Cython-0.26.1/0000775000175000017500000000000013151203436013626 5ustar stefanstefan00000000000000Cython-0.26.1/bin/0000775000175000017500000000000013151203436014376 5ustar stefanstefan00000000000000Cython-0.26.1/bin/cython_freeze0000775000175000017500000001751213023021033017163 0ustar stefanstefan00000000000000#!/usr/bin/env python """ Create a C file for embedding one or more Cython source files. Requires Cython 0.11.2 (or perhaps newer). See Demos/freeze/README.txt for more details. """ from __future__ import print_function import optparse from os.path import splitext, basename usage= '%prog [-o outfile] [-p] module [module ...]' description = 'Create a C file for embedding Cython modules.' p = optparse.OptionParser(usage=usage, description=description) p.add_option('-o', '--output', metavar='FILE', help='write output to FILE instead of standard output') p.add_option('-p', '--pymain', action='store_true', default=False, help='do not automatically run the first module as __main__') options, args = p.parse_args() if len(args) < 1: p.print_help() p.exit(1) if options.output: import sys old_stdout = sys.stdout sys.stdout = open(options.output, 'w') modules = [basename(splitext(x)[0]).replace('.', '_') for x in args] print("""\ #include #include #include #include #ifdef __FreeBSD__ #include #endif #if PY_MAJOR_VERSION < 3 # define MODINIT(name) init ## name #else # define MODINIT(name) PyInit_ ## name #endif """) for name in modules: print("PyMODINIT_FUNC MODINIT(%s) (void);" % name) print(""" static struct _inittab inittab[] = {""") for name in modules: print(' {"%(name)s", MODINIT(%(name)s)},' % {'name' : name}) print(""" {NULL, NULL} }; """, end=' ') if not options.pymain: print("\nextern int __pyx_module_is_main_%s;" % modules[0]) print(""" #if PY_MAJOR_VERSION < 3 int main(int argc, char** argv) { #elif defined(WIN32) || defined(MS_WINDOWS) int wmain(int argc, wchar_t **argv) { #else static int python_main(int argc, wchar_t **argv) { #endif """, end=' ') if not options.pymain: print("""\ PyObject *m = NULL; int r = 0; """, end=' ') print("""\ /* 754 requires that FP exceptions run in "no stop" mode by default, * and until C vendors implement C99's ways to control FP exceptions, * Python requires non-stop mode. Alas, some platforms enable FP * exceptions by default. Here we disable them. */ #ifdef __FreeBSD__ fp_except_t m; m = fpgetmask(); fpsetmask(m & ~FP_X_OFL); #endif if (PyImport_ExtendInittab(inittab)) { fprintf(stderr, "No memory\\n"); exit(1); } """, end=' ') if options.pymain: print("""\ return Py_Main(argc, argv); } """) else: print("""\ Py_SetProgramName(argv[0]); Py_Initialize(); PySys_SetArgv(argc, argv); __pyx_module_is_main_%(main)s = 1; m = PyImport_ImportModule(inittab[0].name); if (!m) { r = 1; PyErr_Print(); /* This exits with the right code if SystemExit. */ #if PY_MAJOR_VERSION < 3 if (Py_FlushLine()) PyErr_Clear(); #endif } Py_XDECREF(m); Py_Finalize(); return r; } """ % {'main' : modules[0]}, end=' ') print(r""" #if PY_MAJOR_VERSION >= 3 && !defined(WIN32) && !defined(MS_WINDOWS) static wchar_t* char2wchar(char* arg) { wchar_t *res; #ifdef HAVE_BROKEN_MBSTOWCS /* Some platforms have a broken implementation of * mbstowcs which does not count the characters that * would result from conversion. Use an upper bound. */ size_t argsize = strlen(arg); #else size_t argsize = mbstowcs(NULL, arg, 0); #endif size_t count; unsigned char *in; wchar_t *out; #ifdef HAVE_MBRTOWC mbstate_t mbs; #endif if (argsize != (size_t)-1) { res = (wchar_t *)malloc((argsize+1)*sizeof(wchar_t)); if (!res) goto oom; count = mbstowcs(res, arg, argsize+1); if (count != (size_t)-1) { wchar_t *tmp; /* Only use the result if it contains no surrogate characters. */ for (tmp = res; *tmp != 0 && (*tmp < 0xd800 || *tmp > 0xdfff); tmp++) ; if (*tmp == 0) return res; } free(res); } /* Conversion failed. Fall back to escaping with surrogateescape. */ #ifdef HAVE_MBRTOWC /* Try conversion with mbrtwoc (C99), and escape non-decodable bytes. */ /* Overallocate; as multi-byte characters are in the argument, the actual output could use less memory. */ argsize = strlen(arg) + 1; res = malloc(argsize*sizeof(wchar_t)); if (!res) goto oom; in = (unsigned char*)arg; out = res; memset(&mbs, 0, sizeof mbs); while (argsize) { size_t converted = mbrtowc(out, (char*)in, argsize, &mbs); if (converted == 0) /* Reached end of string; null char stored. */ break; if (converted == (size_t)-2) { /* Incomplete character. This should never happen, since we provide everything that we have - unless there is a bug in the C library, or I misunderstood how mbrtowc works. */ fprintf(stderr, "unexpected mbrtowc result -2\n"); return NULL; } if (converted == (size_t)-1) { /* Conversion error. Escape as UTF-8b, and start over in the initial shift state. */ *out++ = 0xdc00 + *in++; argsize--; memset(&mbs, 0, sizeof mbs); continue; } if (*out >= 0xd800 && *out <= 0xdfff) { /* Surrogate character. Escape the original byte sequence with surrogateescape. */ argsize -= converted; while (converted--) *out++ = 0xdc00 + *in++; continue; } /* successfully converted some bytes */ in += converted; argsize -= converted; out++; } #else /* Cannot use C locale for escaping; manually escape as if charset is ASCII (i.e. escape all bytes > 128. This will still roundtrip correctly in the locale's charset, which must be an ASCII superset. */ res = malloc((strlen(arg)+1)*sizeof(wchar_t)); if (!res) goto oom; in = (unsigned char*)arg; out = res; while(*in) if(*in < 128) *out++ = *in++; else *out++ = 0xdc00 + *in++; *out = 0; #endif return res; oom: fprintf(stderr, "out of memory\n"); return NULL; } int main(int argc, char **argv) { wchar_t **argv_copy = (wchar_t **)malloc(sizeof(wchar_t*)*argc); /* We need a second copies, as Python might modify the first one. */ wchar_t **argv_copy2 = (wchar_t **)malloc(sizeof(wchar_t*)*argc); int i, res; char *oldloc; if (!argv_copy || !argv_copy2) { fprintf(stderr, "out of memory\n"); return 1; } oldloc = strdup(setlocale(LC_ALL, NULL)); setlocale(LC_ALL, ""); for (i = 0; i < argc; i++) { argv_copy2[i] = argv_copy[i] = char2wchar(argv[i]); if (!argv_copy[i]) return 1; } setlocale(LC_ALL, oldloc); free(oldloc); res = python_main(argc, argv_copy); for (i = 0; i < argc; i++) { free(argv_copy2[i]); } free(argv_copy); free(argv_copy2); return res; } #endif""") Cython-0.26.1/bin/cygdb0000775000175000017500000000017312542002467015421 0ustar stefanstefan00000000000000#!/usr/bin/env python import sys from Cython.Debugger import Cygdb as cygdb if __name__ == '__main__': cygdb.main() Cython-0.26.1/bin/cython0000775000175000017500000000017212542002467015634 0ustar stefanstefan00000000000000#!/usr/bin/env python # # Cython -- Main Program, Unix # from Cython.Compiler.Main import main main(command_line = 1) Cython-0.26.1/bin/cythonrun0000775000175000017500000000055612542002467016367 0ustar stefanstefan00000000000000#!/usr/bin/env python """ Compile a Python script into an executable that embeds CPython and run it. Requires CPython to be built as a shared library ('libpythonX.Y'). Basic usage: python cythonrun somefile.py [ARGS] """ from Cython.Build.BuildExecutable import build, build_and_run if __name__ == '__main__': import sys build_and_run(sys.argv[1:]) Cython-0.26.1/bin/move-declarators.sed0000664000175000017500000000144712542002467020354 0ustar stefanstefan00000000000000# Moves # # use: sed [-E | -r] -i -f move-declarators.sed [files] # Arrays # cdef int a[5] -> cdef int[5] a s/^([ \t]*)cdef +([_0-9a-zA-Z. ]+) +([_0-9a-zA-Z]+)((\[[0-9]*\])+)$/\1cdef \2\4 \3/ # Pointers # cdef int a, *b -> cdef int a \n cdef int *b s/^([ \t]*)cdef +([_0-9a-zA-Z. ]+)( +[_0-9a-zA-Z]+ +(=[^()]+)?),( *[*]+ *)([^()]+)/\1cdef \2\3\ \1cdef \2\5\6/ s/^([ \t]*)cdef +([_0-9a-zA-Z. ]+)( +[_0-9a-zA-Z]+ +(=[^()]+)?),( *[*]+ *)([^()]+)/\1cdef \2\3\ \1cdef \2\5\6/ s/^([ \t]*)cdef +([_0-9a-zA-Z. ]+)( +[_0-9a-zA-Z]+ +(=[^()]+)?),( *[*]+ *)([^()]+)/\1cdef \2\3\ \1cdef \2\5\6/ s/^([ \t]*)cdef +([_0-9a-zA-Z. ]+)( +[_0-9a-zA-Z]+ +(=[^()]+)?),( *[*]+ *)([^()]+)/\1cdef \2\3\ \1cdef \2\5\6/ s/^([ \t]*)cdef +([_0-9a-zA-Z. ]+)( +[_0-9a-zA-Z]+ +(=[^()]+)?),( *[*]+ *)([^()]+)/\1cdef \2\3\ \1cdef \2\5\6/ Cython-0.26.1/bin/cythonize0000775000175000017500000000016312542002467016344 0ustar stefanstefan00000000000000#!/usr/bin/env python # # command line frontend for cythonize() # from Cython.Build.Cythonize import main main() Cython-0.26.1/bin/pcython0000775000175000017500000000611513023021033016000 0ustar stefanstefan00000000000000#!/usr/bin/env python """ Script to run Cython with a Python command line. Executes Python code by compiling it in Cython and running the compiled code. """ import sys import subprocess def parse_args(args=None): import optparse # tried argparse, but it doesn't stop at the first (interspersed) positional argument parser = optparse.OptionParser(description='Run a Python command line with Cython') parser.disable_interspersed_args() parser.add_option('-c', metavar='CODE', dest='command', help='program passed in as string') parser.add_option('--python', metavar='PYTHON', dest='python', default=sys.executable, help='Python interpreter to use') parser.add_option('-X', metavar='NAME=VALUE', dest='directives', action='append', help='Compiler directives to set during compilation') parser.add_option('-V', '--version', action='store_true', help='print the Python and Cython version numbers and exit') return parser.parse_args(args) def run_python(python, command): subprocess.check_call([python, '-c', command]) def print_versions(python): """Print version numbers of Python and Cython. """ command = ( "import sys, Cython; " "print('Python {}'.format('.'.join(map(str, sys.version_info[:3])))); " "print('[Cython {}]'.format(Cython.__version__))" ) run_python(python, command) def run_cython_command(python, command, args=None): """Compile and run a Python command string. """ command = ( "import sys; " "from Cython.Build.Inline import cython_inline; " "sys.argv[1:] = {args!r}; " "(lambda: cython_inline({code!r}, quiet=True))()" ).format(code=command, args=list(args) if args else []) run_python(python, command) def run_python_stdin(python, file_args=None, directives=None): """Compile and run a Python program from stdin. """ import shutil import tempfile with tempfile.NamedTemporaryFile(suffix='.py') as f: shutil.copyfileobj(sys.stdin, f) f.flush() file_args = [f.name] + list(file_args or ()) run_python_file(python, file_args, directives) def run_python_file(python, file_args, directives=None): """Compile and run a Python program from a file. """ args = [] if directives: for directive in directives: args.extend(('-X', directive)) args.extend(file_args) command = ( "import Cython.Build.BuildExecutable as bex; " "bex.DEBUG = False; " "bex.build_and_run({args!r})" ).format(args=args) run_python(python, command) def main(): options, args = parse_args() python = options.python if options.version: print_versions(python) return if options.command: run_cython_command(python, options.command, args) if args: if args[0] == '-': run_python_stdin(python, args[1:], options.directives) else: run_python_file(python, args, options.directives) if __name__ == '__main__': main() Cython-0.26.1/bin/cython.bat0000664000175000017500000000040012542002467016370 0ustar stefanstefan00000000000000@REM Start cython from windows commandline as "cython", not "cython.py". @REM This is especially useful for windows power shell, as no extra window @REM is used. @echo OFF python -c "from Cython.Compiler.Main import main; main(command_line = 1)" %* Cython-0.26.1/Tools/0000775000175000017500000000000013151203436014726 5ustar stefanstefan00000000000000Cython-0.26.1/Tools/cystdlib.py0000664000175000017500000001260212542002467017122 0ustar stefanstefan00000000000000""" Highly experimental script that compiles the CPython standard library using Cython. Execute the script either in the CPython 'Lib' directory or pass the option '--current-python' to compile the standard library of the running Python interpreter. Pass '-j N' to get a parallel build with N processes. Usage example:: $ python cystdlib.py --current-python build_ext -i """ import os import sys from distutils.core import setup from Cython.Build import cythonize from Cython.Compiler import Options # improve Python compatibility by allowing some broken code Options.error_on_unknown_names = False Options.error_on_uninitialized = False exclude_patterns = ['**/test/**/*.py', '**/tests/**/*.py', '**/__init__.py'] broken = [ 'idlelib/MultiCall.py', 'email/utils.py', 'multiprocessing/reduction.py', 'multiprocessing/util.py', 'threading.py', # interrupt handling 'lib2to3/fixes/fix_sys_exc.py', 'traceback.py', 'types.py', 'enum.py', 'keyword.py', '_collections_abc.py', 'importlib/_bootstrap', ] default_directives = dict( auto_cpdef=False, # enable when it's safe, see long list of failures below binding=True, set_initial_path='SOURCEFILE') default_directives['optimize.inline_defnode_calls'] = True special_directives = [ (['pkgutil.py', 'decimal.py', 'datetime.py', 'optparse.py', 'sndhdr.py', 'opcode.py', 'ntpath.py', 'urllib/request.py', 'plat-*/TYPES.py', 'plat-*/IN.py', 'tkinter/_fix.py', 'lib2to3/refactor.py', 'webbrowser.py', 'shutil.py', 'multiprocessing/forking.py', 'xml/sax/expatreader.py', 'xmlrpc/client.py', 'pydoc.py', 'xml/etree/ElementTree.py', 'posixpath.py', 'inspect.py', 'ctypes/util.py', 'urllib/parse.py', 'warnings.py', 'tempfile.py', 'trace.py', 'heapq.py', 'pickletools.py', 'multiprocessing/connection.py', 'hashlib.py', 'getopt.py', 'os.py', 'types.py', ], dict(auto_cpdef=False)), ] del special_directives[:] # currently unused def build_extensions(includes='**/*.py', excludes=None, special_directives=special_directives, language_level=sys.version_info[0], parallel=None): if isinstance(includes, str): includes = [includes] excludes = list(excludes or exclude_patterns) + broken all_groups = (special_directives or []) + [(includes, {})] extensions = [] for modules, directives in all_groups: exclude_now = excludes[:] for other_modules, _ in special_directives: if other_modules != modules: exclude_now.extend(other_modules) d = dict(default_directives) d.update(directives) extensions.extend( cythonize( modules, exclude=exclude_now, exclude_failures=True, language_level=language_level, compiler_directives=d, nthreads=parallel, )) return extensions def build(extensions): try: setup(ext_modules=extensions) result = True except: import traceback print('error building extensions %s' % ( [ext.name for ext in extensions],)) traceback.print_exc() result = False return extensions, result def _build(args): sys_args, ext = args sys.argv[1:] = sys_args return build([ext]) def parse_args(): from optparse import OptionParser parser = OptionParser('%prog [options] [LIB_DIR (default: ./Lib)]') parser.add_option( '--current-python', dest='current_python', action='store_true', help='compile the stdlib of the running Python') parser.add_option( '-j', '--jobs', dest='parallel_jobs', metavar='N', type=int, default=1, help='run builds in N parallel jobs (default: 1)') parser.add_option( '-x', '--exclude', dest='excludes', metavar='PATTERN', action="append", help='exclude modules/packages matching PATTERN') options, args = parser.parse_args() if not args: args = ['./Lib'] elif len(args) > 1: parser.error('only one argument expected, got %d' % len(args)) return options, args if __name__ == '__main__': options, args = parse_args() if options.current_python: # assume that the stdlib is where the "os" module lives os.chdir(os.path.dirname(os.__file__)) else: os.chdir(args[0]) pool = None parallel_jobs = options.parallel_jobs if options.parallel_jobs: try: import multiprocessing pool = multiprocessing.Pool(parallel_jobs) print("Building in %d parallel processes" % parallel_jobs) except (ImportError, OSError): print("Not building in parallel") parallel_jobs = 0 extensions = build_extensions( parallel=parallel_jobs, excludes=options.excludes) sys_args = ['build_ext', '-i'] if pool is not None: results = pool.map(_build, [(sys_args, ext) for ext in extensions]) pool.close() pool.join() for ext, result in results: if not result: print("building extension %s failed" % (ext[0].name,)) else: sys.argv[1:] = sys_args build(extensions) Cython-0.26.1/Tools/kate.diff0000664000175000017500000013302713143605603016515 0ustar stefanstefan00000000000000# HG changeset patch # User Sturla Molden # Date 1256723843 25200 # Node ID 0a6ce52272f641d58c874fa007187778d4c2c81c # Parent db4133d43a7ee34d4f172aced054785acba65a57 Syntax highlighting for Cython and NumPy for KATE and KDevelop. diff -r db4133d43a7e -r 0a6ce52272f6 Tools/cython-numpy-mode-kate.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/Tools/cython-numpy-mode-kate.xml Wed Oct 28 02:57:23 2009 -0700 @@ -0,0 +1,1133 @@ + + + + + + + + + + + + + + + as + + + cimport + import + from + as + + + DEF + IF + ELIF + ELSE + include + + + class + cpdef + def + + + cdef + ctypedef + + + extern + api + del + global + property + nogil + gil + inline + + + readonly + public + + + and + assert + in + is + by + not + or + sizeof + + + + print + + + break + continue + elif + else + except + finally + for + if + pass + raise + return + try + while + with + + + + __import__ + abs + all + any + apply + basestring + buffer + callable + chr + classmethod + cmp + coerce + compile + delattr + dir + divmod + enumerate + eval + execfile + filter + getattr + + hasattr + hash + hex + id + input + intern + isinstance + issubclass + iter + len + + map + max + min + oct + open + ord + pow + + range + raw_input + reduce + reload + repr + reversed + round + setattr + sorted + staticmethod + sum + super + type + unichr + unicode + + xrange + zip + + + + unsigned + void + enum + double + long + short + char + Py_ssize_t + Py_intptr_t + Py_buffer + bint + struct + union + enum + + + + int + float + object + list + tuple + str + dict + set + frozenset + slice + bool + complex + file + + + + np + numpy + + + numpy + + + cython + + + dtype + flatiter + broadcast + ndarray + int8_t + int16_t + int32_t + int64_t + uint8_t + uint16_t + uint32_t + uint64_t + float32_t + float64_t + complex64_t + complex128_t + int_t + long_t + uint_t + ulong_t + float_t + double_t + longdouble_t + cfloat_t + cdouble_t + clongdouble_t + complex_t + npy_int8 + npy_int16 + npy_int32 + npy_int64 + npy_int96 + npy_int128 + npy_uint8 + npy_uint16 + npy_uint32 + npy_uint64 + npy_uint96 + npy_uint128 + npy_float32 + npy_float64 + npy_float80 + npy_float96 + npy_float128 + npy_complex64 + npy_complex128 + npy_complex120 + npy_complex192 + npy_complex256 + npy_cfloat + npy_cdouble + npy_clongdouble + npy_bool + npy_byte + npy_short + npy_int + npy_long + npy_longlong + npy_ubyte + npy_ushort + npy_uint + npy_ulong + npy_ulonglong + npy_float + npy_double + npy_longdouble + npy_intp + + + DataSource + MachAr + PackageLoader + RankWarning + Tester + abs + absolute + add + add_docstring + add_newdoc + alen + all + allclose + alltrue + alterdot + amax + amin + angle + any + append + apply_along_axis + apply_over_axes + arange + arccos + arccosh + arcsin + arcsinh + arctan + arctan2 + arctanh + argmax + argmin + argsort + argwhere + around + array + array2string + array_equal + array_equiv + array_repr + array_split + array_str + asanyarray + asarray + asarray_chkfinite + ascontiguousarray + asfarray + asfortranarray + asmatrix + asscalar + atleast_1d + atleast_2d + atleast_3d + average + bartlett + base_repr + bench + binary_repr + bincount + bitwise_and + bitwise_not + bitwise_or + bitwise_xor + blackman + bmat + bool + bool8 + bool_ + broadcast + broadcast_arrays + byte + byte_bounds + can_cast + cdouble + ceil + cfloat + character + chararray + choose + clip + clongdouble + clongfloat + column_stack + common_type + compare_chararrays + complex + complex128 + complex192 + complex64 + complex_ + complexfloating + compress + concatenate + conj + conjugate + convolve + copy + corrcoef + correlate + cos + cosh + cov + cross + csingle + cumprod + cumproduct + cumsum + deg2rad + degrees + delete + deprecate + deprecate_with_doc + diag + diagflat + diagonal + diff + digitize + disp + divide + dot + double + dsplit + dstack + dtype + ediff1d + empty + empty_like + equal + errstate + exp + exp2 + expand_dims + expm1 + extract + eye + fabs + fastCopyAndTranspose + find_common_type + finfo + fix + flatiter + flatnonzero + flexible + fliplr + flipud + float + float32 + float64 + float96 + float_ + floating + floor + floor_divide + fmax + fmin + fmod + frexp + frombuffer + fromfile + fromfunction + fromiter + frompyfunc + fromregex + fromstring + fv + generic + genfromtxt + get_array_wrap + get_include + get_numarray_include + get_numpy_include + get_printoptions + getbuffer + getbufsize + geterr + geterrcall + geterrobj + gradient + greater + greater_equal + hamming + hanning + histogram + histogram2d + histogramdd + hsplit + hstack + hypot + i0 + identity + imag + indices + inexact + info + inner + insert + int + int0 + int16 + int32 + int64 + int8 + int_ + int_asbuffer + intc + integer + interp + intersect1d + intersect1d_nu + intp + invert + ipmt + irr + iscomplex + iscomplexobj + isfinite + isfortran + isinf + isnan + isneginf + isposinf + isreal + isrealobj + isscalar + issctype + issubclass_ + issubdtype + issubsctype + iterable + ix_ + kaiser + kron + ldexp + left_shift + less + less_equal + lexsort + linspace + load + loads + loadtxt + log + log10 + log1p + log2 + logaddexp + logaddexp2 + logical_and + logical_not + logical_or + logical_xor + logspace + long + longcomplex + longdouble + longfloat + longlong + lookfor + mafromtxt + mat + matrix + max + maximum + maximum_sctype + may_share_memory + mean + median + memmap + meshgrid + min + minimum + mintypecode + mirr + mod + modf + msort + multiply + nan_to_num + nanargmax + nanargmin + nanmax + nanmin + nansum + ndarray + ndenumerate + ndfromtxt + ndim + ndindex + negative + newbuffer + nonzero + not_equal + nper + npv + number + obj2sctype + object + object0 + object_ + ones + ones_like + outer + packbits + piecewise + pkgload + place + pmt + poly + poly1d + polyadd + polyder + polydiv + polyfit + polyint + polymul + polysub + polyval + power + ppmt + prod + product + ptp + put + putmask + pv + rad2deg + radians + rank + rate + ravel + real + real_if_close + recarray + recfromcsv + recfromtxt + reciprocal + record + remainder + repeat + require + reshape + resize + restoredot + right_shift + rint + roll + rollaxis + roots + rot90 + round + round_ + row_stack + safe_eval + save + savetxt + savez + sctype2char + searchsorted + select + set_numeric_ops + set_printoptions + set_string_function + setbufsize + setdiff1d + seterr + seterrcall + seterrobj + setmember1d + setxor1d + shape + short + show_config + sign + signbit + signedinteger + sin + sinc + single + singlecomplex + sinh + size + sometrue + sort + sort_complex + source + split + sqrt + square + squeeze + std + str + str_ + string0 + string_ + subtract + sum + swapaxes + take + tan + tanh + tensordot + test + tile + trace + transpose + trapz + tri + tril + trim_zeros + triu + true_divide + trunc + typename + ubyte + ufunc + uint + uint0 + uint16 + uint32 + uint64 + uint8 + uintc + uintp + ulonglong + unicode + unicode0 + unicode_ + union1d + unique + unique1d + unpackbits + unravel_index + unsignedinteger + unwrap + ushort + vander + var + vdot + vectorize + void + void0 + vsplit + vstack + where + who + zeros + zeros_like + + + __future__ + __import__ + __name__ + __cythonbufferdefaults__ + __weakref__ + None + self + True + False + NotImplemented + Ellipsis + NULL + + + __new__ + __init__ + __cinit__ + __dealloc__ + __cmp__ + __richcmp__ + __str__ + __repr__ + __hash__ + __call__ + __iter__ + __getattr__ + __setattr__ + __delattr__ + __add__ + __sub__ + __mul__ + __div__ + __floordiv__ + __truediv__ + __mod__ + __divmod__ + __pow__ + __neg__ + __pos__ + __abs__ + __nonzero__ + __invert__ + __lshift__ + __rshift__ + __and__ + __or__ + __xor__ + __int__ + __long__ + __float__ + __oct__ + __hex__ + __index__ + __iadd__ + __isub__ + __imul__ + __idiv__ + __ifloordiv__ + __itruediv__ + __imod__ + __ipow__ + __ilshift__ + __irshift__ + __iand__ + __ior__ + __ixor__ + __len__ + __getitem__ + __setitem__ + __delitem__ + __getslice__ + __setslice__ + __delslice__ + __contains__ + __next__ + __getreadbuffer__ + __getwritebuffer__ + __getsegcount__ + __getcharbuffer__ + __get__ + __set__ + __delete__ + __getbuffer__ + __releasebuffer__ + + + ArithmeticError + AssertionError + AttributeError + BaseException + DeprecationWarning + EnvironmentError + EOFError + Exception + FloatingPointError + FutureWarning + GeneratorExit + IOError + ImportError + ImportWarning + IndexError + KeyError + KeyboardInterrupt + LookupError + MemoryError + NameError + NotImplementedError + OSError + OverflowError + PendingDeprecationWarning + ReferenceError + RuntimeError + RuntimeWarning + StandardError + StopIteration + SyntaxError + SyntaxWarning + SystemError + SystemExit + TypeError + UnboundLocalError + UserWarning + UnicodeError + UnicodeWarning + UnicodeEncodeError + UnicodeDecodeError + UnicodeTranslateError + ValueError + Warning + WindowsError + ZeroDivisionError + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cython-0.26.1/Tools/cython-mode.el0000664000175000017500000002724313150045407017507 0ustar stefanstefan00000000000000;;; cython-mode.el --- Major mode for editing Cython files ;;; Commentary: ;; This should work with python-mode.el as well as either the new ;; python.el or the old. ;;; Code: ;; Load python-mode if available, otherwise use builtin emacs python package (when (not (require 'python-mode nil t)) (require 'python)) (eval-when-compile (require 'rx)) ;;;###autoload (add-to-list 'auto-mode-alist '("\\.pyx\\'" . cython-mode)) ;;;###autoload (add-to-list 'auto-mode-alist '("\\.pxd\\'" . cython-mode)) ;;;###autoload (add-to-list 'auto-mode-alist '("\\.pxi\\'" . cython-mode)) (defvar cython-buffer nil "Variable pointing to the cython buffer which was compiled.") (defun cython-compile () "Compile the file via Cython." (interactive) (let ((cy-buffer (current-buffer))) (with-current-buffer (compile compile-command) (set (make-local-variable 'cython-buffer) cy-buffer) (add-to-list (make-local-variable 'compilation-finish-functions) 'cython-compilation-finish)))) (defun cython-compilation-finish (buffer how) "Called when Cython compilation finishes." ;; XXX could annotate source here ) (defvar cython-mode-map (let ((map (make-sparse-keymap))) ;; Will inherit from `python-mode-map' thanks to define-derived-mode. (define-key map "\C-c\C-c" 'cython-compile) map) "Keymap used in `cython-mode'.") (defvar cython-font-lock-keywords `(;; ctypedef statement: "ctypedef (...type... alias)?" (,(rx ;; keyword itself symbol-start (group "ctypedef") ;; type specifier: at least 1 non-identifier symbol + 1 identifier ;; symbol and anything but a comment-starter after that. (opt (regexp "[^a-zA-z0-9_\n]+[a-zA-Z0-9_][^#\n]*") ;; type alias: an identifier symbol-start (group (regexp "[a-zA-Z_]+[a-zA-Z0-9_]*")) ;; space-or-comments till the end of the line (* space) (opt "#" (* nonl)) line-end)) (1 font-lock-keyword-face) (2 font-lock-type-face nil 'noerror)) ;; new keywords in Cython language (,(rx symbol-start (or "by" "cdef" "cimport" "cpdef" "extern" "gil" "include" "nogil" "property" "public" "readonly" "DEF" "IF" "ELIF" "ELSE" "new" "del" "cppclass" "namespace" "const" "__stdcall" "__cdecl" "__fastcall" "inline" "api") symbol-end) . font-lock-keyword-face) ;; Question mark won't match at a symbol-end, so 'except?' must be ;; special-cased. It's simpler to handle it separately than weaving it ;; into the lengthy list of other keywords. (,(rx symbol-start "except?") . font-lock-keyword-face) ;; C and Python types (highlight as builtins) (,(rx symbol-start (or "object" "dict" "list" ;; basic c type names "void" "char" "int" "float" "double" "bint" ;; longness/signed/constness "signed" "unsigned" "long" "short" ;; special basic c types "size_t" "Py_ssize_t" "Py_UNICODE" "Py_UCS4" "ssize_t" "ptrdiff_t") symbol-end) . font-lock-builtin-face) (,(rx symbol-start "NULL" symbol-end) . font-lock-constant-face) ;; cdef is used for more than functions, so simply highlighting the next ;; word is problematic. struct, enum and property work though. (,(rx symbol-start (group (or "struct" "enum" "union" (seq "ctypedef" (+ space "fused")))) (+ space) (group (regexp "[a-zA-Z_]+[a-zA-Z0-9_]*"))) (1 font-lock-keyword-face prepend) (2 font-lock-type-face)) ("\\_ (current-indentation) block-indentation) (or (cython-end-of-statement) t)) ;; comment or empty line (looking-at (rx (0+ space) (or eol "#")))))) (forward-comment -1)) ;; Count trailing space in defun (but not trailing comments). (skip-syntax-forward " >") (unless (eobp) ; e.g. missing final newline (beginning-of-line))) ;; Catch pathological cases like this, where the beginning-of-defun ;; skips to a definition we're not in: ;; if ...: ;; ... ;; else: ;; ... # point here ;; ... ;; def ... (if (< (point) orig) (goto-char (point-max))))) (defun cython-current-defun () "`add-log-current-defun-function' for Cython." (save-excursion ;; Move up the tree of nested `class' and `def' blocks until we ;; get to zero indentation, accumulating the defined names. (let ((start t) accum) (while (or start (> (current-indentation) 0)) (setq start nil) (cython-beginning-of-block) (end-of-line) (beginning-of-defun) (if (looking-at (rx (0+ space) (or "def" "cdef" "cpdef" "class") (1+ space) (group (1+ (or word (syntax symbol)))))) (push (match-string 1) accum))) (if accum (mapconcat 'identity accum "."))))) ;;;###autoload (define-derived-mode cython-mode python-mode "Cython" "Major mode for Cython development, derived from Python mode. \\{cython-mode-map}" (font-lock-add-keywords nil cython-font-lock-keywords) (set (make-local-variable 'outline-regexp) (rx (* space) (or "class" "def" "cdef" "cpdef" "elif" "else" "except" "finally" "for" "if" "try" "while" "with") symbol-end)) (set (make-local-variable 'beginning-of-defun-function) #'cython-beginning-of-defun) (set (make-local-variable 'end-of-defun-function) #'cython-end-of-defun) (set (make-local-variable 'compile-command) (format cython-default-compile-format (shell-quote-argument (or buffer-file-name "")))) (set (make-local-variable 'add-log-current-defun-function) #'cython-current-defun) (add-hook 'which-func-functions #'cython-current-defun nil t) (add-to-list (make-local-variable 'compilation-finish-functions) 'cython-compilation-finish)) (provide 'cython-mode) ;;; cython-mode.el ends here Cython-0.26.1/Tools/rules.bzl0000664000175000017500000000473213150045407016600 0ustar stefanstefan00000000000000""" Defines a pyx_library() macros corresponding to py_library. Uses Cython to compile .pyx files (and .py files with corresponding .pxd files) to Python extension modules. Example: # Assuming Cython is mapped to "cython" in your workspace. load("@cython//Tools:rules.bzl", "pyx_library") pyx_library(name = 'mylib', srcs = ['a.pyx', 'a.pxd', 'b.py', 'pkg/__init__.py', 'pkg/c.pyx'], py_deps = ['//py_library/dep'], data = ['//other/data'], ) The __init__.py file must be in your srcs list so that Cython can resolve cimports using the package layout. """ def pyx_library( name, deps=[], srcs=[], cython_directives=[], cython_options=[]): # First filter out files that should be run compiled vs. passed through. py_srcs = [] pyx_srcs = [] pxd_srcs = [] for src in srcs: if src.endswith('.pyx') or (src.endswith('.py') and src[:-3] + '.pxd' in srcs): pyx_srcs.append(src) elif src.endswith('.py'): py_srcs.append(src) else: pxd_srcs.append(src) if src.endswith('__init__.py'): # TODO(robertwb): Infer __init__.py files/package root? pxd_srcs.append(src) # Invoke cythonize to produce the shared object libraries. outs = [src.split('.')[0] + '.so' for src in pyx_srcs] extra_flags = " ".join(["-X '%s=%s'" % x for x in cython_directives] + ["-s '%s=%s'" % x for x in cython_options]) # TODO(robertwb): It might be better to only generate the C files, # letting cc_library (or similar) handle the rest, but there isn't yet # suport compiling Python C extensions from bazel. native.genrule( name = name + "_cythonize", srcs = pyx_srcs, outs = outs, cmd = "PYTHONHASHSEED=0 $(location @cython//:cythonize) -k -i %s $(SRCS)" % extra_flags # Rename outputs to expected location. # TODO(robertwb): Add an option to cythonize itself to do this. + """ && python -c 'import os, sys; n = len(sys.argv); [os.rename(src.split(".")[0] + ".so", dst) for src, dst in zip(sys.argv[1:], sys.argv[1+n//2:])]' $(SRCS) $(OUTS)""", tools = ["@cython//:cythonize"] + pxd_srcs, ) # Now create a py_library with these shared objects as data. native.py_library( name=name, srcs=py_srcs, deps=deps, data=outs + pyx_srcs + pxd_srcs ) Cython-0.26.1/Tools/BUILD.bazel0000664000175000017500000000000013023021033016557 0ustar stefanstefan00000000000000Cython-0.26.1/Tools/jedityper.py0000664000175000017500000001024613143605603017305 0ustar stefanstefan00000000000000""" Inject Cython type declarations into a .py file using the Jedi static analysis tool. """ from __future__ import absolute_import from io import open from collections import defaultdict from itertools import chain import jedi from jedi.parser.tree import Module, ImportName from jedi.evaluate.representation import Function, Instance, Class from jedi.evaluate.iterable import ArrayMixin, GeneratorComprehension from Cython.Utils import open_source_file default_type_map = { 'float': 'double', 'int': 'long', } def analyse(source_path=None, code=None): """ Analyse a Python source code file with Jedi. Returns a mapping from (scope-name, (line, column)) pairs to a name-types mapping. """ if not source_path and code is None: raise ValueError("Either 'source_path' or 'code' is required.") scoped_names = {} statement_iter = jedi.names(source=code, path=source_path, all_scopes=True) for statement in statement_iter: parent = statement.parent() scope = parent._definition evaluator = statement._evaluator # skip function/generator definitions, class definitions, and module imports if any(isinstance(statement._definition, t) for t in [Function, Class, ImportName]): continue key = (None if isinstance(scope, Module) else str(parent.name), scope.start_pos) try: names = scoped_names[key] except KeyError: names = scoped_names[key] = defaultdict(set) position = statement.start_pos if statement.name in names else None for name_type in evaluator.find_types(scope, statement.name, position=position ,search_global=True): if isinstance(name_type, Instance): if isinstance(name_type.base, Class): type_name = 'object' else: type_name = name_type.base.obj.__name__ elif isinstance(name_type, ArrayMixin): type_name = name_type.type elif isinstance(name_type, GeneratorComprehension): type_name = None else: try: type_name = type(name_type.obj).__name__ except AttributeError as error: type_name = None if type_name is not None: names[str(statement.name)].add(type_name) return scoped_names def inject_types(source_path, types, type_map=default_type_map, mode='python'): """ Hack type declarations into source code file. @param mode is currently 'python', which means that the generated type declarations use pure Python syntax. """ col_and_types_by_line = dict( # {line: (column, scope_name or None, [(name, type)])} (k[-1][0], (k[-1][1], k[0], [(n, next(iter(t))) for (n, t) in v.items() if len(t) == 1])) for (k, v) in types.items()) lines = [u'import cython\n'] with open_source_file(source_path) as f: for line_no, line in enumerate(f, 1): if line_no in col_and_types_by_line: col, scope, types = col_and_types_by_line[line_no] if types: types = ', '.join("%s='%s'" % (name, type_map.get(type_name, type_name)) for name, type_name in types) if scope is None: type_decl = u'{indent}cython.declare({types})\n' else: type_decl = u'{indent}@cython.locals({types})\n' lines.append(type_decl.format(indent=' '*col, types=types)) lines.append(line) return lines def main(file_paths=None, overwrite=False): """ Main entry point to process a list of .py files and inject type inferred declarations. """ if file_paths is None: import sys file_paths = sys.argv[1:] for source_path in file_paths: types = analyse(source_path) lines = inject_types(source_path, types) target_path = source_path + ('' if overwrite else '_typed.py') with open(target_path, 'w', encoding='utf8') as f: for line in lines: f.write(line) if __name__ == '__main__': main() Cython-0.26.1/Tools/site_scons/0000775000175000017500000000000013151203436017077 5ustar stefanstefan00000000000000Cython-0.26.1/Tools/site_scons/site_tools/0000775000175000017500000000000013151203436021263 5ustar stefanstefan00000000000000Cython-0.26.1/Tools/site_scons/site_tools/cython.py0000664000175000017500000000327112542002467023150 0ustar stefanstefan00000000000000""" Tool to run Cython files (.pyx) into .c and .cpp. TODO: - Add support for dynamically selecting in-process Cython through CYTHONINPROCESS variable. - Have a CYTHONCPP option which turns on C++ in flags and changes output extension at the same time VARIABLES: - CYTHON - The path to the "cython" command line tool. - CYTHONFLAGS - Flags to pass to the "cython" command line tool. AUTHORS: - David Cournapeau - Dag Sverre Seljebotn """ import SCons from SCons.Builder import Builder from SCons.Action import Action #def cython_action(target, source, env): # print target, source, env # from Cython.Compiler.Main import compile as cython_compile # res = cython_compile(str(source[0])) cythonAction = Action("$CYTHONCOM") def create_builder(env): try: cython = env['BUILDERS']['Cython'] except KeyError: cython = SCons.Builder.Builder( action = cythonAction, emitter = {}, suffix = cython_suffix_emitter, single_source = 1) env['BUILDERS']['Cython'] = cython return cython def cython_suffix_emitter(env, source): return "$CYTHONCFILESUFFIX" def generate(env): env["CYTHON"] = "cython" env["CYTHONCOM"] = "$CYTHON $CYTHONFLAGS -o $TARGET $SOURCE" env["CYTHONCFILESUFFIX"] = ".c" c_file, cxx_file = SCons.Tool.createCFileBuilders(env) c_file.suffix['.pyx'] = cython_suffix_emitter c_file.add_action('.pyx', cythonAction) c_file.suffix['.py'] = cython_suffix_emitter c_file.add_action('.py', cythonAction) create_builder(env) def exists(env): try: # import Cython return True except ImportError: return False Cython-0.26.1/Tools/site_scons/site_tools/pyext.py0000664000175000017500000002201213143605603023006 0ustar stefanstefan00000000000000"""SCons.Tool.pyext Tool-specific initialization for python extensions builder. AUTHORS: - David Cournapeau - Dag Sverre Seljebotn """ # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import sys import SCons from SCons.Tool import SourceFileScanner, ProgramScanner # Create common python builders def createPythonObjectBuilder(env): """This is a utility function that creates the PythonObject Builder in an Environment if it is not there already. If it is already there, we return the existing one. """ try: pyobj = env['BUILDERS']['PythonObject'] except KeyError: pyobj = SCons.Builder.Builder(action = {}, emitter = {}, prefix = '$PYEXTOBJPREFIX', suffix = '$PYEXTOBJSUFFIX', src_builder = ['CFile', 'CXXFile'], source_scanner = SourceFileScanner, single_source = 1) env['BUILDERS']['PythonObject'] = pyobj return pyobj def createPythonExtensionBuilder(env): """This is a utility function that creates the PythonExtension Builder in an Environment if it is not there already. If it is already there, we return the existing one. """ try: pyext = env['BUILDERS']['PythonExtension'] except KeyError: import SCons.Action import SCons.Defaults action = SCons.Action.Action("$PYEXTLINKCOM", "$PYEXTLINKCOMSTR") action_list = [ SCons.Defaults.SharedCheck, action] pyext = SCons.Builder.Builder(action = action_list, emitter = "$SHLIBEMITTER", prefix = '$PYEXTPREFIX', suffix = '$PYEXTSUFFIX', target_scanner = ProgramScanner, src_suffix = '$PYEXTOBJSUFFIX', src_builder = 'PythonObject') env['BUILDERS']['PythonExtension'] = pyext return pyext def pyext_coms(platform): """Return PYEXTCCCOM, PYEXTCXXCOM and PYEXTLINKCOM for the given platform.""" if platform == 'win32': pyext_cccom = "$PYEXTCC /Fo$TARGET /c $PYEXTCCSHARED "\ "$PYEXTCFLAGS $PYEXTCCFLAGS $_CCCOMCOM "\ "$_PYEXTCPPINCFLAGS $SOURCES" pyext_cxxcom = "$PYEXTCXX /Fo$TARGET /c $PYEXTCSHARED "\ "$PYEXTCXXFLAGS $PYEXTCCFLAGS $_CCCOMCOM "\ "$_PYEXTCPPINCFLAGS $SOURCES" pyext_linkcom = '${TEMPFILE("$PYEXTLINK $PYEXTLINKFLAGS '\ '/OUT:$TARGET.windows $( $_LIBDIRFLAGS $) '\ '$_LIBFLAGS $_PYEXTRUNTIME $SOURCES.windows")}' else: pyext_cccom = "$PYEXTCC -o $TARGET -c $PYEXTCCSHARED "\ "$PYEXTCFLAGS $PYEXTCCFLAGS $_CCCOMCOM "\ "$_PYEXTCPPINCFLAGS $SOURCES" pyext_cxxcom = "$PYEXTCXX -o $TARGET -c $PYEXTCSHARED "\ "$PYEXTCXXFLAGS $PYEXTCCFLAGS $_CCCOMCOM "\ "$_PYEXTCPPINCFLAGS $SOURCES" pyext_linkcom = "$PYEXTLINK -o $TARGET $PYEXTLINKFLAGS "\ "$SOURCES $_LIBDIRFLAGS $_LIBFLAGS $_PYEXTRUNTIME" if platform == 'darwin': pyext_linkcom += ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' return pyext_cccom, pyext_cxxcom, pyext_linkcom def set_basic_vars(env): # Set construction variables which are independant on whether we are using # distutils or not. env['PYEXTCPPPATH'] = SCons.Util.CLVar('$PYEXTINCPATH') env['_PYEXTCPPINCFLAGS'] = '$( ${_concat(INCPREFIX, PYEXTCPPPATH, '\ 'INCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' env['PYEXTOBJSUFFIX'] = '$SHOBJSUFFIX' env['PYEXTOBJPREFIX'] = '$SHOBJPREFIX' env['PYEXTRUNTIME'] = SCons.Util.CLVar("") # XXX: this should be handled with different flags env['_PYEXTRUNTIME'] = '$( ${_concat(LIBLINKPREFIX, PYEXTRUNTIME, '\ 'LIBLINKSUFFIX, __env__)} $)' # XXX: This won't work in all cases (using mingw, for example). To make # this work, we need to know whether PYEXTCC accepts /c and /Fo or -c -o. # This is difficult with the current way tools work in scons. pycc, pycxx, pylink = pyext_coms(sys.platform) env['PYEXTLINKFLAGSEND'] = SCons.Util.CLVar('$LINKFLAGSEND') env['PYEXTCCCOM'] = pycc env['PYEXTCXXCOM'] = pycxx env['PYEXTLINKCOM'] = pylink def _set_configuration_nodistutils(env): # Set env variables to sensible values when not using distutils def_cfg = {'PYEXTCC' : '$SHCC', 'PYEXTCFLAGS' : '$SHCFLAGS', 'PYEXTCCFLAGS' : '$SHCCFLAGS', 'PYEXTCXX' : '$SHCXX', 'PYEXTCXXFLAGS' : '$SHCXXFLAGS', 'PYEXTLINK' : '$LDMODULE', 'PYEXTSUFFIX' : '$LDMODULESUFFIX', 'PYEXTPREFIX' : ''} if sys.platform == 'darwin': def_cfg['PYEXTSUFFIX'] = '.so' for k, v in def_cfg.items(): ifnotset(env, k, v) ifnotset(env, 'PYEXT_ALLOW_UNDEFINED', SCons.Util.CLVar('$ALLOW_UNDEFINED')) ifnotset(env, 'PYEXTLINKFLAGS', SCons.Util.CLVar('$LDMODULEFLAGS')) env.AppendUnique(PYEXTLINKFLAGS = env['PYEXT_ALLOW_UNDEFINED']) def ifnotset(env, name, value): if name not in env: env[name] = value def set_configuration(env, use_distutils): """Set construction variables which are platform dependants. If use_distutils == True, use distutils configuration. Otherwise, use 'sensible' default. Any variable already defined is untouched.""" # We define commands as strings so that we can either execute them using # eval (same python for scons and distutils) or by executing them through # the shell. dist_cfg = {'PYEXTCC': ("sysconfig.get_config_var('CC')", False), 'PYEXTCFLAGS': ("sysconfig.get_config_var('CFLAGS')", True), 'PYEXTCCSHARED': ("sysconfig.get_config_var('CCSHARED')", False), 'PYEXTLINKFLAGS': ("sysconfig.get_config_var('LDFLAGS')", True), 'PYEXTLINK': ("sysconfig.get_config_var('LDSHARED')", False), 'PYEXTINCPATH': ("sysconfig.get_python_inc()", False), 'PYEXTSUFFIX': ("sysconfig.get_config_var('SO')", False)} from distutils import sysconfig # We set the python path even when not using distutils, because we rarely # want to change this, even if not using distutils ifnotset(env, 'PYEXTINCPATH', sysconfig.get_python_inc()) if use_distutils: for k, (v, should_split) in dist_cfg.items(): val = eval(v) if should_split: val = val.split() ifnotset(env, k, val) else: _set_configuration_nodistutils(env) def generate(env): """Add Builders and construction variables for python extensions to an Environment.""" if 'PYEXT_USE_DISTUTILS' not in env: env['PYEXT_USE_DISTUTILS'] = False # This sets all constructions variables used for pyext builders. set_basic_vars(env) set_configuration(env, env['PYEXT_USE_DISTUTILS']) # Create the PythonObject builder pyobj = createPythonObjectBuilder(env) action = SCons.Action.Action("$PYEXTCCCOM", "$PYEXTCCCOMSTR") pyobj.add_emitter('.c', SCons.Defaults.SharedObjectEmitter) pyobj.add_action('.c', action) action = SCons.Action.Action("$PYEXTCXXCOM", "$PYEXTCXXCOMSTR") pyobj.add_emitter('$CXXFILESUFFIX', SCons.Defaults.SharedObjectEmitter) pyobj.add_action('$CXXFILESUFFIX', action) # Create the PythonExtension builder createPythonExtensionBuilder(env) def exists(env): try: # This is not quite right: if someone defines all variables by himself, # it would work without distutils from distutils import sysconfig return True except ImportError: return False Cython-0.26.1/Tools/cython.st0000664000175000017500000000115212542002467016605 0ustar stefanstefan00000000000000/** * Name: pyrex * Description: Pyrex - a Language for Writing Python Extension Modules * Author: Markku Rossi */ state pyrex extends python { /* Additional keywords. (build-re '( NULL as cdef char ctypedef double enum extern float include int long private public short signed sizeof struct union unsigned void )) */ /\b(NULL|as|c(def|har|typedef)|double|e(num|xtern)|float|in(clude|t)\ |long|p(rivate|ublic)|s(hort|i(gned|zeof)|truct)|un(ion|signed)|void)\b/ { keyword_face(true); language_print($0); keyword_face(false); } } /* Local variables: mode: c End: */ Cython-0.26.1/Tools/cython-numpy-mode-kate.xml0000664000175000017500000012774213143605603022006 0ustar stefanstefan00000000000000 as cimport import from as DEF IF ELIF ELSE include class cpdef def cdef ctypedef extern api del global property nogil gil inline readonly public and assert in is by not or sizeof print break continue elif else except finally for if pass raise return try while with __import__ abs all any apply basestring buffer callable chr classmethod cmp coerce compile delattr dir divmod enumerate eval execfile filter getattr hasattr hash hex id input intern isinstance issubclass iter len map max min oct open ord pow range raw_input reduce reload repr reversed round setattr sorted staticmethod sum super type unichr unicode xrange zip unsigned void enum double long short char Py_ssize_t Py_intptr_t Py_buffer bint struct union enum int float object list tuple str dict set frozenset slice bool complex file np numpy numpy cython dtype flatiter broadcast ndarray int8_t int16_t int32_t int64_t uint8_t uint16_t uint32_t uint64_t float32_t float64_t complex64_t complex128_t int_t long_t uint_t ulong_t float_t double_t longdouble_t cfloat_t cdouble_t clongdouble_t complex_t npy_int8 npy_int16 npy_int32 npy_int64 npy_int96 npy_int128 npy_uint8 npy_uint16 npy_uint32 npy_uint64 npy_uint96 npy_uint128 npy_float32 npy_float64 npy_float80 npy_float96 npy_float128 npy_complex64 npy_complex128 npy_complex120 npy_complex192 npy_complex256 npy_cfloat npy_cdouble npy_clongdouble npy_bool npy_byte npy_short npy_int npy_long npy_longlong npy_ubyte npy_ushort npy_uint npy_ulong npy_ulonglong npy_float npy_double npy_longdouble npy_intp DataSource MachAr PackageLoader RankWarning Tester abs absolute add add_docstring add_newdoc alen all allclose alltrue alterdot amax amin angle any append apply_along_axis apply_over_axes arange arccos arccosh arcsin arcsinh arctan arctan2 arctanh argmax argmin argsort argwhere around array array2string array_equal array_equiv array_repr array_split array_str asanyarray asarray asarray_chkfinite ascontiguousarray asfarray asfortranarray asmatrix asscalar atleast_1d atleast_2d atleast_3d average bartlett base_repr bench binary_repr bincount bitwise_and bitwise_not bitwise_or bitwise_xor blackman bmat bool bool8 bool_ broadcast broadcast_arrays byte byte_bounds can_cast cdouble ceil cfloat character chararray choose clip clongdouble clongfloat column_stack common_type compare_chararrays complex complex128 complex192 complex64 complex_ complexfloating compress concatenate conj conjugate convolve copy corrcoef correlate cos cosh cov cross csingle cumprod cumproduct cumsum deg2rad degrees delete deprecate deprecate_with_doc diag diagflat diagonal diff digitize disp divide dot double dsplit dstack dtype ediff1d empty empty_like equal errstate exp exp2 expand_dims expm1 extract eye fabs fastCopyAndTranspose find_common_type finfo fix flatiter flatnonzero flexible fliplr flipud float float32 float64 float96 float_ floating floor floor_divide fmax fmin fmod frexp frombuffer fromfile fromfunction fromiter frompyfunc fromregex fromstring fv generic genfromtxt get_array_wrap get_include get_numarray_include get_numpy_include get_printoptions getbuffer getbufsize geterr geterrcall geterrobj gradient greater greater_equal hamming hanning histogram histogram2d histogramdd hsplit hstack hypot i0 identity imag indices inexact info inner insert int int0 int16 int32 int64 int8 int_ int_asbuffer intc integer interp intersect1d intersect1d_nu intp invert ipmt irr iscomplex iscomplexobj isfinite isfortran isinf isnan isneginf isposinf isreal isrealobj isscalar issctype issubclass_ issubdtype issubsctype iterable ix_ kaiser kron ldexp left_shift less less_equal lexsort linspace load loads loadtxt log log10 log1p log2 logaddexp logaddexp2 logical_and logical_not logical_or logical_xor logspace long longcomplex longdouble longfloat longlong lookfor mafromtxt mat matrix max maximum maximum_sctype may_share_memory mean median memmap meshgrid min minimum mintypecode mirr mod modf msort multiply nan_to_num nanargmax nanargmin nanmax nanmin nansum ndarray ndenumerate ndfromtxt ndim ndindex negative newbuffer nonzero not_equal nper npv number obj2sctype object object0 object_ ones ones_like outer packbits piecewise pkgload place pmt poly poly1d polyadd polyder polydiv polyfit polyint polymul polysub polyval power ppmt prod product ptp put putmask pv rad2deg radians rank rate ravel real real_if_close recarray recfromcsv recfromtxt reciprocal record remainder repeat require reshape resize restoredot right_shift rint roll rollaxis roots rot90 round round_ row_stack safe_eval save savetxt savez sctype2char searchsorted select set_numeric_ops set_printoptions set_string_function setbufsize setdiff1d seterr seterrcall seterrobj setmember1d setxor1d shape short show_config sign signbit signedinteger sin sinc single singlecomplex sinh size sometrue sort sort_complex source split sqrt square squeeze std str str_ string0 string_ subtract sum swapaxes take tan tanh tensordot test tile trace transpose trapz tri tril trim_zeros triu true_divide trunc typename ubyte ufunc uint uint0 uint16 uint32 uint64 uint8 uintc uintp ulonglong unicode unicode0 unicode_ union1d unique unique1d unpackbits unravel_index unsignedinteger unwrap ushort vander var vdot vectorize void void0 vsplit vstack where who zeros zeros_like __future__ __import__ __name__ __cythonbufferdefaults__ __weakref__ None self True False NotImplemented Ellipsis NULL __new__ __init__ __cinit__ __dealloc__ __cmp__ __richcmp__ __str__ __repr__ __hash__ __call__ __iter__ __getattr__ __setattr__ __delattr__ __add__ __sub__ __mul__ __div__ __floordiv__ __truediv__ __mod__ __divmod__ __pow__ __neg__ __pos__ __abs__ __nonzero__ __invert__ __lshift__ __rshift__ __and__ __or__ __xor__ __int__ __long__ __float__ __oct__ __hex__ __index__ __iadd__ __isub__ __imul__ __idiv__ __ifloordiv__ __itruediv__ __imod__ __ipow__ __ilshift__ __irshift__ __iand__ __ior__ __ixor__ __len__ __getitem__ __setitem__ __delitem__ __getslice__ __setslice__ __delslice__ __contains__ __next__ __getreadbuffer__ __getwritebuffer__ __getsegcount__ __getcharbuffer__ __get__ __set__ __delete__ __getbuffer__ __releasebuffer__ ArithmeticError AssertionError AttributeError BaseException DeprecationWarning EnvironmentError EOFError Exception FloatingPointError FutureWarning GeneratorExit IOError ImportError ImportWarning IndexError KeyError KeyboardInterrupt LookupError MemoryError NameError NotImplementedError OSError OverflowError PendingDeprecationWarning ReferenceError RuntimeError RuntimeWarning StandardError StopIteration SyntaxError SyntaxWarning SystemError SystemExit TypeError UnboundLocalError UserWarning UnicodeError UnicodeWarning UnicodeEncodeError UnicodeDecodeError UnicodeTranslateError ValueError Warning WindowsError ZeroDivisionError Cython-0.26.1/Tools/cython-epydoc.py0000664000175000017500000000263412542002467020076 0ustar stefanstefan00000000000000#! /usr/bin/env python # -------------------------------------------------------------------- import re from epydoc import docstringparser as dsp CYTHON_SIGNATURE_RE = re.compile( # Class name (for builtin methods) r'^\s*((?P\w+)\.)?' + # The function name r'(?P\w+)' + # The parameters r'\(((?P(?:self|cls|mcs)),?)?(?P.*)\)' + # The return value (optional) r'(\s*(->)\s*(?P\w+(?:\s*\w+)))?' + # The end marker r'\s*(?:\n|$)') parse_signature = dsp.parse_function_signature def parse_function_signature(func_doc, doc_source, docformat, parse_errors): PYTHON_SIGNATURE_RE = dsp._SIGNATURE_RE assert PYTHON_SIGNATURE_RE is not CYTHON_SIGNATURE_RE try: dsp._SIGNATURE_RE = CYTHON_SIGNATURE_RE found = parse_signature(func_doc, doc_source, docformat, parse_errors) dsp._SIGNATURE_RE = PYTHON_SIGNATURE_RE if not found: found = parse_signature(func_doc, doc_source, docformat, parse_errors) return found finally: dsp._SIGNATURE_RE = PYTHON_SIGNATURE_RE dsp.parse_function_signature = parse_function_signature # -------------------------------------------------------------------- from epydoc.cli import cli cli() # -------------------------------------------------------------------- Cython-0.26.1/setup.py0000775000175000017500000002724013143605603015353 0ustar stefanstefan00000000000000#!/usr/bin/env python try: from setuptools import setup, Extension except ImportError: from distutils.core import setup, Extension import os import stat import subprocess import textwrap import sys import platform is_cpython = platform.python_implementation() == 'CPython' if sys.platform == "darwin": # Don't create resource files on OS X tar. os.environ['COPY_EXTENDED_ATTRIBUTES_DISABLE'] = 'true' os.environ['COPYFILE_DISABLE'] = 'true' setup_args = {} def add_command_class(name, cls): cmdclasses = setup_args.get('cmdclass', {}) cmdclasses[name] = cls setup_args['cmdclass'] = cmdclasses from distutils.command.sdist import sdist as sdist_orig class sdist(sdist_orig): def run(self): self.force_manifest = 1 if (sys.platform != "win32" and os.path.isdir('.git')): assert os.system("git rev-parse --verify HEAD > .gitrev") == 0 sdist_orig.run(self) add_command_class('sdist', sdist) if sys.version_info[:2] == (3, 2): import lib2to3.refactor from distutils.command.build_py \ import build_py_2to3 as build_py # need to convert sources to Py3 on installation with open('2to3-fixers.txt') as f: fixers = [line.strip() for line in f if line.strip()] build_py.fixer_names = fixers add_command_class("build_py", build_py) pxd_include_dirs = [ directory for directory, dirs, files in os.walk(os.path.join('Cython', 'Includes')) if '__init__.pyx' in files or '__init__.pxd' in files or directory == os.path.join('Cython', 'Includes') or directory == os.path.join('Cython', 'Includes', 'Deprecated')] pxd_include_patterns = [ p+'/*.pxd' for p in pxd_include_dirs ] + [ p+'/*.pyx' for p in pxd_include_dirs ] setup_args['package_data'] = { 'Cython.Plex' : ['*.pxd'], 'Cython.Compiler' : ['*.pxd'], 'Cython.Runtime' : ['*.pyx', '*.pxd'], 'Cython.Utility' : ['*.pyx', '*.pxd', '*.c', '*.h', '*.cpp'], 'Cython' : [ p[7:] for p in pxd_include_patterns ], } # This dict is used for passing extra arguments that are setuptools # specific to setup setuptools_extra_args = {} # tells whether to include cygdb (the script and the Cython.Debugger package include_debugger = sys.version_info[:2] > (2, 5) if 'setuptools' in sys.modules: setuptools_extra_args['zip_safe'] = False setuptools_extra_args['entry_points'] = { 'console_scripts': [ 'cython = Cython.Compiler.Main:setuptools_main', 'cythonize = Cython.Build.Cythonize:main' ] } scripts = [] else: if os.name == "posix": scripts = ["bin/cython", 'bin/cythonize'] else: scripts = ["cython.py", "cythonize.py"] if include_debugger: if 'setuptools' in sys.modules: setuptools_extra_args['entry_points']['console_scripts'].append( 'cygdb = Cython.Debugger.Cygdb:main') else: if os.name == "posix": scripts.append('bin/cygdb') else: scripts.append('cygdb.py') def compile_cython_modules(profile=False, compile_more=False, cython_with_refnanny=False): source_root = os.path.abspath(os.path.dirname(__file__)) compiled_modules = [ "Cython.Plex.Scanners", "Cython.Plex.Actions", "Cython.Compiler.Lexicon", "Cython.Compiler.Scanning", "Cython.Compiler.Parsing", "Cython.Compiler.Visitor", "Cython.Compiler.FlowControl", "Cython.Compiler.Code", "Cython.Runtime.refnanny", # "Cython.Compiler.FusedNode", "Cython.Tempita._tempita", ] if compile_more: compiled_modules.extend([ "Cython.Build.Dependencies", "Cython.Compiler.ParseTreeTransforms", "Cython.Compiler.Nodes", "Cython.Compiler.ExprNodes", "Cython.Compiler.ModuleNode", "Cython.Compiler.Optimize", ]) from distutils.spawn import find_executable from distutils.sysconfig import get_python_inc pgen = find_executable( 'pgen', os.pathsep.join([os.environ['PATH'], os.path.join(get_python_inc(), '..', 'Parser')])) if not pgen: sys.stderr.write("Unable to find pgen, not compiling formal grammar.\n") else: parser_dir = os.path.join(os.path.dirname(__file__), 'Cython', 'Parser') grammar = os.path.join(parser_dir, 'Grammar') subprocess.check_call([ pgen, os.path.join(grammar), os.path.join(parser_dir, 'graminit.h'), os.path.join(parser_dir, 'graminit.c'), ]) cst_pyx = os.path.join(parser_dir, 'ConcreteSyntaxTree.pyx') if os.stat(grammar)[stat.ST_MTIME] > os.stat(cst_pyx)[stat.ST_MTIME]: mtime = os.stat(grammar)[stat.ST_MTIME] os.utime(cst_pyx, (mtime, mtime)) compiled_modules.extend([ "Cython.Parser.ConcreteSyntaxTree", ]) defines = [] if cython_with_refnanny: defines.append(('CYTHON_REFNANNY', '1')) extensions = [] for module in compiled_modules: source_file = os.path.join(source_root, *module.split('.')) if os.path.exists(source_file + ".py"): pyx_source_file = source_file + ".py" else: pyx_source_file = source_file + ".pyx" dep_files = [] if os.path.exists(source_file + '.pxd'): dep_files.append(source_file + '.pxd') if '.refnanny' in module: defines_for_module = [] else: defines_for_module = defines extensions.append(Extension( module, sources=[pyx_source_file], define_macros=defines_for_module, depends=dep_files)) # XXX hack around setuptools quirk for '*.pyx' sources extensions[-1].sources[0] = pyx_source_file if sys.version_info[:2] == (3, 2): # Python 3.2: can only run Cython *after* running 2to3 build_ext = _defer_cython_import_in_py32(source_root, profile) else: from Cython.Distutils import build_ext if profile: from Cython.Compiler.Options import get_directive_defaults get_directive_defaults()['profile'] = True sys.stderr.write("Enabled profiling for the Cython binary modules\n") # not using cythonize() here to let distutils decide whether building extensions was requested add_command_class("build_ext", build_ext) setup_args['ext_modules'] = extensions def _defer_cython_import_in_py32(source_root, profile=False): # Python 3.2: can only run Cython *after* running 2to3 # => re-import Cython inside of build_ext from distutils.command.build_ext import build_ext as build_ext_orig class build_ext(build_ext_orig): # we must keep the original modules alive to make sure # their code keeps working when we remove them from # sys.modules dead_modules = [] def __reimport(self): if self.dead_modules: return # add path where 2to3 installed the transformed sources # and make sure Python (re-)imports them from there already_imported = [ module for module in sys.modules if module == 'Cython' or module.startswith('Cython.') ] keep_alive = self.dead_modules.append for module in already_imported: keep_alive(sys.modules[module]) del sys.modules[module] sys.path.insert(0, os.path.join(source_root, self.build_lib)) def build_extensions(self): self.__reimport() if profile: from Cython.Compiler.Options import directive_defaults directive_defaults['profile'] = True print("Enabled profiling for the Cython binary modules") from Cython.Build.Dependencies import cythonize self.distribution.ext_modules[:] = cythonize( self.distribution.ext_modules) build_ext_orig.build_extensions(self) return build_ext cython_profile = '--cython-profile' in sys.argv if cython_profile: sys.argv.remove('--cython-profile') try: sys.argv.remove("--cython-compile-all") cython_compile_more = True except ValueError: cython_compile_more = False try: sys.argv.remove("--cython-with-refnanny") cython_with_refnanny = True except ValueError: cython_with_refnanny = False try: sys.argv.remove("--no-cython-compile") compile_cython_itself = False except ValueError: compile_cython_itself = True if compile_cython_itself and (is_cpython or cython_compile_more): compile_cython_modules(cython_profile, cython_compile_more, cython_with_refnanny) setup_args.update(setuptools_extra_args) from Cython import __version__ as version def dev_status(): if 'b' in version or 'c' in version: # 1b1, 1beta1, 2rc1, ... return 'Development Status :: 4 - Beta' elif 'a' in version: # 1a1, 1alpha1, ... return 'Development Status :: 3 - Alpha' else: return 'Development Status :: 5 - Production/Stable' packages = [ 'Cython', 'Cython.Build', 'Cython.Compiler', 'Cython.Runtime', 'Cython.Distutils', 'Cython.Plex', 'Cython.Tests', 'Cython.Build.Tests', 'Cython.Compiler.Tests', 'Cython.Utility', 'Cython.Tempita', 'pyximport', ] if include_debugger: packages.append('Cython.Debugger') packages.append('Cython.Debugger.Tests') # it's enough to do this for Py2.5+: setup_args['package_data']['Cython.Debugger.Tests'] = ['codefile', 'cfuncs.c'] setup( name='Cython', version=version, url='http://cython.org/', author='Robert Bradshaw, Stefan Behnel, Dag Seljebotn, Greg Ewing, et al.', author_email='cython-devel@python.org', description="The Cython compiler for writing C extensions for the Python language.", long_description=textwrap.dedent("""\ The Cython language makes writing C extensions for the Python language as easy as Python itself. Cython is a source code translator based on Pyrex_, but supports more cutting edge functionality and optimizations. The Cython language is a superset of the Python language (almost all Python code is also valid Cython code), but Cython additionally supports optional static typing to natively call C functions, operate with C++ classes and declare fast C types on variables and class attributes. This allows the compiler to generate very efficient C code from Cython code. This makes Cython the ideal language for writing glue code for external C/C++ libraries, and for fast C modules that speed up the execution of Python code. Note that for one-time builds, e.g. for CI/testing, on platforms that are not covered by one of the wheel packages provided on PyPI, it is substantially faster than a full source build to install an uncompiled (slower) version of Cython with:: pip install Cython --install-option="--no-cython-compile" .. _Pyrex: http://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/ """), license='Apache', classifiers=[ dev_status(), "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Programming Language :: C", "Programming Language :: Cython", "Topic :: Software Development :: Code Generators", "Topic :: Software Development :: Compilers", "Topic :: Software Development :: Libraries :: Python Modules" ], scripts=scripts, packages=packages, py_modules=["cython"], **setup_args ) Cython-0.26.1/PKG-INFO0000664000175000017500000000424713151203436014732 0ustar stefanstefan00000000000000Metadata-Version: 1.1 Name: Cython Version: 0.26.1 Summary: The Cython compiler for writing C extensions for the Python language. Home-page: http://cython.org/ Author: Robert Bradshaw, Stefan Behnel, Dag Seljebotn, Greg Ewing, et al. Author-email: cython-devel@python.org License: Apache Description: The Cython language makes writing C extensions for the Python language as easy as Python itself. Cython is a source code translator based on Pyrex_, but supports more cutting edge functionality and optimizations. The Cython language is a superset of the Python language (almost all Python code is also valid Cython code), but Cython additionally supports optional static typing to natively call C functions, operate with C++ classes and declare fast C types on variables and class attributes. This allows the compiler to generate very efficient C code from Cython code. This makes Cython the ideal language for writing glue code for external C/C++ libraries, and for fast C modules that speed up the execution of Python code. Note that for one-time builds, e.g. for CI/testing, on platforms that are not covered by one of the wheel packages provided on PyPI, it is substantially faster than a full source build to install an uncompiled (slower) version of Cython with:: pip install Cython --install-option="--no-cython-compile" .. _Pyrex: http://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/ Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: C Classifier: Programming Language :: Cython Classifier: Topic :: Software Development :: Code Generators Classifier: Topic :: Software Development :: Compilers Classifier: Topic :: Software Development :: Libraries :: Python Modules Cython-0.26.1/setupegg.py0000775000175000017500000000024412542002467016032 0ustar stefanstefan00000000000000#!/usr/bin/env python """Wrapper to run setup.py using setuptools.""" import setuptools with open('setup.py') as f: exec(compile(f.read(), 'setup.py', 'exec')) Cython-0.26.1/pyximport/0000775000175000017500000000000013151203436015701 5ustar stefanstefan00000000000000Cython-0.26.1/pyximport/PKG-INFO0000664000175000017500000000046612542002467017010 0ustar stefanstefan00000000000000Metadata-Version: 1.0 Name: pyximport Version: 1.0 Summary: Hooks to build and run Pyrex files as if they were simple Python files Home-page: http://www.prescod.net/pyximport Author: Paul Prescod Author-email: paul@prescod.net License: Python Description: UNKNOWN Keywords: pyrex import hook Platform: UNKNOWN Cython-0.26.1/pyximport/pyximport.py0000664000175000017500000005520613143605603020341 0ustar stefanstefan00000000000000""" Import hooks; when installed with the install() function, these hooks allow importing .pyx files as if they were Python modules. If you want the hook installed every time you run Python you can add it to your Python version by adding these lines to sitecustomize.py (which you can create from scratch in site-packages if it doesn't exist there or somewhere else on your python path):: import pyximport pyximport.install() For instance on the Mac with a non-system Python 2.3, you could create sitecustomize.py with only those two lines at /usr/local/lib/python2.3/site-packages/sitecustomize.py . A custom distutils.core.Extension instance and setup() args (Distribution) for for the build can be defined by a .pyxbld file like: # examplemod.pyxbld def make_ext(modname, pyxfilename): from distutils.extension import Extension return Extension(name = modname, sources=[pyxfilename, 'hello.c'], include_dirs=['/myinclude'] ) def make_setup_args(): return dict(script_args=["--compiler=mingw32"]) Extra dependencies can be defined by a .pyxdep . See README. Since Cython 0.11, the :mod:`pyximport` module also has experimental compilation support for normal Python modules. This allows you to automatically run Cython on every .pyx and .py module that Python imports, including parts of the standard library and installed packages. Cython will still fail to compile a lot of Python modules, in which case the import mechanism will fall back to loading the Python source modules instead. The .py import mechanism is installed like this:: pyximport.install(pyimport = True) Running this module as a top-level script will run a test and then print the documentation. This code is based on the Py2.3+ import protocol as described in PEP 302. """ import glob import imp import os import sys from zipimport import zipimporter, ZipImportError mod_name = "pyximport" PYX_EXT = ".pyx" PYXDEP_EXT = ".pyxdep" PYXBLD_EXT = ".pyxbld" DEBUG_IMPORT = False def _print(message, args): if args: message = message % args print(message) def _debug(message, *args): if DEBUG_IMPORT: _print(message, args) def _info(message, *args): _print(message, args) # Performance problem: for every PYX file that is imported, we will # invoke the whole distutils infrastructure even if the module is # already built. It might be more efficient to only do it when the # mod time of the .pyx is newer than the mod time of the .so but # the question is how to get distutils to tell me the name of the .so # before it builds it. Maybe it is easy...but maybe the peformance # issue isn't real. def _load_pyrex(name, filename): "Load a pyrex file given a name and filename." def get_distutils_extension(modname, pyxfilename, language_level=None): # try: # import hashlib # except ImportError: # import md5 as hashlib # extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest() # modname = modname + extra extension_mod,setup_args = handle_special_build(modname, pyxfilename) if not extension_mod: if not isinstance(pyxfilename, str): # distutils is stupid in Py2 and requires exactly 'str' # => encode accidentally coerced unicode strings back to str pyxfilename = pyxfilename.encode(sys.getfilesystemencoding()) from distutils.extension import Extension extension_mod = Extension(name = modname, sources=[pyxfilename]) if language_level is not None: extension_mod.cython_directives = {'language_level': language_level} return extension_mod,setup_args def handle_special_build(modname, pyxfilename): special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT ext = None setup_args={} if os.path.exists(special_build): # globls = {} # locs = {} # execfile(special_build, globls, locs) # ext = locs["make_ext"](modname, pyxfilename) mod = imp.load_source("XXXX", special_build, open(special_build)) make_ext = getattr(mod,'make_ext',None) if make_ext: ext = make_ext(modname, pyxfilename) assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build make_setup_args = getattr(mod, 'make_setup_args',None) if make_setup_args: setup_args = make_setup_args() assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict" % special_build) assert set or setup_args, ("neither make_ext nor make_setup_args %s" % special_build) ext.sources = [os.path.join(os.path.dirname(special_build), source) for source in ext.sources] return ext, setup_args def handle_dependencies(pyxfilename): testing = '_test_files' in globals() dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT # by default let distutils decide whether to rebuild on its own # (it has a better idea of what the output file will be) # but we know more about dependencies so force a rebuild if # some of the dependencies are newer than the pyxfile. if os.path.exists(dependfile): depends = open(dependfile).readlines() depends = [depend.strip() for depend in depends] # gather dependencies in the "files" variable # the dependency file is itself a dependency files = [dependfile] for depend in depends: fullpath = os.path.join(os.path.dirname(dependfile), depend) files.extend(glob.glob(fullpath)) # only for unit testing to see we did the right thing if testing: _test_files[:] = [] #$pycheck_no # if any file that the pyxfile depends upon is newer than # the pyx file, 'touch' the pyx file so that distutils will # be tricked into rebuilding it. for file in files: from distutils.dep_util import newer if newer(file, pyxfilename): _debug("Rebuilding %s because of %s", pyxfilename, file) filetime = os.path.getmtime(file) os.utime(pyxfilename, (filetime, filetime)) if testing: _test_files.append(file) def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None): assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename handle_dependencies(pyxfilename) extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level) build_in_temp = pyxargs.build_in_temp sargs = pyxargs.setup_args.copy() sargs.update(setup_args) build_in_temp = sargs.pop('build_in_temp',build_in_temp) from . import pyxbuild so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, build_in_temp=build_in_temp, pyxbuild_dir=pyxbuild_dir, setup_args=sargs, inplace=inplace, reload_support=pyxargs.reload_support) assert os.path.exists(so_path), "Cannot find: %s" % so_path junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;) junkstuff = glob.glob(junkpath) for path in junkstuff: if path != so_path: try: os.remove(path) except IOError: _info("Couldn't remove %s", path) return so_path def load_module(name, pyxfilename, pyxbuild_dir=None, is_package=False, build_inplace=False, language_level=None, so_path=None): try: if so_path is None: if is_package: module_name = name + '.__init__' else: module_name = name so_path = build_module(module_name, pyxfilename, pyxbuild_dir, inplace=build_inplace, language_level=language_level) mod = imp.load_dynamic(name, so_path) if is_package and not hasattr(mod, '__path__'): mod.__path__ = [os.path.dirname(so_path)] assert mod.__file__ == so_path, (mod.__file__, so_path) except Exception: if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'): # try to fall back to normal import mod = imp.load_source(name, pyxfilename) assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename) else: tb = sys.exc_info()[2] import traceback exc = ImportError("Building module %s failed: %s" % ( name, traceback.format_exception_only(*sys.exc_info()[:2]))) if sys.version_info[0] >= 3: raise exc.with_traceback(tb) else: exec("raise exc, None, tb", {'exc': exc, 'tb': tb}) return mod # import hooks class PyxImporter(object): """A meta-path importer for .pyx files. """ def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False, language_level=None): self.extension = extension self.pyxbuild_dir = pyxbuild_dir self.inplace = inplace self.language_level = language_level def find_module(self, fullname, package_path=None): if fullname in sys.modules and not pyxargs.reload_support: return None # only here when reload() try: fp, pathname, (ext,mode,ty) = imp.find_module(fullname,package_path) if fp: fp.close() # Python should offer a Default-Loader to avoid this double find/open! if pathname and ty == imp.PKG_DIRECTORY: pkg_file = os.path.join(pathname, '__init__'+self.extension) if os.path.isfile(pkg_file): return PyxLoader(fullname, pathname, init_path=pkg_file, pyxbuild_dir=self.pyxbuild_dir, inplace=self.inplace, language_level=self.language_level) if pathname and pathname.endswith(self.extension): return PyxLoader(fullname, pathname, pyxbuild_dir=self.pyxbuild_dir, inplace=self.inplace, language_level=self.language_level) if ty != imp.C_EXTENSION: # only when an extension, check if we have a .pyx next! return None # find .pyx fast, when .so/.pyd exist --inplace pyxpath = os.path.splitext(pathname)[0]+self.extension if os.path.isfile(pyxpath): return PyxLoader(fullname, pyxpath, pyxbuild_dir=self.pyxbuild_dir, inplace=self.inplace, language_level=self.language_level) # .so/.pyd's on PATH should not be remote from .pyx's # think no need to implement PyxArgs.importer_search_remote here? except ImportError: pass # searching sys.path ... #if DEBUG_IMPORT: print "SEARCHING", fullname, package_path mod_parts = fullname.split('.') module_name = mod_parts[-1] pyx_module_name = module_name + self.extension # this may work, but it returns the file content, not its path #import pkgutil #pyx_source = pkgutil.get_data(package, pyx_module_name) paths = package_path or sys.path for path in paths: pyx_data = None if not path: path = os.getcwd() elif os.path.isfile(path): try: zi = zipimporter(path) pyx_data = zi.get_data(pyx_module_name) except (ZipImportError, IOError, OSError): continue # Module not found. # unzip the imported file into the build dir # FIXME: can interfere with later imports if build dir is in sys.path and comes before zip file path = self.pyxbuild_dir elif not os.path.isabs(path): path = os.path.abspath(path) pyx_module_path = os.path.join(path, pyx_module_name) if pyx_data is not None: if not os.path.exists(path): try: os.makedirs(path) except OSError: # concurrency issue? if not os.path.exists(path): raise with open(pyx_module_path, "wb") as f: f.write(pyx_data) elif not os.path.isfile(pyx_module_path): continue # Module not found. return PyxLoader(fullname, pyx_module_path, pyxbuild_dir=self.pyxbuild_dir, inplace=self.inplace, language_level=self.language_level) # not found, normal package, not a .pyx file, none of our business _debug("%s not found" % fullname) return None class PyImporter(PyxImporter): """A meta-path importer for normal .py files. """ def __init__(self, pyxbuild_dir=None, inplace=False, language_level=None): if language_level is None: language_level = sys.version_info[0] self.super = super(PyImporter, self) self.super.__init__(extension='.py', pyxbuild_dir=pyxbuild_dir, inplace=inplace, language_level=language_level) self.uncompilable_modules = {} self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild', 'distutils.extension', 'distutils.sysconfig'] def find_module(self, fullname, package_path=None): if fullname in sys.modules: return None if fullname.startswith('Cython.'): return None if fullname in self.blocked_modules: # prevent infinite recursion return None if _lib_loader.knows(fullname): return _lib_loader _debug("trying import of module '%s'", fullname) if fullname in self.uncompilable_modules: path, last_modified = self.uncompilable_modules[fullname] try: new_last_modified = os.stat(path).st_mtime if new_last_modified > last_modified: # import would fail again return None except OSError: # module is no longer where we found it, retry the import pass self.blocked_modules.append(fullname) try: importer = self.super.find_module(fullname, package_path) if importer is not None: if importer.init_path: path = importer.init_path real_name = fullname + '.__init__' else: path = importer.path real_name = fullname _debug("importer found path %s for module %s", path, real_name) try: so_path = build_module( real_name, path, pyxbuild_dir=self.pyxbuild_dir, language_level=self.language_level, inplace=self.inplace) _lib_loader.add_lib(fullname, path, so_path, is_package=bool(importer.init_path)) return _lib_loader except Exception: if DEBUG_IMPORT: import traceback traceback.print_exc() # build failed, not a compilable Python module try: last_modified = os.stat(path).st_mtime except OSError: last_modified = 0 self.uncompilable_modules[fullname] = (path, last_modified) importer = None finally: self.blocked_modules.pop() return importer class LibLoader(object): def __init__(self): self._libs = {} def load_module(self, fullname): try: source_path, so_path, is_package = self._libs[fullname] except KeyError: raise ValueError("invalid module %s" % fullname) _debug("Loading shared library module '%s' from %s", fullname, so_path) return load_module(fullname, source_path, so_path=so_path, is_package=is_package) def add_lib(self, fullname, path, so_path, is_package): self._libs[fullname] = (path, so_path, is_package) def knows(self, fullname): return fullname in self._libs _lib_loader = LibLoader() class PyxLoader(object): def __init__(self, fullname, path, init_path=None, pyxbuild_dir=None, inplace=False, language_level=None): _debug("PyxLoader created for loading %s from %s (init path: %s)", fullname, path, init_path) self.fullname = fullname self.path, self.init_path = path, init_path self.pyxbuild_dir = pyxbuild_dir self.inplace = inplace self.language_level = language_level def load_module(self, fullname): assert self.fullname == fullname, ( "invalid module, expected %s, got %s" % ( self.fullname, fullname)) if self.init_path: # package #print "PACKAGE", fullname module = load_module(fullname, self.init_path, self.pyxbuild_dir, is_package=True, build_inplace=self.inplace, language_level=self.language_level) module.__path__ = [self.path] else: #print "MODULE", fullname module = load_module(fullname, self.path, self.pyxbuild_dir, build_inplace=self.inplace, language_level=self.language_level) return module #install args class PyxArgs(object): build_dir=True build_in_temp=True setup_args={} #None ##pyxargs=None def _have_importers(): has_py_importer = False has_pyx_importer = False for importer in sys.meta_path: if isinstance(importer, PyxImporter): if isinstance(importer, PyImporter): has_py_importer = True else: has_pyx_importer = True return has_py_importer, has_pyx_importer def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, setup_args=None, reload_support=False, load_py_module_on_import_failure=False, inplace=False, language_level=None): """Main entry point. Call this to install the .pyx import hook in your meta-path for a single Python process. If you want it to be installed whenever you use Python, add it to your sitecustomize (as described above). You can pass ``pyimport=True`` to also install the .py import hook in your meta-path. Note, however, that it is highly experimental, will not work for most .py files, and will therefore only slow down your imports. Use at your own risk. By default, compiled modules will end up in a ``.pyxbld`` directory in the user's home directory. Passing a different path as ``build_dir`` will override this. ``build_in_temp=False`` will produce the C files locally. Working with complex dependencies and debugging becomes more easy. This can principally interfere with existing files of the same name. build_in_temp can be overriden by .pyxbld/make_setup_args() by a dict item of 'build_in_temp' ``setup_args``: dict of arguments for Distribution - see distutils.core.setup() . They are extended/overriden by those of .pyxbld/make_setup_args() ``reload_support``: Enables support for dynamic reload(), e.g. after a change in the Cython code. Additional files .reloadNN may arise on that account, when the previously loaded module file cannot be overwritten. ``load_py_module_on_import_failure``: If the compilation of a .py file succeeds, but the subsequent import fails for some reason, retry the import with the normal .py module instead of the compiled module. Note that this may lead to unpredictable results for modules that change the system state during their import, as the second import will rerun these modifications in whatever state the system was left after the import of the compiled module failed. ``inplace``: Install the compiled module next to the source file. ``language_level``: The source language level to use: 2 or 3. The default is to use the language level of the current Python runtime for .py files and Py2 for .pyx files. """ if setup_args is None: setup_args = {} if not build_dir: build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld') global pyxargs pyxargs = PyxArgs() #$pycheck_no pyxargs.build_dir = build_dir pyxargs.build_in_temp = build_in_temp pyxargs.setup_args = (setup_args or {}).copy() pyxargs.reload_support = reload_support pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure has_py_importer, has_pyx_importer = _have_importers() py_importer, pyx_importer = None, None if pyimport and not has_py_importer: py_importer = PyImporter(pyxbuild_dir=build_dir, inplace=inplace, language_level=language_level) # make sure we import Cython before we install the import hook import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize sys.meta_path.insert(0, py_importer) if pyximport and not has_pyx_importer: pyx_importer = PyxImporter(pyxbuild_dir=build_dir, inplace=inplace, language_level=language_level) sys.meta_path.append(pyx_importer) return py_importer, pyx_importer def uninstall(py_importer, pyx_importer): """ Uninstall an import hook. """ try: sys.meta_path.remove(py_importer) except ValueError: pass try: sys.meta_path.remove(pyx_importer) except ValueError: pass # MAIN def show_docs(): import __main__ __main__.__name__ = mod_name for name in dir(__main__): item = getattr(__main__, name) try: setattr(item, "__module__", mod_name) except (AttributeError, TypeError): pass help(__main__) if __name__ == '__main__': show_docs() Cython-0.26.1/pyximport/pyxbuild.py0000664000175000017500000001310613023021033020101 0ustar stefanstefan00000000000000"""Build a Pyrex file from .pyx source to .so loadable module using the installed distutils infrastructure. Call: out_fname = pyx_to_dll("foo.pyx") """ import os import sys from distutils.errors import DistutilsArgError, DistutilsError, CCompilerError from distutils.extension import Extension from distutils.util import grok_environment_error try: from Cython.Distutils.old_build_ext import old_build_ext as build_ext HAS_CYTHON = True except ImportError: HAS_CYTHON = False DEBUG = 0 _reloads={} def pyx_to_dll(filename, ext=None, force_rebuild=0, build_in_temp=False, pyxbuild_dir=None, setup_args=None, reload_support=False, inplace=False): """Compile a PYX file to a DLL and return the name of the generated .so or .dll .""" assert os.path.exists(filename), "Could not find %s" % os.path.abspath(filename) path, name = os.path.split(os.path.abspath(filename)) if not ext: modname, extension = os.path.splitext(name) assert extension in (".pyx", ".py"), extension if not HAS_CYTHON: filename = filename[:-len(extension)] + '.c' ext = Extension(name=modname, sources=[filename]) if setup_args is None: setup_args = {} if not pyxbuild_dir: pyxbuild_dir = os.path.join(path, "_pyxbld") package_base_dir = path for package_name in ext.name.split('.')[-2::-1]: package_base_dir, pname = os.path.split(package_base_dir) if pname != package_name: # something is wrong - package path doesn't match file path package_base_dir = None break script_args=setup_args.get("script_args",[]) if DEBUG or "--verbose" in script_args: quiet = "--verbose" else: quiet = "--quiet" args = [quiet, "build_ext"] if force_rebuild: args.append("--force") if inplace and package_base_dir: args.extend(['--build-lib', package_base_dir]) if ext.name == '__init__' or ext.name.endswith('.__init__'): # package => provide __path__ early if not hasattr(ext, 'cython_directives'): ext.cython_directives = {'set_initial_path' : 'SOURCEFILE'} elif 'set_initial_path' not in ext.cython_directives: ext.cython_directives['set_initial_path'] = 'SOURCEFILE' if HAS_CYTHON and build_in_temp: args.append("--pyrex-c-in-temp") sargs = setup_args.copy() sargs.update({ "script_name": None, "script_args": args + script_args, }) # late import, in case setuptools replaced it from distutils.dist import Distribution dist = Distribution(sargs) if not dist.ext_modules: dist.ext_modules = [] dist.ext_modules.append(ext) if HAS_CYTHON: dist.cmdclass = {'build_ext': build_ext} build = dist.get_command_obj('build') build.build_base = pyxbuild_dir cfgfiles = dist.find_config_files() dist.parse_config_files(cfgfiles) try: ok = dist.parse_command_line() except DistutilsArgError: raise if DEBUG: print("options (after parsing command line):") dist.dump_option_dicts() assert ok try: obj_build_ext = dist.get_command_obj("build_ext") dist.run_commands() so_path = obj_build_ext.get_outputs()[0] if obj_build_ext.inplace: # Python distutils get_outputs()[ returns a wrong so_path # when --inplace ; see http://bugs.python.org/issue5977 # workaround: so_path = os.path.join(os.path.dirname(filename), os.path.basename(so_path)) if reload_support: org_path = so_path timestamp = os.path.getmtime(org_path) global _reloads last_timestamp, last_path, count = _reloads.get(org_path, (None,None,0) ) if last_timestamp == timestamp: so_path = last_path else: basename = os.path.basename(org_path) while count < 100: count += 1 r_path = os.path.join(obj_build_ext.build_lib, basename + '.reload%s'%count) try: import shutil # late import / reload_support is: debugging try: # Try to unlink first --- if the .so file # is mmapped by another process, # overwriting its contents corrupts the # loaded image (on Linux) and crashes the # other process. On Windows, unlinking an # open file just fails. if os.path.isfile(r_path): os.unlink(r_path) except OSError: continue shutil.copy2(org_path, r_path) so_path = r_path except IOError: continue break else: # used up all 100 slots raise ImportError("reload count for %s reached maximum"%org_path) _reloads[org_path]=(timestamp, so_path, count) return so_path except KeyboardInterrupt: sys.exit(1) except (IOError, os.error): exc = sys.exc_info()[1] error = grok_environment_error(exc) if DEBUG: sys.stderr.write(error + "\n") raise if __name__=="__main__": pyx_to_dll("dummy.pyx") from . import test Cython-0.26.1/pyximport/README0000664000175000017500000000630113143605603016564 0ustar stefanstefan00000000000000 == Pyximport == Download: pyx-import-1.0.tar.gz Pyrex is a compiler. Therefore it is natural that people tend to go through an edit/compile/test cycle with Pyrex modules. But my personal opinion is that one of the deep insights in Python's implementation is that a language can be compiled (Python modules are compiled to .pyc) files and hide that compilation process from the end-user so that they do not have to worry about it. Pyximport does this for Pyrex modules. For instance if you write a Pyrex module called "foo.pyx", with Pyximport you can import it in a regular Python module like this: import pyximport; pyximport.install() import foo Doing so will result in the compilation of foo.pyx (with appropriate exceptions if it has an error in it). If you would always like to import pyrex files without building them specially, you can also the first line above to your sitecustomize.py. That will install the hook every time you run Python. Then you can use Pyrex modules just with simple import statements. I like to test my Pyrex modules like this: python -c "import foo" See help(pyximport.install) to learn its options for controlling the default behavior of "import" and "reload". == Dependency Handling == In Pyximport 1.1 it is possible to declare that your module depends on multiple files, (likely ".h" and ".pxd" files). If your Pyrex module is named "foo" and thus has the filename "foo.pyx" then you should make another file in the same directory called "foo.pyxdep". The "modname.pyxdep" file can be a list of filenames or "globs" (like "*.pxd" or "include/*.h"). Each filename or glob must be on a separate line. Pyximport will check the file date for each of those files before deciding whether to rebuild the module. In order to keep track of the fact that the dependency has been handled, Pyximport updates the modification time of your ".pyx" source file. Future versions may do something more sophisticated like informing distutils of the dependencies directly. == Limitations == Pyximport does not give you any control over how your Pyrex file is compiled. Usually the defaults are fine. You might run into problems if you wanted to write your program in half-C, half-Pyrex and build them into a single library. Pyximport 1.2 will probably do this. Pyximport does not hide the Distutils/GCC warnings and errors generated by the import process. Arguably this will give you better feedback if something went wrong and why. And if nothing went wrong it will give you the warm fuzzy that pyximport really did rebuild your module as it was supposed to. == For further thought and discussion == "setup.py install" does not modify sitecustomize.py for you. Should it? Modifying Python's "standard interpreter" behaviour may be more than most people expect of a package they install.. Pyximport puts your ".c" file beside your ".pyx" file (analogous to ".pyc" beside ".py"). But it puts the platform-specific binary in a build directory as per normal for Distutils. If I could wave a magic wand and get Pyrex or distutils or whoever to put the build directory I might do it but not necessarily: having it at the top level is VERY HELPFUL for debugging Pyrex problems. Cython-0.26.1/pyximport/__init__.py0000664000175000017500000000011712574327400020017 0ustar stefanstefan00000000000000from .pyximport import * # replicate docstring from .pyximport import __doc__ Cython-0.26.1/pyximport/test/0000775000175000017500000000000013151203436016660 5ustar stefanstefan00000000000000Cython-0.26.1/pyximport/test/test_pyximport.py0000664000175000017500000000576313143605603022362 0ustar stefanstefan00000000000000from __future__ import absolute_import, print_function from pyximport import pyximport pyximport.install(reload_support=True) import os import shutil import sys import tempfile import time from zipfile import ZipFile try: from __builtin__ import reload except ImportError: from importlib import reload def make_tempdir(): tempdir = os.path.join(tempfile.gettempdir(), "pyrex_temp") if os.path.exists(tempdir): remove_tempdir(tempdir) os.mkdir(tempdir) return tempdir def remove_tempdir(tempdir): shutil.rmtree(tempdir, 0, on_remove_file_error) def on_remove_file_error(func, path, excinfo): print("Sorry! Could not remove a temp file:", path) print("Extra information.") print(func, excinfo) print("You may want to delete this yourself when you get a chance.") def test_with_reload(): pyximport._test_files = [] tempdir = make_tempdir() sys.path.append(tempdir) filename = os.path.join(tempdir, "dummy.pyx") open(filename, "w").write("print 'Hello world from the Pyrex install hook'") import dummy reload(dummy) depend_filename = os.path.join(tempdir, "dummy.pyxdep") depend_file = open(depend_filename, "w") depend_file.write("*.txt\nfoo.bar") depend_file.close() build_filename = os.path.join(tempdir, "dummy.pyxbld") build_file = open(build_filename, "w") build_file.write(""" from distutils.extension import Extension def make_ext(name, filename): return Extension(name=name, sources=[filename]) """) build_file.close() open(os.path.join(tempdir, "foo.bar"), "w").write(" ") open(os.path.join(tempdir, "1.txt"), "w").write(" ") open(os.path.join(tempdir, "abc.txt"), "w").write(" ") reload(dummy) assert len(pyximport._test_files)==1, pyximport._test_files reload(dummy) time.sleep(1) # sleep a second to get safer mtimes open(os.path.join(tempdir, "abc.txt"), "w").write(" ") print("Here goes the reolad") reload(dummy) assert len(pyximport._test_files) == 1, pyximport._test_files reload(dummy) assert len(pyximport._test_files) == 0, pyximport._test_files remove_tempdir(tempdir) def test_zip(): try: import test_zip_module except ImportError: pass else: assert False, "test_zip_module already exists" fd, zip_path = tempfile.mkstemp(suffix=".zip") os.close(fd) try: with ZipFile(zip_path, "w") as zf: zf.writestr("test_zip_module.pyx", b"x = 42") sys.path.insert(0, zip_path) import test_zip_module assert test_zip_module.x == 42 finally: if zip_path in sys.path: sys.path.remove(zip_path) os.remove(zip_path) def test_zip_nonexisting(): sys.path.append("nonexisting_zip_module.zip") try: import nonexisting_zip_module except ImportError: pass finally: sys.path.remove("nonexisting_zip_module.zip") if __name__== "__main__": test_with_reload() test_zip() test_zip_nonexisting() Cython-0.26.1/pyximport/test/test_reload.py0000664000175000017500000000162413143605603021545 0ustar stefanstefan00000000000000from __future__ import absolute_import, print_function import time, os, sys from . import test_pyximport if 1: from distutils import sysconfig try: sysconfig.set_python_build() except AttributeError: pass import pyxbuild print(pyxbuild.distutils.sysconfig == sysconfig) def test(): tempdir = test_pyximport.make_tempdir() sys.path.append(tempdir) hello_file = os.path.join(tempdir, "hello.pyx") open(hello_file, "w").write("x = 1; print x; before = 'before'\n") import hello assert hello.x == 1 time.sleep(1) # sleep to make sure that new "hello.pyx" has later # timestamp than object file. open(hello_file, "w").write("x = 2; print x; after = 'after'\n") reload(hello) assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2" test_pyximport.remove_tempdir(tempdir) if __name__=="__main__": test() Cython-0.26.1/Makefile0000664000175000017500000000147613143605603015301 0ustar stefanstefan00000000000000PYTHON?=python TESTOPTS?= REPO = git://github.com/cython/cython.git all: local local: ${PYTHON} setup.py build_ext --inplace TMPDIR = .repo_tmp .git: .gitrev rm -rf $(TMPDIR) git clone -n $(REPO) $(TMPDIR) cd $(TMPDIR) && git reset -q "$(shell cat .gitrev)" mv $(TMPDIR)/.git . rm -rf $(TMPDIR) git ls-files -d | xargs git checkout -- repo: .git clean: @echo Cleaning Source @rm -fr build @rm -f *.py[co] */*.py[co] */*/*.py[co] */*/*/*.py[co] @rm -f *.so */*.so */*/*.so @rm -f *.pyd */*.pyd */*/*.pyd @rm -f *~ */*~ */*/*~ @rm -f core */core @rm -f Cython/Compiler/*.c @rm -f Cython/Plex/*.c @rm -f Cython/Tempita/*.c @rm -f Cython/Runtime/refnanny.c @(cd Demos; $(MAKE) clean) testclean: rm -fr BUILD TEST_TMP test: testclean ${PYTHON} runtests.py -vv ${TESTOPTS} s5: $(MAKE) -C Doc/s5 slides Cython-0.26.1/INSTALL.txt0000664000175000017500000000103612542002467015501 0ustar stefanstefan00000000000000Cython - Installation Instructions ================================== You have two installation options: (1) Run the setup.py script in this directory as follows: python setup.py install This will install the Cython package into your Python system. OR (2) If you prefer not to modify your Python installation, arrange for the directory containing this file (INSTALL.txt) to be in your PYTHONPATH. On unix, also put the bin directory on your PATH. See README.txt for pointers to other documentation. Cython-0.26.1/cythonize.py0000775000175000017500000000022212542002467016217 0ustar stefanstefan00000000000000#!/usr/bin/env python # # Cython -- enhanced main program # if __name__ == '__main__': from Cython.Build.Cythonize import main main() Cython-0.26.1/2to3-fixers.txt0000664000175000017500000000003212574327400016455 0ustar stefanstefan00000000000000lib2to3.fixes.fix_unicode Cython-0.26.1/LICENSE.txt0000664000175000017500000002367512542002467015472 0ustar stefanstefan00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS Cython-0.26.1/USAGE.txt0000664000175000017500000000513012542002467015236 0ustar stefanstefan00000000000000Cython - Usage Instructions ========================== Building Cython extensions using distutils ----------------------------------------- Cython comes with an experimental distutils extension for compiling Cython modules, contributed by Graham Fawcett of the University of Windsor (fawcett@uwindsor.ca). The Demos directory contains a setup.py file demonstrating its use. To compile the demos: (1) cd Demos (2) python setup.py build_ext --inplace or python setup.py build --build-lib=. (You may get a screed of warnings from the C compiler, but you can ignore these -- as long as there are no actual errors, things are probably okay.) Try out the extensions with: python run_primes.py python run_spam.py python run_numeric_demo.py Building Cython extensions by hand --------------------------------- You can also invoke the Cython compiler on its own to translate a .pyx file to a .c file. On Unix, cython filename.pyx On other platforms, python cython.py filename.pyx It's then up to you to compile and link the .c file using whatever procedure is appropriate for your platform. The file Makefile.nodistutils in the Demos directory shows how to do this for one particular Unix system. Command line options -------------------- The cython command supports the following options: Short Long Argument Description ----------------------------------------------------------------------------- -v --version Display version number of cython compiler -l --create-listing Write error messages to a .lis file -I --include-dir Search for include files in named directory (may be repeated) -o --output-file Specify name of generated C file (only one source file allowed if this is used) -p, --embed-positions If specified, the positions in Cython files of each function definition is embedded in its docstring. -z, --pre-import If specified, assume undeclared names in this module. Emulates the behavior of putting "from import *" at the top of the file. Anything else is taken as the name of a Cython source file and compiled to a C source file. Multiple Cython source files can be specified (unless -o is used), in which case each source file is treated as the source of a distinct extension module and compiled separately to produce its own C file. Cython-0.26.1/tests/0000775000175000017500000000000013151203436014770 5ustar stefanstefan00000000000000Cython-0.26.1/tests/build/0000775000175000017500000000000013151203436016067 5ustar stefanstefan00000000000000Cython-0.26.1/tests/build/package_compilation.srctree0000664000175000017500000000226212542002467023457 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import toppkg; assert '.py' not in toppkg.__file__; assert toppkg.PACKAGE == 1" PYTHON -c "import toppkg.subpkg; assert '.py' not in toppkg.__file__; assert '.py' not in toppkg.subpkg.__file__; assert toppkg.subpkg.PACKAGE == 2" PYTHON -c "import toppkg.a; assert toppkg.a.MODULE == 'a'" PYTHON -c "from toppkg.subpkg import a; assert a.MODULE == 'subpkg.a'" ######## setup.py ######## from Cython.Build import cythonize from distutils.core import setup setup( ext_modules = cythonize("toppkg/**/*.py"), ) ######## toppkg/__init__.py ######## import sys assert 'toppkg' in sys.modules assert __path__ is not None, "__path__ is None" assert __path__, "__path__ is empty" assert 'toppkg' in __path__[0], "toppkg not in __path__[0]" assert 'toppkg' in __file__ from . import a assert a.MODULE == 'a' from . import b assert b.MODULE == 'b' PACKAGE = 1 ######## toppkg/a.py ######## MODULE = 'a' ######## toppkg/b.py ######## MODULE = 'b' ######## toppkg/subpkg/__init__.py ######## PACKAGE = 2 from . import a assert a.__name__ == 'toppkg.subpkg.a' assert a.MODULE == 'subpkg.a' ######## toppkg/subpkg/a.py ######## MODULE = 'subpkg.a' Cython-0.26.1/tests/build/build_dir.srctree0000664000175000017500000000357312542002467021431 0ustar stefanstefan00000000000000PYTHON symlink_or_copy.py subdir fake PYTHON setup.py build_ext --inplace PYTHON -c "import a" PYTHON -c "import pkg.b" PYTHON check_paths.py ######## symlink_or_copy.py ######## import platform import sys if platform.system() == "Windows": import shutil shutil.copytree(sys.argv[1], sys.argv[2]) else: import os os.symlink(sys.argv[1], sys.argv[2]) ######## setup.py ######## # TODO: Better interface... from Cython.Build.Dependencies import cythonize from distutils.core import setup setup( ext_modules = (cythonize("*.pyx", build_dir="scratchA") + cythonize("pkg/*.pyx", build_dir="scratchB")), ) ######## a.pyx ######## cdef extern from "helper.h": int value1 cdef extern from "subdir/helper.h": int value2 cdef extern from "pkg/pkg_helper.h": int value3 assert value1 == 100 assert value2 == 200 assert value3 == 300 ######## helper.h ######## int value1 = 100; ######## subdir/helper.h ######## int value2 = 200; ######## pkg/__init__.py ######## ######## pkg/b.pyx ######## cdef extern from "../fake/helper.h": int value2 cdef extern from "pkg_helper.h": int value3 cdef extern from "subdir/pkg_helper.h": int value4 assert value2 == 200 assert value3 == 300 assert value4 == 400 ######## pkg/pkg_helper.h ######## int value3 = 300; ######## pkg/subdir/pkg_helper.h ######## int value4 = 400; ######## check_paths.py ######## import os assert os.path.exists("scratchA/a.c") assert os.path.exists("scratchA/helper.h") assert os.path.exists("scratchA/subdir/helper.h") assert os.path.exists("scratchA/pkg/pkg_helper.h") assert not os.path.exists("a.c") assert os.path.exists("scratchB/pkg/b.c") assert os.path.exists("scratchB/pkg/pkg_helper.h") assert os.path.exists("scratchB/pkg/subdir/pkg_helper.h") assert os.path.exists("scratchB/fake/helper.h") assert not os.path.exists("b.c") assert not os.path.exists("pkg/b.c") Cython-0.26.1/tests/build/cythonize_additional_sources_ext.srctree0000664000175000017500000000120712542002467026313 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "from pkg import a; assert a.test() == 43" ######## setup.py ######## from Cython.Build import cythonize from distutils.core import setup, Extension extensions = [ Extension('pkg.a', sources=['pkg/a.pyx', 'pkg/alib.c'], include_dirs=['pkg']) ] setup( ext_modules = cythonize(extensions) ) ######## pkg/__init__.py ######## ######## pkg/a.pyx ######## cdef extern from "alib.h": int c_function(int x) def test(): return c_function(42) ######## pkg/alib.c ######## int c_function(int x) { return x + 1; } ######## pkg/alib.h ######## int c_function(int x); Cython-0.26.1/tests/build/module_api.srctree0000664000175000017500000000457112542002467021611 0ustar stefanstefan00000000000000# tag: cpp PYTHON setup.py build_ext --inplace PYTHON test.py ######## setup.py ######## from Cython.Build.Dependencies import cythonize from distutils.core import setup exts = cythonize("*.pyx") for e in exts: if e.name == "d": e.sources.append("a.c") setup( ext_modules = exts, ) ######## a.pxd ######## ctypedef api float flt cdef int int0 cdef float flt0 cdef api int int1 cdef api float flt1 cdef public api int int2 cdef public api flt flt2 cdef class A0: pass ctypedef api class A1 [ type A1_Type, object A1Object ]: pass ctypedef public api class A2 [ type A2_Type, object A2Object ]: pass cdef A0 a0 cdef api A1 a1 cdef public api A2 a2 ######## a.pyx ######## cdef int int0 = 1, int1 = 1, int2 = 1 cdef float flt0 = 1, flt1 = 1, flt2 = 1 cdef api int int3 = 1 cdef api flt flt3 = 1 cdef public int int4 = 1 cdef public flt flt4 = 1 def get_int(): return (int0, int1, int2, int3, int4) def get_flt(): return (flt0, flt1, flt2, flt3, flt4) cdef class A0: pass cdef class A1: pass cdef class A2: pass cdef A0 a0 = A0() cdef api A1 a1 = A1() cdef public api A2 a2 = A2() ######## b.pyx ######## from a cimport * int0 = int1 = int2 = 7 flt0 = flt1 = flt2 = 7 ######## c.pyx ######## # distutils: language = c++ cdef extern from "a_api.h": int import_a() except -1 ctypedef float flt int int1, int2, int3 flt flt1, flt2, flt3 import_a() int1 = int2 = int3 = 5 flt1 = flt2 = flt3 = 5 ######## inita.h ######## #if PY_MAJOR_VERSION >= 3 void inita(void) { PyObject *sys_modules = NULL; PyObject *mod = NULL; sys_modules = PyImport_GetModuleDict(); if (!sys_modules) return; mod = PyInit_a(); if (!mod) return; PyDict_SetItemString(sys_modules, (char*)"a", mod); } #endif ######## d.pyx ######## cdef extern from "a.h": pass cdef extern from "inita.h": pass cdef extern from "a.h": void inita() except * ctypedef float flt int int2, int4 flt flt2, flt4 inita() int2 = int4 = 3 flt2 = flt4 = 3 ######## test.py ######## import a assert a.get_int() == (1,1,1,1,1) assert a.get_flt() == (1,1,1,1,1) import b assert a.get_int() == (7,7,7,1,1) assert a.get_flt() == (7,7,7,1,1) import c assert a.get_int() == (7,5,5,5,1) assert a.get_flt() == (7,5,5,5,1) import d import a assert a.get_int() == (1,1,3,1,3) assert a.get_flt() == (1,1,3,1,3) Cython-0.26.1/tests/build/cythonize_options.srctree0000664000175000017500000000076512542002467023263 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import a" ######## setup.py ######## from Cython.Build.Dependencies import cythonize from distutils.core import setup setup( ext_modules = cythonize("*.pyx", include_path=['subdir'], compiler_directives={'cdivision': True}), ) ######## a.pyx ######## cimport x include "y.pxi" # cdivision from setup.py def mod_int_c(int a, int b): return a % b assert mod_int_c(-1, 10) < 0 ######## subdir/x.pxd ######## ######## subdir/y.pxi ######## Cython-0.26.1/tests/build/cythonize_script_excludes.srctree0000664000175000017500000000223312542002467024760 0ustar stefanstefan00000000000000PYTHON -m Cython.Build.Cythonize -i '**/*.pyx' -x '**/t/**/*.pyx' -x '**/m/**/*.pyx' PYTHON -c "import tests; assert tests.X.x == 2" ######## tests.py ######## import sys sys.path.append('src') import a.f.c.d.x as X assert X.x == 2 assert 'src/a/' in X.__file__ or 'src\\a\\' in X.__file__ try: import a.t.c.d.x except ImportError: pass else: assert False, "ImportError not raised - exclude of 't' package did not work" try: import a.m.c.d.x except ImportError: pass else: assert False, "ImportError not raised - exclude of 'm' package did not work" ######## src/a/__init__.py ######## ######## src/a/t/__init__.py ######## ######## src/a/t/c/__init__.py ######## ######## src/a/t/c/d/__init__.py ######## ######## src/a/t/c/d/x.pyx ######## x = 1 ######## src/a/__init__.py ######## ######## src/a/f/__init__.py ######## ######## src/a/f/c/__init__.py ######## ######## src/a/f/c/d/__init__.py ######## ######## src/a/f/c/d/x.pyx ######## x = 2 ######## src/a/__init__.py ######## ######## src/a/m/__init__.py ######## ######## src/a/m/c/__init__.py ######## ######## src/a/m/c/d/__init__.py ######## ######## src/a/m/c/d/x.pyx ######## x = 3 Cython-0.26.1/tests/build/cythonize_additional_sources.srctree0000664000175000017500000000077212542002467025441 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import a; assert a.test() == 43" ######## setup.py ######## from Cython.Build.Dependencies import cythonize from distutils.core import setup setup( ext_modules = cythonize("*.pyx"), ) ######## a.pyx ######## # distutils: sources=alib.c cdef extern from "alib.h": int c_function(int x) def test(): return c_function(42) ######## alib.c ######## int c_function(int x) { return x + 1; } ######## alib.h ######## int c_function(int x); Cython-0.26.1/tests/build/cythonize_script.srctree0000664000175000017500000000151312542002467023064 0ustar stefanstefan00000000000000''' PYTHON -m Cython.Build.Cythonize -i '**/*_test.py' PYTHON -c "import cy_test; assert cy_test.TEST == 'cy_test', cy_test.TEST; assert '.py' not in cy_test.__file__, cy_test.__file__" PYTHON -c "import pkg.cy_test; assert pkg.cy_test.TEST == 'pkg.cy_test', pkg.cy_test.TEST; assert '.py' not in pkg.cy_test.__file__, pkg.cy_test.__file__" PYTHON -c "import pkg.sub.cy_test; assert pkg.sub.cy_test.TEST == 'pkg.sub.cy_test', pkg.sub.cy_test.TEST; assert '.py' not in pkg.sub.cy_test.__file__, pkg.cy_test.__file__" ''' ######## cy_test.py ######## TEST = 'cy_test' ######## pkg/__init__.py ######## ######## pkg/cy_test.py ######## TEST = 'pkg.cy_test' ######## pkg/sub/__init__.py ######## ######## pkg/sub/cy_test.py ######## # cython: language_level=3 TEST = 'pkg.sub.cy_test' ustring = 'abc' assert isinstance(ustring, unicode) Cython-0.26.1/tests/build/cythonize_glob.srctree0000664000175000017500000000124312542002467022503 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import runner" ######## setup.py ######## from Cython.Build.Dependencies import cythonize from distutils.core import setup setup( ext_modules = cythonize("**/a*.pyx", include_path=['subdir'], compiler_directives={'cdivision': True}), ) ######## a.pyx ######## ######## p1/__init__.py ######## ######## p1/a.pyx ######## ######## p1/ab.pyx ######## ######## p1/b.pyx ######## ######## p1/p2/__init__.py ######## ######## p1/p2/a.pyx ######## ######## runner.py ######## import a import p1.a import p1.ab import p1.p2.a try: import p1.b assert False, "b should not be complied" except ImportError: pass Cython-0.26.1/tests/build/inline_distutils.srctree0000664000175000017500000000122212542002467023043 0ustar stefanstefan00000000000000# tag: cpp PYTHON setup.py build_ext --inplace PYTHON -c "import a" ######## setup.py ######## # TODO: Better interface... from Cython.Build.Dependencies import cythonize from distutils.core import setup import sys if sys.platform == 'win32': MATH_LIBS = [] else: MATH_LIBS = ['m'] setup( ext_modules = cythonize("*.pyx", aliases={'MATH_LIBS': MATH_LIBS}), ) ######## my_lib.pxd ######## # distutils: language = c++ # distutils: libraries = MATH_LIBS cdef extern from "my_lib_helper.cpp" namespace "A": int x ######## my_lib_helper.cpp ####### namespace A { int x = 100; }; ######## a.pyx ######## from my_lib cimport x print x Cython-0.26.1/tests/build/basic_cythonize.srctree0000664000175000017500000000041412542002467022640 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import a" ######## setup.py ######## # TODO: Better interface... from Cython.Build.Dependencies import cythonize from distutils.core import setup setup( ext_modules = cythonize("*.pyx"), ) ######## a.pyx ######## Cython-0.26.1/tests/build/cpp_cythonize.srctree0000664000175000017500000000075713023021033022334 0ustar stefanstefan00000000000000# tag: cpp PYTHON setup.py build_ext --inplace PYTHON -c "import a; a.use_vector([1,2,3])" ######## setup.py ######## from Cython.Build.Dependencies import cythonize from distutils.core import setup setup( ext_modules = cythonize("*.pyx"), ) ######## a.pyx ######## # distutils: language=c++ from libcpp.vector cimport vector def use_vector(L): try: v = new vector[int]() for a in L: v.push_back(a) return v.size() finally: del v Cython-0.26.1/tests/build/basic_distutils.srctree0000664000175000017500000000050012542002467022644 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import a" ######## setup.py ######## from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext setup( cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("a", ["a.pyx"])], ) ######## a.pyx ######## Cython-0.26.1/tests/build/build_dir_src.srctree0000664000175000017500000000403313023021033022251 0ustar stefanstefan00000000000000# Mostly the same test as build_dir.srctree but with everything inside # a common "src" directory. We don't use --inplace and don't actually # import the built modules. PYTHON shutil_copy.py src/subdir src/fake PYTHON setup.py build_ext PYTHON check_paths.py ######## shutil_copy.py ######## import shutil, sys shutil.copytree(sys.argv[1], sys.argv[2]) ######## setup.py ######## from Cython.Build.Dependencies import cythonize from Cython.Distutils.extension import Extension from distutils.core import setup ext_modules = cythonize( Extension("a", ["src/a.pyx"]), build_dir="scratchA") ext_modules += cythonize( Extension("pkg.b", ["src/pkg/b.pyx"]), build_dir="scratchB") setup(ext_modules=ext_modules) ######## src/a.pyx ######## cdef extern from "helper.h": int value1 cdef extern from "subdir/helper.h": int value2 cdef extern from "pkg/pkg_helper.h": int value3 assert value1 == 100 assert value2 == 200 assert value3 == 300 ######## src/helper.h ######## int value1 = 100; ######## src/subdir/helper.h ######## int value2 = 200; ######## src/pkg/__init__.py ######## ######## src/pkg/b.pyx ######## cdef extern from "../fake/helper.h": int value2 cdef extern from "pkg_helper.h": int value3 cdef extern from "subdir/pkg_helper.h": int value4 assert value2 == 200 assert value3 == 300 assert value4 == 400 ######## src/pkg/pkg_helper.h ######## int value3 = 300; ######## src/pkg/subdir/pkg_helper.h ######## int value4 = 400; ######## check_paths.py ######## import os assert os.path.exists("scratchA/src/a.c") assert os.path.exists("scratchA/src/helper.h") assert os.path.exists("scratchA/src/subdir/helper.h") assert os.path.exists("scratchA/src/pkg/pkg_helper.h") assert not os.path.exists("src/a.c") assert os.path.exists("scratchB/src/pkg/b.c") assert os.path.exists("scratchB/src/pkg/pkg_helper.h") assert os.path.exists("scratchB/src/pkg/subdir/pkg_helper.h") assert os.path.exists("scratchB/src/fake/helper.h") assert not os.path.exists("src/b.c") assert not os.path.exists("src/pkg/b.c") Cython-0.26.1/tests/build/common_include_dir.srctree0000664000175000017500000000361512542002467023322 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import runner" # Verify some files were created. # ls common/AddTraceback_impl*.h common/RaiseException_impl_*.h PYTHON -c "import glob; assert glob.glob('common/AddTraceback_impl*.h')" PYTHON -c "import glob; assert glob.glob('common/RaiseException_impl_*.h')" # Verify that they're used. PYTHON fake_grep.py -c '#include "common/AddTraceback_impl_.*h"' a.c PYTHON fake_grep.py -c '#include "common/AddTraceback_impl_.*h"' b.c PYTHON fake_grep.py -c '#include "common/AddTraceback_impl_.*h"' c.c ######## setup.py ######## import sys from Cython.Build.Dependencies import cythonize from distutils.core import setup # Test concurrent safety if multiprocessing is available. # (In particular, TravisCI does not support spawning processes from tests.) nthreads = 0 if not hasattr(sys, 'pypy_version_info'): try: import multiprocessing multiprocessing.Pool(2).close() nthreads = 2 except (ImportError, OSError): pass setup( ext_modules = cythonize("*.pyx", common_utility_include_dir='common', nthreads=nthreads), ) ######## a.pyx ######## def generator(n): for k in range(n): yield k assert list(generator(10)) == list(range(10)) ######## b.pyx ######## def generator(n): for k in range(n): yield k assert list(generator(10)) == list(range(10)) if __name__ == "__main__": print("here b") ######## c.pyx ######## if __name__ == "__main__": print("here c") ######## runner.py ######## import a, b, c ######## fake_grep.py ######## import re import sys if sys.platform == 'win32': opt, pattern, file = sys.argv[1:] assert opt == '-c' count = 0 regex = re.compile(pattern) for line in open(file): if regex.search(line): count += 1 print(count) sys.exit(count == 0) else: import subprocess sys.exit(subprocess.call(['grep'] + sys.argv[1:])) Cython-0.26.1/tests/build/cythonize_script_package.srctree0000664000175000017500000000122612542002467024540 0ustar stefanstefan00000000000000''' PYTHON -m Cython.Build.Cythonize -i pkg -j1 PYTHON package_test.py ''' ######## package_test.py ######## import sys if sys.version_info[0] < 3 or sys.version_info >= (3,3): # __init__.py compilation isn't supported in Py 3.[012] import pkg.sub.test assert pkg.sub.test.TEST == 'pkg.sub.test' assert '.py' not in pkg.sub.test.__file__ ######## test.py ######## TEST = 'test' ######## pkg/__init__.py ######## ######## pkg/test.py ######## TEST = 'pkg.test' ######## pkg/sub/__init__.py ######## ######## pkg/sub/test.py ######## # cython: language_level=3 TEST = 'pkg.sub.test' ustring = 'abc' assert isinstance(ustring, unicode) Cython-0.26.1/tests/build/compile_env_distutils.srctree0000664000175000017500000000101713023021033024050 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import a; import sys; sys.exit(a.compile_env_test())" ######## setup.py ######## from distutils.core import setup from Cython.Distutils.extension import Extension from Cython.Distutils.old_build_ext import old_build_ext setup( cmdclass = {'build_ext': old_build_ext}, ext_modules = [Extension( "a", ["a.pyx"], pyrex_compile_time_env = {'TEST': True}, )], ) ######## a.pyx ######## def compile_env_test(): IF TEST: return 0 ELSE: return 1 Cython-0.26.1/tests/pyximport/0000775000175000017500000000000013151203436017043 5ustar stefanstefan00000000000000Cython-0.26.1/tests/pyximport/pyximport_basic.srctree0000664000175000017500000000063013023021033023634 0ustar stefanstefan00000000000000 PYTHON -c "import basic_test; basic_test.test()" ######## basic_test.py ######## import os.path import pyximport pyximport.install(build_dir=os.path.join(os.path.dirname(__file__), "TEST_TMP")) def test(): import mymodule assert mymodule.test_string == "TEST" assert not mymodule.__file__.rstrip('oc').endswith('.py'), mymodule.__file__ ######## mymodule.pyx ######## test_string = "TEST" Cython-0.26.1/tests/pyximport/pyximport_pyimport.srctree0000664000175000017500000000114113023021033024434 0ustar stefanstefan00000000000000 PYTHON -c "import pyimport_test; pyimport_test.test()" ######## pyimport_test.py ######## import os.path import pyximport # blacklist for speed import pyximport.pyxbuild, Cython.Compiler.Pipeline import distutils.core, distutils.ccompiler, distutils.command.build pyximport.install(pyximport=False, pyimport=True, build_dir=os.path.join(os.path.dirname(__file__), "TEST_TMP")) def test(): import mymodule assert mymodule.test_string == "TEST" assert not mymodule.__file__.rstrip('oc').endswith('.py'), mymodule.__file__ ######## mymodule.py ######## test_string = "TEST" Cython-0.26.1/tests/pyximport/pyximport_errors.srctree0000664000175000017500000000135113023021033024070 0ustar stefanstefan00000000000000 PYTHON -c "import pyximport_test; pyximport_test.test()" ######## pyximport_test.py ######## import os.path from contextlib import contextmanager import pyximport pyximport.install(build_dir=os.path.join(os.path.dirname(__file__), "TEST_TMP")) @contextmanager def fails(exc=ImportError): try: yield except exc: pass else: raise RuntimeError("NOT RAISED!") def test(): with fails(): import compiler_error with fails(): import syntax_error with fails(): import runtime_error ######## compiler_error.pyx ######## from __future__ import braces ######## syntax_error.pyx ######## def test { BRACES! } ######## runtime_error.pyx ######## raise ValueError() Cython-0.26.1/tests/buffers/0000775000175000017500000000000013151203436016424 5ustar stefanstefan00000000000000Cython-0.26.1/tests/buffers/buffmt.pyx0000664000175000017500000002327412542002467020465 0ustar stefanstefan00000000000000from __future__ import unicode_literals # Tests buffer format string parsing. __test__ = {} def testcase(func): __test__[func.__name__] = func.__doc__ return func from libc cimport stdlib def little_endian(): cdef unsigned int n = 1 return (&n)[0] != 0 if little_endian(): current_endian = '<' other_endian = '>' else: current_endian = '>' other_endian = '<' cdef struct align_of_float_helper: char ch float d cdef struct align_of_int_helper: char ch int i float_align = sizeof(align_of_float_helper) - sizeof(float) int_align = sizeof(align_of_int_helper) - sizeof(int) if float_align != 4 or sizeof(float) != 4: raise RuntimeError("Alignment or size of float is %d on this system, please report to cython-dev for a testcase fix" % float_align) if int_align != 4 or sizeof(int) != 4: raise RuntimeError("Alignment or size of int is %d on this system, please report to cython-dev for a testcase fix" % int_align) cdef class MockBuffer: cdef Py_ssize_t zero cdef Py_ssize_t minusone cdef object format cdef object itemsize def __init__(self, format, itemsize): self.format = unicode(format).encode(u"ASCII") self.itemsize = itemsize self.zero = 0 self.minusone = -1 def __getbuffer__(self, Py_buffer* info, int flags): info.buf = NULL info.strides = &self.zero info.suboffsets = &self.minusone info.shape = &self.zero info.ndim = 1 info.format = self.format info.itemsize = self.itemsize @testcase def _int(fmt): """ >>> _int("i") >>> _int("b") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch, expected 'int' but got 'signed char' >>> _int("if") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch, expected end but got 'float' >>> _int("$$") Traceback (most recent call last): ... ValueError: Does not understand character buffer dtype format string ('$') """ cdef object[int] buf = MockBuffer(fmt, sizeof(int)) @testcase def _ulong(fmt): """ >>> _ulong("L") """ cdef object[unsigned long] buf = MockBuffer(fmt, sizeof(unsigned long)) @testcase def wrongsize(): """ >>> wrongsize() Traceback (most recent call last): ... ValueError: Item size of buffer (1 byte) does not match size of 'float' (4 bytes) """ cdef object[float] buf = MockBuffer("f", 1) @testcase def _obj(fmt): """ >>> _obj("O") >>> _obj("i") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch, expected 'Python object' but got 'int' """ cdef object[object] buf = MockBuffer(fmt, sizeof(void*)) cdef struct ComplexFloat: float real float imag ctypedef struct Char3Int: char a int b int c int d cdef struct CharIntCFloat: char a int b ComplexFloat c float d cdef struct UnpackedStruct1: char a int b ComplexFloat c float c2 Char3Int d ctypedef struct UnpackedStruct2: CharIntCFloat a Char3Int b ctypedef struct UnpackedStruct3: CharIntCFloat a char b int c, d, e cdef struct UnpackedStruct4: char a int b ComplexFloat c float c2 char d int e, f, g @testcase def char3int(fmt): """ >>> char3int("ciii") >>> char3int("c1i1i1i") >>> char3int("c3i") >>> char3int("ci2i") >>> char3int("c@i@2i") Extra pad bytes (assuming int size is 4 or more) >>> char3int("cxiii") >>> char3int("c3xiii") >>> char3int("cxxxiii") Standard alignment (assming int size is 4) >>> char3int("=c3xiii") >>> char3int("=ciii") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch; next field is at offset 1 but 4 expected >>> char3int("=cxxx@iii") Error: >>> char3int("cii") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch, expected 'int' but got end in 'Char3Int.d' """ cdef object obj = MockBuffer(fmt, sizeof(Char3Int)) cdef object[Char3Int, ndim=1] buf = obj @testcase def unpacked_struct(fmt): """ Native formats: >>> unpacked_struct("ciZffciii") >>> unpacked_struct("@ci3fc3i") >>> unpacked_struct("@ciZffci2i") >>> unpacked_struct("ciZffT{ciii}") >>> unpacked_struct("cT{ifffc2i}i") >>> unpacked_struct("ciZffc3T{i}") >>> unpacked_struct("T{c}T{T{iZffT{ci}}}2T{T{i}}") """ assert (sizeof(UnpackedStruct1) == sizeof(UnpackedStruct2) == sizeof(UnpackedStruct3) == sizeof(UnpackedStruct4)) cdef object obj = MockBuffer(fmt, sizeof(UnpackedStruct1)) cdef object[UnpackedStruct1, ndim=1] buf1 = obj cdef object[UnpackedStruct2, ndim=1] buf2 = obj cdef object[UnpackedStruct3, ndim=1] buf3 = obj cdef object[UnpackedStruct4, ndim=1] buf4 = obj cdef struct ComplexTest: ComplexFloat a, b, c @testcase def complex_test(fmt): """ >>> complex_test("ZfZfZf") >>> complex_test("3Zf") >>> complex_test("6f") >>> complex_test("3T{Zf}") >>> complex_test("fZfZff") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch, expected 'float' but got 'complex float' in 'ComplexFloat.imag' """ cdef object obj = MockBuffer(fmt, sizeof(ComplexTest)) cdef object[ComplexTest] buf1 = obj @testcase def alignment_string(fmt, exc=None): """ >>> alignment_string("@i") >>> alignment_string("%si" % current_endian) >>> alignment_string("%si" % other_endian, "X-endian buffer not supported on X-endian compiler") >>> alignment_string("=i") """ cdef object[int] buf try: buf = MockBuffer(fmt, sizeof(int)) except ValueError, e: msg = unicode(e).replace("Big", "X").replace("Little", "X").replace("big", "X").replace("little", "X") if msg != exc: print msg print " is not equal to" print exc return if exc: print "fail" @testcase def int_and_long_are_same(): """ >>> int_and_long_are_same() """ cdef object[int] intarr cdef object[long] longarr if sizeof(int) == sizeof(long): intarr = MockBuffer("l", sizeof(int)) longarr = MockBuffer("i", sizeof(int)) cdef struct MixedComplex: double real float imag @testcase def mixed_complex_struct(): """ Triggering a specific execution path for this case. >>> mixed_complex_struct() Traceback (most recent call last): ... ValueError: Buffer dtype mismatch, expected 'double' but got 'complex double' in 'MixedComplex.real' """ cdef object[MixedComplex] buf = MockBuffer("Zd", sizeof(MixedComplex)) cdef packed struct PackedSubStruct: char x int y cdef struct UnpackedSubStruct: char x int y cdef packed struct PackedStruct: char a int b PackedSubStruct sub cdef struct PartiallyPackedStruct: char a int b PackedSubStruct sub cdef packed struct PartiallyPackedStruct2: char a UnpackedSubStruct sub char b int c @testcase def packed_struct(fmt): """ Assuming int is four bytes: >>> packed_struct("^cici") >>> packed_struct("=cici") However aligned access won't work: >>> packed_struct("^c@i^ci") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch; next field is at offset 4 but 1 expected >>> packed_struct("@cici") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch; next field is at offset 4 but 1 expected """ cdef object[PackedStruct] buf = MockBuffer(fmt, sizeof(PackedStruct)) @testcase def partially_packed_struct(fmt): """ Assuming int is four bytes: >>> partially_packed_struct("^c@i^ci") >>> partially_packed_struct("@ci^ci") >>> partially_packed_struct("^c@i=ci") >>> partially_packed_struct("@ci=ci") >>> partially_packed_struct("ci^ci") >>> partially_packed_struct("ci=ci") Incorrectly aligned accesses won't work: >>> partially_packed_struct("^cici") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch; next field is at offset 1 but 4 expected >>> partially_packed_struct("=cibi") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch; next field is at offset 1 but 4 expected """ cdef object[PartiallyPackedStruct] buf = MockBuffer( fmt, sizeof(PartiallyPackedStruct)) @testcase def partially_packed_struct_2(fmt): """ Assuming int is four bytes: >>> partially_packed_struct_2("^ccxxxici") >>> partially_packed_struct_2("^ccxxxi^ci") >>> partially_packed_struct_2("c=cxxxi^ci") >>> partially_packed_struct_2("c^cxxxi^ci") >>> partially_packed_struct_2("c^cxxxi=ci") >>> partially_packed_struct_2("ccxxx^i@c^i") Incorrectly aligned accesses won't work: >>> partially_packed_struct_2("ccxxxici") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch; next field is at offset 8 but 5 expected >>> partially_packed_struct_2("ccici") Traceback (most recent call last): ... ValueError: Buffer dtype mismatch; next field is at offset 4 but 5 expected """ cdef object[PartiallyPackedStruct2] buf = MockBuffer( fmt, sizeof(PartiallyPackedStruct2)) cdef packed struct PackedStructWithCharArrays: float a int b char[5] c char[3] d @testcase def packed_struct_with_strings(fmt): """ >>> packed_struct_with_strings("T{f:a:i:b:5s:c:3s:d:}") """ cdef object[PackedStructWithCharArrays] buf = MockBuffer( fmt, sizeof(PackedStructWithCharArrays)) # TODO: empty struct # TODO: Incomplete structs # TODO: mixed structs Cython-0.26.1/tests/buffers/bufaccess.pyx0000664000175000017500000007020213023021033021112 0ustar stefanstefan00000000000000# Tests the buffer access syntax functionality by constructing # mock buffer objects. # # Note that the buffers are mock objects created for testing # the buffer access behaviour -- for instance there is no flag # checking in the buffer objects (why test our test case?), rather # what we want to test is what is passed into the flags argument. # from __future__ import unicode_literals from cpython.object cimport PyObject from cpython.ref cimport Py_INCREF, Py_DECREF cimport cython __test__ = {} import sys #import re exclude = []#re.compile('object').search] if getattr(sys, 'pypy_version_info', None) is not None: # disable object-in-buffer tests in PyPy import re exclude.append(re.compile('object').search) def testcase(func): for e in exclude: if e(func.__name__): return func __test__[func.__name__] = func.__doc__ return func include "mockbuffers.pxi" # # Buffer acquire and release tests # def nousage(): """ The challenge here is just compilation. """ cdef object[int, ndim=2] buf @testcase def disabled_usage(obj): """ The challenge here is just compilation. >>> disabled_usage(None) """ cdef object[int, ndim=2] buf if False: buf = obj return obj @testcase def nousage_cleanup(x): """ >>> nousage_cleanup(False) >>> nousage_cleanup(True) Traceback (most recent call last): RuntimeError """ cdef object[int, ndim=2] buf if x: raise RuntimeError() @testcase def acquire_release(o1, o2): """ >>> A = IntMockBuffer("A", range(6)) >>> B = IntMockBuffer("B", range(6)) >>> acquire_release(A, B) acquired A released A acquired B released B >>> acquire_release(None, None) >>> acquire_release(None, B) acquired B released B """ cdef object[int] buf buf = o1 buf = o2 @testcase def acquire_raise(o): """ Apparently, doctest won't handle mixed exceptions and print stats, so need to circumvent this. >>> A = IntMockBuffer("A", range(6)) >>> A.resetlog() >>> acquire_raise(A) Traceback (most recent call last): ... Exception: on purpose >>> A.printlog() acquired A released A """ cdef object[int] buf buf = o raise Exception("on purpose") @testcase def acquire_failure1(): """ >>> acquire_failure1() acquired working 0 3 0 3 released working """ cdef object[int] buf buf = IntMockBuffer("working", range(4)) print buf[0], buf[3] try: buf = ErrorBuffer() assert False except Exception: print buf[0], buf[3] @testcase def acquire_failure2(): """ >>> acquire_failure2() acquired working 0 3 0 3 released working """ cdef object[int] buf = IntMockBuffer("working", range(4)) print buf[0], buf[3] try: buf = ErrorBuffer() assert False except Exception: print buf[0], buf[3] @testcase def acquire_failure3(): """ >>> acquire_failure3() acquired working 0 3 released working acquired working 0 3 released working """ cdef object[int] buf buf = IntMockBuffer("working", range(4)) print buf[0], buf[3] try: buf = 3 assert False except Exception: print buf[0], buf[3] @testcase def acquire_failure4(): """ >>> acquire_failure4() acquired working 0 3 released working acquired working 0 3 released working """ cdef object[int] buf = IntMockBuffer("working", range(4)) print buf[0], buf[3] try: buf = 2 assert False except Exception: print buf[0], buf[3] @testcase def acquire_failure5(): """ >>> acquire_failure5() Traceback (most recent call last): ... ValueError: Buffer acquisition failed on assignment; and then reacquiring the old buffer failed too! """ cdef object[int] buf buf = IntMockBuffer("working", range(4)) buf.fail = True buf = 3 @testcase def acquire_nonbuffer1(first, second=None): """ >>> acquire_nonbuffer1(3) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError:... 'int'... >>> acquire_nonbuffer1(type) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError:... 'type'... >>> acquire_nonbuffer1(None, 2) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError:... 'int'... """ cdef object[int] buf buf = first buf = second @testcase def acquire_nonbuffer2(): """ >>> acquire_nonbuffer2() acquired working 0 3 released working acquired working 0 3 released working """ cdef object[int] buf = IntMockBuffer("working", range(4)) print buf[0], buf[3] try: buf = ErrorBuffer assert False except Exception: print buf[0], buf[3] @testcase def as_argument(object[int] bufarg, int n): """ >>> A = IntMockBuffer("A", range(6)) >>> as_argument(A, 6) acquired A 0 1 2 3 4 5 END released A """ cdef int i for i in range(n): print bufarg[i], print 'END' @testcase def as_argument_not_none(object[int] bufarg not None): """ >>> A = IntMockBuffer("A", range(6)) >>> as_argument_not_none(A) acquired A ACCEPTED released A >>> as_argument_not_none(None) Traceback (most recent call last): TypeError: Argument 'bufarg' must not be None """ print 'ACCEPTED' @testcase def as_argument_defval(object[int] bufarg=IntMockBuffer('default', range(6)), int n=6): """ >>> as_argument_defval() acquired default 0 1 2 3 4 5 END released default >>> A = IntMockBuffer("A", range(6)) >>> as_argument_defval(A, 6) acquired A 0 1 2 3 4 5 END released A """ cdef int i for i in range(n): print bufarg[i], print 'END' @testcase def cdef_assignment(obj, n): """ >>> A = IntMockBuffer("A", range(6)) >>> cdef_assignment(A, 6) acquired A 0 1 2 3 4 5 END released A """ cdef object[int] buf = obj cdef int i for i in range(n): print buf[i], print 'END' @testcase def forin_assignment(objs, int pick): """ >>> A = IntMockBuffer("A", range(6)) >>> B = IntMockBuffer("B", range(6)) >>> forin_assignment([A, B, A, A], 2) acquired A 2 released A acquired B 2 released B acquired A 2 released A acquired A 2 released A """ cdef object[int] buf for buf in objs: print buf[pick] @testcase def cascaded_buffer_assignment(obj): """ >>> A = IntMockBuffer("A", range(6)) >>> cascaded_buffer_assignment(A) acquired A acquired A released A released A """ cdef object[int] a, b a = b = obj @testcase def tuple_buffer_assignment1(a, b): """ >>> A = IntMockBuffer("A", range(6)) >>> B = IntMockBuffer("B", range(6)) >>> tuple_buffer_assignment1(A, B) acquired A acquired B released A released B """ cdef object[int] x, y x, y = a, b @testcase def tuple_buffer_assignment2(tup): """ >>> A = IntMockBuffer("A", range(6)) >>> B = IntMockBuffer("B", range(6)) >>> tuple_buffer_assignment2((A, B)) acquired A acquired B released A released B """ cdef object[int] x, y x, y = tup @testcase def explicitly_release_buffer(): """ >>> explicitly_release_buffer() acquired A released A After release """ cdef object[int] x = IntMockBuffer("A", range(10)) x = None print "After release" # # Getting items and index bounds checking # @testcase def get_int_2d(object[int, ndim=2] buf, int i, int j): """ >>> C = IntMockBuffer("C", range(6), (2,3)) >>> get_int_2d(C, 1, 1) acquired C released C 4 Check negative indexing: >>> get_int_2d(C, -1, 0) acquired C released C 3 >>> get_int_2d(C, -1, -2) acquired C released C 4 >>> get_int_2d(C, -2, -3) acquired C released C 0 Out-of-bounds errors: >>> get_int_2d(C, 2, 0) Traceback (most recent call last): ... IndexError: Out of bounds on buffer access (axis 0) >>> get_int_2d(C, 0, -4) Traceback (most recent call last): ... IndexError: Out of bounds on buffer access (axis 1) """ return buf[i, j] @testcase def get_int_2d_uintindex(object[int, ndim=2] buf, unsigned int i, unsigned int j): """ Unsigned indexing: >>> C = IntMockBuffer("C", range(6), (2,3)) >>> get_int_2d_uintindex(C, 0, 0) acquired C released C 0 >>> get_int_2d_uintindex(C, 1, 2) acquired C released C 5 """ # This is most interesting with regards to the C code # generated. return buf[i, j] @testcase def set_int_2d(object[int, ndim=2] buf, int i, int j, int value): """ Uses get_int_2d to read back the value afterwards. For pure unit test, one should support reading in MockBuffer instead. >>> C = IntMockBuffer("C", range(6), (2,3)) >>> set_int_2d(C, 1, 1, 10) acquired C released C >>> get_int_2d(C, 1, 1) acquired C released C 10 Check negative indexing: >>> set_int_2d(C, -1, 0, 3) acquired C released C >>> get_int_2d(C, -1, 0) acquired C released C 3 >>> set_int_2d(C, -1, -2, 8) acquired C released C >>> get_int_2d(C, -1, -2) acquired C released C 8 >>> set_int_2d(C, -2, -3, 9) acquired C released C >>> get_int_2d(C, -2, -3) acquired C released C 9 Out-of-bounds errors: >>> set_int_2d(C, 2, 0, 19) Traceback (most recent call last): ... IndexError: Out of bounds on buffer access (axis 0) >>> set_int_2d(C, 0, -4, 19) Traceback (most recent call last): ... IndexError: Out of bounds on buffer access (axis 1) """ buf[i, j] = value @testcase def set_int_2d_cascaded(object[int, ndim=2] buf, int i, int j, int value): """ Uses get_int_2d to read back the value afterwards. For pure unit test, one should support reading in MockBuffer instead. >>> C = IntMockBuffer("C", range(6), (2,3)) >>> set_int_2d_cascaded(C, 1, 1, 10) acquired C released C 10 >>> get_int_2d(C, 1, 1) acquired C released C 10 Check negative indexing: >>> set_int_2d_cascaded(C, -1, 0, 3) acquired C released C 3 >>> get_int_2d(C, -1, 0) acquired C released C 3 >>> set_int_2d_cascaded(C, -1, -2, 8) acquired C released C 8 >>> get_int_2d(C, -1, -2) acquired C released C 8 >>> set_int_2d_cascaded(C, -2, -3, 9) acquired C released C 9 >>> get_int_2d(C, -2, -3) acquired C released C 9 Out-of-bounds errors: >>> set_int_2d_cascaded(C, 2, 0, 19) Traceback (most recent call last): IndexError: Out of bounds on buffer access (axis 0) >>> set_int_2d_cascaded(C, 0, -4, 19) Traceback (most recent call last): IndexError: Out of bounds on buffer access (axis 1) """ cdef int casc_value buf[i, j] = casc_value = value return casc_value @testcase def list_comprehension(object[int] buf, len): """ >>> list_comprehension(IntMockBuffer(None, [1,2,3]), 3) 1|2|3 """ cdef int i print u"|".join([unicode(buf[i]) for i in range(len)]) # # The negative_indices buffer option # @testcase def no_negative_indices(object[int, negative_indices=False] buf, int idx): """ The most interesting thing here is to inspect the C source and make sure optimal code is produced. >>> A = IntMockBuffer(None, range(6)) >>> no_negative_indices(A, 3) 3 >>> no_negative_indices(A, -1) Traceback (most recent call last): ... IndexError: Out of bounds on buffer access (axis 0) """ return buf[idx] @testcase @cython.wraparound(False) def wraparound_directive(object[int] buf, int pos_idx, int neg_idx): """ Again, the most interesting thing here is to inspect the C source. >>> A = IntMockBuffer(None, range(4)) >>> wraparound_directive(A, 2, -1) 5 >>> wraparound_directive(A, -1, 2) Traceback (most recent call last): ... IndexError: Out of bounds on buffer access (axis 0) """ cdef int byneg with cython.wraparound(True): byneg = buf[neg_idx] return buf[pos_idx] + byneg # # Test which flags are passed. # @testcase def readonly(obj): """ >>> R = UnsignedShortMockBuffer("R", range(27), shape=(3, 3, 3)) >>> readonly(R) acquired R 25 released R >>> [str(x) for x in R.recieved_flags] # Works in both py2 and py3 ['FORMAT', 'INDIRECT', 'ND', 'STRIDES'] """ cdef object[unsigned short int, ndim=3] buf = obj print buf[2, 2, 1] @testcase def writable(obj): """ >>> R = UnsignedShortMockBuffer("R", range(27), shape=(3, 3, 3)) >>> writable(R) acquired R released R >>> [str(x) for x in R.recieved_flags] # Py2/3 ['FORMAT', 'INDIRECT', 'ND', 'STRIDES', 'WRITABLE'] """ cdef object[unsigned short int, ndim=3] buf = obj buf[2, 2, 1] = 23 @testcase def strided(object[int, ndim=1, mode='strided'] buf): """ >>> A = IntMockBuffer("A", range(4)) >>> strided(A) acquired A released A 2 >>> [str(x) for x in A.recieved_flags] # Py2/3 ['FORMAT', 'ND', 'STRIDES'] Check that the suboffsets were patched back prior to release. >>> A.release_ok True """ return buf[2] @testcase def c_contig(object[int, ndim=1, mode='c'] buf): """ >>> A = IntMockBuffer(None, range(4)) >>> c_contig(A) 2 >>> [str(x) for x in A.recieved_flags] ['FORMAT', 'ND', 'STRIDES', 'C_CONTIGUOUS'] """ return buf[2] @testcase def c_contig_2d(object[int, ndim=2, mode='c'] buf): """ Multi-dim has seperate implementation >>> A = IntMockBuffer(None, range(12), shape=(3,4)) >>> c_contig_2d(A) 7 >>> [str(x) for x in A.recieved_flags] ['FORMAT', 'ND', 'STRIDES', 'C_CONTIGUOUS'] """ return buf[1, 3] @testcase def f_contig(object[int, ndim=1, mode='fortran'] buf): """ >>> A = IntMockBuffer(None, range(4)) >>> f_contig(A) 2 >>> [str(x) for x in A.recieved_flags] ['FORMAT', 'ND', 'STRIDES', 'F_CONTIGUOUS'] """ return buf[2] @testcase def f_contig_2d(object[int, ndim=2, mode='fortran'] buf): """ Must set up strides manually to ensure Fortran ordering. >>> A = IntMockBuffer(None, range(12), shape=(4,3), strides=(1, 4)) >>> f_contig_2d(A) 7 >>> [str(x) for x in A.recieved_flags] ['FORMAT', 'ND', 'STRIDES', 'F_CONTIGUOUS'] """ return buf[3, 1] # # Test compiler options for bounds checking. We create an array with a # safe "boundary" (memory # allocated outside of what it published) and then check whether we get back # what we stored in the memory or an error. @testcase def safe_get(object[int] buf, int idx): """ >>> A = IntMockBuffer(None, range(10), shape=(3,), offset=5) Validate our testing buffer... >>> safe_get(A, 0) 5 >>> safe_get(A, 2) 7 >>> safe_get(A, -3) 5 Access outside it. This is already done above for bounds check testing but we include it to tell the story right. >>> safe_get(A, -4) Traceback (most recent call last): ... IndexError: Out of bounds on buffer access (axis 0) >>> safe_get(A, 3) Traceback (most recent call last): ... IndexError: Out of bounds on buffer access (axis 0) """ return buf[idx] @testcase @cython.boundscheck(False) # outer decorators should take precedence @cython.boundscheck(True) def unsafe_get(object[int] buf, int idx): """ Access outside of the area the buffer publishes. >>> A = IntMockBuffer(None, range(10), shape=(3,), offset=5) >>> unsafe_get(A, -4) 4 >>> unsafe_get(A, -5) 3 >>> unsafe_get(A, 3) 8 """ return buf[idx] @testcase @cython.boundscheck(False) def unsafe_get_nonegative(object[int, negative_indices=False] buf, int idx): """ Also inspect the C source to see that it is optimal... >>> A = IntMockBuffer(None, range(10), shape=(3,), offset=5) >>> unsafe_get_nonegative(A, -2) 3 """ return buf[idx] @testcase def mixed_get(object[int] buf, int unsafe_idx, int safe_idx): """ >>> A = IntMockBuffer(None, range(10), shape=(3,), offset=5) >>> mixed_get(A, -4, 0) (4, 5) >>> mixed_get(A, 0, -4) Traceback (most recent call last): ... IndexError: Out of bounds on buffer access (axis 0) """ with cython.boundscheck(False): one = buf[unsafe_idx] with cython.boundscheck(True): two = buf[safe_idx] return (one, two) # # Coercions # ## @testcase ## def coercions(object[unsigned char] uc): ## """ ## TODO ## """ ## print type(uc[0]) ## uc[0] = -1 ## print uc[0] ## uc[0] = 3.14 ## print uc[0] ## cdef char* ch = b"asfd" ## cdef object[object] objbuf ## objbuf[3] = ch # # Testing that accessing data using various types of buffer access # all works. # def printbuf_int(object[int] buf, shape): # Utility func cdef int i for i in range(shape[0]): print buf[i], print 'END' @testcase def printbuf_int_2d(o, shape): """ Strided: >>> printbuf_int_2d(IntMockBuffer("A", range(6), (2,3)), (2,3)) acquired A 0 1 2 END 3 4 5 END released A >>> printbuf_int_2d(IntMockBuffer("A", range(100), (3,3), strides=(20,5)), (3,3)) acquired A 0 5 10 END 20 25 30 END 40 45 50 END released A Indirect: >>> printbuf_int_2d(IntMockBuffer("A", [[1,2],[3,4]]), (2,2)) acquired A 1 2 END 3 4 END released A """ # should make shape builtin cdef object[int, ndim=2] buf buf = o cdef int i, j for i in range(shape[0]): for j in range(shape[1]): print buf[i, j], print 'END' @testcase def printbuf_float(o, shape): """ >>> printbuf_float(FloatMockBuffer("F", [1.0, 1.25, 0.75, 1.0]), (4,)) acquired F 1.0 1.25 0.75 1.0 END released F """ # should make shape builtin cdef object[float] buf buf = o cdef int i, j for i in range(shape[0]): print buf[i], print "END" # # Test assignments # @testcase def inplace_operators(object[int] buf): """ >>> buf = IntMockBuffer(None, [2, 2]) >>> inplace_operators(buf) >>> printbuf_int(buf, (2,)) 0 3 END """ cdef int j = 0 buf[1] += 1 buf[j] *= 2 buf[0] -= 4 # # Typedefs # # Test three layers of typedefs going through a h file for plain int, and # simply a header file typedef for floats and unsigned. ctypedef int td_cy_int cdef extern from "bufaccess.h": ctypedef td_cy_int td_h_short # Defined as short, but Cython doesn't know this! ctypedef float td_h_double # Defined as double ctypedef unsigned int td_h_ushort # Defined as unsigned short ctypedef td_h_short td_h_cy_short @testcase def printbuf_td_cy_int(object[td_cy_int] buf, shape): """ >>> printbuf_td_cy_int(IntMockBuffer(None, range(3)), (3,)) 0 1 2 END >>> printbuf_td_cy_int(ShortMockBuffer(None, range(3)), (3,)) Traceback (most recent call last): ... ValueError: Buffer dtype mismatch, expected 'td_cy_int' but got 'short' """ cdef int i for i in range(shape[0]): print buf[i], print 'END' @testcase def printbuf_td_h_short(object[td_h_short] buf, shape): """ >>> printbuf_td_h_short(ShortMockBuffer(None, range(3)), (3,)) 0 1 2 END >>> printbuf_td_h_short(IntMockBuffer(None, range(3)), (3,)) Traceback (most recent call last): ... ValueError: Buffer dtype mismatch, expected 'td_h_short' but got 'int' """ cdef int i for i in range(shape[0]): print buf[i], print 'END' @testcase def printbuf_td_h_cy_short(object[td_h_cy_short] buf, shape): """ >>> printbuf_td_h_cy_short(ShortMockBuffer(None, range(3)), (3,)) 0 1 2 END >>> printbuf_td_h_cy_short(IntMockBuffer(None, range(3)), (3,)) Traceback (most recent call last): ... ValueError: Buffer dtype mismatch, expected 'td_h_cy_short' but got 'int' """ cdef int i for i in range(shape[0]): print buf[i], print 'END' @testcase def printbuf_td_h_ushort(object[td_h_ushort] buf, shape): """ >>> printbuf_td_h_ushort(UnsignedShortMockBuffer(None, range(3)), (3,)) 0 1 2 END >>> printbuf_td_h_ushort(ShortMockBuffer(None, range(3)), (3,)) Traceback (most recent call last): ... ValueError: Buffer dtype mismatch, expected 'td_h_ushort' but got 'short' """ cdef int i for i in range(shape[0]): print buf[i], print 'END' @testcase def printbuf_td_h_double(object[td_h_double] buf, shape): """ >>> printbuf_td_h_double(DoubleMockBuffer(None, [0.25, 1, 3.125]), (3,)) 0.25 1.0 3.125 END >>> printbuf_td_h_double(FloatMockBuffer(None, [0.25, 1, 3.125]), (3,)) Traceback (most recent call last): ... ValueError: Buffer dtype mismatch, expected 'td_h_double' but got 'float' """ cdef int i for i in range(shape[0]): print buf[i], print 'END' # # Object access # def addref(*args): for item in args: Py_INCREF(item) def decref(*args): for item in args: Py_DECREF(item) def get_refcount(x): return (x).ob_refcnt @testcase def printbuf_object(object[object] buf, shape): """ Only play with unique objects, interned numbers etc. will have unpredictable refcounts. ObjectMockBuffer doesn't do anything about increfing/decrefing, we to the "buffer implementor" refcounting directly in the testcase. >>> a, b, c = "globally_unique_string_23234123", {4:23}, [34,3] >>> get_refcount(a), get_refcount(b), get_refcount(c) (2, 2, 2) >>> A = ObjectMockBuffer(None, [a, b, c]) >>> printbuf_object(A, (3,)) 'globally_unique_string_23234123' 2 {4: 23} 2 [34, 3] 2 """ cdef int i for i in range(shape[0]): print repr(buf[i]), (buf[i]).ob_refcnt @testcase def assign_to_object(object[object] buf, int idx, obj): """ See comments on printbuf_object above. >>> a, b = [1, 2, 3], [4, 5, 6] >>> get_refcount(a), get_refcount(b) (2, 2) >>> addref(a) >>> A = ObjectMockBuffer(None, [1, a]) # 1, ...,otherwise it thinks nested lists... >>> get_refcount(a), get_refcount(b) (3, 2) >>> assign_to_object(A, 1, b) >>> get_refcount(a), get_refcount(b) (2, 3) >>> decref(b) """ buf[idx] = obj @testcase def assign_temporary_to_object(object[object] buf): """ See comments on printbuf_object above. >>> a, b = [1, 2, 3], {4:23} >>> get_refcount(a) 2 >>> addref(a) >>> A = ObjectMockBuffer(None, [b, a]) >>> get_refcount(a) 3 >>> assign_temporary_to_object(A) >>> get_refcount(a) 2 >>> printbuf_object(A, (2,)) {4: 23} 2 {1: 8} 2 To avoid leaking a reference in our testcase we need to replace the temporary with something we can manually decref :-) >>> assign_to_object(A, 1, a) >>> decref(a) """ buf[1] = {3-2: 2+(2*4)-2} # # cast option # @testcase def buffer_cast(object[unsigned int, cast=True] buf, int idx): """ Round-trip a signed int through unsigned int buffer access. >>> A = IntMockBuffer(None, [-100]) >>> buffer_cast(A, 0) -100 """ cdef unsigned int data = buf[idx] return data @testcase def buffer_cast_fails(object[char, cast=True] buf): """ Cannot cast between datatype of different sizes. >>> buffer_cast_fails(IntMockBuffer(None, [0])) Traceback (most recent call last): ... ValueError: Item size of buffer (4 bytes) does not match size of 'char' (1 byte) """ return buf[0] # # Typed buffers # @testcase def typedbuffer1(obj): """ >>> typedbuffer1(IntMockBuffer("A", range(10))) acquired A released A >>> typedbuffer1(None) >>> typedbuffer1(4) Traceback (most recent call last): ... TypeError: Cannot convert int to bufaccess.IntMockBuffer """ cdef IntMockBuffer[int, ndim=1] buf = obj @testcase def typedbuffer2(IntMockBuffer[int, ndim=1] obj): """ >>> typedbuffer2(IntMockBuffer("A", range(10))) acquired A released A >>> typedbuffer2(None) >>> typedbuffer2(4) Traceback (most recent call last): ... TypeError: Argument 'obj' has incorrect type (expected bufaccess.IntMockBuffer, got int) """ pass # # Test __cythonbufferdefaults__ # @testcase def bufdefaults1(IntStridedMockBuffer[int, ndim=1] buf): """ For IntStridedMockBuffer, mode should be "strided" by defaults which should show up in the flags. >>> A = IntStridedMockBuffer("A", range(10)) >>> bufdefaults1(A) acquired A released A >>> [str(x) for x in A.recieved_flags] ['FORMAT', 'ND', 'STRIDES'] """ pass @testcase def basic_struct(object[MyStruct] buf): """ See also buffmt.pyx >>> basic_struct(MyStructMockBuffer(None, [(1, 2, 3, 4, 5)])) 1 2 3 4 5 >>> basic_struct(MyStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="ccqii")) 1 2 3 4 5 """ print buf[0].a, buf[0].b, buf[0].c, buf[0].d, buf[0].e @testcase def nested_struct(object[NestedStruct] buf): """ See also buffmt.pyx >>> nested_struct(NestedStructMockBuffer(None, [(1, 2, 3, 4, 5)])) 1 2 3 4 5 >>> nested_struct(NestedStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="T{ii}T{2i}i")) 1 2 3 4 5 """ print buf[0].x.a, buf[0].x.b, buf[0].y.a, buf[0].y.b, buf[0].z @testcase def packed_struct(object[PackedStruct] buf): """ See also buffmt.pyx >>> packed_struct(PackedStructMockBuffer(None, [(1, 2)])) 1 2 >>> packed_struct(PackedStructMockBuffer(None, [(1, 2)], format="T{c^i}")) 1 2 >>> packed_struct(PackedStructMockBuffer(None, [(1, 2)], format="T{c=i}")) 1 2 """ print buf[0].a, buf[0].b @testcase def nested_packed_struct(object[NestedPackedStruct] buf): """ See also buffmt.pyx >>> nested_packed_struct(NestedPackedStructMockBuffer(None, [(1, 2, 3, 4, 5)])) 1 2 3 4 5 >>> nested_packed_struct(NestedPackedStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="ci^ci@i")) 1 2 3 4 5 >>> nested_packed_struct(NestedPackedStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="^c@i^ci@i")) 1 2 3 4 5 """ print buf[0].a, buf[0].b, buf[0].sub.a, buf[0].sub.b, buf[0].c @testcase def complex_dtype(object[long double complex] buf): """ >>> complex_dtype(LongComplexMockBuffer(None, [(0, -1)])) -1j """ print buf[0] @testcase def complex_inplace(object[long double complex] buf): """ >>> complex_inplace(LongComplexMockBuffer(None, [(0, -1)])) (1+1j) """ buf[0] = buf[0] + 1 + 2j print buf[0] @testcase def complex_struct_dtype(object[LongComplex] buf): """ Note that the format string is "Zg" rather than "2g", yet a struct is accessed. >>> complex_struct_dtype(LongComplexMockBuffer(None, [(0, -1)])) 0.0 -1.0 """ print buf[0].real, buf[0].imag @testcase def complex_struct_inplace(object[LongComplex] buf): """ >>> complex_struct_inplace(LongComplexMockBuffer(None, [(0, -1)])) 1.0 1.0 """ buf[0].real += 1 buf[0].imag += 2 print buf[0].real, buf[0].imag # # Nogil # @testcase @cython.boundscheck(False) def buffer_nogil(): """ >>> buffer_nogil() 10 """ cdef object[int] buf = IntMockBuffer(None, [1,2,3]) with nogil: buf[1] = 10 return buf[1] @testcase def buffer_nogil_oob(): """ >>> buffer_nogil_oob() Traceback (most recent call last): ... IndexError: Out of bounds on buffer access (axis 0) """ cdef object[int] buf = IntMockBuffer(None, [1,2,3]) with nogil: buf[5] = 10 return buf[1] def get_int(): return 10 @testcase def test_inplace_assignment(): """ >>> test_inplace_assignment() 10 """ cdef object[int, ndim=1] buf = IntMockBuffer(None, [1, 2, 3]) buf[0] = get_int() print buf[0] @testcase def test_nested_assignment(): """ >>> test_nested_assignment() 100 """ cdef object[int] inner = IntMockBuffer(None, [1, 2, 3]) cdef object[int] outer = IntMockBuffer(None, [1, 2, 3]) outer[inner[0]] = 100 return outer[inner[0]] Cython-0.26.1/tests/buffers/buffer.pyx0000664000175000017500000000236512542002467020451 0ustar stefanstefan00000000000000__doc__ = u""" >>> b1 = TestBuffer() >>> b2 = TestBufferRelease() """ import sys if sys.version_info[0] >= 3: __doc__ += u""" >>> ms = memoryview(s) >>> ms.tobytes() b'abcdefg' >>> m1 = memoryview(b1) __getbuffer__ called Semantics changed in python 3.3 >> m1.tobytes() __getbuffer__ called b'abcdefg' >>> m2 = memoryview(b2) __getbuffer__ called Semantics changed in python 3.3 >> m2.tobytes() __getbuffer__ called releasing! b'abcdefg' >>> del m1 >>> del m2 releasing! """ s = b"abcdefg" cdef class TestBuffer: def __getbuffer__(self, Py_buffer* buffer, int flags): print u"__getbuffer__ called" buffer.buf = s buffer.obj = self buffer.len = len(s) buffer.readonly = 0 buffer.format = "B" buffer.ndim = 0 buffer.shape = NULL buffer.strides = NULL buffer.suboffsets = NULL buffer.itemsize = 1 buffer.internal = NULL cdef class TestBufferRelease(TestBuffer): def __releasebuffer__(self, Py_buffer* buffer): print u"releasing!" cdef class TestCompileWithDocstring(object): def __getbuffer__(self, Py_buffer* buffer, int flags): "I am a docstring!" def __releasebuffer__(self, Py_buffer* buf): "I am a docstring!" Cython-0.26.1/tests/buffers/mockbuffers.pxi0000664000175000017500000002555113023021033021451 0ustar stefanstefan00000000000000from libc cimport stdlib from libc cimport stdio cimport cpython.buffer import sys available_flags = ( ('FORMAT', cpython.buffer.PyBUF_FORMAT), ('INDIRECT', cpython.buffer.PyBUF_INDIRECT), ('ND', cpython.buffer.PyBUF_ND), ('STRIDES', cpython.buffer.PyBUF_STRIDES), ('C_CONTIGUOUS', cpython.buffer.PyBUF_C_CONTIGUOUS), ('F_CONTIGUOUS', cpython.buffer.PyBUF_F_CONTIGUOUS), ('WRITABLE', cpython.buffer.PyBUF_WRITABLE) ) cdef class MockBuffer: cdef object format, offset cdef void* buffer cdef Py_ssize_t len, itemsize cdef int ndim cdef Py_ssize_t* strides cdef Py_ssize_t* shape cdef Py_ssize_t* suboffsets cdef object label, log cdef readonly object recieved_flags, release_ok cdef public object fail def __init__(self, label, data, shape=None, strides=None, format=None, offset=0): # It is important not to store references to data after the constructor # as refcounting is checked on object buffers. self.label = label self.release_ok = True self.log = "" self.offset = offset self.itemsize = self.get_itemsize() if format is None: format = self.get_default_format() if shape is None: shape = (len(data),) if strides is None: strides = [] cumprod = 1 rshape = list(shape) rshape.reverse() for s in rshape: strides.append(cumprod) cumprod *= s strides.reverse() strides = [x * self.itemsize for x in strides] suboffsets = [-1] * len(shape) datashape = [len(data)] p = data while True: p = p[0] if isinstance(p, list): datashape.append(len(p)) else: break if len(datashape) > 1: # indirect access self.ndim = len(datashape) shape = datashape self.buffer = self.create_indirect_buffer(data, shape) suboffsets = [0] * (self.ndim-1) + [-1] strides = [sizeof(void*)] * (self.ndim-1) + [self.itemsize] self.suboffsets = self.list_to_sizebuf(suboffsets) else: # strided and/or simple access self.buffer = self.create_buffer(data) self.ndim = len(shape) self.suboffsets = NULL try: format = format.encode('ASCII') except AttributeError: pass self.format = format self.len = len(data) * self.itemsize self.strides = self.list_to_sizebuf(strides) self.shape = self.list_to_sizebuf(shape) def __dealloc__(self): stdlib.free(self.strides) stdlib.free(self.shape) if self.suboffsets != NULL: stdlib.free(self.suboffsets) # must recursively free indirect... else: stdlib.free(self.buffer) cdef void* create_buffer(self, data) except NULL: cdef size_t n = (len(data) * self.itemsize) cdef char* buf = stdlib.malloc(n) if buf == NULL: raise MemoryError cdef char* it = buf for value in data: self.write(it, value) it += self.itemsize return buf cdef void* create_indirect_buffer(self, data, shape) except NULL: cdef size_t n = 0 cdef void** buf assert shape[0] == len(data), (shape[0], len(data)) if len(shape) == 1: return self.create_buffer(data) else: shape = shape[1:] n = len(data) * sizeof(void*) buf = stdlib.malloc(n) if buf == NULL: return NULL for idx, subdata in enumerate(data): buf[idx] = self.create_indirect_buffer(subdata, shape) return buf cdef Py_ssize_t* list_to_sizebuf(self, l): cdef size_t n = len(l) * sizeof(Py_ssize_t) cdef Py_ssize_t* buf = stdlib.malloc(n) for i, x in enumerate(l): buf[i] = x return buf def __getbuffer__(MockBuffer self, Py_buffer* buffer, int flags): if self.fail: raise ValueError("Failing on purpose") self.recieved_flags = [] cdef int value for name, value in available_flags: if (value & flags) == value: self.recieved_flags.append(name) buffer.buf = (self.buffer + (self.offset * self.itemsize)) buffer.obj = self buffer.len = self.len buffer.readonly = 0 buffer.format = self.format buffer.ndim = self.ndim buffer.shape = self.shape buffer.strides = self.strides buffer.suboffsets = self.suboffsets buffer.itemsize = self.itemsize buffer.internal = NULL if self.label: msg = "acquired %s" % self.label print msg self.log += msg + "\n" def __releasebuffer__(MockBuffer self, Py_buffer* buffer): if buffer.suboffsets != self.suboffsets: self.release_ok = False if self.label: msg = "released %s" % self.label print msg self.log += msg + "\n" def printlog(self): print self.log[:-1] def resetlog(self): self.log = "" cdef int write(self, char* buf, object value) except -1: raise Exception() cdef get_itemsize(self): print "ERROR, not subclassed", self.__class__ cdef get_default_format(self): print "ERROR, not subclassed", self.__class__ cdef class CharMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: (buf)[0] = value return 0 cdef get_itemsize(self): return sizeof(char) cdef get_default_format(self): return b"@b" cdef class IntMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: (buf)[0] = value return 0 cdef get_itemsize(self): return sizeof(int) cdef get_default_format(self): return b"@i" cdef class UnsignedIntMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: (buf)[0] = value return 0 cdef get_itemsize(self): return sizeof(unsigned int) cdef get_default_format(self): return b"@I" cdef class ShortMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: (buf)[0] = value return 0 cdef get_itemsize(self): return sizeof(short) cdef get_default_format(self): return b"h" # Try without endian specifier cdef class UnsignedShortMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: (buf)[0] = value return 0 cdef get_itemsize(self): return sizeof(unsigned short) cdef get_default_format(self): return b"@1H" # Try with repeat count cdef class FloatMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: (buf)[0] = (value) return 0 cdef get_itemsize(self): return sizeof(float) cdef get_default_format(self): return b"f" cdef class DoubleMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: (buf)[0] = value return 0 cdef get_itemsize(self): return sizeof(double) cdef get_default_format(self): return b"d" cdef extern from *: void* addr_of_pyobject "(void*)"(object) cdef class ObjectMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: (buf)[0] = addr_of_pyobject(value) return 0 cdef get_itemsize(self): return sizeof(void*) cdef get_default_format(self): return b"@O" cdef class IntStridedMockBuffer(IntMockBuffer): cdef __cythonbufferdefaults__ = {"mode" : "strided"} cdef class ErrorBuffer: cdef object label def __init__(self, label): self.label = label def __getbuffer__(ErrorBuffer self, Py_buffer* buffer, int flags): raise Exception("acquiring %s" % self.label) def __releasebuffer__(ErrorBuffer self, Py_buffer* buffer): raise Exception("releasing %s" % self.label) # # Structs # cdef struct MyStruct: signed char a signed char b long long int c int d int e cdef struct SmallStruct: int a int b cdef struct NestedStruct: SmallStruct x SmallStruct y int z cdef packed struct PackedStruct: signed char a int b cdef struct NestedPackedStruct: signed char a int b PackedStruct sub int c cdef class MyStructMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: cdef MyStruct* s s = buf s.a, s.b, s.c, s.d, s.e = value return 0 cdef get_itemsize(self): return sizeof(MyStruct) cdef get_default_format(self): return b"2cq2i" cdef class NestedStructMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: cdef NestedStruct* s s = buf s.x.a, s.x.b, s.y.a, s.y.b, s.z = value return 0 cdef get_itemsize(self): return sizeof(NestedStruct) cdef get_default_format(self): return b"2T{ii}i" cdef class PackedStructMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: cdef PackedStruct* s s = buf s.a, s.b = value return 0 cdef get_itemsize(self): return sizeof(PackedStruct) cdef get_default_format(self): return b"^ci" cdef class NestedPackedStructMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: cdef NestedPackedStruct* s s = buf s.a, s.b, s.sub.a, s.sub.b, s.c = value return 0 cdef get_itemsize(self): return sizeof(NestedPackedStruct) cdef get_default_format(self): return b"ci^ci@i" cdef struct LongComplex: long double real long double imag cdef class LongComplexMockBuffer(MockBuffer): cdef int write(self, char* buf, object value) except -1: cdef LongComplex* s s = buf s.real, s.imag = value return 0 cdef get_itemsize(self): return sizeof(LongComplex) cdef get_default_format(self): return b"Zg" def print_offsets(*args, size, newline=True): sys.stdout.write(' '.join([str(item // size) for item in args])) if newline: sys.stdout.write('\n') def print_int_offsets(*args, newline=True): print_offsets(*args, size=sizeof(int), newline=newline) shape_5_3_4_list = [[list(range(k * 12 + j * 4, k * 12 + j * 4 + 4)) for j in range(3)] for k in range(5)] stride1 = 21 * 14 stride2 = 21 shape_9_14_21_list = [[list(range(k * stride1 + j * stride2, k * stride1 + j * stride2 + 21)) for j in range(14)] for k in range(9)] Cython-0.26.1/tests/buffers/bufaccess.h0000664000175000017500000000016312542002467020537 0ustar stefanstefan00000000000000/* See bufaccess.pyx */ typedef short td_h_short; typedef double td_h_double; typedef unsigned short td_h_ushort; Cython-0.26.1/tests/windows_bugs.txt0000664000175000017500000000041513023021033020230 0ustar stefanstefan00000000000000common_include_dir cythonize_script cythonize_script_excludes cythonize_script_package initial_file_path package_compilation carray_coercion ctuple int_float_builtins_as_casts_T400 list_pop test_coroutines_pep492 type_inference parallel py_unicode_type test_grammar Cython-0.26.1/tests/compile/0000775000175000017500000000000013151203436016420 5ustar stefanstefan00000000000000Cython-0.26.1/tests/compile/libc_signal.pyx0000664000175000017500000000053212542002467021434 0ustar stefanstefan00000000000000# mode: compile from libc.signal cimport * cdef void sighdl(int signum) nogil: pass cdef sighandler_t h h = signal(SIGABRT, sighdl) if h == SIG_ERR: pass h = signal(SIGABRT, SIG_IGN) if h == SIG_ERR: pass h = signal(SIGABRT, SIG_DFL) if h == SIG_ERR: pass h = signal(SIGABRT, SIG_IGN) cdef int e = raise_(SIGABRT) h = signal(SIGABRT, h) Cython-0.26.1/tests/compile/johnson1.pyx0000664000175000017500000000016512542002467020727 0ustar stefanstefan00000000000000# mode: compile ctypedef enum foo: FOO cdef void func(): cdef foo x map = [FOO] x = map[0] func() Cython-0.26.1/tests/compile/cassign.pyx0000664000175000017500000000036412542002467020620 0ustar stefanstefan00000000000000# mode: compile cdef void foo(): cdef int i1, i2=0 cdef char c1=0, c2 cdef char *p1, *p2=NULL cdef object obj1 i1 = i2 i1 = c1 p1 = p2 obj1 = i1 i1 = obj1 p1 = obj1 p1 = "spanish inquisition" foo() Cython-0.26.1/tests/compile/complexbasetype.pyx0000664000175000017500000000016212542002467022371 0ustar stefanstefan00000000000000# mode: compile cdef extern (int *[42]) spam, grail, swallow cdef (int (*)()) brian(): return NULL brian() Cython-0.26.1/tests/compile/libc_stdio.pyx0000664000175000017500000000066313143605603021305 0ustar stefanstefan00000000000000# mode: compile cimport libc.stdio from libc cimport stdio from libc.stdio cimport printf, puts, fputs, putchar, fputc, putc, stdout with nogil: libc.stdio.printf("hello %s\n", b"world") stdio.printf("hello %s\n", b"world") printf("hello %s\n", b"world") printf("printf_output %d %d\n", 1, 2) puts("puts_output") fputs("fputs_output", stdout) putchar(b'z') fputc(b'x', stdout) putc(b'c', stdout) Cython-0.26.1/tests/compile/nogil.h0000664000175000017500000000065212542002467017710 0ustar stefanstefan00000000000000#ifdef __cplusplus extern "C" { #endif extern DL_EXPORT(void) e1(void); extern DL_EXPORT(int*) e2(void); #ifdef __cplusplus } #endif void e1(void) {return;} int* e2(void) {return 0;} #ifdef __cplusplus extern "C" { #endif extern DL_EXPORT(PyObject *) g(PyObject*); extern DL_EXPORT(void) g2(PyObject*); #ifdef __cplusplus } #endif PyObject *g(PyObject* o) {if (o) {}; return 0;} void g2(PyObject* o) {if (o) {}; return;} Cython-0.26.1/tests/compile/hinsen1.pyx.BROKEN0000664000175000017500000000043512542002467021514 0ustar stefanstefan00000000000000__doc__ = """ >>> test() 1 """ cdef extern from "hinsen1.h": ctypedef class spam.Spam [object PySpamObject]: pass cdef class SpamAndEggs(Spam): cdef cook(self): return 1 def test(): cdef SpamAndEggs s s = SpamAndEggs() return s.cook() Cython-0.26.1/tests/compile/nullptr.pyx0000664000175000017500000000015212542002467020664 0ustar stefanstefan00000000000000# mode: compile cdef char *p1 cdef int *p2 cdef int x p1 = NULL p2 = NULL x = p1 == NULL x = NULL == p2 Cython-0.26.1/tests/compile/types_and_names.pyx0000664000175000017500000000100212542002467022330 0ustar stefanstefan00000000000000# mode: compile print sizeof(point*) cdef foo(int i0, int i, list L0, list L, point p0, point p, point* ps): pass cdef class A: cdef list cdef list L # Possibly empty declarators cdef point(self, int, int i, list, list L, point, point p, point* ps): pass cdef class B(A): cdef point(self, o, int i, oo, list L, ooo, point p, point* ps): pass cdef point P cdef point *Ps cdef A a foo(2, 3, [], [], P, P, &P) a.point("something", 3, "anything", [], "an object", P, &P) Cython-0.26.1/tests/compile/callingconvention.srctree0000664000175000017500000000403013143605603023525 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import callingconvention" ######## setup.py ######## from distutils.core import setup from Cython.Distutils import build_ext from Cython.Distutils.extension import Extension setup( ext_modules = [ Extension("callingconvention", ["callingconvention.pyx", "external_callingconvention.c"]), ], cmdclass={'build_ext': build_ext}, ) ######## callingconvention.pyx ######## # mode: compile cdef extern from "callingconvention.h": pass cdef extern int f1() cdef extern int __cdecl f2() cdef extern int __stdcall f3() cdef extern int __fastcall f4() cdef extern int (*p1)() cdef extern int (__cdecl *p2)() cdef extern int (__stdcall *p3)() cdef extern int (__fastcall *p4)() p1 = f1 p2 = f2 p3 = f3 p4 = f4 ######## callingconvention.h ######## #define DLL_EXPORT #include "external_callingconvention.h" ######## external_callingconvention.h ######## #ifndef DL_IMPORT #define DL_IMPORT(t) t #elif defined(DLL_EXPORT) #define DL_IMPORT(t) DL_EXPORT(t) #endif #ifdef __cplusplus extern "C" { #endif extern DL_IMPORT(int) f1(void); extern DL_IMPORT(int) __cdecl f2(void); extern DL_IMPORT(int) __stdcall f3(void); extern DL_IMPORT(int) __fastcall f4(void); extern DL_IMPORT(int) (*p1)(void); extern DL_IMPORT(int) (__cdecl *p2)(void); extern DL_IMPORT(int) (__stdcall *p3)(void); extern DL_IMPORT(int) (__fastcall *p4)(void); #ifdef __cplusplus } #endif ######## external_callingconvention.c ######## #include #ifndef DL_EXPORT #define DL_EXPORT(t) t #endif #if !defined(WIN32) && !defined(MS_WINDOWS) #ifndef __stdcall #define __stdcall #endif #ifndef __cdecl #define __cdecl #endif #ifndef __fastcall #define __fastcall #endif #endif DL_EXPORT(int) f1(void) {return 0;} DL_EXPORT(int) __cdecl f2(void) {return 0;} DL_EXPORT(int) __stdcall f3(void) {return 0;} DL_EXPORT(int) __fastcall f4(void) {return 0;} DL_EXPORT(int) (*p1)(void); DL_EXPORT(int) (__cdecl *p2)(void); DL_EXPORT(int) (__stdcall *p3)(void); DL_EXPORT(int) (__fastcall *p4)(void); Cython-0.26.1/tests/compile/inplace_lhs.pyx0000664000175000017500000000075312542002467021454 0ustar stefanstefan00000000000000# mode: compile cdef struct S: int q def test(): cdef int i = 1, j = 2, k = 3 cdef float x = 1, y = 2, z = 3 cdef object a = 1, b = 2, c = 3, d = 4, e = 5 cdef int[3] m m[0] = 0 m[1] = 1 m[2] = 1 cdef S s = [1] global g i += j + k x += y + z x += i a += b + c g += a m[i] += j a[i] += b + c a[b + c] += d (a + b)[c] += d a[i : j] += b (a + b)[i : j] += c a.b += c + d (a + b).c += d s.q += i Cython-0.26.1/tests/compile/cpp_nogil.pyx0000664000175000017500000000111113143605603021131 0ustar stefanstefan00000000000000# tag: cpp # mode: compile cdef extern from "cpp_nogil.h" nogil: cdef cppclass NoGilTest1: NoGilTest1() void doSomething() # This is declared in cpp_nogil.h, but here we're testing # that we can put nogil directly on the cppclass. cdef extern from *: cdef cppclass NoGilTest2 nogil: NoGilTest2() void doSomething() with nogil: NoGilTest1().doSomething() NoGilTest2().doSomething() # We can override nogil methods as with gil methods. cdef cppclass WithGilSubclass(NoGilTest1): void doSomething() with gil: print "have the gil" Cython-0.26.1/tests/compile/extdescrset.pyx0000664000175000017500000000011412542002467021517 0ustar stefanstefan00000000000000# mode: compile cdef class Foo: def __set__(self, i, v): pass Cython-0.26.1/tests/compile/longunsigned.pyx0000664000175000017500000000011112542002467021653 0ustar stefanstefan00000000000000# mode: compile cdef extern unsigned long x cdef extern long unsigned y Cython-0.26.1/tests/compile/globalstmt.pyx0000664000175000017500000000010112542002467021326 0ustar stefanstefan00000000000000# mode: compile def f(): global a,b,c,d a = b c = d Cython-0.26.1/tests/compile/cpp_enums.h0000664000175000017500000000014412542002467020565 0ustar stefanstefan00000000000000enum Enum1 { Item1, Item2 }; namespace Namespace1 { enum Enum2 { Item3, Item4 }; } Cython-0.26.1/tests/compile/point.h0000664000175000017500000000022612542002467017726 0ustar stefanstefan00000000000000#ifndef POINT_H #define POINT_H namespace geometry { struct Point { double x; double y; int color; }; } #endif Cython-0.26.1/tests/compile/globvardef.pyx0000664000175000017500000000017512542002467021304 0ustar stefanstefan00000000000000# mode: compile cdef int a_global_int cdef a_global_pyobject a_global_int = 0 a_global_pyobject = None cdef object unused Cython-0.26.1/tests/compile/extimported.pyx0000664000175000017500000000012512542002467021530 0ustar stefanstefan00000000000000# mode: compile cdef extern class Spam.Eggs.Ham: pass cdef Ham ham ham = None Cython-0.26.1/tests/compile/tryexcept.pyx0000664000175000017500000000204212542002467021213 0ustar stefanstefan00000000000000# mode: compile def f(a, b, c, x): cdef int i a = b + c try: i = 1 raise x i = 2 except a: i = 3 try: i = 1 except a: i = 2 except b: i = 3 try: i = 1 except a, b: i = 2 try: i = 1 except a: i = 2 except: i = 3 try: i = 1 except (a, b), c[42]: i = 2 for a in b: try: c = x * 42 except: i = 17 try: i = 1 except: raise def g(a, b, c, x): cdef int i a = b + c try: i = 1 raise x i = 2 except a: i = 3 try: i = 1 except a: i = 2 except b: i = 3 try: i = 1 except a as b: i = 2 try: i = 1 except a: i = 2 except: i = 3 try: i = 1 except (a, b) as c: i = 2 except (b, a) as c: i = 3 except: i = 4 else: i = 5 Cython-0.26.1/tests/compile/crunchytype.h0000664000175000017500000000007312542002467021152 0ustar stefanstefan00000000000000 struct CrunchyType { int number; PyObject* string; }; Cython-0.26.1/tests/compile/inplace_ops.pyx0000664000175000017500000000036212542002467021463 0ustar stefanstefan00000000000000# mode: compile def test(): cdef object a = 1, b = 2 cdef char *p = 'abc' a += b a -= b a *= b a /= b a %= b a **= b a <<= b a >>= b a &= b a ^= b a |= b p += 42 p -= 42 p += a Cython-0.26.1/tests/compile/cimported_class_base.srctree0000664000175000017500000000127013143605603024161 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace ######## setup.py ######## from Cython.Build import cythonize from Cython.Distutils.extension import Extension import sys sys.path.append("path") ext_modules = [ Extension("importer", ["importer.pyx"]), ] ext_modules = cythonize(ext_modules, include_path=["include"]) ######## pkg/__init__.py ######## ######## pkg/a.pxd ######## cdef class A(object): pass ######## importer.pyx ######## cimport pkg.a cimport pkg.a as a cimport pkg.a as a_by_another_name from pkg cimport a as from_cimported_a cdef class A1(a.A): pass cdef class A2(a_by_another_name.A): pass cdef class A3(pkg.a.A): pass cdef class A4(from_cimported_a.A): pass Cython-0.26.1/tests/compile/doda1.pyx0000664000175000017500000000026212542002467020156 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: pass cdef Spam foo(): return blarg() #cdef Spam grail #grail = blarg() #return grail cdef object blarg(): pass foo() Cython-0.26.1/tests/compile/extdelitem.pyx0000664000175000017500000000011612542002467021330 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: def __delitem__(self, i): pass Cython-0.26.1/tests/compile/omittedargnames.pyx0000664000175000017500000000006512542002467022352 0ustar stefanstefan00000000000000# mode: compile cdef extern void spam(int, char *) Cython-0.26.1/tests/compile/find_pxd.srctree0000664000175000017500000000146512574327400021620 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace ######## setup.py ######## from Cython.Build import cythonize from Cython.Distutils.extension import Extension import sys sys.path.append("path") ext_modules = [ Extension("a", ["a.pyx"]), Extension("b", ["b.pyx"]), Extension("c", ["c.pyx"]), ] ext_modules = cythonize(ext_modules, include_path=["include"]) ######## a.pyx ######## # Implicit cimport looking in include_path cdef my_type foo ######## include/a.pxd ######## ctypedef int my_type ######## b.pyx ######## # Explicit cimport looking in sys.path from b cimport * cdef my_type foo ######## path/b.pxd ######## ctypedef int my_type ######## c.pyx ######## # Implicit cimport NOT looking in sys.path ######## path/c.pxd ######## +++syntax error just to show that this file is not actually cimported+++ Cython-0.26.1/tests/compile/pyclass.pyx0000664000175000017500000000007712542002467020650 0ustar stefanstefan00000000000000# mode: compile class Spam: def eggs(self): pass Cython-0.26.1/tests/compile/ewing8.pxd0000664000175000017500000000024412542002467020342 0ustar stefanstefan00000000000000cdef struct Foo cdef class Blarg ctypedef Foo FooType ctypedef Blarg BlargType cdef struct Foo: FooType *f cdef class Blarg: cdef FooType *f cdef BlargType b Cython-0.26.1/tests/compile/cpp_enums.pyx0000664000175000017500000000051312542002467021156 0ustar stefanstefan00000000000000# tag: cpp # mode: compile cdef extern from "cpp_enums.h": cdef enum Enum1: Item1 Item2 a = Item1 b = Item2 cdef Enum1 x, y x = Item1 y = Item2 cdef extern from "cpp_enums.h" namespace "Namespace1": cdef enum Enum2: Item3 Item4 c = Item3 d = Item4 cdef Enum2 z, w z = Item3 w = Item4 Cython-0.26.1/tests/compile/hinsen2.pyx0000664000175000017500000000021512542002467020532 0ustar stefanstefan00000000000000# mode: compile cdef class vector: def __div__(vector self, double factor): cdef object result = vector() return result Cython-0.26.1/tests/compile/cheese.h0000664000175000017500000000000012542002467020017 0ustar stefanstefan00000000000000Cython-0.26.1/tests/compile/getattr3ref.pyx.BROKEN0000664000175000017500000000012512542002467022375 0ustar stefanstefan00000000000000__doc__ = """ >>> print f() """ def f(): g = getattr3 return g.__name__ Cython-0.26.1/tests/compile/hinsen1.h0000664000175000017500000000006012542002467020136 0ustar stefanstefan00000000000000typedef struct { PyObject_HEAD } PySpamObject; Cython-0.26.1/tests/compile/extindex.pyx0000664000175000017500000000011612542002467021014 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: def __index__(self): return 42 Cython-0.26.1/tests/compile/none.pyx0000664000175000017500000000007312542002467020125 0ustar stefanstefan00000000000000# mode: compile cdef void spam(): eggs = None spam() Cython-0.26.1/tests/compile/index.pyx0000664000175000017500000000064612542002467020303 0ustar stefanstefan00000000000000# mode: compile def f(obj1, obj2, obj3): cdef int int1, int2=0, int3=0 cdef float flt1, *ptr1=NULL cdef int[42] array1 array1[int2] = 0 int1 = array1[int2] flt1 = ptr1[int2] array1[int1] = int2 ptr1[int1] = int2 obj1 = obj2[obj3] int1 = array1[obj3] obj1 = obj2[int3] obj1[obj2] = obj3 array1[obj2] = int3 obj1[int2] = obj3 obj1[obj2] = 42 f(None, None, None) Cython-0.26.1/tests/compile/ewing5.pyx0000664000175000017500000000007212542002467020363 0ustar stefanstefan00000000000000# mode: compile cdef char *f(): raise Exception f() Cython-0.26.1/tests/compile/notnonearg.pyx0000664000175000017500000000024512542002467021341 0ustar stefanstefan00000000000000# mode: compile cdef extern class external.Spam [object Spam]: pass cdef extern class external.Eggs [object Eggs]: pass def ham(Spam s, Eggs e not None): pass Cython-0.26.1/tests/compile/a_capi.pyx0000664000175000017500000000072312542002467020404 0ustar stefanstefan00000000000000# mode: compile cdef public struct Foo: int a, b ctypedef struct Blarg: int c, d ctypedef public Foo Zax cdef public class C[type C_Type, object C_Obj]: pass cdef public Zax *blarg cdef public C c_pub = C() cdef api C c_api = C() cdef public dict o_pub = C() cdef api list o_api = C() cdef api float f(Foo *x): pass cdef public void g(Blarg *x): pass cdef public api void h(Zax *x): pass cdef extern from "a_capi.h": pass Cython-0.26.1/tests/compile/extgetitem.pyx0000664000175000017500000000011712542002467021344 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: def __getitem__(self, x): pass Cython-0.26.1/tests/compile/funcptr.pyx0000664000175000017500000000026512542002467020652 0ustar stefanstefan00000000000000# mode: compile cdef int grail(): cdef int (*spam)() spam = &grail spam = grail spam() ctypedef int funcptr_t() cdef inline funcptr_t* dummy(): return &grail Cython-0.26.1/tests/compile/cpp_structs.pyx0000664000175000017500000000046213023021033021522 0ustar stefanstefan00000000000000# tag: cpp # mode: compile from cython.view import array cdef extern from "point.h" namespace "geometry": cdef struct Point: double x double y int color cdef Point p = Point(0.0, 0.0, 0) the_point = p cdef Point[::1] ps = array((10,), itemsize=sizeof(Point), format='ddi') Cython-0.26.1/tests/compile/distutils_libraries_T845.srctree0000664000175000017500000000123712574327400024626 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace ######## setup.py ######## from Cython.Build import cythonize from Cython.Distutils.extension import Extension ext_modules = [ Extension("a", ["a.pyx"]), Extension("x", ["x.pyx"]), Extension("y", ["y.pyx"]), ] ext_modules = cythonize(ext_modules) assert len(ext_modules[0].libraries) == 2 assert ext_modules[1].libraries == ["lib_x"] assert ext_modules[2].libraries == ["lib_y"] ######## libx.pxd ######## # distutils: libraries = lib_x ######## liby.pxd ######## # distutils: libraries = lib_y ######## a.pyx ######## cimport libx , liby ######## x.pyx ######## cimport libx ######## y.pyx ######## cimport liby Cython-0.26.1/tests/compile/ctypedef_public_class_T355.pxd0000664000175000017500000000030712542002467024207 0ustar stefanstefan00000000000000ctypedef public class Time [type MyTime_Type, object MyTimeObject]: cdef public double seconds ctypedef public class Event [type MyEvent_Type, object MyEventObject]: cdef public Time time Cython-0.26.1/tests/compile/cpdef.pyx0000664000175000017500000000013512542002467020246 0ustar stefanstefan00000000000000# mode: compile cdef class A: cpdef a(self): ma(self) cpdef ma(x): print x Cython-0.26.1/tests/compile/drake1.pyx0000664000175000017500000000005412542002467020334 0ustar stefanstefan00000000000000# mode: compile cdef char *s s = r'\"HT\"' Cython-0.26.1/tests/compile/dotted_cimport_submodule/0000775000175000017500000000000013151203436023517 5ustar stefanstefan00000000000000Cython-0.26.1/tests/compile/dotted_cimport_submodule/__init__.pyx0000664000175000017500000000001512542002467026020 0ustar stefanstefan00000000000000# empty file Cython-0.26.1/tests/compile/dotted_cimport_submodule/a.pxd0000664000175000017500000000000012542002467024446 0ustar stefanstefan00000000000000Cython-0.26.1/tests/compile/signedtypes.pyx0000664000175000017500000000046112542002467021525 0ustar stefanstefan00000000000000# mode: compile cdef struct S: char c unsigned char uc signed char sc short s unsigned short us signed short ss int i unsigned int ui signed int si long l unsigned long ul signed long sl long long ll unsigned long long ull signed long long sll Cython-0.26.1/tests/compile/specmethargdefault.pyx0000664000175000017500000000021312542002467023031 0ustar stefanstefan00000000000000# mode: compile cdef class Grail: def __cinit__(self, spam = None): pass def __init__(self, parrot = 42): pass Cython-0.26.1/tests/compile/ctypedef_public_class_T355.pyx0000664000175000017500000000045312542002467024236 0ustar stefanstefan00000000000000# ticket: 355 # mode: compile ctypedef public class Time [type MyTime_Type, object MyTimeObject]: def __init__(self, seconds): self.seconds = seconds ctypedef public class Event [type MyEvent_Type, object MyEventObject]: def __init__(self, Time time): self.time = time Cython-0.26.1/tests/compile/cnamespec.pyx0000664000175000017500000000070713023021033021111 0ustar stefanstefan00000000000000# mode: compile cdef extern from "cnamespec.h": int a "c_a", b "c_b" cdef struct foo "c_foo": int i "c_i" ctypedef enum blarg "c_blarg": x "c_x" y "c_y" = 42 cdef double spam "c_spam" (int i, float f): cdef double d "c_d" cdef foo *p global b if i: d = spam(a, f) cdef foo q q.i = 7 p = &q b = p.i p.i = x p.i = y cdef inline double spam2 "c_spam2" (int i, float f): return spam(i,f) Cython-0.26.1/tests/compile/gustafsson2.pyx0000664000175000017500000000031412542002467021442 0ustar stefanstefan00000000000000# mode: compile ctypedef enum someenum_t: ENUMVALUE_1 ENUMVALUE_2 cdef somefunction(someenum_t val): if val == ENUMVALUE_1: pass somefunction(ENUMVALUE_1) somefunction(ENUMVALUE_2) Cython-0.26.1/tests/compile/excvalreturn.pyx0000664000175000017500000000010212542002467021701 0ustar stefanstefan00000000000000# mode: compile cdef int spam() except -1: eggs = 42 spam() Cython-0.26.1/tests/compile/const_T42.srctree0000664000175000017500000000121412542002467021572 0ustar stefanstefan00000000000000# tag: cpp PYTHON setup.py build_ext --inplace PYTHON -c "import test" ######## test.pyx ######## # distutils: language = c++ cdef extern from "test.h": ctypedef struct c_Test "Test": const char *getString() except +RuntimeError cdef class Test: cdef c_Test *thisptr def getString(self): return self.thisptr.getString() ######## test.h ######## static const char *astring = "123"; class Test { public: const char *getString(void) { return astring; } }; ######## setup.py ######## from distutils.core import setup from Cython.Build.Dependencies import cythonize setup(name='test', ext_modules=cythonize('*.pyx')) Cython-0.26.1/tests/compile/cvardef.pyx0000664000175000017500000000244412574327400020606 0ustar stefanstefan00000000000000# mode: compile # tag: cdef def f(): cdef char a_char cdef short a_short cdef int i1, i2 cdef long a_long cdef float a_float cdef double a_double cdef unsigned char an_unsigned_char cdef unsigned short an_unsigned_short cdef unsigned int an_unsigned_int cdef unsigned long an_unsigned_long cdef char *a_char_ptr, *another_char_ptr cdef char **a_char_ptr_ptr cdef char ***a_char_ptr_ptr_ptr cdef char[10] a_sized_char_array cdef char[10][20] a_2d_char_array cdef char *a_2d_char_ptr_array[10][20] cdef char **a_2d_char_ptr_ptr_array[10][20] cdef int (*a_0arg_function)() cdef int (*a_1arg_function)(int i) cdef int (*a_2arg_function)(int i, int j) cdef void (*a_void_function)() a_char = 0 a_short = 0 i1 = 0 i2 = 0 a_long = 0 a_float = 0 a_double = 0 an_unsigned_char = 0 an_unsigned_short = 0 an_unsigned_int = 0 an_unsigned_long = 0 a_char_ptr = NULL another_char_ptr = NULL a_char_ptr_ptr = NULL a_char_ptr_ptr_ptr = NULL a_sized_char_array[0] = 0 a_2d_char_array[0][0] = 0 a_2d_char_ptr_array[0][0] = NULL a_2d_char_ptr_ptr_array[0][0] = NULL a_0arg_function = NULL a_1arg_function = NULL a_2arg_function = NULL a_void_function = NULL Cython-0.26.1/tests/compile/types_and_names.pxd0000664000175000017500000000021412542002467022307 0ustar stefanstefan00000000000000cdef struct point: double x double y double z cdef foo(int, int i, list, list L, point, point p, point* ps) Cython-0.26.1/tests/compile/belchenko2.pyx.BROKEN0000664000175000017500000000031112542002467022154 0ustar stefanstefan00000000000000cdef extern from "belchenko2.h": void c_func(unsigned char pixel) def f(unsigned char pixel): c_func(pixel) def g(signed char pixel): c_func(pixel) def h(char pixel): c_func(pixel) Cython-0.26.1/tests/compile/ishimoto4.pyx0000664000175000017500000000007012542002467021102 0ustar stefanstefan00000000000000# mode: compile cdef void __stdcall f(): pass f() Cython-0.26.1/tests/compile/withgil.pyx0000664000175000017500000000015312542002467020634 0ustar stefanstefan00000000000000# mode: compile cdef void f() with gil: x = 42 cdef int g(void* x) with gil: pass f() g("test") Cython-0.26.1/tests/compile/crunchytype.pxd0000664000175000017500000000022012542002467021510 0ustar stefanstefan00000000000000cdef extern from "crunchytype.h": cdef class crunchytype.Crunchy [ object CrunchyType ]: cdef int number cdef object string Cython-0.26.1/tests/compile/ctypedefclass.pyx0000664000175000017500000000010612542002467022014 0ustar stefanstefan00000000000000# mode: compile ctypedef class spam: pass cdef spam s s = None Cython-0.26.1/tests/compile/cdefexternfromstar.pyx0000664000175000017500000000006312542002467023072 0ustar stefanstefan00000000000000# mode: compile cdef extern from *: int spam Cython-0.26.1/tests/compile/kleckner1.pyx0000664000175000017500000000011212542002467021037 0ustar stefanstefan00000000000000# mode: compile def f(x,): pass cdef void g(int x,): pass g(0) Cython-0.26.1/tests/compile/extpropertyall.pyx0000664000175000017500000000032012542002467022257 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: property eggs: "Ova" def __get__(self): pass def __set__(self, x): pass def __del__(self): pass Cython-0.26.1/tests/compile/carrdecl.pyx0000664000175000017500000000006312542002467020744 0ustar stefanstefan00000000000000# mode: compile cdef extern: cdef func(int[]) Cython-0.26.1/tests/compile/behnel4.pyx0000664000175000017500000000013312542002467020504 0ustar stefanstefan00000000000000# mode: compile cdef enum E: spam, eggs cdef E f() except spam: return eggs f() Cython-0.26.1/tests/compile/declarations.srctree0000664000175000017500000000455113143605603022471 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import declarations" ######## setup.py ######## from distutils.core import setup from Cython.Distutils import build_ext from Cython.Distutils.extension import Extension setup( ext_modules = [ Extension("declarations", ["declarations.pyx", "external_declarations.c"]), ], cmdclass={'build_ext': build_ext}, ) ######## declarations.pyx ######## # mode: compile cdef extern from "declarations.h": pass cdef extern char *cp cdef extern char *cpa[5] cdef extern int (*ifnpa[5])() cdef extern char *(*cpfnpa[5])() cdef extern int (*ifnp)() cdef extern int (*iap)[5] cdef extern int ifn() cdef extern char *cpfn() cdef extern int (*iapfn())[5] cdef extern char *(*cpapfn())[5] cdef extern int fnargfn(int ()) cdef extern int ia[] cdef extern int iaa[][3] cdef extern int a(int[][3], int[][3][5]) cdef void f(): cdef void *p=NULL global ifnp, cpa ifnp = p cdef char *g(): pass f() g() ######## declarations.h ######## #define DLL_EXPORT #include "external_declarations.h" ######## external_declarations.h ######## #ifndef DL_IMPORT #define DL_IMPORT(t) t #elif defined(DLL_EXPORT) #define DL_IMPORT(t) DL_EXPORT(t) #endif #ifdef __cplusplus extern "C" { #endif extern DL_IMPORT(char) *cp; extern DL_IMPORT(char) *cpa[5]; extern DL_IMPORT(int) (*ifnpa[5])(void); extern DL_IMPORT(char) *(*cpfnpa[5])(void); extern DL_IMPORT(int) (*ifnp)(void); extern DL_IMPORT(int) (*iap)[5]; #ifdef __cplusplus } #endif #ifdef __cplusplus extern "C" { #endif extern DL_IMPORT(int) ifn(void); extern DL_IMPORT(char *) cpfn(void); extern DL_IMPORT(int) fnargfn(int (void)); extern DL_IMPORT(int) (*iapfn(void))[5]; extern DL_IMPORT(char *)(*cpapfn(void))[5]; extern DL_IMPORT(int) ia[]; extern DL_IMPORT(int) iaa[][3]; extern DL_IMPORT(int) a(int[][3], int[][3][5]); #ifdef __cplusplus } #endif ######## external_declarations.c ######## #include #ifndef DL_EXPORT #define DL_EXPORT(t) t #endif DL_EXPORT(char) *cp; DL_EXPORT(char) *cpa[5]; DL_EXPORT(int) (*ifnpa[5])(void); DL_EXPORT(char) *(*cpfnpa[5])(void); DL_EXPORT(int) (*ifnp)(void); DL_EXPORT(int) (*iap)[5]; DL_EXPORT(int) ifn(void) {return 0;} DL_EXPORT(char) *cpfn(void) {return 0;} DL_EXPORT(int) fnargfn(int f(void)) {return 0;} DL_EXPORT(int) ia[1]; DL_EXPORT(int) iaa[1][3]; DL_EXPORT(int) a(int a[][3], int b[][3][5]) {return 0;} Cython-0.26.1/tests/compile/arrayptrcompat.pyx0000664000175000017500000000036412542002467022241 0ustar stefanstefan00000000000000# mode: compile cdef enum E: z cdef void f(): cdef int *p cdef void *v cdef int[5] a cdef int i=0 cdef E e=z p = a v = a p = a + i p = a + e p = i + a p = e + a p = a - i p = a - e f() Cython-0.26.1/tests/compile/typecast.pyx0000664000175000017500000000023612542002467021023 0ustar stefanstefan00000000000000# mode: compile cdef void f(obj): cdef size_t i=0 cdef char *p p = i p = &i obj = p p = obj f(None) Cython-0.26.1/tests/compile/khavkine1.pyx0000664000175000017500000000020212542002467021041 0ustar stefanstefan00000000000000# mode: compile cdef class T: cdef int[1] a cdef object b cdef void f(void *obj): ( obj).a[0] = 1 b = None f(NULL) Cython-0.26.1/tests/compile/ellipsis_T488.pyx0000664000175000017500000000015712542002467021544 0ustar stefanstefan00000000000000# ticket: 488 # mode: compile #from ... import foo print ... def test(): x = ... assert x is Ellipsis Cython-0.26.1/tests/compile/johnson2.pyx0000664000175000017500000000012012542002467020717 0ustar stefanstefan00000000000000# mode: compile cdef class C: cdef object foo cdef object __weakref__ Cython-0.26.1/tests/compile/jiba6.pyx0000664000175000017500000000030112542002467020153 0ustar stefanstefan00000000000000# mode: compile cdef extern from "string.h": void memcpy(void* des, void* src, int size) cdef void f(): cdef float[3] f1 cdef float* f2 f2 = f1 + 1 memcpy(f1, f2, 1) f() Cython-0.26.1/tests/compile/jiba4.pyx0000664000175000017500000000005712542002467020161 0ustar stefanstefan00000000000000# mode: compile cdef class A: cdef object x Cython-0.26.1/tests/compile/pxd_override_T230.py0000664000175000017500000000016312542002467022200 0ustar stefanstefan00000000000000# mode: compile class A: def foo(self): return "A" class B(A): def foo(self): return "B" Cython-0.26.1/tests/compile/finally_GH1744.pyx0000664000175000017500000000350313143605603021522 0ustar stefanstefan00000000000000# mode: compile # This caused a "maximum recursion depth exceeded" at some point, # see https://github.com/cython/cython/issues/1744 cdef inline bint g(int x, int y): return True cdef cython_bug(int u): try: g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) g(u, u) finally: g(u, u) cython_bug(1) Cython-0.26.1/tests/compile/constcast.pyx0000664000175000017500000000024412542002467021167 0ustar stefanstefan00000000000000# mode: compile from libc.stdlib cimport malloc, free cdef void f(): cdef const int **allocated = malloc(sizeof(int *)) free(allocated) f() Cython-0.26.1/tests/compile/enumintcompat.pyx0000664000175000017500000000061012542002467022046 0ustar stefanstefan00000000000000# mode: compile cdef enum E: a cdef enum G: b cdef void f(): cdef E e=a cdef G g=b cdef int i, j=0 cdef float f, h=0 i = j | e i = e | j i = j ^ e i = j & e i = j << e i = j >> e i = j + e i = j - e i = j * e i = j / e i = j % e # f = j ** e # Cython prohibits this i = e + g f = h i = ~a i = -a f() Cython-0.26.1/tests/compile/argdefault.pyx0000664000175000017500000000013412542002467021302 0ustar stefanstefan00000000000000# mode: compile cdef swallow def spam(w, int x = 42, y = "grail", z = swallow): pass Cython-0.26.1/tests/compile/publicapi_api.pyx0000664000175000017500000000215712542002467021774 0ustar stefanstefan00000000000000# mode: compile # -- ctypedef int Int0 ctypedef api int Int1 ctypedef enum EnumA0: EA0 ctypedef api enum EnumA1: EA1 cdef enum EnumB0: EB0=0 cdef api enum EnumB1: EB1=1 cdef Int0 i0 = 0 cdef EnumA0 ea0 = EA0 cdef EnumB0 eb0 = EB0 cdef api Int1 i1 = 0 cdef api EnumA1 ea1 = EA1 cdef api EnumB1 eb1 = EB1 # -- ctypedef struct StructA0: int SA0 ctypedef api struct StructA1: int SA1 cdef struct StructB0: int SB0 cdef api struct StructB1: int SB1 cdef StructA0 sa0 = {'SA0':0} cdef StructB0 sb0 = {'SB0':2} cdef api StructA1 sa1 = {'SA1':1} cdef api StructB1 sb1 = {'SB1':3} # -- ctypedef class Foo0: pass ctypedef api class Foo1 [type PyFoo1_Type, object PyFoo1_Object]: pass cdef class Bar0: pass cdef api class Bar1 [type PyBar1_Type, object PyBar1_Object]: pass cdef Foo0 f0 = None cdef Bar0 b0 = None cdef api Foo1 f1 = None cdef api Bar1 b1 = None # -- cdef void bar0(): pass cdef api void bar1(): pass cdef void* spam0(object o) except NULL: return NULL cdef api void* spam1(object o) except NULL: return NULL bar0() bar1() spam0(None) spam1(None) # -- Cython-0.26.1/tests/compile/extargdefault.pyx0000664000175000017500000000021312542002467022021 0ustar stefanstefan00000000000000# mode: compile cdef extern class somewhere.Swallow: pass cdef Swallow swallow def spam(x = swallow, Swallow y = swallow): pass Cython-0.26.1/tests/compile/docstrings.pyx0000664000175000017500000000044012542002467021343 0ustar stefanstefan00000000000000# mode: compile "Welcome to the parrot module. It is currently resting." def zap(polly, volts): "Wake up polly." class Parrot: "Standard Norwegian Blue." def admire_plumage(self): "Lovely, ain't it?" cdef class SuperParrot: "Special high-performance model." Cython-0.26.1/tests/compile/excvalcheck.pyx0000664000175000017500000000041712542002467021450 0ustar stefanstefan00000000000000# mode: compile cdef extern from "excvalcheck.h": pass cdef extern int spam() except -1 cdef extern void grail() except * cdef extern char *tomato() except? NULL cdef void eggs(): cdef int i cdef char *p i = spam() grail() p = tomato() eggs() Cython-0.26.1/tests/compile/const_decl.pyx0000664000175000017500000000052012542002467021300 0ustar stefanstefan00000000000000# mode: compile cdef const_args(const int a, const int *b, const (int*) c, int *const d): print a print b[0] b = NULL # OK, the pointer itself is not const c[0] = 4 # OK, the value is not const d[0] = 7 # OK, the value is not const def call_const_args(x): cdef int k = x const_args(x, &k, &k, &k) Cython-0.26.1/tests/compile/builtinlist.pyx0000664000175000017500000000044312542002467021531 0ustar stefanstefan00000000000000# mode: compile cdef int f() except -1: cdef list l cdef object x = (), y = (1,), z z = list l = list(x) l = list(*y) z = l.insert l.insert(17, 42) l.append(88) l.sort() l.reverse() z = l.as_tuple() return z is not None def test(): f() Cython-0.26.1/tests/compile/extimportedsubtype.pyx0000664000175000017500000000020612542002467023144 0ustar stefanstefan00000000000000# mode: compile from crunchytype cimport Crunchy cdef class Sub2(Crunchy): cdef char character cdef class Sub1(Sub2): pass Cython-0.26.1/tests/compile/declandimpl.pyx0000664000175000017500000000021112542002467021434 0ustar stefanstefan00000000000000# mode: compile cdef class Tomato: def eject(self): pass cdef extern Sandwich butty cdef Tomato supertom supertom = None Cython-0.26.1/tests/compile/builtinfuncs.pyx0000664000175000017500000000130212542002467021667 0ustar stefanstefan00000000000000# mode: compile cdef int f() except -1: cdef object x, y = 0, z = 0, w = 0 cdef str sstring cdef basestring sustring cdef int i cdef long lng cdef Py_ssize_t s x = abs(y) delattr(x, 'spam') x = dir(y) x = divmod(y, z) x = getattr(y, 'spam') i = hasattr(y, 'spam') lng = hash(y) x = intern(y) i = isinstance(y, z) i = issubclass(y, z) x = iter(y) s = len(x) x = open(y, z) x = pow(y, z, w) x = pow(y, z) x = reload(y) x = repr(y) sstring = repr(x) sustring = repr(x) setattr(x, y, z) #i = typecheck(x, y) #i = issubtype(x, y) x = abs def not_called(): response = raw_input('xyz') f() Cython-0.26.1/tests/compile/extinheritset.pyx0000664000175000017500000000032512542002467022065 0ustar stefanstefan00000000000000# mode: compile cdef class Parrot: pass cdef class Norwegian(Parrot): def __setitem__(self, i, x): pass def __setattr__(self, n, x): pass def __set__(self, i, v): pass Cython-0.26.1/tests/compile/cdef_syntax.pyx0000664000175000017500000000032713023021033021460 0ustar stefanstefan00000000000000# mode: compile # the following are valid syntax constructs and should not produce errors ctypedef int x; cdef no_semi(): cdef int i cdef with_semi(): cdef int i; def use_cdef(): &no_semi, &with_semi Cython-0.26.1/tests/compile/specmethdocstring.pyx0000664000175000017500000000146612542002467022722 0ustar stefanstefan00000000000000# mode: compile cdef class C: def __cinit__(self): "This is an unusable docstring." def __init__(self): "This is an unusable docstring." def __dealloc__(self): "This is an unusable docstring." def __richcmp__(self, other, int op): "This is an unusable docstring." def __nonzero__(self): "This is an unusable docstring." return False def __contains__(self, other): "This is an unusable docstring." property foo: def __get__(self): "So is this." def __set__(self, x): "And here is another one." def __div__(self, other): "usable docstring" def __iter__(self): "usable docstring" return False def __next__(self): "usable docstring" return False Cython-0.26.1/tests/compile/food.h0000664000175000017500000000010612542002467017521 0ustar stefanstefan00000000000000struct Tomato { PyObject_HEAD }; struct Bicycle{ PyObject_HEAD }; Cython-0.26.1/tests/compile/extdelattr.pyx0000664000175000017500000000011612542002467021344 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: def __delattr__(self, n): pass Cython-0.26.1/tests/compile/ia_cdefblock.pyx0000664000175000017500000000063412542002467021556 0ustar stefanstefan00000000000000# mode: compile cdef: struct PrivFoo: int i int priv_i void priv_f(): global priv_i priv_i = 42 cdef public: struct PubFoo: int i int pub_v void pub_f(): pass class PubBlarg [object PubBlargObj, type PubBlargType]: pass cdef api: void api_f(): pass cdef public api: void pub_api_f(): pass priv_f() Cython-0.26.1/tests/compile/create_extension.srctree0000664000175000017500000000120713143605603023353 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace ######## setup.py ######## from Cython.Build import cythonize from Cython.Distutils.extension import Extension ext_modules = [ Extension("foo", ["foo.pyx"]), ] # Example documented in docs/src/reference/compilation.rst from Cython.Build.Dependencies import default_create_extension def my_create_extension(template, kwds): libs = kwds.get('libraries', []) + ["mylib"] kwds['libraries'] = libs return default_create_extension(template, kwds) ext_modules = cythonize(ext_modules, create_extension=my_create_extension) assert ext_modules[0].libraries == ["mylib"] ######## foo.pyx ######## Cython-0.26.1/tests/compile/extern.pyx0000664000175000017500000000023612542002467020474 0ustar stefanstefan00000000000000# mode: compile cdef extern int i cdef extern char *s[] cdef extern void spam(char c) cdef extern int eggs(): pass cdef int grail(): pass grail() Cython-0.26.1/tests/compile/publicapi_mix.pyx0000664000175000017500000000375612542002467022026 0ustar stefanstefan00000000000000# mode: compile # -- ctypedef int Int0 ctypedef public int Int1 ctypedef api int Int2 ctypedef public api int Int3 ctypedef enum EnumA0: EA0 ctypedef public enum EnumA1: EA1 ctypedef api enum EnumA2: EA2 ctypedef public api enum EnumA3: EA3 cdef enum EnumB0: EB0=0 cdef public enum EnumB1: EB1=1 cdef api enum EnumB2: EB2=2 cdef public api enum EnumB3: EB3=3 # -- ctypedef struct StructA0: int SA0 ctypedef public struct StructA1: int SA1 ctypedef api struct StructA2: int SA2 ctypedef public api struct StructA3: int SA3 cdef struct StructB0: int SB0 cdef public struct StructB1: int SB1 cdef api struct StructB2: int SB2 cdef public api struct StructB3: int SB3 # -- ctypedef class Foo0: pass ctypedef public class Foo1 [type PyFoo1_Type, object PyFoo1_Object]: pass ctypedef api class Foo2 [type PyFoo2_Type, object PyFoo2_Object]: pass ctypedef public api class Foo3 [type PyFoo3_Type, object PyFoo3_Object]: pass cdef class Bar0: pass cdef public class Bar1 [type PyBar1_Type, object PyBar1_Object]: pass cdef api class Bar2 [type PyBar2_Type, object PyBar2_Object]: pass cdef public api class Bar3 [type PyBar3_Type, object PyBar3_Object]: pass # -- cdef void bar0(): pass cdef public void bar1(): pass cdef api void bar2(): pass cdef public api void bar3(): pass cdef void* spam0(object o) except NULL: return NULL cdef public void* spam1(object o) except NULL: return NULL cdef api void* spam2(object o) except NULL: return NULL cdef public api void* spam3(object o) except NULL: return NULL bar0() spam0(None) # -- cdef double d0 = 0 cdef public double d1 = 1 cdef api double d2 = 2 cdef public api double d3 = 3 cdef object o0 = None cdef public object o1 = None cdef api object o2 = None cdef public api object o3 = None # -- Cython-0.26.1/tests/compile/indices.pyx0000664000175000017500000000027112542002467020604 0ustar stefanstefan00000000000000# mode: compile cdef int* a cdef object x cdef int f(int i): print i return i x[f(1)] = 3 a[f(1)] = 3 x[f(2)] += 4 a[f(2)] += 4 print x[1] print a[1] x[f(1)] = 15 Cython-0.26.1/tests/compile/conditional_dependencies.srctree0000664000175000017500000000124513023021033025011 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace ######## setup.py ######## from Cython.Build import cythonize ext_modules = cythonize("foo.pyx") assert set(ext_modules[0].depends) == set(["a.h", "b.h", "c.h", "d.h"]) ######## foo.pyx ######## IF 1: cimport a from b cimport something include "c.pxi" cdef extern from "d.h": pass ######## a.pxd ######## cdef extern from "a.h": pass ######## b.pxd ######## cdef extern from "b.h": cdef void something() ######## c.pxi ######## cdef extern from "c.h": pass ######## a.h ######## /* empty */ ######## b.h ######## /* empty */ ######## c.h ######## /* empty */ ######## d.h ######## /* empty */ Cython-0.26.1/tests/compile/jiba3.pyx0000664000175000017500000000062712542002467020163 0ustar stefanstefan00000000000000# mode: compile cdef class Position cdef class Point(Position) cdef class Vector(Point) cdef class CoordSyst cdef void test(float* f): pass cdef class Position: cdef readonly CoordSyst parent cdef class Point(Position): cdef void bug(self): test(self.parent._matrix) cdef class Vector(Point): cdef void bug(self): test(self.parent._matrix) cdef class CoordSyst: cdef float* _matrix Cython-0.26.1/tests/compile/cforfromloop.pyx0000664000175000017500000000067612542002467021706 0ustar stefanstefan00000000000000# mode: compile cdef int i, j, k cdef object a, b, x for i from 0 <= i < 10: pass for i from 0 < i <= 10: pass for i from 10 >= i > 0: pass for i from 10 > i >= 0: pass for x from 0 <= x <= 10: pass for i from a <= i <= b: pass for i from k <= i <= j: pass for i from k * 42 <= i <= j / 18: pass while j: for i from 0 <= i <= 10: continue break else: continue break Cython-0.26.1/tests/compile/burton1.pyx.BROKEN0000664000175000017500000000011612542002467021535 0ustar stefanstefan00000000000000cdef void f(): cdef void (*p)() p = 0 (p)() Cython-0.26.1/tests/compile/magcmp.pyx0000664000175000017500000000025112542002467020430 0ustar stefanstefan00000000000000# mode: compile cdef void foo(): cdef int bool, int1=0, int2=0 bool = int1 < int2 bool = int1 > int2 bool = int1 <= int2 bool = int1 >= int2 foo() Cython-0.26.1/tests/compile/cargdef.pyx0000664000175000017500000000037312542002467020564 0ustar stefanstefan00000000000000# mode: compile def f(obj, int i, double f, char *s1, char s2[]): pass cdef g(obj, int i, double f, char *s1, char s2[]): pass cdef do_g(object (*func)(object, int, double, char*, char*)): return func(1, 2, 3.14159, "a", "b") do_g(&g) Cython-0.26.1/tests/compile/extgetattr.pyx0000664000175000017500000000011612542002467021357 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: def __getattr__(self, x): pass Cython-0.26.1/tests/compile/del.pyx0000664000175000017500000000016612542002467017735 0ustar stefanstefan00000000000000# mode: compile def f(a, b): global g del g del a[b] del a[b][42] del a.spam del a.spam.eggs Cython-0.26.1/tests/compile/cdefexternempty.pyx0000664000175000017500000000007012542002467022371 0ustar stefanstefan00000000000000# mode: compile cdef extern from "cheese.h": pass Cython-0.26.1/tests/compile/import.pyx0000664000175000017500000000016412542002467020501 0ustar stefanstefan00000000000000# mode: compile def f(): import spam import spam.eggs import spam, eggs, ham import spam as tasty Cython-0.26.1/tests/compile/for.pyx0000664000175000017500000000046312542002467017757 0ustar stefanstefan00000000000000# mode: compile def f(a, b, c): cdef int i for a in b: i = 1 continue i = 2 break i = 3 for i in b: i = 1 for a in "spam": i = 1 for a[b] in c: i = 1 for a,b in c: i = 1 for a in b,c: i = 1 Cython-0.26.1/tests/compile/slicex.pyx0000664000175000017500000000012612542002467020454 0ustar stefanstefan00000000000000# mode: compile def f(a, b, c, d, e, f, g, h, i): a = b[c:d, e:f:g, ..., h, :i:] Cython-0.26.1/tests/compile/parallel_compile_float_rank.pyx0000664000175000017500000000017212542002467024672 0ustar stefanstefan00000000000000# mode: compile from cython.parallel import * cdef ssize_t i with nogil, parallel(): for i in range(10): pass Cython-0.26.1/tests/compile/assert2.pyx0000664000175000017500000000006012542002467020545 0ustar stefanstefan00000000000000# mode: compile def f(a, b): assert a, a+b Cython-0.26.1/tests/compile/first_assignment.pyx0000664000175000017500000000146312542002467022551 0ustar stefanstefan00000000000000# mode: compile cimport cython @cython.test_assert_path_exists( "//SingleAssignmentNode", "//SingleAssignmentNode[./NameNode[@name = 'a']]", "//SingleAssignmentNode[./NameNode[@name = 'a'] and @first = True]", ) def test_cdef(): cdef int a = 1 @cython.test_assert_path_exists( "//SingleAssignmentNode", "//SingleAssignmentNode[./NameNode[@name = 'a']]", # FIXME: currently not working # "//SingleAssignmentNode[./NameNode[@name = 'a'] and @first = True]", ) def test_py(): a = 1 @cython.test_assert_path_exists( "//SingleAssignmentNode", "//SingleAssignmentNode[./NameNode[@name = 'a']]", # FIXME: currently not working # "//SingleAssignmentNode[./NameNode[@name = 'a'] and @first = True]", ) def test_cond(): if True: a = 1 else: a = 2 Cython-0.26.1/tests/compile/publicapi_pxd_mix.pyx0000664000175000017500000000127312542002467022671 0ustar stefanstefan00000000000000# mode: compile cdef class Foo1: pass cdef class Foo2: pass cdef class Foo3: pass cdef class Bar1: pass cdef class Bar2: pass cdef class Bar3: pass cdef void bar0(): pass cdef public void bar1(): pass cdef api void bar2(): pass cdef public api void bar3(): pass cdef void* spam0(object o) except NULL: return NULL cdef public void* spam1(object o) except NULL: return NULL cdef api void* spam2(object o) nogil except NULL: return NULL cdef public api void* spam3(object o) except NULL with gil: return NULL cdef int i0 = 0 # XXX This should not be required! cdef public int i1 = 1 cdef api int i2 = 2 cdef public api int i3 = 3 Cython-0.26.1/tests/compile/ewing4.pyx0000664000175000017500000000011012542002467020353 0ustar stefanstefan00000000000000# mode: compile cdef void f(): "This is a pseudo doc string." f() Cython-0.26.1/tests/compile/altet1.pyx.BROKEN0000664000175000017500000000033112542002467021334 0ustar stefanstefan00000000000000__doc__ = """ >>> flub(25) 25 >>> g() 0 """ cdef extern from "altet1.h": ctypedef int blarg cdef blarg globvar def flub(blarg bobble): print bobble globvar = 0 def g(): return globvar Cython-0.26.1/tests/compile/ewing7.pyx0000664000175000017500000000027712542002467020374 0ustar stefanstefan00000000000000# mode: compile cdef class A: cdef void f(self, x): pass cdef class B(A): cdef void f(self, object x): pass cdef extern void g(A a, b) cdef extern void g(A a, b) Cython-0.26.1/tests/compile/extsetslice.pyx0000664000175000017500000000015312542002467021521 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: def __setslice__(self, Py_ssize_t i, Py_ssize_t j, x): pass Cython-0.26.1/tests/compile/lepage_2.pyx0000664000175000017500000000010112542002467020634 0ustar stefanstefan00000000000000# mode: compile ctypedef struct BB: void (*f) (void* state) Cython-0.26.1/tests/compile/extdelslice.pyx0000664000175000017500000000015012542002467021467 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: def __delslice__(self, Py_ssize_t i, Py_ssize_t j): pass Cython-0.26.1/tests/compile/pxd_override_T230.pxd0000664000175000017500000000011012542002467022333 0ustar stefanstefan00000000000000cdef class A: cpdef foo(self) cdef class B(A): cpdef foo(self) Cython-0.26.1/tests/compile/watts2.pyx0000664000175000017500000000005312542002467020410 0ustar stefanstefan00000000000000# mode: compile cdef int x x = 0xFFFFFFFF Cython-0.26.1/tests/compile/posix_pxds.pyx0000664000175000017500000000163713023021023021355 0ustar stefanstefan00000000000000# tag: posix # mode: compile cimport posix cimport posix.unistd from posix cimport unistd from posix.unistd cimport * cimport posix.fcntl from posix cimport fcntl from posix.fcntl cimport * cimport posix.types from posix cimport types from posix.types cimport * cimport posix.signal from posix cimport signal from posix.signal cimport * cimport posix.stat from posix cimport stat from posix.stat cimport * cimport posix.stdio from posix cimport stdio from posix.stdio cimport * cimport posix.stdlib from posix cimport stdlib from posix.stdlib cimport * cimport posix.time from posix cimport time from posix.time cimport * cimport posix.resource from posix cimport resource from posix.resource cimport * cimport posix.wait from posix cimport wait from posix.wait cimport * cimport posix.mman from posix cimport mman from posix.mman cimport * cimport posix.dlfcn from posix cimport dlfcn from posix.dlfcn cimport * Cython-0.26.1/tests/compile/extpropertydoc.pyx0000664000175000017500000000010512542002467022255 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: property eggs: "Ova" Cython-0.26.1/tests/compile/belchenko1.pyx0000664000175000017500000000025212542002467021200 0ustar stefanstefan00000000000000# mode: compile cdef extern from *: ctypedef int intptr_t cdef int _is_aligned(void *ptr): return ((ptr) & ((sizeof(int))-1)) == 0 _is_aligned(NULL) Cython-0.26.1/tests/compile/specmethextarg.pyx0000664000175000017500000000020612542002467022207 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: cdef int eggs def __iadd__(self, Spam other): self.eggs = self.eggs + other.eggs Cython-0.26.1/tests/compile/extinheritdel.pyx0000664000175000017500000000031712542002467022037 0ustar stefanstefan00000000000000# mode: compile cdef class Parrot: pass cdef class Norwegian(Parrot): def __delitem__(self, i): pass def __delattr__(self, n): pass def __delete__(self, i): pass Cython-0.26.1/tests/compile/cnamespec.h0000664000175000017500000000001612542002467020530 0ustar stefanstefan00000000000000int c_a, c_b; Cython-0.26.1/tests/compile/tryfinally.pyx0000664000175000017500000000041212542002467021360 0ustar stefanstefan00000000000000# mode: compile def f(a, b, c, x): cdef int i a = b + c try: return raise a finally: c = a - b for a in b: try: continue break c = a * b finally: i = 42 Cython-0.26.1/tests/compile/altet1.h0000664000175000017500000000002412542002467017763 0ustar stefanstefan00000000000000typedef int blarg; Cython-0.26.1/tests/compile/extpymemberdef.pyx0000664000175000017500000000057012542002467022210 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: cdef public char c cdef public int i cdef public long l cdef public unsigned char uc cdef public unsigned int ui cdef public unsigned long ul cdef public float f cdef public double d cdef public char *s cdef readonly char[42] a cdef public object o cdef readonly int r cdef readonly Spam e Cython-0.26.1/tests/compile/cpp_class_redefinition.pyx0000664000175000017500000000032012542002467023667 0ustar stefanstefan00000000000000# tag: cpp # mode: compile cdef extern from "templates.h": cdef cppclass TemplateTest1[T]: TemplateTest1() T value int t T getValue() cdef cppclass TemplateTest1[T] Cython-0.26.1/tests/compile/msvc_strings.pyx0000664000175000017500000021454112542002467021716 0ustar stefanstefan00000000000000# mode: compile """A long module docstring. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Integer sit amet mauris mauris, sit amet venenatis nisl. Vivamus a est porta enim sollicitudin mollis. Proin fringilla massa vel ante gravida luctus. Nunc quis nunc id quam hendrerit posuere. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Aliquam porttitor interdum sollicitudin. Mauris malesuada tellus tellus. Mauris condimentum nunc et sapien pellentesque gravida. Suspendisse sed ipsum orci. Duis ut lacus dui. Integer ac gravida sem. Vivamus fermentum porttitor velit ac blandit. Maecenas pulvinar ullamcorper enim, vitae aliquet tortor scelerisque et. Vestibulum ante massa, sodales et bibendum dignissim, consectetur vitae metus. Quisque vel dui erat, vel commodo metus. Aliquam arcu dolor, viverra sit amet porttitor a, faucibus eu augue. Sed ornare, enim eget ultricies suscipit, nunc dui lacinia enim, vitae tempus nunc libero vitae ligula. Nam et commodo ligula. Pellentesque tincidunt lorem at elit aliquam at fringilla libero tempor. Donec molestie consectetur nibh, ac varius ante dictum id. Suspendisse lectus nibh, molestie vel dapibus eget, egestas ut eros. Mauris vel mauris turpis, vitae bibendum nunc. Vestibulum nulla enim, vestibulum vitae tincidunt et, gravida eu metus. Nulla sagittis, odio a placerat laoreet, arcu lectus vestibulum nunc, in hendrerit tortor quam sit amet turpis. In et purus vel dui pellentesque tincidunt. Donec dictum nibh sed quam luctus sit amet luctus justo dapibus. Integer nulla elit, lacinia aliquet euismod sed, tempus vitae lectus. Fusce non sapien dolor. Suspendisse ut est ut dui tempor ultricies id ut elit. Aenean adipiscing sollicitudin enim, nec porttitor est porttitor eget. Proin lobortis ante ut diam sodales volutpat. Donec urna diam, porttitor nec laoreet ut, rhoncus non diam. Ut sed mi vitae turpis semper semper. Integer sit amet lorem sapien. Aliquam risus diam, vulputate id sagittis et, molestie ut lectus. Aliquam erat volutpat. Morbi aliquet venenatis metus in posuere. Cras vitae purus nunc, ut vestibulum ipsum. Nullam vehicula dui in urna iaculis lobortis. Ut a est non est tincidunt iaculis. Vivamus rutrum velit non nunc malesuada sed bibendum mi iaculis. Sed id lacus in sem tempor vestibulum. Cras bibendum accumsan suscipit. Phasellus congue nisl consectetur turpis rhoncus aliquet posuere libero fringilla. Sed eros tellus, hendrerit nec imperdiet vitae, blandit ac dolor. Nulla facilisi. Morbi ullamcorper libero odio, at cursus tortor. Cras ultricies tellus eget justo cursus cursus. Donec at mi massa, auctor suscipit sem. Proin dolor purus, semper sed ultrices ut, iaculis at tortor. Donec risus enim, interdum et convallis nec, aliquam eget velit. Curabitur eget lectus dolor. Integer id turpis eu nulla euismod tincidunt. Fusce elit nibh, dapibus sit amet tempus ac, convallis eu libero. Donec dui justo, molestie sed euismod porta, ultricies id orci. Praesent a tellus et risus faucibus porttitor pellentesque in purus. Fusce blandit risus ac tortor viverra vitae molestie odio convallis. Donec rhoncus volutpat mauris, sit amet mattis libero dapibus id. Ut rhoncus venenatis nisi ac dictum. In non nulla eget massa convallis facilisis. Praesent nec odio id odio semper lobortis non eu erat. Proin quis gravida magna. Sed rhoncus lectus auctor arcu posuere a auctor dui pellentesque. Sed enim nulla, luctus quis sagittis sed, vestibulum eget metus. Mauris ornare pretium fringilla. Proin ligula eros, fermentum in placerat sit amet, placerat vel mauris. Nulla magna enim, luctus eget euismod ac, lacinia vel lorem. Duis mi leo, porttitor vitae dictum ac, ultrices iaculis metus. Quisque libero mi, aliquet quis vestibulum eget, porttitor non justo. Praesent ac metus felis. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Donec vel enim sit amet ante imperdiet commodo sed vel nisi. Praesent semper viverra nulla vehicula sollicitudin. Fusce lacinia aliquet ullamcorper. Donec vitae diam est. Integer volutpat hendrerit turpis ut bibendum. Integer et dui augue. Nunc ut nisl in felis feugiat semper nec sit amet purus. Proin convallis ultrices nisl ut vehicula. Pellentesque neque mi, elementum vel placerat nec, laoreet ac nulla. Pellentesque aliquam dui a metus iaculis posuere. Curabitur dapibus faucibus metus. Donec quis diam dui. Proin at mi nec augue cursus pulvinar eu vel metus. Curabitur eget turpis ac risus dignissim luctus sed id ligula. Etiam lectus neque, varius ut euismod nec, euismod quis nulla. Ut feugiat, quam id tempor luctus, metus eros lacinia diam, nec dapibus tellus dui quis diam. Nam interdum, orci id fringilla mattis, ipsum eros pellentesque turpis, hendrerit dignissim justo dui interdum ante. Curabitur aliquam nisi ut dui lacinia tempor. Nulla lobortis tellus non sapien dignissim ut dapibus dui aliquet. Nam scelerisque, urna a aliquam malesuada, mi tortor scelerisque libero, quis pellentesque erat eros ut justo. Phasellus nulla purus, suscipit vel gravida euismod, malesuada et odio. Vestibulum non libero eget lacus venenatis auctor quis a est. Nunc id leo est. Curabitur pulvinar viverra sapien at viverra. Cras pretium justo et lorem lobortis id tempor nisi accumsan. Cras egestas tortor in risus hendrerit eu varius purus suscipit. Nullam mauris eros, mattis at tempor vitae, mollis vitae velit. Etiam at adipiscing lectus. Quisque molestie, metus id posuere pharetra, lorem enim vehicula mauris, ut ultricies purus justo a lacus. Vivamus blandit euismod adipiscing. Nam eu ligula at elit ultricies tempus. Nunc ac sodales neque. Ut dui diam, porttitor a pulvinar vel, sodales sit amet turpis. Donec vitae eros at neque luctus scelerisque. In consequat elementum iaculis. Donec ullamcorper dolor eu quam volutpat rhoncus. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras laoreet ante eget dolor sagittis imperdiet. Proin magna urna, porta id blandit nec, commodo eget lorem. Etiam imperdiet, orci sit amet rutrum consectetur, orci augue tempus lacus, id venenatis sapien nisl a est. Sed accumsan massa sed libero consectetur scelerisque. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Sed nunc risus, lobortis id egestas nec, suscipit id magna. Morbi at iaculis mauris. Proin felis sem, tempus non pellentesque congue, vehicula sit amet eros. Maecenas porttitor erat ac dolor pharetra iaculis. Cras tincidunt, nulla eget malesuada egestas, sem diam consequat quam, sed feugiat nulla orci at mauris. Quisque non arcu diam, ac lacinia felis. Nunc iaculis mollis egestas. Etiam imperdiet dolor consectetur eros feugiat fringilla sed in lacus. Nunc nec tincidunt dolor. Etiam sagittis tortor condimentum nunc fermentum vestibulum. Vivamus lobortis, magna sit amet scelerisque lobortis, sem eros molestie leo, eget aliquet ligula est in lectus. Duis placerat porta pulvinar. Sed sed adipiscing ante. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Nam accumsan iaculis augue, sed varius dui sagittis id. Etiam sit amet eleifend augue. Ut sit amet nibh sit amet justo tempor condimentum. Ut faucibus sagittis volutpat. Duis vestibulum feugiat sollicitudin. Aenean cursus luctus urna at consectetur. Nullam tincidunt, eros a iaculis sodales, tellus est imperdiet arcu, sit amet tincidunt orci felis et tortor. Mauris rutrum venenatis nunc ut rutrum. Phasellus nec erat magna, in tincidunt orci. Sed sit amet suscipit tellus. Mauris ut nisi turpis. Suspendisse augue turpis, condimentum ac bibendum in, vestibulum nec eros. Curabitur dapibus pulvinar vehicula. Fusce consequat, erat in malesuada hendrerit, tellus urna pharetra lacus, sed euismod nisi urna sed nisi. Etiam fermentum accumsan nunc, sed bibendum dui iaculis id. Etiam blandit fermentum ligula nec viverra. Vivamus venenatis arcu in nulla euismod euismod. Donec sit amet augue nec metus varius fringilla. Vivamus pulvinar elit ac mi rhoncus in luctus diam egestas. Curabitur a felis eget arcu pretium tempus eu sed mauris. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Mauris malesuada, nibh ac venenatis hendrerit, ligula dui condimentum tellus, sit amet pretium diam tortor vel risus. Suspendisse suscipit consequat eros id dignissim. Cras interdum lorem ac massa euismod non porta enim pretium. Aliquam ultrices nibh vitae ligula consectetur vel sollicitudin lacus volutpat. Phasellus vulputate iaculis sem nec laoreet. Nam leo sem, tempor eu condimentum id, imperdiet sed dolor. Donec pharetra velit non libero euismod tempor. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Sed fermentum, libero a posuere posuere, enim elit imperdiet enim, a sollicitudin est felis non libero. Sed vel dolor ut arcu dapibus iaculis nec a mauris. Morbi ullamcorper ultrices venenatis. Fusce luctus ante sit amet lacus venenatis ut rutrum elit lobortis. Nulla fermentum tortor ac sapien fringilla quis iaculis quam egestas. Aliquam et tortor est, at elementum mauris. Morbi posuere erat nec leo vulputate in pellentesque tortor condimentum. Vestibulum at orci augue. Aenean pellentesque sapien id felis consequat varius. Suspendisse bibendum enim sit amet mi imperdiet vel suscipit nisi tristique. Curabitur velit massa, consectetur ac mattis vel, accumsan at nunc. Donec porta, nibh nec consequat convallis, urna neque auctor erat, eu convallis lorem leo convallis turpis. Morbi non mauris non metus ornare vulputate. Sed aliquet, dolor ut egestas fermentum, metus purus mollis elit, nec commodo odio quam quis nisl. Aliquam erat volutpat. Suspendisse sed faucibus urna. Integer suscipit rutrum condimentum. Praesent dignissim libero eget metus luctus consectetur. Vestibulum ac erat felis, vitae iaculis erat. Duis interdum lacinia arcu, non lacinia urna luctus in. Curabitur feugiat sapien sapien, in vestibulum diam. Phasellus lobortis massa ut metus pretium dignissim. Fusce quis sem odio. Integer pellentesque sodales augue id tincidunt. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Vestibulum lorem odio, semper vel scelerisque sit amet, sagittis ac libero. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Nam egestas ultricies dui at gravida. Duis tristique, eros id consectetur pellentesque, nulla arcu ultricies tortor, ut pulvinar sapien lacus in elit. Vivamus dolor massa, pulvinar at mollis vitae, euismod ut dolor. Vivamus a magna ante. Vestibulum vitae fringilla leo. Ut gravida magna in quam fringilla ultricies. Mauris rhoncus enim id sem interdum blandit. Pellentesque luctus leo sit amet felis viverra ac accumsan purus mollis. Aenean pretium fringilla quam nec laoreet. Nulla id mauris mauris. Nam varius bibendum tristique. Integer ante felis, volutpat sed dignissim vel, interdum molestie nisi. Etiam mollis accumsan elit, ut gravida eros molestie nec. Nullam quis velit ac purus imperdiet sodales. Donec semper placerat venenatis. Cras dolor risus, sodales sed scelerisque nec, sollicitudin pretium felis. Quisque pretium felis id turpis bibendum pulvinar ornare id nibh. Morbi lobortis leo non mi porttitor vulputate. Vestibulum nec odio tellus, ut blandit ligula. In pellentesque neque sit amet dui pulvinar sed laoreet dui vehicula. In hac habitasse platea dictumst. Etiam feugiat dictum blandit. Praesent lacinia tincidunt elit, quis consectetur tortor molestie commodo. Ut sit amet accumsan lorem. Cras quam nunc, malesuada tempor volutpat vitae, aliquam eu diam. Sed sem nibh, bibendum nec sollicitudin at, interdum et magna. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Aliquam vitae sem non elit pharetra vestibulum et condimentum libero. Nam egestas ultricies hendrerit. Nunc nec fringilla nulla. Aliquam risus tellus, hendrerit non dapibus a, tincidunt vel ante. Vivamus mollis, magna et lacinia tincidunt, dui massa porta odio, ac ornare felis massa nec lorem. Mauris sagittis lacus sed metus mollis ac egestas lectus porttitor. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Integer vitae lacinia libero. Phasellus at neque enim, sit amet dictum mi. Ut risus nisi, vestibulum vel congue eget, egestas in ipsum. Duis faucibus tempus sodales. Duis convallis pellentesque arcu rhoncus congue. Nunc ac mauris eu purus vestibulum congue. Praesent convallis semper augue vel volutpat. Integer dictum varius placerat. Vestibulum convallis tortor non mi lacinia ac aliquet dui ultricies. Donec ultrices purus eros. Maecenas venenatis posuere massa, nec consectetur lacus cursus eget. Donec quam lacus, tempus id placerat et, posuere sed libero. Proin auctor diam ut arcu viverra ut imperdiet tellus dapibus. Morbi ac mauris quis tellus porttitor eleifend. Sed et ante magna, ut sodales sapien. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Integer mattis venenatis mi non ullamcorper. Vestibulum magna enim, aliquam non interdum ut, dignissim vitae ante. Praesent dignissim, est at pretium posuere, nisl ante varius felis, vitae posuere enim nulla et nunc. Morbi sagittis suscipit leo, eu accumsan ligula volutpat non. Donec ut tincidunt magna. Integer ac libero mi. Sed non eros dolor, in tincidunt enim. Curabitur iaculis erat quis felis iaculis ut volutpat augue malesuada. Pellentesque eget arcu ligula, ut volutpat purus. Suspendisse dictum lorem quis sapien lacinia pretium. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin sagittis egestas massa et tempor. Mauris et eros ante, id porta sem. Duis ac eros vitae ipsum ultrices malesuada eget a risus. Morbi imperdiet, est a hendrerit tristique, mi erat molestie lacus, ac tempor risus nulla id erat. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Etiam congue, lacus quis ultricies consequat, diam metus convallis enim, ut volutpat enim urna vitae erat. In quam risus, molestie et dapibus id, elementum sit amet ligula. Nam faucibus lacus id dolor facilisis viverra. Nullam vehicula massa ac arcu consectetur vulputate. Praesent nec augue ac justo dapibus vehicula. Aliquam consectetur hendrerit dolor, et mollis nisl auctor ut. Ut sagittis risus at felis fringilla ultricies. Vestibulum non urna nibh, nec pretium dolor. Nulla imperdiet lobortis eros at pharetra. Vivamus cursus pellentesque ornare. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce arcu quam, pulvinar at rutrum vitae, ornare vitae leo. Maecenas vehicula magna sit amet nulla bibendum condimentum. Curabitur ultrices tempor fringilla. Vivamus pretium suscipit molestie. Donec arcu diam, ultricies ac pellentesque eu, venenatis et sapien. Nam dictum orci augue, vel eleifend leo. Nam at lacus sapien, nec pretium eros. In egestas, enim sed sagittis feugiat, purus odio tristique lectus, vel condimentum leo turpis ac odio. Nam iaculis mi quis odio posuere et venenatis velit ultricies. Nulla facilisi. Proin nec dolor ac quam euismod gravida quis et eros. Nam interdum condimentum mattis. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Praesent a nisi eu massa mollis posuere sed a nunc. Aenean tempus enim a justo rhoncus quis ultrices nisl commodo. Aenean imperdiet mauris a ipsum venenatis vel scelerisque lorem rutrum. Donec ut nunc eros, eget accumsan felis. Nullam ullamcorper porta dictum. Donec accumsan cursus vestibulum. Aenean in sapien velit. Vivamus nec massa mi. Fusce felis tortor, bibendum non volutpat a, fringilla quis nisi. Duis varius bibendum erat, quis fermentum sem accumsan eget. Maecenas massa felis, porta sed laoreet eu, luctus eu lectus. Cras id nibh vitae erat fringilla rutrum. Maecenas eget consequat est. Vivamus viverra, felis vel faucibus rhoncus, quam ipsum elementum libero, quis convallis urna purus ut mauris. Nam quis urna vitae enim consequat placerat. Vivamus congue augue sit amet lectus luctus tempor. Cras ut justo convallis est egestas pellentesque ac nec orci. Vivamus rutrum bibendum ante, at cursus erat pulvinar ornare. Proin imperdiet scelerisque ante eu vestibulum. Nullam ullamcorper metus nec purus auctor lobortis. Proin sed lacus et ipsum tempor tempus. Vivamus odio dolor, vulputate vitae semper sit amet, aliquet egestas orci. Nullam non quam eu quam sagittis porta. Nunc in velit id erat commodo viverra. Praesent nec est augue, nec sagittis erat. Cras sed turpis quis enim tempor sagittis. Donec in justo ac nisl porta condimentum id vestibulum nulla. Nam elementum ultricies nunc a bibendum. Aenean tincidunt nisl non augue pellentesque sit amet convallis neque semper. Cras placerat suscipit massa sed volutpat. Integer vulputate imperdiet enim, vitae vulputate sapien mattis feugiat. Vivamus pharetra facilisis mauris a gravida. Nulla non venenatis est. Duis lobortis consectetur sem ac aliquam. In eget sapien odio. Vivamus pulvinar ultricies magna, quis laoreet dui porta et. Integer tempus malesuada velit, et consequat odio ultrices sed. Aliquam malesuada commodo diam vel posuere. Morbi porttitor, elit vitae auctor gravida, lorem massa bibendum arcu, vel placerat nulla justo at augue. Aliquam libero quam, mattis blandit congue sit amet, fermentum ac augue. Aliquam malesuada molestie vulputate. Duis id porta augue. Vestibulum diam dolor, ultrices sit amet porttitor id, convallis id lectus. Etiam ac augue tincidunt nisi tempor molestie euismod id nisl. Nam et tortor ac arcu viverra pulvinar. Fusce pulvinar rhoncus leo, a faucibus enim interdum non. Aliquam vulputate mattis consectetur. Pellentesque sit amet quam sem. Cras eget arcu eu elit volutpat volutpat. Integer sed varius enim. Integer sit amet felis orci, id dignissim sapien. Sed vitae lorem sed libero facilisis fringilla. Pellentesque congue tristique purus, eleifend semper risus suscipit quis. Phasellus rutrum quam vitae arcu vulputate porta. Sed tristique arcu nec mi porttitor lacinia. Donec congue feugiat diam quis pretium. Vivamus at luctus nunc. Integer vulputate laoreet mauris quis auctor. Nunc at ultrices libero. Maecenas porta faucibus purus non vehicula. Sed sit amet metus vitae mi ultrices scelerisque nec quis risus. Phasellus pellentesque tincidunt massa id ultricies. Aliquam dictum arcu ac dolor interdum rutrum. Nulla facilisi. Duis nisi est, tincidunt a sagittis id, sollicitudin at odio. Curabitur sed est eu sapien faucibus dignissim in quis tortor. Nunc ac elit tortor, non lobortis massa. Proin posuere ante ut metus vehicula suscipit. Proin mattis mauris ac lectus consequat rutrum. Nam arcu lectus, commodo non pretium a, pharetra semper dolor. Fusce eleifend hendrerit adipiscing. Nunc et eleifend erat. Suspendisse tempus nisl ut arcu blandit ut adipiscing nisi tristique. Suspendisse molestie facilisis risus sed fermentum. Praesent tempor convallis ultricies. Integer et elit velit, at consectetur risus. Vestibulum mollis adipiscing sodales. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Mauris dictum molestie porta. Nam convallis nisl quis lacus vulputate in convallis risus sagittis. Vivamus accumsan faucibus cursus. Ut ultricies imperdiet ligula scelerisque blandit. In ornare egestas purus, at convallis velit egestas laoreet. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Nulla quis imperdiet est. Nunc tempus magna quis lacus feugiat et posuere lacus vehicula. Cras lacinia aliquam est at vehicula. Aenean congue elit in ante dignissim vitae fermentum lectus aliquam. Mauris at odio magna, at interdum dui. Cras fringilla mi velit, nec varius neque. Fusce et volutpat lacus. Suspendisse id turpis et urna varius convallis in eu purus. Nulla facilisi. Etiam mauris nisl, ultrices ac porttitor sit amet, facilisis ut neque. Nullam ut velit quis velit tincidunt rhoncus. Praesent tristique porttitor euismod. Nulla non felis ante, feugiat commodo turpis. In nec venenatis mi. Duis tempus tempor purus, vitae consectetur mi ornare eu. Proin sed consequat erat. Quisque nec sem dui. Nam semper, ligula facilisis pretium interdum, diam lectus sollicitudin lorem, in elementum nisi lorem scelerisque justo. Nullam ac fringilla nunc. Maecenas malesuada ligula in massa sollicitudin sit amet auctor ipsum malesuada. Vestibulum ut augue in magna lobortis varius eget in ipsum. In hac habitasse platea dictumst. Cras vel sagittis mi. Aenean urna sapien, ultrices et tristique et, aliquam vel libero. Nullam in consequat ante. Suspendisse libero augue, pulvinar a dignissim vitae, fringilla malesuada dui. Phasellus augue ante, pulvinar eget tincidunt vel, venenatis sed arcu. Pellentesque ac purus orci, vel molestie turpis. Nulla consectetur sollicitudin dolor, sed ornare arcu accumsan fermentum. Fusce vestibulum nisi at leo interdum eu sollicitudin lacus dictum. Fusce malesuada consequat ipsum ut convallis. Maecenas in eros sit amet elit consectetur fringilla nec a nibh. Ut et velit vel ligula pharetra elementum. Nullam aliquam, tellus vel cursus lacinia, dui libero dictum turpis, nec lacinia dolor nunc vel diam. Pellentesque convallis dui quis lacus ornare at rutrum lorem pellentesque. Suspendisse potenti. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nunc ac nibh sed mauris ornare cursus. Praesent enim mauris, tincidunt vitae convallis ac, ultricies imperdiet sapien. Duis sodales ligula eget lorem suscipit sed consectetur metus pretium. Nam in magna augue, quis volutpat mauris. Quisque pretium lobortis orci quis laoreet. Nam ut nisi diam. Sed ultrices ultrices dapibus. Integer feugiat mauris id orci pulvinar eu tempus nibh viverra. Etiam venenatis bibendum massa a consequat. Fusce interdum velit ac mauris rhoncus non cursus neque consectetur. Vestibulum dictum eros ac metus fringilla venenatis. Phasellus auctor dui non nulla molestie id malesuada mauris euismod. Aenean id tortor ac justo eleifend mollis non vel arcu. Duis ac lorem tortor. Donec volutpat purus sed nunc luctus interdum hendrerit nulla ullamcorper. Sed consectetur interdum aliquet. Proin ullamcorper risus ut ante lacinia sagittis. Nunc varius eleifend purus, ac pellentesque urna viverra id. Praesent euismod, sapien accumsan gravida dictum, massa massa euismod sapien, ut auctor tellus arcu sed diam. Vivamus tincidunt dolor non lorem pellentesque at tempus elit adipiscing. Vestibulum tempor aliquam consectetur. Mauris nec dictum nisl. Donec scelerisque ornare condimentum. Phasellus laoreet justo nec nibh convallis convallis. Duis id orci sapien, eget pulvinar justo. Aenean id arcu felis, eu iaculis nibh. Aenean eleifend pretium rutrum. Aliquam molestie sem quis tellus aliquam eleifend. Mauris et purus orci. Nunc et accumsan tortor. Phasellus semper eleifend nisi, a faucibus risus vehicula id. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas in felis et est lacinia eleifend vel sed ipsum. Aliquam commodo molestie lorem id hendrerit. Nam sed tellus urna, sed dignissim eros. """ A_LONG_STRING = """A long string. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Integer sit amet mauris mauris, sit amet venenatis nisl. Vivamus a est porta enim sollicitudin mollis. Proin fringilla massa vel ante gravida luctus. Nunc quis nunc id quam hendrerit posuere. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Aliquam porttitor interdum sollicitudin. Mauris malesuada tellus tellus. Mauris condimentum nunc et sapien pellentesque gravida. Suspendisse sed ipsum orci. Duis ut lacus dui. Integer ac gravida sem. Vivamus fermentum porttitor velit ac blandit. Maecenas pulvinar ullamcorper enim, vitae aliquet tortor scelerisque et. Vestibulum ante massa, sodales et bibendum dignissim, consectetur vitae metus. Quisque vel dui erat, vel commodo metus. Aliquam arcu dolor, viverra sit amet porttitor a, faucibus eu augue. Sed ornare, enim eget ultricies suscipit, nunc dui lacinia enim, vitae tempus nunc libero vitae ligula. Nam et commodo ligula. Pellentesque tincidunt lorem at elit aliquam at fringilla libero tempor. Donec molestie consectetur nibh, ac varius ante dictum id. Suspendisse lectus nibh, molestie vel dapibus eget, egestas ut eros. Mauris vel mauris turpis, vitae bibendum nunc. Vestibulum nulla enim, vestibulum vitae tincidunt et, gravida eu metus. Nulla sagittis, odio a placerat laoreet, arcu lectus vestibulum nunc, in hendrerit tortor quam sit amet turpis. In et purus vel dui pellentesque tincidunt. Donec dictum nibh sed quam luctus sit amet luctus justo dapibus. Integer nulla elit, lacinia aliquet euismod sed, tempus vitae lectus. Fusce non sapien dolor. Suspendisse ut est ut dui tempor ultricies id ut elit. Aenean adipiscing sollicitudin enim, nec porttitor est porttitor eget. Proin lobortis ante ut diam sodales volutpat. Donec urna diam, porttitor nec laoreet ut, rhoncus non diam. Ut sed mi vitae turpis semper semper. Integer sit amet lorem sapien. Aliquam risus diam, vulputate id sagittis et, molestie ut lectus. Aliquam erat volutpat. Morbi aliquet venenatis metus in posuere. Cras vitae purus nunc, ut vestibulum ipsum. Nullam vehicula dui in urna iaculis lobortis. Ut a est non est tincidunt iaculis. Vivamus rutrum velit non nunc malesuada sed bibendum mi iaculis. Sed id lacus in sem tempor vestibulum. Cras bibendum accumsan suscipit. Phasellus congue nisl consectetur turpis rhoncus aliquet posuere libero fringilla. Sed eros tellus, hendrerit nec imperdiet vitae, blandit ac dolor. Nulla facilisi. Morbi ullamcorper libero odio, at cursus tortor. Cras ultricies tellus eget justo cursus cursus. Donec at mi massa, auctor suscipit sem. Proin dolor purus, semper sed ultrices ut, iaculis at tortor. Donec risus enim, interdum et convallis nec, aliquam eget velit. Curabitur eget lectus dolor. Integer id turpis eu nulla euismod tincidunt. Fusce elit nibh, dapibus sit amet tempus ac, convallis eu libero. Donec dui justo, molestie sed euismod porta, ultricies id orci. Praesent a tellus et risus faucibus porttitor pellentesque in purus. Fusce blandit risus ac tortor viverra vitae molestie odio convallis. Donec rhoncus volutpat mauris, sit amet mattis libero dapibus id. Ut rhoncus venenatis nisi ac dictum. In non nulla eget massa convallis facilisis. Praesent nec odio id odio semper lobortis non eu erat. Proin quis gravida magna. Sed rhoncus lectus auctor arcu posuere a auctor dui pellentesque. Sed enim nulla, luctus quis sagittis sed, vestibulum eget metus. Mauris ornare pretium fringilla. Proin ligula eros, fermentum in placerat sit amet, placerat vel mauris. Nulla magna enim, luctus eget euismod ac, lacinia vel lorem. Duis mi leo, porttitor vitae dictum ac, ultrices iaculis metus. Quisque libero mi, aliquet quis vestibulum eget, porttitor non justo. Praesent ac metus felis. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Donec vel enim sit amet ante imperdiet commodo sed vel nisi. Praesent semper viverra nulla vehicula sollicitudin. Fusce lacinia aliquet ullamcorper. Donec vitae diam est. Integer volutpat hendrerit turpis ut bibendum. Integer et dui augue. Nunc ut nisl in felis feugiat semper nec sit amet purus. Proin convallis ultrices nisl ut vehicula. Pellentesque neque mi, elementum vel placerat nec, laoreet ac nulla. Pellentesque aliquam dui a metus iaculis posuere. Curabitur dapibus faucibus metus. Donec quis diam dui. Proin at mi nec augue cursus pulvinar eu vel metus. Curabitur eget turpis ac risus dignissim luctus sed id ligula. Etiam lectus neque, varius ut euismod nec, euismod quis nulla. Ut feugiat, quam id tempor luctus, metus eros lacinia diam, nec dapibus tellus dui quis diam. Nam interdum, orci id fringilla mattis, ipsum eros pellentesque turpis, hendrerit dignissim justo dui interdum ante. Curabitur aliquam nisi ut dui lacinia tempor. Nulla lobortis tellus non sapien dignissim ut dapibus dui aliquet. Nam scelerisque, urna a aliquam malesuada, mi tortor scelerisque libero, quis pellentesque erat eros ut justo. Phasellus nulla purus, suscipit vel gravida euismod, malesuada et odio. Vestibulum non libero eget lacus venenatis auctor quis a est. Nunc id leo est. Curabitur pulvinar viverra sapien at viverra. Cras pretium justo et lorem lobortis id tempor nisi accumsan. Cras egestas tortor in risus hendrerit eu varius purus suscipit. Nullam mauris eros, mattis at tempor vitae, mollis vitae velit. Etiam at adipiscing lectus. Quisque molestie, metus id posuere pharetra, lorem enim vehicula mauris, ut ultricies purus justo a lacus. Vivamus blandit euismod adipiscing. Nam eu ligula at elit ultricies tempus. Nunc ac sodales neque. Ut dui diam, porttitor a pulvinar vel, sodales sit amet turpis. Donec vitae eros at neque luctus scelerisque. In consequat elementum iaculis. Donec ullamcorper dolor eu quam volutpat rhoncus. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras laoreet ante eget dolor sagittis imperdiet. Proin magna urna, porta id blandit nec, commodo eget lorem. Etiam imperdiet, orci sit amet rutrum consectetur, orci augue tempus lacus, id venenatis sapien nisl a est. Sed accumsan massa sed libero consectetur scelerisque. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Sed nunc risus, lobortis id egestas nec, suscipit id magna. Morbi at iaculis mauris. Proin felis sem, tempus non pellentesque congue, vehicula sit amet eros. Maecenas porttitor erat ac dolor pharetra iaculis. Cras tincidunt, nulla eget malesuada egestas, sem diam consequat quam, sed feugiat nulla orci at mauris. Quisque non arcu diam, ac lacinia felis. Nunc iaculis mollis egestas. Etiam imperdiet dolor consectetur eros feugiat fringilla sed in lacus. Nunc nec tincidunt dolor. Etiam sagittis tortor condimentum nunc fermentum vestibulum. Vivamus lobortis, magna sit amet scelerisque lobortis, sem eros molestie leo, eget aliquet ligula est in lectus. Duis placerat porta pulvinar. Sed sed adipiscing ante. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Nam accumsan iaculis augue, sed varius dui sagittis id. Etiam sit amet eleifend augue. Ut sit amet nibh sit amet justo tempor condimentum. Ut faucibus sagittis volutpat. Duis vestibulum feugiat sollicitudin. Aenean cursus luctus urna at consectetur. Nullam tincidunt, eros a iaculis sodales, tellus est imperdiet arcu, sit amet tincidunt orci felis et tortor. Mauris rutrum venenatis nunc ut rutrum. Phasellus nec erat magna, in tincidunt orci. Sed sit amet suscipit tellus. Mauris ut nisi turpis. Suspendisse augue turpis, condimentum ac bibendum in, vestibulum nec eros. Curabitur dapibus pulvinar vehicula. Fusce consequat, erat in malesuada hendrerit, tellus urna pharetra lacus, sed euismod nisi urna sed nisi. Etiam fermentum accumsan nunc, sed bibendum dui iaculis id. Etiam blandit fermentum ligula nec viverra. Vivamus venenatis arcu in nulla euismod euismod. Donec sit amet augue nec metus varius fringilla. Vivamus pulvinar elit ac mi rhoncus in luctus diam egestas. Curabitur a felis eget arcu pretium tempus eu sed mauris. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Mauris malesuada, nibh ac venenatis hendrerit, ligula dui condimentum tellus, sit amet pretium diam tortor vel risus. Suspendisse suscipit consequat eros id dignissim. Cras interdum lorem ac massa euismod non porta enim pretium. Aliquam ultrices nibh vitae ligula consectetur vel sollicitudin lacus volutpat. Phasellus vulputate iaculis sem nec laoreet. Nam leo sem, tempor eu condimentum id, imperdiet sed dolor. Donec pharetra velit non libero euismod tempor. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Sed fermentum, libero a posuere posuere, enim elit imperdiet enim, a sollicitudin est felis non libero. Sed vel dolor ut arcu dapibus iaculis nec a mauris. Morbi ullamcorper ultrices venenatis. Fusce luctus ante sit amet lacus venenatis ut rutrum elit lobortis. Nulla fermentum tortor ac sapien fringilla quis iaculis quam egestas. Aliquam et tortor est, at elementum mauris. Morbi posuere erat nec leo vulputate in pellentesque tortor condimentum. Vestibulum at orci augue. Aenean pellentesque sapien id felis consequat varius. Suspendisse bibendum enim sit amet mi imperdiet vel suscipit nisi tristique. Curabitur velit massa, consectetur ac mattis vel, accumsan at nunc. Donec porta, nibh nec consequat convallis, urna neque auctor erat, eu convallis lorem leo convallis turpis. Morbi non mauris non metus ornare vulputate. Sed aliquet, dolor ut egestas fermentum, metus purus mollis elit, nec commodo odio quam quis nisl. Aliquam erat volutpat. Suspendisse sed faucibus urna. Integer suscipit rutrum condimentum. Praesent dignissim libero eget metus luctus consectetur. Vestibulum ac erat felis, vitae iaculis erat. Duis interdum lacinia arcu, non lacinia urna luctus in. Curabitur feugiat sapien sapien, in vestibulum diam. Phasellus lobortis massa ut metus pretium dignissim. Fusce quis sem odio. Integer pellentesque sodales augue id tincidunt. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Vestibulum lorem odio, semper vel scelerisque sit amet, sagittis ac libero. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Nam egestas ultricies dui at gravida. Duis tristique, eros id consectetur pellentesque, nulla arcu ultricies tortor, ut pulvinar sapien lacus in elit. Vivamus dolor massa, pulvinar at mollis vitae, euismod ut dolor. Vivamus a magna ante. Vestibulum vitae fringilla leo. Ut gravida magna in quam fringilla ultricies. Mauris rhoncus enim id sem interdum blandit. Pellentesque luctus leo sit amet felis viverra ac accumsan purus mollis. Aenean pretium fringilla quam nec laoreet. Nulla id mauris mauris. Nam varius bibendum tristique. Integer ante felis, volutpat sed dignissim vel, interdum molestie nisi. Etiam mollis accumsan elit, ut gravida eros molestie nec. Nullam quis velit ac purus imperdiet sodales. Donec semper placerat venenatis. Cras dolor risus, sodales sed scelerisque nec, sollicitudin pretium felis. Quisque pretium felis id turpis bibendum pulvinar ornare id nibh. Morbi lobortis leo non mi porttitor vulputate. Vestibulum nec odio tellus, ut blandit ligula. In pellentesque neque sit amet dui pulvinar sed laoreet dui vehicula. In hac habitasse platea dictumst. Etiam feugiat dictum blandit. Praesent lacinia tincidunt elit, quis consectetur tortor molestie commodo. Ut sit amet accumsan lorem. Cras quam nunc, malesuada tempor volutpat vitae, aliquam eu diam. Sed sem nibh, bibendum nec sollicitudin at, interdum et magna. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Aliquam vitae sem non elit pharetra vestibulum et condimentum libero. Nam egestas ultricies hendrerit. Nunc nec fringilla nulla. Aliquam risus tellus, hendrerit non dapibus a, tincidunt vel ante. Vivamus mollis, magna et lacinia tincidunt, dui massa porta odio, ac ornare felis massa nec lorem. Mauris sagittis lacus sed metus mollis ac egestas lectus porttitor. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Integer vitae lacinia libero. Phasellus at neque enim, sit amet dictum mi. Ut risus nisi, vestibulum vel congue eget, egestas in ipsum. Duis faucibus tempus sodales. Duis convallis pellentesque arcu rhoncus congue. Nunc ac mauris eu purus vestibulum congue. Praesent convallis semper augue vel volutpat. Integer dictum varius placerat. Vestibulum convallis tortor non mi lacinia ac aliquet dui ultricies. Donec ultrices purus eros. Maecenas venenatis posuere massa, nec consectetur lacus cursus eget. Donec quam lacus, tempus id placerat et, posuere sed libero. Proin auctor diam ut arcu viverra ut imperdiet tellus dapibus. Morbi ac mauris quis tellus porttitor eleifend. Sed et ante magna, ut sodales sapien. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Integer mattis venenatis mi non ullamcorper. Vestibulum magna enim, aliquam non interdum ut, dignissim vitae ante. Praesent dignissim, est at pretium posuere, nisl ante varius felis, vitae posuere enim nulla et nunc. Morbi sagittis suscipit leo, eu accumsan ligula volutpat non. Donec ut tincidunt magna. Integer ac libero mi. Sed non eros dolor, in tincidunt enim. Curabitur iaculis erat quis felis iaculis ut volutpat augue malesuada. Pellentesque eget arcu ligula, ut volutpat purus. Suspendisse dictum lorem quis sapien lacinia pretium. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin sagittis egestas massa et tempor. Mauris et eros ante, id porta sem. Duis ac eros vitae ipsum ultrices malesuada eget a risus. Morbi imperdiet, est a hendrerit tristique, mi erat molestie lacus, ac tempor risus nulla id erat. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Etiam congue, lacus quis ultricies consequat, diam metus convallis enim, ut volutpat enim urna vitae erat. In quam risus, molestie et dapibus id, elementum sit amet ligula. Nam faucibus lacus id dolor facilisis viverra. Nullam vehicula massa ac arcu consectetur vulputate. Praesent nec augue ac justo dapibus vehicula. Aliquam consectetur hendrerit dolor, et mollis nisl auctor ut. Ut sagittis risus at felis fringilla ultricies. Vestibulum non urna nibh, nec pretium dolor. Nulla imperdiet lobortis eros at pharetra. Vivamus cursus pellentesque ornare. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce arcu quam, pulvinar at rutrum vitae, ornare vitae leo. Maecenas vehicula magna sit amet nulla bibendum condimentum. Curabitur ultrices tempor fringilla. Vivamus pretium suscipit molestie. Donec arcu diam, ultricies ac pellentesque eu, venenatis et sapien. Nam dictum orci augue, vel eleifend leo. Nam at lacus sapien, nec pretium eros. In egestas, enim sed sagittis feugiat, purus odio tristique lectus, vel condimentum leo turpis ac odio. Nam iaculis mi quis odio posuere et venenatis velit ultricies. Nulla facilisi. Proin nec dolor ac quam euismod gravida quis et eros. Nam interdum condimentum mattis. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Praesent a nisi eu massa mollis posuere sed a nunc. Aenean tempus enim a justo rhoncus quis ultrices nisl commodo. Aenean imperdiet mauris a ipsum venenatis vel scelerisque lorem rutrum. Donec ut nunc eros, eget accumsan felis. Nullam ullamcorper porta dictum. Donec accumsan cursus vestibulum. Aenean in sapien velit. Vivamus nec massa mi. Fusce felis tortor, bibendum non volutpat a, fringilla quis nisi. Duis varius bibendum erat, quis fermentum sem accumsan eget. Maecenas massa felis, porta sed laoreet eu, luctus eu lectus. Cras id nibh vitae erat fringilla rutrum. Maecenas eget consequat est. Vivamus viverra, felis vel faucibus rhoncus, quam ipsum elementum libero, quis convallis urna purus ut mauris. Nam quis urna vitae enim consequat placerat. Vivamus congue augue sit amet lectus luctus tempor. Cras ut justo convallis est egestas pellentesque ac nec orci. Vivamus rutrum bibendum ante, at cursus erat pulvinar ornare. Proin imperdiet scelerisque ante eu vestibulum. Nullam ullamcorper metus nec purus auctor lobortis. Proin sed lacus et ipsum tempor tempus. Vivamus odio dolor, vulputate vitae semper sit amet, aliquet egestas orci. Nullam non quam eu quam sagittis porta. Nunc in velit id erat commodo viverra. Praesent nec est augue, nec sagittis erat. Cras sed turpis quis enim tempor sagittis. Donec in justo ac nisl porta condimentum id vestibulum nulla. Nam elementum ultricies nunc a bibendum. Aenean tincidunt nisl non augue pellentesque sit amet convallis neque semper. Cras placerat suscipit massa sed volutpat. Integer vulputate imperdiet enim, vitae vulputate sapien mattis feugiat. Vivamus pharetra facilisis mauris a gravida. Nulla non venenatis est. Duis lobortis consectetur sem ac aliquam. In eget sapien odio. Vivamus pulvinar ultricies magna, quis laoreet dui porta et. Integer tempus malesuada velit, et consequat odio ultrices sed. Aliquam malesuada commodo diam vel posuere. Morbi porttitor, elit vitae auctor gravida, lorem massa bibendum arcu, vel placerat nulla justo at augue. Aliquam libero quam, mattis blandit congue sit amet, fermentum ac augue. Aliquam malesuada molestie vulputate. Duis id porta augue. Vestibulum diam dolor, ultrices sit amet porttitor id, convallis id lectus. Etiam ac augue tincidunt nisi tempor molestie euismod id nisl. Nam et tortor ac arcu viverra pulvinar. Fusce pulvinar rhoncus leo, a faucibus enim interdum non. Aliquam vulputate mattis consectetur. Pellentesque sit amet quam sem. Cras eget arcu eu elit volutpat volutpat. Integer sed varius enim. Integer sit amet felis orci, id dignissim sapien. Sed vitae lorem sed libero facilisis fringilla. Pellentesque congue tristique purus, eleifend semper risus suscipit quis. Phasellus rutrum quam vitae arcu vulputate porta. Sed tristique arcu nec mi porttitor lacinia. Donec congue feugiat diam quis pretium. Vivamus at luctus nunc. Integer vulputate laoreet mauris quis auctor. Nunc at ultrices libero. Maecenas porta faucibus purus non vehicula. Sed sit amet metus vitae mi ultrices scelerisque nec quis risus. Phasellus pellentesque tincidunt massa id ultricies. Aliquam dictum arcu ac dolor interdum rutrum. Nulla facilisi. Duis nisi est, tincidunt a sagittis id, sollicitudin at odio. Curabitur sed est eu sapien faucibus dignissim in quis tortor. Nunc ac elit tortor, non lobortis massa. Proin posuere ante ut metus vehicula suscipit. Proin mattis mauris ac lectus consequat rutrum. Nam arcu lectus, commodo non pretium a, pharetra semper dolor. Fusce eleifend hendrerit adipiscing. Nunc et eleifend erat. Suspendisse tempus nisl ut arcu blandit ut adipiscing nisi tristique. Suspendisse molestie facilisis risus sed fermentum. Praesent tempor convallis ultricies. Integer et elit velit, at consectetur risus. Vestibulum mollis adipiscing sodales. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Mauris dictum molestie porta. Nam convallis nisl quis lacus vulputate in convallis risus sagittis. Vivamus accumsan faucibus cursus. Ut ultricies imperdiet ligula scelerisque blandit. In ornare egestas purus, at convallis velit egestas laoreet. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Nulla quis imperdiet est. Nunc tempus magna quis lacus feugiat et posuere lacus vehicula. Cras lacinia aliquam est at vehicula. Aenean congue elit in ante dignissim vitae fermentum lectus aliquam. Mauris at odio magna, at interdum dui. Cras fringilla mi velit, nec varius neque. Fusce et volutpat lacus. Suspendisse id turpis et urna varius convallis in eu purus. Nulla facilisi. Etiam mauris nisl, ultrices ac porttitor sit amet, facilisis ut neque. Nullam ut velit quis velit tincidunt rhoncus. Praesent tristique porttitor euismod. Nulla non felis ante, feugiat commodo turpis. In nec venenatis mi. Duis tempus tempor purus, vitae consectetur mi ornare eu. Proin sed consequat erat. Quisque nec sem dui. Nam semper, ligula facilisis pretium interdum, diam lectus sollicitudin lorem, in elementum nisi lorem scelerisque justo. Nullam ac fringilla nunc. Maecenas malesuada ligula in massa sollicitudin sit amet auctor ipsum malesuada. Vestibulum ut augue in magna lobortis varius eget in ipsum. In hac habitasse platea dictumst. Cras vel sagittis mi. Aenean urna sapien, ultrices et tristique et, aliquam vel libero. Nullam in consequat ante. Suspendisse libero augue, pulvinar a dignissim vitae, fringilla malesuada dui. Phasellus augue ante, pulvinar eget tincidunt vel, venenatis sed arcu. Pellentesque ac purus orci, vel molestie turpis. Nulla consectetur sollicitudin dolor, sed ornare arcu accumsan fermentum. Fusce vestibulum nisi at leo interdum eu sollicitudin lacus dictum. Fusce malesuada consequat ipsum ut convallis. Maecenas in eros sit amet elit consectetur fringilla nec a nibh. Ut et velit vel ligula pharetra elementum. Nullam aliquam, tellus vel cursus lacinia, dui libero dictum turpis, nec lacinia dolor nunc vel diam. Pellentesque convallis dui quis lacus ornare at rutrum lorem pellentesque. Suspendisse potenti. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nunc ac nibh sed mauris ornare cursus. Praesent enim mauris, tincidunt vitae convallis ac, ultricies imperdiet sapien. Duis sodales ligula eget lorem suscipit sed consectetur metus pretium. Nam in magna augue, quis volutpat mauris. Quisque pretium lobortis orci quis laoreet. Nam ut nisi diam. Sed ultrices ultrices dapibus. Integer feugiat mauris id orci pulvinar eu tempus nibh viverra. Etiam venenatis bibendum massa a consequat. Fusce interdum velit ac mauris rhoncus non cursus neque consectetur. Vestibulum dictum eros ac metus fringilla venenatis. Phasellus auctor dui non nulla molestie id malesuada mauris euismod. Aenean id tortor ac justo eleifend mollis non vel arcu. Duis ac lorem tortor. Donec volutpat purus sed nunc luctus interdum hendrerit nulla ullamcorper. Sed consectetur interdum aliquet. Proin ullamcorper risus ut ante lacinia sagittis. Nunc varius eleifend purus, ac pellentesque urna viverra id. Praesent euismod, sapien accumsan gravida dictum, massa massa euismod sapien, ut auctor tellus arcu sed diam. Vivamus tincidunt dolor non lorem pellentesque at tempus elit adipiscing. Vestibulum tempor aliquam consectetur. Mauris nec dictum nisl. Donec scelerisque ornare condimentum. Phasellus laoreet justo nec nibh convallis convallis. Duis id orci sapien, eget pulvinar justo. Aenean id arcu felis, eu iaculis nibh. Aenean eleifend pretium rutrum. Aliquam molestie sem quis tellus aliquam eleifend. Mauris et purus orci. Nunc et accumsan tortor. Phasellus semper eleifend nisi, a faucibus risus vehicula id. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas in felis et est lacinia eleifend vel sed ipsum. Aliquam commodo molestie lorem id hendrerit. Nam sed tellus urna, sed dignissim eros. """ def a_function(): """A long function docstring. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Integer sit amet mauris mauris, sit amet venenatis nisl. Vivamus a est porta enim sollicitudin mollis. Proin fringilla massa vel ante gravida luctus. Nunc quis nunc id quam hendrerit posuere. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Aliquam porttitor interdum sollicitudin. Mauris malesuada tellus tellus. Mauris condimentum nunc et sapien pellentesque gravida. Suspendisse sed ipsum orci. Duis ut lacus dui. Integer ac gravida sem. Vivamus fermentum porttitor velit ac blandit. Maecenas pulvinar ullamcorper enim, vitae aliquet tortor scelerisque et. Vestibulum ante massa, sodales et bibendum dignissim, consectetur vitae metus. Quisque vel dui erat, vel commodo metus. Aliquam arcu dolor, viverra sit amet porttitor a, faucibus eu augue. Sed ornare, enim eget ultricies suscipit, nunc dui lacinia enim, vitae tempus nunc libero vitae ligula. Nam et commodo ligula. Pellentesque tincidunt lorem at elit aliquam at fringilla libero tempor. Donec molestie consectetur nibh, ac varius ante dictum id. Suspendisse lectus nibh, molestie vel dapibus eget, egestas ut eros. Mauris vel mauris turpis, vitae bibendum nunc. Vestibulum nulla enim, vestibulum vitae tincidunt et, gravida eu metus. Nulla sagittis, odio a placerat laoreet, arcu lectus vestibulum nunc, in hendrerit tortor quam sit amet turpis. In et purus vel dui pellentesque tincidunt. Donec dictum nibh sed quam luctus sit amet luctus justo dapibus. Integer nulla elit, lacinia aliquet euismod sed, tempus vitae lectus. Fusce non sapien dolor. Suspendisse ut est ut dui tempor ultricies id ut elit. Aenean adipiscing sollicitudin enim, nec porttitor est porttitor eget. Proin lobortis ante ut diam sodales volutpat. Donec urna diam, porttitor nec laoreet ut, rhoncus non diam. Ut sed mi vitae turpis semper semper. Integer sit amet lorem sapien. Aliquam risus diam, vulputate id sagittis et, molestie ut lectus. Aliquam erat volutpat. Morbi aliquet venenatis metus in posuere. Cras vitae purus nunc, ut vestibulum ipsum. Nullam vehicula dui in urna iaculis lobortis. Ut a est non est tincidunt iaculis. Vivamus rutrum velit non nunc malesuada sed bibendum mi iaculis. Sed id lacus in sem tempor vestibulum. Cras bibendum accumsan suscipit. Phasellus congue nisl consectetur turpis rhoncus aliquet posuere libero fringilla. Sed eros tellus, hendrerit nec imperdiet vitae, blandit ac dolor. Nulla facilisi. Morbi ullamcorper libero odio, at cursus tortor. Cras ultricies tellus eget justo cursus cursus. Donec at mi massa, auctor suscipit sem. Proin dolor purus, semper sed ultrices ut, iaculis at tortor. Donec risus enim, interdum et convallis nec, aliquam eget velit. Curabitur eget lectus dolor. Integer id turpis eu nulla euismod tincidunt. Fusce elit nibh, dapibus sit amet tempus ac, convallis eu libero. Donec dui justo, molestie sed euismod porta, ultricies id orci. Praesent a tellus et risus faucibus porttitor pellentesque in purus. Fusce blandit risus ac tortor viverra vitae molestie odio convallis. Donec rhoncus volutpat mauris, sit amet mattis libero dapibus id. Ut rhoncus venenatis nisi ac dictum. In non nulla eget massa convallis facilisis. Praesent nec odio id odio semper lobortis non eu erat. Proin quis gravida magna. Sed rhoncus lectus auctor arcu posuere a auctor dui pellentesque. Sed enim nulla, luctus quis sagittis sed, vestibulum eget metus. Mauris ornare pretium fringilla. Proin ligula eros, fermentum in placerat sit amet, placerat vel mauris. Nulla magna enim, luctus eget euismod ac, lacinia vel lorem. Duis mi leo, porttitor vitae dictum ac, ultrices iaculis metus. Quisque libero mi, aliquet quis vestibulum eget, porttitor non justo. Praesent ac metus felis. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Donec vel enim sit amet ante imperdiet commodo sed vel nisi. Praesent semper viverra nulla vehicula sollicitudin. Fusce lacinia aliquet ullamcorper. Donec vitae diam est. Integer volutpat hendrerit turpis ut bibendum. Integer et dui augue. Nunc ut nisl in felis feugiat semper nec sit amet purus. Proin convallis ultrices nisl ut vehicula. Pellentesque neque mi, elementum vel placerat nec, laoreet ac nulla. Pellentesque aliquam dui a metus iaculis posuere. Curabitur dapibus faucibus metus. Donec quis diam dui. Proin at mi nec augue cursus pulvinar eu vel metus. Curabitur eget turpis ac risus dignissim luctus sed id ligula. Etiam lectus neque, varius ut euismod nec, euismod quis nulla. Ut feugiat, quam id tempor luctus, metus eros lacinia diam, nec dapibus tellus dui quis diam. Nam interdum, orci id fringilla mattis, ipsum eros pellentesque turpis, hendrerit dignissim justo dui interdum ante. Curabitur aliquam nisi ut dui lacinia tempor. Nulla lobortis tellus non sapien dignissim ut dapibus dui aliquet. Nam scelerisque, urna a aliquam malesuada, mi tortor scelerisque libero, quis pellentesque erat eros ut justo. Phasellus nulla purus, suscipit vel gravida euismod, malesuada et odio. Vestibulum non libero eget lacus venenatis auctor quis a est. Nunc id leo est. Curabitur pulvinar viverra sapien at viverra. Cras pretium justo et lorem lobortis id tempor nisi accumsan. Cras egestas tortor in risus hendrerit eu varius purus suscipit. Nullam mauris eros, mattis at tempor vitae, mollis vitae velit. Etiam at adipiscing lectus. Quisque molestie, metus id posuere pharetra, lorem enim vehicula mauris, ut ultricies purus justo a lacus. Vivamus blandit euismod adipiscing. Nam eu ligula at elit ultricies tempus. Nunc ac sodales neque. Ut dui diam, porttitor a pulvinar vel, sodales sit amet turpis. Donec vitae eros at neque luctus scelerisque. In consequat elementum iaculis. Donec ullamcorper dolor eu quam volutpat rhoncus. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras laoreet ante eget dolor sagittis imperdiet. Proin magna urna, porta id blandit nec, commodo eget lorem. Etiam imperdiet, orci sit amet rutrum consectetur, orci augue tempus lacus, id venenatis sapien nisl a est. Sed accumsan massa sed libero consectetur scelerisque. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Sed nunc risus, lobortis id egestas nec, suscipit id magna. Morbi at iaculis mauris. Proin felis sem, tempus non pellentesque congue, vehicula sit amet eros. Maecenas porttitor erat ac dolor pharetra iaculis. Cras tincidunt, nulla eget malesuada egestas, sem diam consequat quam, sed feugiat nulla orci at mauris. Quisque non arcu diam, ac lacinia felis. Nunc iaculis mollis egestas. Etiam imperdiet dolor consectetur eros feugiat fringilla sed in lacus. Nunc nec tincidunt dolor. Etiam sagittis tortor condimentum nunc fermentum vestibulum. Vivamus lobortis, magna sit amet scelerisque lobortis, sem eros molestie leo, eget aliquet ligula est in lectus. Duis placerat porta pulvinar. Sed sed adipiscing ante. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Nam accumsan iaculis augue, sed varius dui sagittis id. Etiam sit amet eleifend augue. Ut sit amet nibh sit amet justo tempor condimentum. Ut faucibus sagittis volutpat. Duis vestibulum feugiat sollicitudin. Aenean cursus luctus urna at consectetur. Nullam tincidunt, eros a iaculis sodales, tellus est imperdiet arcu, sit amet tincidunt orci felis et tortor. Mauris rutrum venenatis nunc ut rutrum. Phasellus nec erat magna, in tincidunt orci. Sed sit amet suscipit tellus. Mauris ut nisi turpis. Suspendisse augue turpis, condimentum ac bibendum in, vestibulum nec eros. Curabitur dapibus pulvinar vehicula. Fusce consequat, erat in malesuada hendrerit, tellus urna pharetra lacus, sed euismod nisi urna sed nisi. Etiam fermentum accumsan nunc, sed bibendum dui iaculis id. Etiam blandit fermentum ligula nec viverra. Vivamus venenatis arcu in nulla euismod euismod. Donec sit amet augue nec metus varius fringilla. Vivamus pulvinar elit ac mi rhoncus in luctus diam egestas. Curabitur a felis eget arcu pretium tempus eu sed mauris. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Mauris malesuada, nibh ac venenatis hendrerit, ligula dui condimentum tellus, sit amet pretium diam tortor vel risus. Suspendisse suscipit consequat eros id dignissim. Cras interdum lorem ac massa euismod non porta enim pretium. Aliquam ultrices nibh vitae ligula consectetur vel sollicitudin lacus volutpat. Phasellus vulputate iaculis sem nec laoreet. Nam leo sem, tempor eu condimentum id, imperdiet sed dolor. Donec pharetra velit non libero euismod tempor. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Sed fermentum, libero a posuere posuere, enim elit imperdiet enim, a sollicitudin est felis non libero. Sed vel dolor ut arcu dapibus iaculis nec a mauris. Morbi ullamcorper ultrices venenatis. Fusce luctus ante sit amet lacus venenatis ut rutrum elit lobortis. Nulla fermentum tortor ac sapien fringilla quis iaculis quam egestas. Aliquam et tortor est, at elementum mauris. Morbi posuere erat nec leo vulputate in pellentesque tortor condimentum. Vestibulum at orci augue. Aenean pellentesque sapien id felis consequat varius. Suspendisse bibendum enim sit amet mi imperdiet vel suscipit nisi tristique. Curabitur velit massa, consectetur ac mattis vel, accumsan at nunc. Donec porta, nibh nec consequat convallis, urna neque auctor erat, eu convallis lorem leo convallis turpis. Morbi non mauris non metus ornare vulputate. Sed aliquet, dolor ut egestas fermentum, metus purus mollis elit, nec commodo odio quam quis nisl. Aliquam erat volutpat. Suspendisse sed faucibus urna. Integer suscipit rutrum condimentum. Praesent dignissim libero eget metus luctus consectetur. Vestibulum ac erat felis, vitae iaculis erat. Duis interdum lacinia arcu, non lacinia urna luctus in. Curabitur feugiat sapien sapien, in vestibulum diam. Phasellus lobortis massa ut metus pretium dignissim. Fusce quis sem odio. Integer pellentesque sodales augue id tincidunt. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Vestibulum lorem odio, semper vel scelerisque sit amet, sagittis ac libero. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Nam egestas ultricies dui at gravida. Duis tristique, eros id consectetur pellentesque, nulla arcu ultricies tortor, ut pulvinar sapien lacus in elit. Vivamus dolor massa, pulvinar at mollis vitae, euismod ut dolor. Vivamus a magna ante. Vestibulum vitae fringilla leo. Ut gravida magna in quam fringilla ultricies. Mauris rhoncus enim id sem interdum blandit. Pellentesque luctus leo sit amet felis viverra ac accumsan purus mollis. Aenean pretium fringilla quam nec laoreet. Nulla id mauris mauris. Nam varius bibendum tristique. Integer ante felis, volutpat sed dignissim vel, interdum molestie nisi. Etiam mollis accumsan elit, ut gravida eros molestie nec. Nullam quis velit ac purus imperdiet sodales. Donec semper placerat venenatis. Cras dolor risus, sodales sed scelerisque nec, sollicitudin pretium felis. Quisque pretium felis id turpis bibendum pulvinar ornare id nibh. Morbi lobortis leo non mi porttitor vulputate. Vestibulum nec odio tellus, ut blandit ligula. In pellentesque neque sit amet dui pulvinar sed laoreet dui vehicula. In hac habitasse platea dictumst. Etiam feugiat dictum blandit. Praesent lacinia tincidunt elit, quis consectetur tortor molestie commodo. Ut sit amet accumsan lorem. Cras quam nunc, malesuada tempor volutpat vitae, aliquam eu diam. Sed sem nibh, bibendum nec sollicitudin at, interdum et magna. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Aliquam vitae sem non elit pharetra vestibulum et condimentum libero. Nam egestas ultricies hendrerit. Nunc nec fringilla nulla. Aliquam risus tellus, hendrerit non dapibus a, tincidunt vel ante. Vivamus mollis, magna et lacinia tincidunt, dui massa porta odio, ac ornare felis massa nec lorem. Mauris sagittis lacus sed metus mollis ac egestas lectus porttitor. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Integer vitae lacinia libero. Phasellus at neque enim, sit amet dictum mi. Ut risus nisi, vestibulum vel congue eget, egestas in ipsum. Duis faucibus tempus sodales. Duis convallis pellentesque arcu rhoncus congue. Nunc ac mauris eu purus vestibulum congue. Praesent convallis semper augue vel volutpat. Integer dictum varius placerat. Vestibulum convallis tortor non mi lacinia ac aliquet dui ultricies. Donec ultrices purus eros. Maecenas venenatis posuere massa, nec consectetur lacus cursus eget. Donec quam lacus, tempus id placerat et, posuere sed libero. Proin auctor diam ut arcu viverra ut imperdiet tellus dapibus. Morbi ac mauris quis tellus porttitor eleifend. Sed et ante magna, ut sodales sapien. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Integer mattis venenatis mi non ullamcorper. Vestibulum magna enim, aliquam non interdum ut, dignissim vitae ante. Praesent dignissim, est at pretium posuere, nisl ante varius felis, vitae posuere enim nulla et nunc. Morbi sagittis suscipit leo, eu accumsan ligula volutpat non. Donec ut tincidunt magna. Integer ac libero mi. Sed non eros dolor, in tincidunt enim. Curabitur iaculis erat quis felis iaculis ut volutpat augue malesuada. Pellentesque eget arcu ligula, ut volutpat purus. Suspendisse dictum lorem quis sapien lacinia pretium. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin sagittis egestas massa et tempor. Mauris et eros ante, id porta sem. Duis ac eros vitae ipsum ultrices malesuada eget a risus. Morbi imperdiet, est a hendrerit tristique, mi erat molestie lacus, ac tempor risus nulla id erat. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Etiam congue, lacus quis ultricies consequat, diam metus convallis enim, ut volutpat enim urna vitae erat. In quam risus, molestie et dapibus id, elementum sit amet ligula. Nam faucibus lacus id dolor facilisis viverra. Nullam vehicula massa ac arcu consectetur vulputate. Praesent nec augue ac justo dapibus vehicula. Aliquam consectetur hendrerit dolor, et mollis nisl auctor ut. Ut sagittis risus at felis fringilla ultricies. Vestibulum non urna nibh, nec pretium dolor. Nulla imperdiet lobortis eros at pharetra. Vivamus cursus pellentesque ornare. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce arcu quam, pulvinar at rutrum vitae, ornare vitae leo. Maecenas vehicula magna sit amet nulla bibendum condimentum. Curabitur ultrices tempor fringilla. Vivamus pretium suscipit molestie. Donec arcu diam, ultricies ac pellentesque eu, venenatis et sapien. Nam dictum orci augue, vel eleifend leo. Nam at lacus sapien, nec pretium eros. In egestas, enim sed sagittis feugiat, purus odio tristique lectus, vel condimentum leo turpis ac odio. Nam iaculis mi quis odio posuere et venenatis velit ultricies. Nulla facilisi. Proin nec dolor ac quam euismod gravida quis et eros. Nam interdum condimentum mattis. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Praesent a nisi eu massa mollis posuere sed a nunc. Aenean tempus enim a justo rhoncus quis ultrices nisl commodo. Aenean imperdiet mauris a ipsum venenatis vel scelerisque lorem rutrum. Donec ut nunc eros, eget accumsan felis. Nullam ullamcorper porta dictum. Donec accumsan cursus vestibulum. Aenean in sapien velit. Vivamus nec massa mi. Fusce felis tortor, bibendum non volutpat a, fringilla quis nisi. Duis varius bibendum erat, quis fermentum sem accumsan eget. Maecenas massa felis, porta sed laoreet eu, luctus eu lectus. Cras id nibh vitae erat fringilla rutrum. Maecenas eget consequat est. Vivamus viverra, felis vel faucibus rhoncus, quam ipsum elementum libero, quis convallis urna purus ut mauris. Nam quis urna vitae enim consequat placerat. Vivamus congue augue sit amet lectus luctus tempor. Cras ut justo convallis est egestas pellentesque ac nec orci. Vivamus rutrum bibendum ante, at cursus erat pulvinar ornare. Proin imperdiet scelerisque ante eu vestibulum. Nullam ullamcorper metus nec purus auctor lobortis. Proin sed lacus et ipsum tempor tempus. Vivamus odio dolor, vulputate vitae semper sit amet, aliquet egestas orci. Nullam non quam eu quam sagittis porta. Nunc in velit id erat commodo viverra. Praesent nec est augue, nec sagittis erat. Cras sed turpis quis enim tempor sagittis. Donec in justo ac nisl porta condimentum id vestibulum nulla. Nam elementum ultricies nunc a bibendum. Aenean tincidunt nisl non augue pellentesque sit amet convallis neque semper. Cras placerat suscipit massa sed volutpat. Integer vulputate imperdiet enim, vitae vulputate sapien mattis feugiat. Vivamus pharetra facilisis mauris a gravida. Nulla non venenatis est. Duis lobortis consectetur sem ac aliquam. In eget sapien odio. Vivamus pulvinar ultricies magna, quis laoreet dui porta et. Integer tempus malesuada velit, et consequat odio ultrices sed. Aliquam malesuada commodo diam vel posuere. Morbi porttitor, elit vitae auctor gravida, lorem massa bibendum arcu, vel placerat nulla justo at augue. Aliquam libero quam, mattis blandit congue sit amet, fermentum ac augue. Aliquam malesuada molestie vulputate. Duis id porta augue. Vestibulum diam dolor, ultrices sit amet porttitor id, convallis id lectus. Etiam ac augue tincidunt nisi tempor molestie euismod id nisl. Nam et tortor ac arcu viverra pulvinar. Fusce pulvinar rhoncus leo, a faucibus enim interdum non. Aliquam vulputate mattis consectetur. Pellentesque sit amet quam sem. Cras eget arcu eu elit volutpat volutpat. Integer sed varius enim. Integer sit amet felis orci, id dignissim sapien. Sed vitae lorem sed libero facilisis fringilla. Pellentesque congue tristique purus, eleifend semper risus suscipit quis. Phasellus rutrum quam vitae arcu vulputate porta. Sed tristique arcu nec mi porttitor lacinia. Donec congue feugiat diam quis pretium. Vivamus at luctus nunc. Integer vulputate laoreet mauris quis auctor. Nunc at ultrices libero. Maecenas porta faucibus purus non vehicula. Sed sit amet metus vitae mi ultrices scelerisque nec quis risus. Phasellus pellentesque tincidunt massa id ultricies. Aliquam dictum arcu ac dolor interdum rutrum. Nulla facilisi. Duis nisi est, tincidunt a sagittis id, sollicitudin at odio. Curabitur sed est eu sapien faucibus dignissim in quis tortor. Nunc ac elit tortor, non lobortis massa. Proin posuere ante ut metus vehicula suscipit. Proin mattis mauris ac lectus consequat rutrum. Nam arcu lectus, commodo non pretium a, pharetra semper dolor. Fusce eleifend hendrerit adipiscing. Nunc et eleifend erat. Suspendisse tempus nisl ut arcu blandit ut adipiscing nisi tristique. Suspendisse molestie facilisis risus sed fermentum. Praesent tempor convallis ultricies. Integer et elit velit, at consectetur risus. Vestibulum mollis adipiscing sodales. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Mauris dictum molestie porta. Nam convallis nisl quis lacus vulputate in convallis risus sagittis. Vivamus accumsan faucibus cursus. Ut ultricies imperdiet ligula scelerisque blandit. In ornare egestas purus, at convallis velit egestas laoreet. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Nulla quis imperdiet est. Nunc tempus magna quis lacus feugiat et posuere lacus vehicula. Cras lacinia aliquam est at vehicula. Aenean congue elit in ante dignissim vitae fermentum lectus aliquam. Mauris at odio magna, at interdum dui. Cras fringilla mi velit, nec varius neque. Fusce et volutpat lacus. Suspendisse id turpis et urna varius convallis in eu purus. Nulla facilisi. Etiam mauris nisl, ultrices ac porttitor sit amet, facilisis ut neque. Nullam ut velit quis velit tincidunt rhoncus. Praesent tristique porttitor euismod. Nulla non felis ante, feugiat commodo turpis. In nec venenatis mi. Duis tempus tempor purus, vitae consectetur mi ornare eu. Proin sed consequat erat. Quisque nec sem dui. Nam semper, ligula facilisis pretium interdum, diam lectus sollicitudin lorem, in elementum nisi lorem scelerisque justo. Nullam ac fringilla nunc. Maecenas malesuada ligula in massa sollicitudin sit amet auctor ipsum malesuada. Vestibulum ut augue in magna lobortis varius eget in ipsum. In hac habitasse platea dictumst. Cras vel sagittis mi. Aenean urna sapien, ultrices et tristique et, aliquam vel libero. Nullam in consequat ante. Suspendisse libero augue, pulvinar a dignissim vitae, fringilla malesuada dui. Phasellus augue ante, pulvinar eget tincidunt vel, venenatis sed arcu. Pellentesque ac purus orci, vel molestie turpis. Nulla consectetur sollicitudin dolor, sed ornare arcu accumsan fermentum. Fusce vestibulum nisi at leo interdum eu sollicitudin lacus dictum. Fusce malesuada consequat ipsum ut convallis. Maecenas in eros sit amet elit consectetur fringilla nec a nibh. Ut et velit vel ligula pharetra elementum. Nullam aliquam, tellus vel cursus lacinia, dui libero dictum turpis, nec lacinia dolor nunc vel diam. Pellentesque convallis dui quis lacus ornare at rutrum lorem pellentesque. Suspendisse potenti. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nunc ac nibh sed mauris ornare cursus. Praesent enim mauris, tincidunt vitae convallis ac, ultricies imperdiet sapien. Duis sodales ligula eget lorem suscipit sed consectetur metus pretium. Nam in magna augue, quis volutpat mauris. Quisque pretium lobortis orci quis laoreet. Nam ut nisi diam. Sed ultrices ultrices dapibus. Integer feugiat mauris id orci pulvinar eu tempus nibh viverra. Etiam venenatis bibendum massa a consequat. Fusce interdum velit ac mauris rhoncus non cursus neque consectetur. Vestibulum dictum eros ac metus fringilla venenatis. Phasellus auctor dui non nulla molestie id malesuada mauris euismod. Aenean id tortor ac justo eleifend mollis non vel arcu. Duis ac lorem tortor. Donec volutpat purus sed nunc luctus interdum hendrerit nulla ullamcorper. Sed consectetur interdum aliquet. Proin ullamcorper risus ut ante lacinia sagittis. Nunc varius eleifend purus, ac pellentesque urna viverra id. Praesent euismod, sapien accumsan gravida dictum, massa massa euismod sapien, ut auctor tellus arcu sed diam. Vivamus tincidunt dolor non lorem pellentesque at tempus elit adipiscing. Vestibulum tempor aliquam consectetur. Mauris nec dictum nisl. Donec scelerisque ornare condimentum. Phasellus laoreet justo nec nibh convallis convallis. Duis id orci sapien, eget pulvinar justo. Aenean id arcu felis, eu iaculis nibh. Aenean eleifend pretium rutrum. Aliquam molestie sem quis tellus aliquam eleifend. Mauris et purus orci. Nunc et accumsan tortor. Phasellus semper eleifend nisi, a faucibus risus vehicula id. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas in felis et est lacinia eleifend vel sed ipsum. Aliquam commodo molestie lorem id hendrerit. Nam sed tellus urna, sed dignissim eros. """ return A_LONG_STRING Cython-0.26.1/tests/compile/extsetattr.pyx0000664000175000017500000000012112542002467021367 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: def __setattr__(self, n, x): pass Cython-0.26.1/tests/compile/excvalcheck.h0000664000175000017500000000040512542002467021054 0ustar stefanstefan00000000000000#ifdef __cplusplus extern "C" { #endif extern DL_EXPORT(int) spam(void); extern DL_EXPORT(void) grail(void); extern DL_EXPORT(char *)tomato(void); #ifdef __cplusplus } #endif int spam(void) {return 0;} void grail(void) {return;} char *tomato(void) {return 0;} Cython-0.26.1/tests/compile/ewing9.pxd0000664000175000017500000000002312542002467020336 0ustar stefanstefan00000000000000cdef struct xmlDoc Cython-0.26.1/tests/compile/extern_packed_struct.pyx0000664000175000017500000000012512542002467023404 0ustar stefanstefan00000000000000# mode: compile cdef extern from *: cdef packed struct MyStruct: char a Cython-0.26.1/tests/compile/ishimoto1.pyx0000664000175000017500000000011312542002467021075 0ustar stefanstefan00000000000000# mode: compile cdef class A: def __getitem__(self, x): pass Cython-0.26.1/tests/compile/emptytry.pyx0000664000175000017500000000012512542002467021061 0ustar stefanstefan00000000000000# mode: compile cdef void f(): try: pass finally: pass f() Cython-0.26.1/tests/compile/templates.h0000664000175000017500000000067412542002467020602 0ustar stefanstefan00000000000000#ifndef _TEMPLATES_H_ #define _TEMPLATES_H_ template class TemplateTest1 { public: T value; int t; TemplateTest1() { } T getValue() { return value; } }; template class TemplateTest2 { public: T value1; U value2; TemplateTest2() { } T getValue1() { return value1; } U getValue2() { return value2; } }; template void template_function(TemplateTest1 &) { } #endif Cython-0.26.1/tests/compile/fromimport_star.pyx0000664000175000017500000000014012542002467022410 0ustar stefanstefan00000000000000# mode: compile from spam import * from ...spam.foo import * from . import * from ... import * Cython-0.26.1/tests/compile/pinard4.pyx0000664000175000017500000000014412542002467020526 0ustar stefanstefan00000000000000# mode: compile __doc__ = u""" >>> fiches_CP [] """ fiches_CP = [1,2,3] fiches_CP[:] = [] Cython-0.26.1/tests/compile/extpropertydel.pyx0000664000175000017500000000014312542002467022256 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: property eggs: def __del__(self): pass Cython-0.26.1/tests/compile/coventry1.pyx0000664000175000017500000000005712542002467021122 0ustar stefanstefan00000000000000# mode: compile cdef class Tst: cdef foo, Cython-0.26.1/tests/compile/specialfloatvals.pyx0000664000175000017500000000026212542002467022522 0ustar stefanstefan00000000000000# mode: compile DEF nan = float('nan') DEF inf = float('inf') DEF minf = -float('inf') cdef int f() except -1: cdef float x, y, z x = nan y = inf z = minf f() Cython-0.26.1/tests/compile/forward.pyx0000664000175000017500000000121612542002467020632 0ustar stefanstefan00000000000000# mode: compile ctypedef enum MyEnum: Value1 Value2 Value3 = 100 cdef MyEnum my_enum = Value3 ctypedef struct StructA: StructA *a StructB *b cdef struct StructB: StructA *a StructB *b cdef class ClassA: cdef ClassA a cdef ClassB b ctypedef public class ClassB [ object ClassB, type TypeB ]: cdef ClassA a cdef ClassB b cdef StructA struct_a cdef StructB struct_b struct_a.a = &struct_a struct_a.b = &struct_b struct_b.a = &struct_a struct_b.b = &struct_b cdef ClassA class_a = ClassA() cdef ClassB class_b = ClassB() class_a.a = class_a class_a.b = class_b class_b.a = class_a class_b.b = class_b Cython-0.26.1/tests/compile/libc_errno.pyx0000664000175000017500000000020612542002467021302 0ustar stefanstefan00000000000000# mode: compile from libc.errno cimport * if errno == EDOM : pass if errno == EILSEQ : pass if errno == ERANGE : pass errno = 0 Cython-0.26.1/tests/compile/fromimport.pyx0000664000175000017500000000063112542002467021364 0ustar stefanstefan00000000000000# mode: compile def f(): from spam import eggs from spam.morespam import bacon, eggs, ham from spam import eggs as ova from . import spam from ... import spam from .. import spam, foo from ... import spam, foobar from .spam import foo from ...spam import foo, bar from ...spam.foo import bar from ...spam.foo import foo, bar from ...spam.foo import (foo, bar) Cython-0.26.1/tests/compile/publicapi_cimport.pyx0000664000175000017500000000023512542002467022673 0ustar stefanstefan00000000000000# mode: compile from publicapi_pxd_mix cimport * bar0() bar1() bar2() bar3() spam0(None) spam1(None) spam2(None) spam3(None) i0 = 0 i1 = 1 i2 = 2 i3 = 3 Cython-0.26.1/tests/compile/arrayargs.pyx0000664000175000017500000000062312542002467021162 0ustar stefanstefan00000000000000# mode: compile cdef extern from *: cdef void foo(int[]) ctypedef int MyInt cdef void foo(MyInt[]) struct MyStruct: pass cdef void bar(MyStruct[]) ctypedef MyStruct* MyStructP cdef void baz(MyStructP[]) cdef struct OtherStruct: int a a = sizeof(int[23][34]) b = sizeof(OtherStruct[43]) DEF COUNT = 4 c = sizeof(int[COUNT]) d = sizeof(OtherStruct[COUNT]) Cython-0.26.1/tests/compile/r_pernici1.pyx0000664000175000017500000000053512542002467021224 0ustar stefanstefan00000000000000# mode: compile __doc__ = u""" >>> main() 3.14159265358979323846 3.14159265358979323846 3.14159265358979323846 """ cdef extern from "math.h": double M_PI #cdef unsigned long int n1 #n1 = 4293858116 cdef double pi pi = 3.14159265358979323846 def main(): #print n1 print "%.18f" % M_PI print "%.18f" % ( M_PI) print "%.18f" % pi Cython-0.26.1/tests/compile/cpp_templates.pyx0000664000175000017500000000221212542002467022023 0ustar stefanstefan00000000000000# tag: cpp # mode: compile # ticket: 767 cdef extern from "templates.h": cdef cppclass TemplateTest1[T]: TemplateTest1() T value int t T getValue() cdef cppclass TemplateTest2[T, U]: TemplateTest2() T value1 U value2 T getValue1() U getValue2() void template_function[T](TemplateTest1[T] &) cdef TemplateTest1[int] a cdef TemplateTest1[int]* b = new TemplateTest1[int]() cdef int c = a.getValue() c = b.getValue() cdef TemplateTest2[int, char] d cdef TemplateTest2[int, char]* e = new TemplateTest2[int, char]() c = d.getValue1() c = e.getValue2() cdef char f = d.getValue2() f = e.getValue2() del b, e ctypedef TemplateTest1[int] TemplateTest1_int cdef TemplateTest1_int aa # Verify that T767 is fixed. cdef public int func(int arg): return arg # Regression test: the function call used to produce # template_function>(__pyx_v_t); # which is valid C++11, but not valid C++98 because the ">>" would be # parsed as a single token. cdef public void use_nested_templates(): cdef TemplateTest1[TemplateTest1[int]] t template_function(t) Cython-0.26.1/tests/compile/extdescrdel.pyx0000664000175000017500000000011412542002467021470 0ustar stefanstefan00000000000000# mode: compile cdef class Foo: def __delete__(self, i): pass Cython-0.26.1/tests/compile/extdescrget.pyx0000664000175000017500000000011412542002467021503 0ustar stefanstefan00000000000000# mode: compile cdef class Foo: def __get__(self, i, c): pass Cython-0.26.1/tests/compile/pylong.pyx0000664000175000017500000000102612542002467020475 0ustar stefanstefan00000000000000# mode: compile cdef extern from "Python.h": ctypedef struct PyTypeObject: pass ctypedef struct PyObject: Py_ssize_t ob_refcnt PyTypeObject *ob_type cdef extern from "longintrepr.h": cdef struct _longobject: int ob_refcnt PyTypeObject *ob_type # int ob_size # not in Py3k unsigned int *ob_digit def test(temp = long(0)): cdef _longobject *l l = <_longobject *> temp #print sizeof(l.ob_size) # not in Py3k print sizeof(l.ob_digit[0]) Cython-0.26.1/tests/compile/ass2longlong.pyx0000664000175000017500000000024312542002467021575 0ustar stefanstefan00000000000000# mode: compile cdef void spam(): cdef long long L cdef unsigned long long U cdef object x = object() L = x x = L U = x x = U spam() Cython-0.26.1/tests/compile/libc_all.pyx0000664000175000017500000000144312542002467020731 0ustar stefanstefan00000000000000# mode: compile cimport libc cimport libc.stdio cimport libc.errno cimport libc.float cimport libc.limits cimport libc.locale cimport libc.signal cimport libc.stddef #cimport libc.stdint # XXX MSVC cimport libc.stdio cimport libc.stdlib cimport libc.string from libc cimport errno from libc cimport float from libc cimport limits from libc cimport locale from libc cimport signal from libc cimport stddef #from libc cimport stdint # XXX MSVC from libc cimport stdio from libc cimport stdlib from libc cimport string from libc.errno cimport * from libc.float cimport * from libc.limits cimport * from libc.locale cimport * from libc.signal cimport * from libc.stddef cimport * #from libc.stdint cimport * # XXX MSVC from libc.stdio cimport * from libc.stdlib cimport * from libc.string cimport * Cython-0.26.1/tests/compile/stop_async_iteration_exception_pep492.pyx0000664000175000017500000000114713143605603026611 0ustar stefanstefan00000000000000# mode: compile # tag: pep492 # make sure async iterators also compile correctly without using 'await' cdef class AsyncIter: cdef long i cdef long aiter_calls cdef long max_iter_calls def __init__(self, long max_iter_calls=1): self.i = 0 self.aiter_calls = 0 self.max_iter_calls = max_iter_calls def __aiter__(self): self.aiter_calls += 1 return self async def __anext__(self): self.i += 1 assert self.aiter_calls <= self.max_iter_calls if self.i > 10: raise StopAsyncIteration return self.i, self.i Cython-0.26.1/tests/compile/weakref_T276.pyx0000664000175000017500000000035512542002467021337 0ustar stefanstefan00000000000000# ticket: 276 # mode: compile __doc__ = u""" """ cdef class A: cdef __weakref__ ctypedef public class B [type B_Type, object BObject]: cdef __weakref__ cdef public class C [type C_Type, object CObject]: cdef __weakref__ Cython-0.26.1/tests/compile/a/0000775000175000017500000000000013151203436016640 5ustar stefanstefan00000000000000Cython-0.26.1/tests/compile/a/b.pxd0000664000175000017500000000002612542002467017600 0ustar stefanstefan00000000000000cdef int **foo(void*) Cython-0.26.1/tests/compile/a/__init__.py0000664000175000017500000000001512542002467020751 0ustar stefanstefan00000000000000# empty file Cython-0.26.1/tests/compile/extcmethcall.pyx0000664000175000017500000000046512542002467021650 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: cdef int tons cdef void add_tons(self, int x): pass cdef class SuperSpam(Spam): pass cdef void tomato(): cdef Spam spam cdef SuperSpam superspam = SuperSpam() spam = superspam spam.add_tons(42) superspam.add_tons(1764) tomato() Cython-0.26.1/tests/compile/exthash.pyx0000664000175000017500000000011112542002467020623 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: def __hash__(self): pass Cython-0.26.1/tests/compile/ctypedef.pyx0000664000175000017500000000016312542002467020771 0ustar stefanstefan00000000000000# mode: compile ctypedef int *IntPtr ctypedef unsigned long ULong cdef extern IntPtr spam cdef extern ULong grail Cython-0.26.1/tests/compile/publicapi_pxd_mix.pxd0000664000175000017500000000362712542002467022651 0ustar stefanstefan00000000000000# -- ctypedef int Int0 ctypedef public int Int1 ctypedef api int Int2 ctypedef public api int Int3 ctypedef enum EnumA0: EA0 ctypedef public enum EnumA1: EA1 ctypedef api enum EnumA2: EA2 ctypedef public api enum EnumA3: EA3 cdef enum EnumB0: EB0=0 cdef public enum EnumB1: EB1=1 cdef api enum EnumB2: EB2=2 cdef public api enum EnumB3: EB3=3 # -- ctypedef struct StructA0: int SA0 ctypedef public struct StructA1: int SA1 ctypedef api struct StructA2: int SA2 ctypedef public api struct StructA3: int SA3 cdef struct StructB0: int SB0 cdef public struct StructB1: int SB1 cdef api struct StructB2: int SB2 cdef public api struct StructB3: int SB3 # -- ctypedef class Foo0: pass ctypedef public class Foo1 [type PyFoo1_Type, object PyFoo1_Object]: pass ctypedef api class Foo2 [type PyFoo2_Type, object PyFoo2_Object]: pass ctypedef public api class Foo3 [type PyFoo3_Type, object PyFoo3_Object]: pass cdef class Bar0: pass cdef public class Bar1 [type PyBar1_Type, object PyBar1_Object]: pass cdef api class Bar2 [type PyBar2_Type, object PyBar2_Object]: pass cdef public api class Bar3 [type PyBar3_Type, object PyBar3_Object]: pass # -- cdef extern from *: void foo() cdef inline void bar (): pass cdef void bar0() cdef public void bar1() cdef api void bar2() cdef public api void bar3() cdef inline void* spam (object o) except NULL: return NULL cdef void* spam0(object o) except NULL cdef public void* spam1(object o) except NULL cdef api void* spam2(object o) nogil except NULL cdef public api void* spam3(object o) except NULL with gil # -- cdef int i0 = 0 # XXX implement initialization!!! cdef public int i1 cdef api int i2 cdef public api int i3 # -- Cython-0.26.1/tests/compile/excvaldecl.pyx0000664000175000017500000000045212542002467021301 0ustar stefanstefan00000000000000# mode: compile cdef int spam() except 42: pass cdef float eggs() except 3.14: pass cdef char *grail() except NULL: pass cdef int tomato() except *: pass cdef int brian() except? 0: pass cdef int silly() except -1: pass spam() eggs() grail() tomato() brian() silly() Cython-0.26.1/tests/compile/extpropertyset.pyx0000664000175000017500000000014612542002467022310 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: property eggs: def __set__(self, x): pass Cython-0.26.1/tests/compile/coercetovoidptr.pyx0000664000175000017500000000012612542002467022400 0ustar stefanstefan00000000000000# mode: compile cdef void f(): cdef void *p cdef char *q=NULL p = q f() Cython-0.26.1/tests/compile/casttoexttype.pyx0000664000175000017500000000027712542002467022114 0ustar stefanstefan00000000000000# mode: compile cdef extern class external.Spam: pass cdef void foo(object x): pass cdef void blarg(void *y, object z): foo(y) foo(z) blarg(None, None) Cython-0.26.1/tests/compile/extcoerce.pyx0000664000175000017500000000041412542002467021146 0ustar stefanstefan00000000000000# mode: compile cdef class Grail: def __add__(int x, float y): pass cdef class Swallow: pass def f(Grail g): cdef int i = 0 cdef Swallow s = Swallow() cdef object x = Grail() g = x x = g g = i i = g g = s s = g Cython-0.26.1/tests/compile/ewing8.pyx0000664000175000017500000000005412542002467020366 0ustar stefanstefan00000000000000# mode: compile cdef class Blarg: pass Cython-0.26.1/tests/compile/traceback.pyx0000664000175000017500000000021012542002467021076 0ustar stefanstefan00000000000000# mode: compile def spam(): raise Exception cdef int grail() except -1: raise Exception def tomato(): spam() grail() Cython-0.26.1/tests/compile/cast_ctypedef_array_T518.pyx0000664000175000017500000000054412542002467023725 0ustar stefanstefan00000000000000# ticket: 518 # mode: compile cdef extern from "cast_ctypedef_array_T518_helper.h": cdef struct __foo_struct: int i, j ctypedef __foo_struct foo_t[1] void foo_init(foo_t) void foo_clear(foo_t) cdef foo_t value foo_init(value) foo_clear(value) cdef void *pointer = value foo_init(pointer) foo_clear(pointer) Cython-0.26.1/tests/compile/ctypedefunion.pyx0000664000175000017500000000015112542002467022037 0ustar stefanstefan00000000000000# mode: compile ctypedef union pet: int cat float dog cdef pet sam sam.cat = 1 sam.dog = 2.7 Cython-0.26.1/tests/compile/extforward.pyx0000664000175000017500000000014312542002467021351 0ustar stefanstefan00000000000000# mode: compile cdef class Spam cdef class Grail: cdef Spam spam cdef class Spam: pass Cython-0.26.1/tests/compile/classmethargdefault.pyx0000664000175000017500000000061312542002467023210 0ustar stefanstefan00000000000000# mode: compile __doc__ = u""" >>> s = Swallow() >>> s.spam(1) 1 42 'grail' True >>> s.spam(1, 2) 1 2 'grail' True >>> s.spam(1, z = 2) 1 42 'grail' 2 >>> s.spam(1, y = 2) 1 42 2 True >>> s.spam(1, x = 2, y = 'test') 1 2 'test' True """ swallow = True class Swallow: def spam(w, int x = 42, y = "grail", z = swallow): print w, x, y, z Cython-0.26.1/tests/compile/forfromelse.pyx0000664000175000017500000000032312542002467021507 0ustar stefanstefan00000000000000# mode: compile cdef void spam(): cdef int i, j=0, k=0 for i from 0 <= i < 10: j = k else: k = j # new syntax for 0 <= i < 10: j = i else: j = k spam() Cython-0.26.1/tests/compile/cstructreturn.pyx0000664000175000017500000000022012542002467022107 0ustar stefanstefan00000000000000# mode: compile ctypedef struct Foo: int blarg cdef Foo f(): blarg = 1 + 2 cdef Foo foo foo.blarg = blarg return foo f() Cython-0.26.1/tests/compile/dotted_cimport.pyx0000664000175000017500000000012612542002467022205 0ustar stefanstefan00000000000000# mode: compile cimport dotted_cimport_submodule.a import dotted_cimport_submodule.b Cython-0.26.1/tests/compile/cimportfrom_T248.pyx0000664000175000017500000000032612542002467022251 0ustar stefanstefan00000000000000# ticket: 248 # mode: compile from ewing8 cimport (Foo, Blarg) from declandimpl cimport (Sandwich , Tomato) cdef extern Foo yummy Cython-0.26.1/tests/compile/belchenko2.h0000664000175000017500000000004212542002467020605 0ustar stefanstefan00000000000000void c_func(unsigned char pixel); Cython-0.26.1/tests/compile/gencall.pyx0000664000175000017500000000024312542002467020572 0ustar stefanstefan00000000000000# mode: compile def f(x, y): x = y def z(a, b, c): f(x = 42, y = "spam") f(*a) f(**b) f(x = 42, **b) f(a, *b) f(a, x = 42, *b, **c) Cython-0.26.1/tests/compile/formfeed.pyx0000664000175000017500000000005712542002467020757 0ustar stefanstefan00000000000000# mode: compile cdef int x x = 42 y = 88 Cython-0.26.1/tests/compile/cpp_nogil.h0000664000175000017500000000020712542002467020546 0ustar stefanstefan00000000000000struct NoGilTest1 { NoGilTest1() { } void doSomething() { } }; struct NoGilTest2 { NoGilTest2() { } void doSomething() { } }; Cython-0.26.1/tests/compile/ctypedefpubapi.pyx0000664000175000017500000000043112542002467022170 0ustar stefanstefan00000000000000# mode: compile ctypedef public api class Foo [type PyFoo_Type, object PyFooObject]: pass cdef api: ctypedef public class Bar [type PyBar_Type, object PyBarObject]: pass cdef public api: ctypedef class Baz [type PyBaz_Type, object PyBazObject]: pass Cython-0.26.1/tests/compile/arraytoptrarg.pyx0000664000175000017500000000015212542002467022065 0ustar stefanstefan00000000000000# mode: compile cdef void f1(char *argv[]): f2(argv) cdef void f2(char *argv[]): pass f1(NULL) Cython-0.26.1/tests/compile/jiba5.pyx0000664000175000017500000000013212542002467020154 0ustar stefanstefan00000000000000# mode: compile def f(): cdef int i=0 global mylist del mylist[i] return Cython-0.26.1/tests/compile/constexpr.pyx0000664000175000017500000000016112542002467021211 0ustar stefanstefan00000000000000# mode: compile cdef enum Grail: k = 42 cdef enum Spam: a = -1 b = 2 + 3 c = 42 > 17 d = k Cython-0.26.1/tests/compile/nogil.pyx0000664000175000017500000000060512542002467020277 0ustar stefanstefan00000000000000# mode: compile cdef extern object g(object x) nogil cdef extern void g2(object x) nogil cdef extern from "nogil.h": void e1() nogil int *e2() nogil cdef void f(int x) nogil: cdef int y y = 42 cdef void h(object x) nogil: cdef void *p=None g2(x) g2(p) p = x e1() e2() f(0) h(None) Cython-0.26.1/tests/compile/builtin.pyx0000664000175000017500000000005612542002467020635 0ustar stefanstefan00000000000000# mode: compile def f(): x = open("foo") Cython-0.26.1/tests/compile/nonctypedefclass.pyx0000664000175000017500000000010212542002467022523 0ustar stefanstefan00000000000000# mode: compile cdef class spam: pass cdef spam s s = None Cython-0.26.1/tests/compile/cenum.pyx0000664000175000017500000000025312542002467020275 0ustar stefanstefan00000000000000# mode: compile cdef enum Spam: a b, c, d, e, f g = 42 cdef void eggs(): cdef Spam s1, s2=a cdef int i s1 = s2 s1 = c i = s1 eggs() Cython-0.26.1/tests/compile/ctypedefstruct.pyx0000664000175000017500000000025212542002467022235 0ustar stefanstefan00000000000000# mode: compile ctypedef struct order: int spam int eggs cdef order order1 order1.spam = 7 order1.eggs = 2 ctypedef struct linked: int a linked *next Cython-0.26.1/tests/compile/ctypedefenum.pyx0000664000175000017500000000016012542002467021653 0ustar stefanstefan00000000000000# mode: compile ctypedef enum parrot_state: alive = 1 dead = 2 cdef parrot_state polly polly = dead Cython-0.26.1/tests/compile/ewing3.pyx0000664000175000017500000000007612542002467020365 0ustar stefanstefan00000000000000# mode: compile cdef class C: cdef f(self): pass Cython-0.26.1/tests/compile/cimport_package_module_T4.pyx0000664000175000017500000000011212542002467024224 0ustar stefanstefan00000000000000# ticket: 4 # mode: compile from a cimport b cdef int **t = b.foo(NULL) Cython-0.26.1/tests/compile/operators.h0000664000175000017500000000216612542002467020620 0ustar stefanstefan00000000000000#ifndef _OPERATORS_H_ #define _OPERATORS_H_ class Operators { public: int value; Operators() { } Operators(int value) { this->value = value; } virtual ~Operators() { } Operators operator+(Operators f) { return Operators(this->value + f.value); } Operators operator-(Operators f) { return Operators(this->value - f.value); } Operators operator*(Operators f) { return Operators(this->value * f.value); } Operators operator/(Operators f) { return Operators(this->value / f.value); } bool operator<(Operators f) { return this->value < f.value; } bool operator<=(Operators f) { return this->value <= f.value; } bool operator==(Operators f) { return this->value == f.value; } bool operator!=(Operators f) { return this->value != f.value; } bool operator>(Operators f) { return this->value > f.value; } bool operator>=(Operators f) { return this->value >= f.value; } Operators operator>>(int v) { return Operators(this->value >> v); } Operators operator<<(int v) { return Operators(this->value << v); } Operators operator%(int v) { return Operators(this->value % v); } }; #endif Cython-0.26.1/tests/compile/cascmp.pyx0000664000175000017500000000055412542002467020440 0ustar stefanstefan00000000000000# mode: compile cdef void foo(): cdef int bool, int1=0, int2=0, int3=0, int4=0 cdef object obj1, obj2, obj3, obj4 obj1 = 1 obj2 = 2 obj3 = 3 obj4 = 4 bool = int1 < int2 < int3 bool = obj1 < obj2 < obj3 bool = int1 < int2 < obj3 bool = obj1 < 2 < 3 bool = obj1 < 2 < 3 < 4 bool = int1 < (int2 == int3) < int4 foo() Cython-0.26.1/tests/compile/publicapi_pub.pyx0000664000175000017500000000227212542002467022007 0ustar stefanstefan00000000000000# mode: compile # -- ctypedef int Int0 ctypedef public int Int1 ctypedef enum EnumA0: EA0 ctypedef public enum EnumA1: EA1 cdef enum EnumB0: EB0=0 cdef public enum EnumB1: EB1=1 cdef Int0 i0 = 0 cdef EnumA0 ea0 = EA0 cdef EnumB0 eb0 = EB0 cdef public Int1 i1 = 0 cdef public EnumA1 ea1 = EA1 cdef public EnumB1 eb1 = EB1 # -- ctypedef struct StructA0: int SA0 ctypedef public struct StructA1: int SA1 cdef struct StructB0: int SB0 cdef public struct StructB1: int SB1 cdef StructA0 sa0 = {'SA0':0} cdef StructB0 sb0 = {'SB0':2} cdef public StructA1 sa1 = {'SA1':1} cdef public StructB1 sb1 = {'SB1':3} # -- ctypedef class Foo0: pass ctypedef public class Foo1 [type PyFoo1_Type, object PyFoo1_Object]: pass cdef class Bar0: pass cdef public class Bar1 [type PyBar1_Type, object PyBar1_Object]: pass cdef Foo0 f0 = None cdef Bar0 b0 = None cdef public Foo1 f1 = None cdef public Bar1 b1 = None # -- cdef void bar0(): pass cdef public void bar1(): pass cdef void* spam0(object o) except NULL: return NULL cdef public void* spam1(object o) except NULL: return NULL bar0() bar1() spam0(None) spam1(None) # -- Cython-0.26.1/tests/compile/extexttype.pyx0000664000175000017500000000043612542002467021414 0ustar stefanstefan00000000000000# mode: compile cdef extern class external.Spam [object SpamObject]: pass ctypedef extern class external.Grail [object Grail]: pass cdef extern from "food.h": class external.Tomato [object Tomato]: pass class external.Bicycle [object Bicycle]: pass Cython-0.26.1/tests/compile/cnumop.pyx0000664000175000017500000000045312542002467020471 0ustar stefanstefan00000000000000# mode: compile def f(): cdef int int1, int2=0, int3=1 cdef char char1=0 cdef long long1, long2=0 cdef float float1, float2=0 cdef double double1 int1 = int2 * int3 int1 = int2 / int3 long1 = long2 * char1 float1 = int1 * float2 double1 = float1 * int2 f() Cython-0.26.1/tests/compile/declandimpl.pxd0000664000175000017500000000011512542002467021412 0ustar stefanstefan00000000000000cdef struct Sandwich: int i char *s cdef class Tomato: cdef float danger Cython-0.26.1/tests/compile/extsetitem.pyx0000664000175000017500000000012112542002467021353 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: def __setitem__(self, i, x): pass Cython-0.26.1/tests/compile/while.pyx0000664000175000017500000000045312542002467020300 0ustar stefanstefan00000000000000# mode: compile def f(a, b): cdef int i = 5 while a: x = 1 while a+b: x = 1 while i: x = 1 else: x = 2 while i: x = 1 break x = 2 else: x = 3 while i: x = 1 continue x = 2 Cython-0.26.1/tests/compile/extpropertyget.pyx0000664000175000017500000000014412542002467022272 0ustar stefanstefan00000000000000# mode: compile cdef class Spam: property eggs: def __get__(self): pass Cython-0.26.1/tests/compile/c_directives.pyx0000664000175000017500000000155012542002467021632 0ustar stefanstefan00000000000000# mode: compile # cython: boundscheck = False # cython: ignoreme = OK # cython: warn.undeclared = False # This testcase is most useful if you inspect the generated C file print 3 cimport cython as cy def e(object[int, ndim=2] buf): print buf[3, 2] # no bc @cy.boundscheck(False) def f(object[int, ndim=2] buf): print buf[3, 2] # no bc @cy.boundscheck(True) def g(object[int, ndim=2] buf): # The below line should have no meaning # cython: boundscheck = False # even if the above line doesn't follow indentation. print buf[3, 2] # bc def h(object[int, ndim=2] buf): print buf[3, 2] # no bc with cy.boundscheck(True): print buf[3,2] # bc from cython cimport boundscheck as bc def i(object[int] buf): with bc(True): print buf[3] # bs from cython cimport warn as my_warn @my_warn(undeclared=True) def j(): pass Cython-0.26.1/tests/compile/future_imports.pyx0000664000175000017500000000026012542002467022253 0ustar stefanstefan00000000000000# mode: compile from __future__ import nested_scopes from __future__ import with_statement pass from __future__ import nested_scopes ; from __future__ import nested_scopes Cython-0.26.1/tests/compile/cast_ctypedef_array_T518_helper.h0000664000175000017500000000030212542002467024663 0ustar stefanstefan00000000000000struct __foo_struct { int i, j; }; typedef struct __foo_struct foo_t[1]; static void foo_init (foo_t v) { v[0].i = 0; v[0].j = 0; } static void foo_clear (foo_t v) { v[0].i = 0; v[0].j = 0; } Cython-0.26.1/tests/compile/globalonly.pyx0000664000175000017500000000006112542002467021325 0ustar stefanstefan00000000000000# mode: compile global __name__ print __name__ Cython-0.26.1/tests/compile/ewing6.pyx0000664000175000017500000000056312542002467020371 0ustar stefanstefan00000000000000# mode: compile # Spurious gcc3.3 warnings about incompatible pointer # types passed to C method # Ordering of declarations in C code is important cdef class C cdef class D(C) cdef class E cdef class C: cdef void a(self): pass cdef class D(C): cdef void m(self, E e): pass cdef class E: pass cdef void f(D d, E e): d.m(e) f(D(),E()) Cython-0.26.1/tests/compile/cpp_templated_ctypedef.pyx0000664000175000017500000000016412542002467023673 0ustar stefanstefan00000000000000# tag: cpp # mode: compile cdef extern from *: cdef cppclass Foo[T]: pass ctypedef Foo[int] IntFoo Cython-0.26.1/tests/compile/cverylongtypes.pyx0000664000175000017500000000016412542002467022264 0ustar stefanstefan00000000000000# mode: compile cdef extern short int s cdef extern long int l cdef extern long long ll cdef extern long double ld Cython-0.26.1/tests/compile/ewing9.pyx0000664000175000017500000000005712542002467020372 0ustar stefanstefan00000000000000# mode: compile cdef struct xmlDoc: int i Cython-0.26.1/tests/compile/cunsignedlong.pyx0000664000175000017500000000014612542002467022026 0ustar stefanstefan00000000000000# mode: compile cdef void f(): cdef unsigned long x cdef object y=0 x = y y = x f() Cython-0.26.1/tests/compile/eqcmp.pyx0000664000175000017500000000052712542002467020277 0ustar stefanstefan00000000000000# mode: compile cdef void foo(): cdef int bool, int1=0, int2=0 cdef float float1=0, float2=0 cdef char *ptr1=NULL, *ptr2=NULL cdef int *ptr3 bool = int1 == int2 bool = int1 != int2 bool = float1 == float2 bool = ptr1 == ptr2 bool = int1 == float2 bool = ptr1 is ptr2 bool = ptr1 is not ptr2 foo() Cython-0.26.1/tests/compile/utf8bom.pyx0000664000175000017500000000023412542002467020551 0ustar stefanstefan00000000000000# coding: utf-8 # mode: compile # this file starts with a UTF-8 encoded BOM # the only thing we test is that it properly compiles def test(): pass Cython-0.26.1/tests/compile/huss2.pyx0000664000175000017500000000030512542002467020230 0ustar stefanstefan00000000000000# mode: compile cdef enum Color: red white blue cdef void f(): cdef Color e cdef int i i = red i = red + 1 i = red | 1 e = white i = e i = e + 1 f() Cython-0.26.1/tests/compile/ewing1.pyx0000664000175000017500000000022012542002467020352 0ustar stefanstefan00000000000000# mode: compile cdef int blarg(int i): pass cdef void foo(): cdef float f=0 cdef int i if blarg( f): pass foo() Cython-0.26.1/tests/run/0000775000175000017500000000000013151203436015574 5ustar stefanstefan00000000000000Cython-0.26.1/tests/run/ext_attr_assign.pyx0000664000175000017500000000413112542002467021537 0ustar stefanstefan00000000000000# mode: run # tag: assign, exttype cdef struct X: int ix X* x cdef class A: cdef int i cdef list l cdef object o cdef X x def assign_A(self): """ >>> A().assign_A() (2, [1, 2, 3]) """ a = A() a.i = 1 a.l = [1, 2, 3] a.o = a.l a.o = a.o a.l = a.o a.i = a.l[1] return a.i, a.l def assign_A_struct(self): """ >>> A().assign_A_struct() (5, 2, 2, 5) """ cdef X x a = A() a.x.ix = 2 a.x.x = &x x.ix = 5 x.x = &a.x assert a.x.x.x is &a.x a.x.x.x.x.x.x.x = a.x.x.x.x assert x.x is &x assert x.x.x is &x assert a.x.x is &x a.x.x.x.x.x.x.x, a.x.x.x = a.x.x.x.x, &a.x # replay+undo :) assert x.x is &a.x assert x.x.x is &x return x.ix, x.x.ix, a.x.ix, a.x.x.ix cdef class B(A): cdef int ib cdef object ob cdef A a def assign_B(self): """ >>> B().assign_B() (1, 2, 5, 9, 2) """ b = B() b.i = 1 b.ib = 2 b.l = [b.i, b.ib] b.o = b.l b.ob = b.o assert b.ob == b.l b.o = b.ob = b.l b.a = A() # only one reference! b.a.o = 5 b.a.i = 5 b.a, b.a.i = A(), b.a.i # overwrite b.a but keep b.a.i assert b.a.i == 5 assert b.a.o is None b.a.o = 9 b.a, b.a.i, b.a.o = A(), b.a.i, b.a.o return b.i, b.ib, b.a.i, b.a.o, b.o[1] def cross_assign_Ba(self): """ >>> B().cross_assign_Ba() 2 """ b = B() b.a = A() b.a.i = 1 b.a.o = A() # only one reference! (b.a.o).i = 2 b.a = b.a.o return b.a.i def cascaded_assign_B(self): """ >>> B().cascaded_assign_B() (2, 2) """ cdef B b = B() b.ib = 1 b.a = A() b.a.o = B() # only one reference! (b.a.o).ib = 2 b = b.ob = b.a.o return b.ib, (b.ob).ib Cython-0.26.1/tests/run/decorator_lambda.pyx0000664000175000017500000000057212542002467021630 0ustar stefanstefan00000000000000# mode: run # tag: decorator, lambda def decorate(f): return f @decorate(lambda x: x) class TestClassDecorator(object): """ >>> obj = TestClassDecorator() >>> obj.hello() 'Hello, world!' """ def hello(self): return "Hello, world!" @decorate(lambda x: x) def test_function(): """ >>> test_function() 123 """ return 123 Cython-0.26.1/tests/run/non_dict_kwargs_T470.pyx0000664000175000017500000000156512542002467022242 0ustar stefanstefan00000000000000# mode: run # ticket: 470 def func(**kwargs): """ >>> func(**{'a' : 7}) True >>> func(**SubDict()) True >>> func(**NonDict()) True """ return type(kwargs) is dict and kwargs['a'] == 7 class NonDict(object): def __getitem__(self, k): assert k == 'a' return 7 def keys(self): return ['a'] def call_non_dict_test(): """ >>> call_non_dict_test() True """ return func(**NonDict()) def call_non_dict_test_kw(): """ >>> call_non_dict_test_kw() True """ return func(b=5, **NonDict()) class SubDict(dict): def __init__(self): self['a'] = 7 def call_sub_dict_test(): """ >>> call_sub_dict_test() True """ return func(**SubDict()) def call_sub_dict_test_kw(): """ >>> call_sub_dict_test_kw() True """ return func(b=5, **SubDict()) Cython-0.26.1/tests/run/pynumop.pyx0000664000175000017500000000037112542002467020052 0ustar stefanstefan00000000000000def f(): """ >>> f() 6 """ obj1 = 1 obj2 = 2 obj3 = 3 obj1 = obj2 * obj3 return obj1 def g(): """ >>> g() 2 """ obj1 = 12 obj2 = 6 obj3 = 3 obj1 = obj2 / obj3 return int(obj1) Cython-0.26.1/tests/run/include.pyx0000664000175000017500000000015212542002467017763 0ustar stefanstefan00000000000000__doc__ = u""" >>> D 2 >>> XYZ 5 """ D = 1 include "testinclude.pxi" include "includes/includefile.pxi" Cython-0.26.1/tests/run/dict_setdefault.py0000664000175000017500000000601112542002467021313 0ustar stefanstefan00000000000000 import cython class Unhashable(object): def __hash__(self): raise TypeError('I am not hashable') class Hashable(object): def __hash__(self): return 1 def __eq__(self, other): return isinstance(other, Hashable) class CountedHashable(object): def __init__(self): self.hash_count = 0 self.eq_count = 0 def __hash__(self): self.hash_count += 1 return 42 def __eq__(self, other): self.eq_count += 1 return id(self) == id(other) @cython.test_fail_if_path_exists('//AttributeNode') @cython.test_assert_path_exists('//PythonCapiCallNode') @cython.locals(d=dict) def setdefault1(d, key): """ >>> d = {} >>> setdefault1(d, 1) >>> len(d) 1 >>> setdefault1(d, 1) >>> len(d) 1 >>> d[1] >>> setdefault1(d, Unhashable()) Traceback (most recent call last): TypeError: I am not hashable >>> len(d) 1 >>> h1 = setdefault1(d, Hashable()) >>> len(d) 2 >>> h2 = setdefault1(d, Hashable()) >>> len(d) 2 >>> d[Hashable()] # CPython's behaviour depends on version and py_debug setting, so just compare to it >>> py_hashed1 = CountedHashable() >>> y = {py_hashed1: 5} >>> py_hashed2 = CountedHashable() >>> y.setdefault(py_hashed2) >>> cy_hashed1 = CountedHashable() >>> y = {cy_hashed1: 5} >>> cy_hashed2 = CountedHashable() >>> setdefault1(y, cy_hashed2) >>> py_hashed1.hash_count - cy_hashed1.hash_count 0 >>> py_hashed2.hash_count - cy_hashed2.hash_count 0 >>> (py_hashed1.eq_count + py_hashed2.eq_count) - (cy_hashed1.eq_count + cy_hashed2.eq_count) 0 """ return d.setdefault(key) @cython.test_fail_if_path_exists('//AttributeNode') @cython.test_assert_path_exists('//PythonCapiCallNode') @cython.locals(d=dict) def setdefault2(d, key, value): """ >>> d = {} >>> setdefault2(d, 1, 2) 2 >>> len(d) 1 >>> setdefault2(d, 1, 2) 2 >>> len(d) 1 >>> l = setdefault2(d, 2, []) >>> len(d) 2 >>> l.append(1) >>> setdefault2(d, 2, []) [1] >>> len(d) 2 >>> setdefault2(d, Unhashable(), 1) Traceback (most recent call last): TypeError: I am not hashable >>> h1 = setdefault2(d, Hashable(), 55) >>> len(d) 3 >>> h2 = setdefault2(d, Hashable(), 66) >>> len(d) 3 >>> d[Hashable()] 55 # CPython's behaviour depends on version and py_debug setting, so just compare to it >>> py_hashed1 = CountedHashable() >>> y = {py_hashed1: 5} >>> py_hashed2 = CountedHashable() >>> y.setdefault(py_hashed2, []) [] >>> cy_hashed1 = CountedHashable() >>> y = {cy_hashed1: 5} >>> cy_hashed2 = CountedHashable() >>> setdefault2(y, cy_hashed2, []) [] >>> py_hashed1.hash_count - cy_hashed1.hash_count 0 >>> py_hashed2.hash_count - cy_hashed2.hash_count 0 >>> (py_hashed1.eq_count + py_hashed2.eq_count) - (cy_hashed1.eq_count + cy_hashed2.eq_count) 0 """ return d.setdefault(key, value) Cython-0.26.1/tests/run/ctypedef_char_types.pyx0000664000175000017500000000264212542002467022372 0ustar stefanstefan00000000000000 cimport cython from cython cimport typeof from libc.string cimport const_char, const_uchar @cython.test_assert_path_exists( "//NameNode[@name = 'st' and @type.is_string = True]", "//NameNode[@name = 'ust' and @type.is_string = True]", "//NameNode[@name = 'my_st' and @type.is_string = True]", "//NameNode[@name = 'my_ust' and @type.is_string = True]", ) def const_charptrs(): """ >>> const_charptrs() """ cdef object obj cdef const_char* st = b'XYZ' cdef const_uchar* ust = b'XYZ' # needs cast to unsigned assert typeof(st) == "const_char *", typeof(st) my_st = st assert typeof(my_st) == "const_char *", typeof(my_st) obj = my_st assert obj == b'XYZ', obj assert typeof(ust) == "const_uchar *", typeof(ust) my_ust = ust assert typeof(my_ust) == "const_uchar *", typeof(my_ust) obj = my_ust assert obj == b'XYZ', obj ctypedef char mychar ctypedef unsigned char myuchar def const_char_arrays(): """ >>> const_char_arrays() """ cdef int i cdef object obj cdef mychar[4] st cdef myuchar[4] ust cdef char ch i = 0 for ch in b'XYZ\0': st[i] = ch ust[i] = ch i += 1 assert typeof(st) == "mychar [4]", typeof(st) obj = st assert obj == b'XYZ', obj assert typeof(ust) == "myuchar [4]", typeof(ust) obj = ust assert obj == b'XYZ', obj Cython-0.26.1/tests/run/dictcomp.pyx0000664000175000017500000000241612574327400020151 0ustar stefanstefan00000000000000 cimport cython def dictcomp(): """ >>> sorted(dictcomp().items()) [(2, 0), (4, 4), (6, 8)] >>> sorted(dictcomp().items()) [(2, 0), (4, 4), (6, 8)] """ x = 'abc' result = { x+2:x*2 for x in range(5) if x % 2 == 0 } assert x == 'abc' # do not leak! return result @cython.test_assert_path_exists( "//InlinedGeneratorExpressionNode", "//DictComprehensionAppendNode") def genexpr(): """ >>> type(genexpr()) is dict True >>> type(genexpr()) is dict True """ x = 'abc' result = dict( (x+2,x*2) for x in range(5) if x % 2 == 0 ) assert x == 'abc' return result cdef class A: def __repr__(self): return u"A" def __richcmp__(one, other, int op): return one is other def __hash__(self): return id(self) % 65536 def typed_dictcomp(): """ >>> list(typed_dictcomp().items()) [(A, 1), (A, 1), (A, 1)] """ cdef A obj return {obj:1 for obj in [A(), A(), A()]} def iterdict_dictcomp(): """ >>> sorted(iterdict_dictcomp().items()) [(1, 'a'), (2, 'b'), (3, 'c')] """ cdef dict d = dict(a=1,b=2,c=3) return {d[key]:key for key in d} def sorted(it): l = list(it) l.sort() return l Cython-0.26.1/tests/run/weakfail.pyx0000664000175000017500000000072312542002467020127 0ustar stefanstefan00000000000000import gc import weakref foo_dict = weakref.WeakValueDictionary() cdef class Foo: cdef object __weakref__ def test_weakref(key): """ Test af9cfeb5f94d9cd4f2989fc8e111c33208494ba4 fix. Originally running it using debug build of python lead to:: visit_decref: Assertion `gc->gc.gc_refs != 0' failed >>> _ = gc.collect() >>> _ = test_weakref(48) >>> _ = gc.collect() """ obj = Foo() foo_dict[key] = obj return obj Cython-0.26.1/tests/run/autotestdict_cdef.pyx0000664000175000017500000000622612542002467022045 0ustar stefanstefan00000000000000# cython: autotestdict=True, autotestdict.cdef=True """ Tests autotestdict compiler directive. Both module test and individual tests are run; finally, all_tests_run() is executed which does final validation. >>> items = list(__test__.items()) >>> items.sort() >>> for key, value in items: ... print('%s ; %s' % (key, value)) MyCdefClass.cdef_method (line 78) ; >>> add_log("cdef class cmethod") MyCdefClass.cpdef_method (line 75) ; >>> add_log("cpdef class method") MyCdefClass.method (line 72) ; >>> add_log("cdef class method") MyClass.method (line 61) ; >>> add_log("class method") cdeffunc (line 25) ; >>> add_log("cdef") mycpdeffunc (line 48) ; >>> add_log("cpdef") myfunc (line 39) ; >>> add_log("def") """ import sys log = [] cdef cdeffunc(): """>>> add_log("cdef")""" cdeffunc() # make sure it's being used def all_tests_run(): assert sorted(log) == sorted([u'cdef', u'cdef class', u'cdef class cmethod', u'class'] + ( ((1 if sys.version_info < (3, 4) else 2) * [u'cdef class method', u'class method', u'cpdef', u'cpdef class method', u'def']))), sorted(log) def add_log(s): log.append(unicode(s)) if len(log) == len(__test__) + (2 if sys.version_info < (3, 4) else 7): # Final per-function doctest executed all_tests_run() def myfunc(): """>>> add_log("def")""" def doc_without_test(): """Some docs""" def nodocstring(): pass cpdef mycpdeffunc(): """>>> add_log("cpdef")""" class MyClass: """ Needs no hack >>> add_log("class") >>> True True """ def method(self): """>>> add_log("class method")""" cdef class MyCdefClass: """ Needs no hack >>> add_log("cdef class") >>> True True """ def method(self): """>>> add_log("cdef class method")""" cpdef cpdef_method(self): """>>> add_log("cpdef class method")""" cdef cdef_method(self): """>>> add_log("cdef class cmethod")""" def __cinit__(self): """ Should not be included, as it can't be looked up with getattr >>> True False """ def __dealloc__(self): """ Should not be included, as it can't be looked up with getattr >>> True False """ def __richcmp__(self, other, int op): """ Should not be included, as it can't be looked up with getattr in Py 2 >>> True False """ def __nonzero__(self): """ Should not be included, as it can't be looked up with getattr in Py 3.1 >>> True False """ def __len__(self): """ Should not be included, as it can't be looked up with getattr in Py 3.1 >>> sys.version_info < (3, 4) False """ def __contains__(self, value): """ Should not be included, as it can't be looked up with getattr in Py 3.1 >>> sys.version_info < (3, 4) False """ cdef class MyOtherCdefClass: """ Needs no hack >>> True True """ def __bool__(self): """ Should not be included, as it can't be looked up with getattr in Py 2 >>> True False """ Cython-0.26.1/tests/run/bint_property_T354.pyx0000664000175000017500000000051012542002467021755 0ustar stefanstefan00000000000000# ticket: 354 cdef class Test: """ >>> t = Test(True) >>> t.some_ro_bool True >>> t.some_public_bool True """ cdef public bint some_public_bool cdef readonly bint some_ro_bool def __init__(self, bint boolval): self.some_ro_bool = boolval self.some_public_bool = boolval Cython-0.26.1/tests/run/listcomp.pyx0000664000175000017500000000521612542002467020200 0ustar stefanstefan00000000000000cimport cython def smoketest(): """ >>> smoketest() [0, 4, 8] """ x = 'abc' result = [x*2 for x in range(5) if x % 2 == 0] assert x != 'abc' return result def list_genexp(): """ >>> list_genexp() [0, 4, 8] """ x = 'abc' result = list(x*2 for x in range(5) if x % 2 == 0) assert x == 'abc' return result def int_runvar(): """ >>> int_runvar() [0, 4, 8] """ cdef int x print [x*2 for x in range(5) if x % 2 == 0] cdef class A: def __repr__(self): return u"A" def typed(): """ >>> typed() [A, A, A] """ cdef A obj print [obj for obj in [A(), A(), A()]] def inferred_type(): """ >>> inferred_type() ['A', 'A', 'A'] """ print [cython.typeof(obj) for obj in [A(), A(), A()]] def not_inferred_type(): """ >>> not_inferred_type() ['Python object', 'Python object', 'Python object'] """ print [cython.typeof(obj) for obj in [1, A(), 'abc']] def iterdict(): """ >>> iterdict() [1, 2, 3] """ cdef dict d = dict(a=1,b=2,c=3) l = [d[key] for key in d] l.sort() print l listcomp_result = [ i*i for i in range(5) ] def global_listcomp(): """ >>> [ i*i for i in range(5) ] [0, 1, 4, 9, 16] >>> listcomp_result [0, 1, 4, 9, 16] """ def nested_result(): """ >>> nested_result() [[], [-1], [-1, 0], [-1, 0, 1]] """ result = [[a-1 for a in range(b)] for b in range(4)] return result def listcomp_as_condition(sequence): """ >>> listcomp_as_condition(['a', 'b', '+']) True >>> listcomp_as_condition('ab+') True >>> listcomp_as_condition('abc') False """ if [1 for c in sequence if c in '+-*/<=>!%&|([^~,']: return True return False @cython.test_fail_if_path_exists("//SimpleCallNode//ComprehensionNode") @cython.test_assert_path_exists("//ComprehensionNode") def sorted_listcomp(sequence): """ >>> sorted_listcomp([3,2,4]) [3, 4, 5] """ return sorted([ n+1 for n in sequence ]) @cython.test_fail_if_path_exists("//IfStatNode", "//ComprehensionAppendNode") @cython.test_assert_path_exists("//ComprehensionNode") def listcomp_const_condition_false(): """ >>> listcomp_const_condition_false() [] """ return [x*2 for x in range(3) if False] @cython.test_fail_if_path_exists("//IfStatNode") @cython.test_assert_path_exists("//ComprehensionNode", "//ComprehensionAppendNode") def listcomp_const_condition_true(): """ >>> listcomp_const_condition_true() [0, 2, 4] """ return [x*2 for x in range(3) if True] Cython-0.26.1/tests/run/inop.pyx0000664000175000017500000002510112542002467017306 0ustar stefanstefan00000000000000 cimport cython def f(a,b): """ >>> f(1,[1,2,3]) True >>> f(5,[1,2,3]) False >>> f(2,(1,2,3)) True """ cdef object result = a in b return result def g(a,b): """ >>> g(1,[1,2,3]) 1 >>> g(5,[1,2,3]) 0 >>> g(2,(1,2,3)) 1 """ cdef int result = a in b return result def h(b): """ >>> h([1,2,3,4]) True >>> h([1,3,4]) False """ cdef object result = 2 in b return result def j(b): """ >>> j([1,2,3,4]) 1 >>> j([1,3,4]) 0 """ cdef int result = 2 in b return result @cython.test_fail_if_path_exists("//SwitchStatNode") def k(a): """ >>> k(1) 1 >>> k(5) 0 """ cdef int result = a in [1,2,3,4] return result @cython.test_assert_path_exists("//SwitchStatNode") @cython.test_fail_if_path_exists("//BoolBinopNode", "//PrimaryCmpNode") def m_list(int a): """ >>> m_list(2) 1 >>> m_list(5) 0 """ cdef int result = a in [1,2,3,4] return result @cython.test_assert_path_exists("//SwitchStatNode") @cython.test_fail_if_path_exists("//BoolBinopNode", "//PrimaryCmpNode") def m_tuple(int a): """ >>> m_tuple(2) 1 >>> m_tuple(5) 0 """ cdef int result = a in (1,2,3,4) return result @cython.test_assert_path_exists("//SwitchStatNode") @cython.test_fail_if_path_exists("//BoolBinopNode", "//PrimaryCmpNode") def m_set(int a): """ >>> m_set(2) 1 >>> m_set(5) 0 """ cdef int result = a in {1,2,3,4} return result cdef bytes bytes_string = b'ab\0cde\0f\0g' py_bytes_string = bytes_string @cython.test_assert_path_exists("//PrimaryCmpNode") @cython.test_fail_if_path_exists("//SwitchStatNode", "//BoolBinopNode") def m_bytes(char a, bytes bytes_string): """ >>> m_bytes(ord('f'), py_bytes_string) 1 >>> m_bytes(ord('X'), py_bytes_string) 0 >>> 'f'.encode('ASCII') in None # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ...iterable... >>> m_bytes(ord('f'), None) Traceback (most recent call last): TypeError: argument of type 'NoneType' is not iterable """ cdef int result = a in bytes_string return result @cython.test_assert_path_exists("//PrimaryCmpNode") @cython.test_fail_if_path_exists("//SwitchStatNode", "//BoolBinopNode") def m_bytes_unsigned(unsigned char a, bytes bytes_string): """ >>> m_bytes(ord('f'), py_bytes_string) 1 >>> m_bytes(ord('X'), py_bytes_string) 0 >>> 'f'.encode('ASCII') in None # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ...iterable... >>> m_bytes(ord('f'), None) Traceback (most recent call last): TypeError: argument of type 'NoneType' is not iterable """ cdef int result = a in bytes_string return result @cython.test_assert_path_exists("//SwitchStatNode") @cython.test_fail_if_path_exists("//BoolBinopNode", "//PrimaryCmpNode") def m_bytes_literal(char a): """ >>> m_bytes_literal(ord('f')) 1 >>> m_bytes_literal(ord('X')) 0 """ cdef int result = a in b'ab\0cde\0f\0g' return result @cython.test_assert_path_exists("//SwitchStatNode") @cython.test_fail_if_path_exists("//BoolBinopNode", "//PrimaryCmpNode") def m_bytes_literal_unsigned(unsigned char a): """ >>> m_bytes_literal(ord('f')) 1 >>> m_bytes_literal(ord('X')) 0 """ cdef int result = a in b'ab\0cde\0f\0g' return result cdef unicode unicode_string = u'abc\0defg\u1234\uF8D2' py_unicode_string = unicode_string @cython.test_assert_path_exists("//PrimaryCmpNode") @cython.test_fail_if_path_exists("//SwitchStatNode", "//BoolBinopNode") def m_unicode(Py_UNICODE a, unicode unicode_string): """ >>> m_unicode(ord('f'), py_unicode_string) 1 >>> m_unicode(ord('X'), py_unicode_string) 0 >>> m_unicode(ord(py_klingon_character), py_unicode_string) 1 >>> 'f' in None # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ...iterable... >>> m_unicode(ord('f'), None) Traceback (most recent call last): TypeError: argument of type 'NoneType' is not iterable """ cdef int result = a in unicode_string return result cdef unicode klingon_character = u'\uF8D2' py_klingon_character = klingon_character @cython.test_assert_path_exists("//SwitchStatNode") @cython.test_fail_if_path_exists("//BoolBinopNode", "//PrimaryCmpNode") def m_unicode_literal(Py_UNICODE a): """ >>> m_unicode_literal(ord('f')) 1 >>> m_unicode_literal(ord('X')) 0 >>> m_unicode_literal(ord(py_klingon_character)) 1 """ cdef int result = a in u'abc\0defg\u1234\uF8D2' return result cdef unicode wide_unicode_character = u'\U0010FEDC' py_wide_unicode_character = wide_unicode_character wide_unicode_character_surrogate1 = 0xDBFF wide_unicode_character_surrogate2 = 0xDEDC @cython.test_fail_if_path_exists("//SwitchStatNode") @cython.test_assert_path_exists("//PrimaryCmpNode") def m_wide_unicode_literal(Py_UCS4 a): """ >>> m_unicode_literal(ord('f')) 1 >>> m_unicode_literal(ord('X')) 0 >>> import sys >>> if sys.maxunicode == 65535: ... m_wide_unicode_literal(wide_unicode_character_surrogate1) ... m_wide_unicode_literal(wide_unicode_character_surrogate2) ... else: ... m_wide_unicode_literal(ord(py_wide_unicode_character)) ... 1 1 1 """ cdef int result = a in u'abc\0defg\u1234\uF8D2\U0010FEDC' return result @cython.test_assert_path_exists("//SwitchStatNode") @cython.test_fail_if_path_exists("//BoolBinopNode", "//PrimaryCmpNode") def conditional_int(int a): """ >>> conditional_int(1) 1 >>> conditional_int(0) 2 >>> conditional_int(5) 2 """ return 1 if a in (1,2,3,4) else 2 @cython.test_assert_path_exists("//SwitchStatNode") @cython.test_fail_if_path_exists("//BoolBinopNode", "//PrimaryCmpNode") def conditional_object(int a): """ >>> conditional_object(1) 1 >>> conditional_object(0) '2' >>> conditional_object(5) '2' """ return 1 if a in (1,2,3,4) else '2' @cython.test_assert_path_exists("//SwitchStatNode") @cython.test_fail_if_path_exists("//BoolBinopNode", "//PrimaryCmpNode") def conditional_bytes(char a): """ >>> conditional_bytes(ord('a')) 1 >>> conditional_bytes(ord('X')) '2' >>> conditional_bytes(0) '2' """ return 1 if a in b'abc' else '2' @cython.test_assert_path_exists("//SwitchStatNode") @cython.test_fail_if_path_exists("//BoolBinopNode", "//PrimaryCmpNode") def conditional_unicode(Py_UNICODE a): """ >>> conditional_unicode(ord('a')) 1 >>> conditional_unicode(ord('X')) '2' >>> conditional_unicode(0) '2' """ return 1 if a in u'abc' else '2' @cython.test_assert_path_exists("//SwitchStatNode") @cython.test_fail_if_path_exists("//BoolBinopNode", "//PrimaryCmpNode") def conditional_none(int a): """ >>> conditional_none(1) >>> conditional_none(0) 1 >>> conditional_none(5) 1 """ return None if a in {1,2,3,4} else 1 @cython.test_assert_path_exists( "//BoolBinopNode", "//BoolBinopNode//PrimaryCmpNode" ) @cython.test_fail_if_path_exists("//ListNode") def n(a): """ >>> n('d *') 1 >>> n('xxx') 0 """ cdef int result = a.lower() in [u'a *',u'b *',u'c *',u'd *'] return result def p(a): """ >>> p(1) 0 >>> p('a') 1 """ cdef dict d = {u'a': 1, u'b': 2} cdef int result = a in d return result def q(a): """ >>> q(1) Traceback (most recent call last): TypeError: 'NoneType' object is not iterable >>> l = [1,2,3,4] >>> l2 = [l[1:],l[:-1],l] >>> 2 in l in l2 True """ cdef dict d = None cdef int result = a in d # should fail with a TypeError return result def r(a): """ >>> r(2) 1 """ cdef object l = [1,2,3,4] cdef object l2 = [l[1:],l[:-1],l] cdef int result = a in l in l2 return result def s(a): """ >>> s(2) 1 """ cdef int result = a in [1,2,3,4] in [[1,2,3],[2,3,4],[1,2,3,4]] return result #@cython.test_assert_path_exists("//ReturnStatNode//BoolNode") #@cython.test_fail_if_path_exists("//SwitchStatNode") def constant_empty_sequence(a): """ >>> constant_empty_sequence(1) False >>> constant_empty_sequence(5) False """ return a in () @cython.test_fail_if_path_exists("//ReturnStatNode//BoolNode") @cython.test_assert_path_exists("//PrimaryCmpNode") def constant_empty_sequence_side_effect(a): """ >>> l =[] >>> def a(): ... l.append(1) ... return 1 >>> constant_empty_sequence_side_effect(a) False >>> l [1] """ return a() in () def test_error_non_iterable(x): """ >>> test_error_non_iterable(1) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ...iterable... """ return x in 42 def test_error_non_iterable_cascaded(x): """ >>> test_error_non_iterable_cascaded(1) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ...iterable... """ return 1 == x in 42 def test_inop_cascaded(x): """ >>> test_inop_cascaded(1) False >>> test_inop_cascaded(2) True >>> test_inop_cascaded(3) False """ return 1 != x in [2] ### The following tests are copied from CPython's test_grammar.py. ### They look stupid, but the nice thing about them is that Cython ### treats '1' as a C integer constant that triggers Python object ### coercion for the 'in' operator here, whereas the left side of ### the cascade can be evaluated entirely in C space. def test_inop_cascaded_one(): """ >>> test_inop_cascaded_one() False """ return 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in 1 is 1 is not 1 def test_inop_cascaded_int_orig(int x): """ >>> test_inop_cascaded_int_orig(1) False """ return 1 < 1 > 1 == 1 >= 1 <= 1 != x in 1 not in 1 is 1 is not 1 def test_inop_cascaded_one_err(): """ >>> test_inop_cascaded_one_err() # doctest: +ELLIPSIS Traceback (most recent call last): TypeError:... itera... """ return 1 == 1 >= 1 <= 1 in 1 not in 1 is 1 is not 1 def test_inop_cascaded_int_orig_err(int x): """ >>> test_inop_cascaded_int_orig_err(1) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError:... itera... """ return 1 == 1 >= 1 <= 1 == x in 1 not in 1 is 1 is not 1 ### def test_inop_cascaded_int(int x): """ >>> test_inop_cascaded_int(1) False >>> test_inop_cascaded_int(2) True >>> test_inop_cascaded_int(3) False """ return 1 != x in [1,2] Cython-0.26.1/tests/run/unpack.pyx0000664000175000017500000002265413023021033017615 0ustar stefanstefan00000000000000# mode: run # tag: sequence_unpacking import cython def _it(N): for i in range(N): yield i cdef class ItCount(object): cdef object values cdef readonly count def __init__(self, values): self.values = iter(values) self.count = 0 def __iter__(self): return self def __next__(self): self.count += 1 return next(self.values) def kunterbunt(obj1, obj2, obj3, obj4, obj5): """ >>> kunterbunt(1, (2,), (3,4,5), (6,(7,(8,9))), 0) (8, 9, (8, 9), (6, (7, (8, 9))), 0) """ obj1, = obj2 obj1, obj2 = obj2 + obj2 obj1, obj2, obj3 = obj3 obj1, (obj2, obj3) = obj4 [obj1, obj2] = obj3 return obj1, obj2, obj3, obj4, obj5 def unpack_tuple(tuple it): """ >>> unpack_tuple((1,2,3)) (1, 2, 3) >>> a,b,c = None # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> unpack_tuple(None) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... """ a,b,c = it return a,b,c def unpack_list(list it): """ >>> unpack_list([1,2,3]) (1, 2, 3) >>> a,b,c = None # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> unpack_list(None) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... """ a,b,c = it return a,b,c def unpack_to_itself(it): """ >>> it = _it(2) >>> it, it = it >>> it 1 >>> unpack_to_itself([1,2]) 2 >>> unpack_to_itself((1,2)) 2 >>> unpack_to_itself(_it(2)) 1 >>> unpack_to_itself((1,2,3)) Traceback (most recent call last): ValueError: too many values to unpack (expected 2) >>> unpack_to_itself(_it(3)) Traceback (most recent call last): ValueError: too many values to unpack (expected 2) """ it, it = it return it def unpack_partial(it): """ >>> it = _it(2) >>> a = b = c = 0 >>> try: a,b,c = it ... except ValueError: pass ... else: print("DID NOT FAIL!") >>> a, b, c (0, 0, 0) >>> unpack_partial([1,2]) (0, 0, 0) >>> unpack_partial((1,2)) (0, 0, 0) >>> unpack_partial(_it(2)) (0, 0, 0) >>> it = ItCount([1,2]) >>> a = b = c = 0 >>> try: a,b,c = it ... except ValueError: pass ... else: print("DID NOT FAIL!") >>> a, b, c (0, 0, 0) >>> it.count 3 >>> it = ItCount([1,2]) >>> unpack_partial(it) (0, 0, 0) >>> it.count 3 """ a = b = c = 0 try: a, b, c = it except ValueError: pass return a, b, c def unpack_fail_assignment(it): """ >>> it = ItCount([1, 2, 3]) >>> a = b = c = 0 >>> try: a, b[0], c = it ... except TypeError: pass >>> a,b,c (1, 0, 0) >>> it.count 4 >>> it = ItCount([1, 2, 3]) >>> unpack_fail_assignment(it) (1, 0, 0) >>> it.count 4 """ cdef object a,b,c a = b = c = 0 try: a, b[0], c = it except TypeError: pass return a, b, c def unpack_partial_typed(it): """ >>> unpack_partial_typed([1, 2, 'abc']) (0, 0, 0) >>> unpack_partial_typed((1, 'abc', 3)) (0, 0, 0) >>> unpack_partial_typed(set([1, 'abc', 3])) (0, 0, 0) >>> it = ItCount([1, 'abc', 3]) >>> unpack_partial_typed(it) (0, 0, 0) >>> it.count 4 """ cdef int a,b,c a = b = c = 0 try: a, b, c = it except TypeError: pass return a, b, c def unpack_typed(it): """ >>> unpack_typed((1, 2.0, [1])) (1, 2.0, [1]) >>> unpack_typed([1, 2.0, [1]]) (1, 2.0, [1]) >>> it = ItCount([1, 2.0, [1]]) >>> unpack_typed(it) (1, 2.0, [1]) >>> it.count 4 >>> try: unpack_typed((1, None, [1])) ... except TypeError: pass >>> try: unpack_typed([1, None, [1]]) ... except TypeError: pass >>> it = ItCount([1, None, [1]]) >>> try: unpack_typed(it) ... except TypeError: pass >>> it.count 4 >>> unpack_typed((1, 2.0, (1,))) Traceback (most recent call last): TypeError: Expected list, got tuple >>> it = ItCount([1, 2.0, (1,)]) >>> unpack_typed(it) Traceback (most recent call last): TypeError: Expected list, got tuple >>> it.count 4 """ cdef int a cdef float b cdef list c a,b,c = it return a,b,c def failure_too_many(it): """ >>> try: a,b,c = [1,2,3,4] ... except ValueError: pass >>> failure_too_many([1,2,3,4]) Traceback (most recent call last): ValueError: too many values to unpack (expected 3) >>> try: a,b,c = [1,2,3,4] ... except ValueError: pass >>> failure_too_many((1,2,3,4)) Traceback (most recent call last): ValueError: too many values to unpack (expected 3) >>> a,b,c = set([1,2,3,4]) # doctest: +ELLIPSIS Traceback (most recent call last): ValueError: too many values to unpack... >>> failure_too_many(set([1,2,3,4])) Traceback (most recent call last): ValueError: too many values to unpack (expected 3) >>> a,b,c = _it(4) # doctest: +ELLIPSIS Traceback (most recent call last): ValueError: too many values to unpack... >>> failure_too_many(_it(4)) Traceback (most recent call last): ValueError: too many values to unpack (expected 3) """ a,b,c = it return a,b,c def failure_too_few(it): """ >>> try: a,b,c = [1,2] ... except ValueError: pass >>> failure_too_few([1,2]) Traceback (most recent call last): ValueError: need more than 2 values to unpack >>> try: a,b,c = (1,2) ... except ValueError: pass >>> failure_too_few((1,2)) Traceback (most recent call last): ValueError: need more than 2 values to unpack >>> try: a,b,c = set([1,2]) ... except ValueError: pass ... else: print("DID NOT FAIL!") >>> failure_too_few(set([1,2])) Traceback (most recent call last): ValueError: need more than 2 values to unpack >>> try: a,b,c = _it(2) ... except ValueError: pass ... else: print("DID NOT FAIL!") >>> failure_too_few(_it(2)) Traceback (most recent call last): ValueError: need more than 2 values to unpack """ a,b,c = it return a,b,c def _it_failure(N): for i in range(N): yield i raise ValueError("huhu") def failure_while_unpacking(it): """ >>> a,b,c = _it_failure(0) Traceback (most recent call last): ValueError: huhu >>> failure_while_unpacking(_it_failure(0)) Traceback (most recent call last): ValueError: huhu >>> a,b,c = _it_failure(1) Traceback (most recent call last): ValueError: huhu >>> failure_while_unpacking(_it_failure(1)) Traceback (most recent call last): ValueError: huhu >>> a,b,c = _it_failure(2) Traceback (most recent call last): ValueError: huhu >>> failure_while_unpacking(_it_failure(2)) Traceback (most recent call last): ValueError: huhu >>> a,b,c = _it_failure(3) Traceback (most recent call last): ValueError: huhu >>> failure_while_unpacking(_it_failure(3)) Traceback (most recent call last): ValueError: huhu >>> a,b,c = _it_failure(4) # doctest: +ELLIPSIS Traceback (most recent call last): ValueError: too many values to unpack... >>> failure_while_unpacking(_it_failure(4)) Traceback (most recent call last): ValueError: too many values to unpack (expected 3) """ a,b,c = it return a,b,c def unpack_many(it): """ >>> items = range(1,13) >>> unpack_many(items) (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) >>> unpack_many(iter(items)) (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) >>> unpack_many(list(items)) (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) >>> unpack_many(tuple(items)) (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) """ a,b,c,d,e,f,g,h,i,j,k,l = it return a,b,c,d,e,f,g,h,i,j,k,l def unpack_many_tuple(tuple it): """ >>> items = range(1,13) >>> unpack_many_tuple(tuple(items)) (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) """ a,b,c,d,e,f,g,h,i,j,k,l = it return a,b,c,d,e,f,g,h,i,j,k,l def unpack_many_list(list it): """ >>> items = range(1,13) >>> unpack_many_list(list(items)) (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) """ a,b,c,d,e,f,g,h,i,j,k,l = it return a,b,c,d,e,f,g,h,i,j,k,l def unpack_many_int(it): """ >>> items = range(1,13) >>> unpack_many_int(items) (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) >>> unpack_many_int(iter(items)) (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) >>> unpack_many_int(list(items)) (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) >>> unpack_many_int(tuple(items)) (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) """ cdef int b cdef long f cdef Py_ssize_t h a,b,c,d,e,f,g,h,i,j,k,l = it return a,b,c,d,e,f,g,h,i,j,k,l @cython.test_fail_if_path_exists('//PyTypeTestNode') def unpack_literal_none_to_builtin_type(): """ >>> unpack_literal_none_to_builtin_type() (None, None, None, None) """ cdef list a,b,c,d a, b = c, d = None, None return a,b,c,d cdef class ExtType: pass @cython.test_fail_if_path_exists('//PyTypeTestNode') def unpack_literal_none_to_exttype(): """ >>> unpack_literal_none_to_exttype() (None, None, None, None) """ cdef ExtType a,b,c,d a, b = c, d = None, None return a,b,c,d # Github issue #1523 def test_unpack_resultref(): """ >>> test_unpack_resultref() == ((1, set()), 1, set()) True """ a = b, c = 1, set() return a, b, c Cython-0.26.1/tests/run/cdef_decorator_directives_T183.pyx0000664000175000017500000000045112542002467024245 0ustar stefanstefan00000000000000# ticket: 183 cimport cython @cython.cdivision(True) cpdef cdiv_decorator(int a, int b): """ >>> cdiv_decorator(-12, 5) -2 """ return a / b @cython.cdivision(False) cpdef pydiv_decorator(int a, int b): """ >>> pydiv_decorator(-12, 5) -3 """ return a / b Cython-0.26.1/tests/run/cpp_stl_vector.pyx0000664000175000017500000001026113150045407021365 0ustar stefanstefan00000000000000# mode: run # tag: cpp, werror from cython.operator cimport dereference as d from cython.operator cimport preincrement as incr from libcpp.vector cimport vector from libcpp cimport bool as cbool def simple_test(double x): """ >>> simple_test(55) 3 """ v = new vector[double]() try: v.push_back(1.0) v.push_back(x) from math import pi v.push_back(pi) return v.size() finally: del v def list_test(L): """ >>> list_test([1,2,4,8]) (4, 4) >>> list_test([]) (0, 0) >>> list_test([-1] * 1000) (1000, 1000) """ v = new vector[int]() try: for a in L: v.push_back(a) return len(L), v.size() finally: del v def index_test(L): """ >>> index_test([1,2,4,8]) (1.0, 8.0) >>> index_test([1.25]) (1.25, 1.25) """ v = new vector[double]() try: for a in L: v.push_back(a) return v[0][0], v[0][len(L)-1] finally: del v def index_set_test(L): """ >>> index_set_test([1,2,4,8]) (-1.0, -8.0) >>> index_set_test([1.25]) (-1.25, -1.25) """ v = new vector[double]() try: for a in L: v.push_back(a) for i in range(v.size()): d(v)[i] = -d(v)[i] return d(v)[0], d(v)[v.size()-1] finally: del v def iteration_test(L): """ >>> iteration_test([1,2,4,8]) 1 2 4 8 """ v = new vector[int]() try: for a in L: v.push_back(a) it = v.begin() while it != v.end(): a = d(it) incr(it) print(a) finally: del v def reverse_iteration_test(L): """ >>> reverse_iteration_test([1,2,4,8]) 8 4 2 1 """ v = new vector[int]() try: for a in L: v.push_back(a) it = v.rbegin() while it != v.rend(): a = d(it) incr(it) print(a) finally: del v def nogil_test(L): """ >>> nogil_test([1,2,3]) 3 """ cdef int a with nogil: v = new vector[int]() try: for a in L: with nogil: v.push_back(a) return v.size() finally: del v def item_ptr_test(L, int i, int x): """ >>> item_ptr_test(range(10), 7, 100) [0, 1, 2, 3, 4, 5, 6, 100, 8, 9] """ cdef vector[int] v = L cdef int* vi_ptr = &v[i] vi_ptr[0] = x return v def test_value_type(x): """ >>> test_value_type(2) 2.0 >>> test_value_type(2.5) 2.5 """ cdef vector[double].value_type val = x return val def test_value_type_complex(x): """ >>> test_value_type_complex(2) (2+0j) """ cdef vector[double complex].value_type val = x return val def test_bool_vector_convert(o): """ >>> test_bool_vector_convert([True, False, None, 3]) [True, False, False, True] """ cdef vector[cbool] v = o return v def test_bool_vector_get_set(): """ >>> test_bool_vector_get_set() """ cdef vector[cbool] v = range(5) # Test access. assert not v[0], v assert v[1], v assert not v.at(0), v assert v.at(1), v v[0] = True v[1] = False assert v == [True, False, True, True, True] ctypedef vector[cbool] vector_bool ctypedef vector[int] vector_int def test_typedef_vector(L): """ >>> test_typedef_vector([0, 1, True]) ([0, 1, 1, 0, 1, 1], 0, [False, True, True, False, True, True], False) """ cdef vector_int vi = L cdef vector_int vi2 = vi vi.insert(vi.begin(), vi2.begin(), vi2.end()) cdef vector_bool vb = L cdef vector_bool vb2 = vb vb.insert(vb.begin(), vb2.begin(), vb2.end()) return vi, vi.at(0), vb, vb.at(0) def test_insert(): """ >>> test_insert() """ cdef vector[int] v cdef vector[int].size_type count = 5 cdef int value = 0 v.insert(v.end(), count, value) assert v.size() == count for element in v: assert element == value, '%s != %s' % (element, count) # Tests GitHub issue #1788. cdef cppclass MyVector[T](vector): pass cdef cppclass Ints(MyVector[int]): pass Cython-0.26.1/tests/run/list_pop.pyx0000664000175000017500000001226312574327400020201 0ustar stefanstefan00000000000000cimport cython from libc.stdint cimport uint64_t class A: def pop(self, *args): print args return None cdef class B: """ >>> B().call_pop() 'B' """ cdef pop(self): return "B" def call_pop(self): return self.pop() @cython.test_assert_path_exists('//PythonCapiCallNode') @cython.test_fail_if_path_exists('//SimpleCallNode/AttributeNode') def simple_pop(L): """ >>> L = list(range(10)) >>> simple_pop(L) 9 >>> simple_pop(L) 8 >>> L [0, 1, 2, 3, 4, 5, 6, 7] >>> while L: ... _ = simple_pop(L) >>> L [] >>> simple_pop(L) Traceback (most recent call last): IndexError: pop from empty list >>> simple_pop(A()) () """ return L.pop() @cython.test_assert_path_exists('//PythonCapiCallNode') @cython.test_fail_if_path_exists('//SimpleCallNode/AttributeNode') def simple_pop_typed(list L): """ >>> L = list(range(10)) >>> simple_pop_typed(L) 9 >>> simple_pop_typed(L) 8 >>> L [0, 1, 2, 3, 4, 5, 6, 7] >>> while L: ... _ = simple_pop_typed(L) >>> L [] >>> simple_pop_typed(L) Traceback (most recent call last): IndexError: pop from empty list """ return L.pop() @cython.test_assert_path_exists('//PythonCapiCallNode') @cython.test_fail_if_path_exists('//SimpleCallNode/AttributeNode') def index_pop(L, int i): """ >>> L = list(range(10)) >>> index_pop(L, 2) 2 >>> index_pop(L, -10) Traceback (most recent call last): IndexError: pop index out of range >>> index_pop(L, -2) 8 >>> L [0, 1, 3, 4, 5, 6, 7, 9] >>> index_pop(L, 100) Traceback (most recent call last): IndexError: pop index out of range >>> index_pop(L, -100) Traceback (most recent call last): IndexError: pop index out of range >>> while L: ... _ = index_pop(L, 0) >>> L [] >>> index_pop(L, 0) Traceback (most recent call last): IndexError: pop from empty list >>> index_pop(A(), 3) (3,) """ return L.pop(i) @cython.test_assert_path_exists('//PythonCapiCallNode') @cython.test_fail_if_path_exists('//SimpleCallNode/AttributeNode') def index_pop_typed(list L, int i): """ >>> L = list(range(10)) >>> index_pop_typed(L, 2) 2 >>> index_pop_typed(L, -2) 8 >>> L [0, 1, 3, 4, 5, 6, 7, 9] >>> index_pop_typed(L, 100) Traceback (most recent call last): IndexError: pop index out of range >>> index_pop_typed(L, -100) Traceback (most recent call last): IndexError: pop index out of range >>> index_pop_typed(None, 0) Traceback (most recent call last): AttributeError: 'NoneType' object has no attribute 'pop' >>> while L: ... _ = index_pop_typed(L, 0) >>> L [] >>> index_pop_typed(L, 0) Traceback (most recent call last): IndexError: pop from empty list """ return L.pop(i) @cython.test_assert_path_exists('//PythonCapiCallNode') @cython.test_fail_if_path_exists('//SimpleCallNode/AttributeNode') def index_pop_list_object_index(list L, i): """ >>> L = list(range(10)) >>> index_pop_list_object_index(L, 2) 2 >>> index_pop_list_object_index(L, -2) 8 >>> L [0, 1, 3, 4, 5, 6, 7, 9] >>> index_pop_list_object_index(L, 100) Traceback (most recent call last): IndexError: pop index out of range >>> index_pop_list_object_index(L, -100) Traceback (most recent call last): IndexError: pop index out of range >>> index_pop_list_object_index(None, 0) Traceback (most recent call last): AttributeError: 'NoneType' object has no attribute 'pop' >>> index_pop_list_object_index([1], None) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> index_pop_list_object_index([1], 'abc') # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> while L: ... _ = index_pop_list_object_index(L, 0) >>> L [] >>> index_pop_list_object_index(L, 0) Traceback (most recent call last): IndexError: pop from empty list """ return L.pop(i) @cython.test_assert_path_exists('//PythonCapiCallNode') @cython.test_fail_if_path_exists('//SimpleCallNode/AttributeNode') def index_pop_literal(list L): """ >>> L = list(range(10)) >>> index_pop_literal(L) 0 >>> L [1, 2, 3, 4, 5, 6, 7, 8, 9] >>> while L: ... _ = index_pop_literal(L) >>> L [] >>> index_pop_literal(L) Traceback (most recent call last): IndexError: pop from empty list """ return L.pop(0) @cython.test_fail_if_path_exists('//PythonCapiCallNode') def crazy_pop(L): """ >>> crazy_pop(list(range(10))) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: pop... at most ... argument... >>> crazy_pop(A()) (1, 2, 3) """ return L.pop(1, 2, 3) def method_name(): """ >>> method_name() 'pop' """ return [].pop.__name__ def object_pop_large_int(): """ >>> object_pop_large_int() {} """ cdef object foo = {} cdef uint64_t bar = 201213467776703617ULL foo[bar] = None assert (bar) in foo foo.pop(bar) return foo Cython-0.26.1/tests/run/line_trace.pyx0000664000175000017500000001124313023021033020431 0ustar stefanstefan00000000000000# cython: linetrace=True # distutils: define_macros=CYTHON_TRACE_NOGIL=1 # mode: run # tag: trace cdef extern from "frameobject.h": ctypedef struct PyFrameObject: pass from cpython.ref cimport PyObject from cpython.pystate cimport ( Py_tracefunc, PyTrace_CALL, PyTrace_EXCEPTION, PyTrace_LINE, PyTrace_RETURN, PyTrace_C_CALL, PyTrace_C_EXCEPTION, PyTrace_C_RETURN) cdef extern from *: void PyEval_SetProfile(Py_tracefunc cfunc, object obj) void PyEval_SetTrace(Py_tracefunc cfunc, object obj) map_trace_types = { PyTrace_CALL: 'call', PyTrace_EXCEPTION: 'exc', PyTrace_LINE: 'line', PyTrace_RETURN: 'return', PyTrace_C_CALL: 'ccall', PyTrace_C_EXCEPTION: 'cexc', PyTrace_C_RETURN: 'cret', }.get cdef int _trace_func(PyObject* _traceobj, PyFrameObject* _frame, int what, PyObject* arg) except -1: frame, traceobj = _frame, _traceobj traceobj.append((map_trace_types(what), frame.f_lineno - frame.f_code.co_firstlineno)) return 0 cdef int _failing_call_trace_func(PyObject* _traceobj, PyFrameObject* _frame, int what, PyObject* arg) except -1: if what == PyTrace_CALL: raise ValueError("failing call trace!") return _trace_func(_traceobj, _frame, what, arg) cdef int _failing_line_trace_func(PyObject* _traceobj, PyFrameObject* _frame, int what, PyObject* arg) except -1: if what == PyTrace_LINE and _traceobj: frame, traceobj = _frame, _traceobj if traceobj and traceobj[0] == frame.f_code.co_name: # first line in the right function => fail! raise ValueError("failing line trace!") return _trace_func(_traceobj, _frame, what, arg) def cy_add(a,b): x = a + b # 1 return x # 2 def cy_add_with_nogil(a,b): cdef int z, x=a, y=b # 1 with nogil: # 2 z = 0 # 3 z += cy_add_nogil(x, y) # 4 return z # 5 cdef int cy_add_nogil(int a, int b) nogil except -1: x = a + b # 1 return x # 2 def run_trace(func, *args): """ >>> def py_add(a,b): ... x = a+b ... return x >>> run_trace(py_add, 1, 2) [('call', 0), ('line', 1), ('line', 2), ('return', 2)] >>> run_trace(cy_add, 1, 2) [('call', 0), ('line', 1), ('line', 2), ('return', 2)] >>> result = run_trace(cy_add_with_nogil, 1, 2) >>> result[:5] [('call', 0), ('line', 1), ('line', 2), ('line', 3), ('line', 4)] >>> result[5:9] [('call', 0), ('line', 1), ('line', 2), ('return', 2)] >>> result[9:] [('line', 2), ('line', 5), ('return', 5)] """ trace = [] PyEval_SetTrace(_trace_func, trace) try: func(*args) finally: PyEval_SetTrace(NULL, None) return trace def fail_on_call_trace(func, *args): """ >>> def py_add(a,b): ... x = a+b ... return x >>> fail_on_call_trace(py_add, 1, 2) Traceback (most recent call last): ValueError: failing call trace! """ trace = [] PyEval_SetTrace(_failing_call_trace_func, trace) try: func(*args) finally: PyEval_SetTrace(NULL, None) assert not trace def fail_on_line_trace(fail_func): """ >>> result = fail_on_line_trace(None) >>> len(result) 17 >>> result[:5] ['NO ERROR', ('call', 0), ('line', 1), ('line', 2), ('return', 2)] >>> result[5:10] [('call', 0), ('line', 1), ('line', 2), ('line', 3), ('line', 4)] >>> result[10:14] [('call', 0), ('line', 1), ('line', 2), ('return', 2)] >>> result[14:] [('line', 2), ('line', 5), ('return', 5)] >>> result = fail_on_line_trace('cy_add_with_nogil') failing line trace! >>> result ['cy_add_with_nogil', ('call', 0), ('line', 1), ('line', 2), ('return', 2), ('call', 0), ('return', 1)] >>> result = fail_on_line_trace('cy_add_nogil') failing line trace! >>> result[:5] ['cy_add_nogil', ('call', 0), ('line', 1), ('line', 2), ('return', 2)] >>> result[5:] [('call', 0), ('line', 1), ('line', 2), ('line', 3), ('line', 4), ('call', 0), ('return', 1), ('return', 4)] """ cdef int x = 1 trace = ['NO ERROR'] exception = None PyEval_SetTrace(_failing_line_trace_func, trace) try: x += 1 cy_add(1, 2) x += 1 if fail_func: trace[0] = fail_func # trigger error on first line x += 1 cy_add_with_nogil(3, 4) x += 1 except Exception as exc: exception = str(exc) finally: PyEval_SetTrace(NULL, None) if exception: print(exception) else: assert x == 5 return trace Cython-0.26.1/tests/run/cstruct.pyx0000664000175000017500000000143612542002467020035 0ustar stefanstefan00000000000000cdef struct Grail cdef struct Spam: int i char c float *p[42] Grail *g cdef struct Grail: Spam *s cdef Spam spam, ham cdef void eggs_i(Spam s): cdef int j j = s.i s.i = j cdef void eggs_c(Spam s): cdef char c c = s.c s.c = c cdef void eggs_p(Spam s): cdef float *p p = s.p[0] s.p[0] = p cdef void eggs_g(Spam s): cdef float *p p = s.p[0] s.p[0] = p spam = ham def test_i(): """ >>> test_i() """ spam.i = 1 eggs_i(spam) def test_c(): """ >>> test_c() """ spam.c = c'a' eggs_c(spam) def test_p(): """ >>> test_p() """ cdef float f spam.p[0] = &f eggs_p(spam) def test_g(): """ >>> test_g() """ cdef Grail l spam.g = &l eggs_g(spam) Cython-0.26.1/tests/run/varargdecl.pyx0000664000175000017500000000020312542002467020447 0ustar stefanstefan00000000000000cdef grail(char *blarg, ...): pass def test(): """ >>> test() """ grail(b"test") grail(b"test", b"toast") Cython-0.26.1/tests/run/cdefassign.pyx0000664000175000017500000000047112542002467020452 0ustar stefanstefan00000000000000cdef int g = 7 def test(x, int y): """ >>> test(1, 2) 4 1 2 2 0 7 8 """ if True: before = 0 cdef int a = 4, b = x, c = y cdef int *p = &y cdef object o = int(8) print a, b, c, p[0], before, g, o # Also test that pruning cdefs doesn't hurt def empty(): cdef int i Cython-0.26.1/tests/run/coverage_api.srctree0000664000175000017500000000736013023021033021604 0ustar stefanstefan00000000000000# mode: run # tag: coverage,trace """ PYTHON -c 'import shutil; shutil.copy("pkg/coverage_test_pyx.pyx", "pkg/coverage_test_pyx.pxi")' PYTHON setup.py build_ext -i PYTHON coverage_test.py """ ######## setup.py ######## from distutils.core import setup from Cython.Build import cythonize setup(ext_modules = cythonize([ 'coverage_test_*.py*', 'pkg/coverage_test_*.py*' ])) ######## .coveragerc ######## [run] plugins = Cython.Coverage ######## pkg/__init__.py ######## ######## pkg/coverage_test_py.py ######## # cython: linetrace=True # distutils: define_macros=CYTHON_TRACE=1 def func1(a, b): x = 1 # 5 c = func2(a) + b # 6 return x + c # 7 def func2(a): return a * 2 # 11 ######## pkg/coverage_test_pyx.pyx ######## # cython: linetrace=True # distutils: define_macros=CYTHON_TRACE=1 def func1(int a, int b): cdef int x = 1 # 5 c = func2(a) + b # 6 return x + c # 7 def func2(int a): return a * 2 # 11 ######## coverage_test_include_pyx.pyx ######## # cython: linetrace=True # distutils: define_macros=CYTHON_TRACE=1 cdef int x = 5 # 4 cdef int cfunc1(int x): # 6 return x * 3 # 7 include "pkg/coverage_test_pyx.pxi" # 9 def main_func(int x): # 11 return cfunc1(x) + func1(x, 4) + func2(x) # 12 ######## coverage_test.py ######## import re import os.path try: # io.StringIO in Py2.x cannot handle str ... from StringIO import StringIO except ImportError: from io import StringIO from coverage import coverage from pkg import coverage_test_py from pkg import coverage_test_pyx import coverage_test_include_pyx for module in [coverage_test_py, coverage_test_pyx, coverage_test_include_pyx]: assert not any(module.__file__.endswith(ext) for ext in '.py .pyc .pyo .pyw .pyx .pxi'.split()), \ module.__file__ def source_file_for(module): module_name = module.__name__ path, ext = os.path.splitext(module.__file__) platform_suffix = re.search(r'[.](?:cpython|pypy)-[0-9]+[^.]*$', path, re.I) if platform_suffix: path = path[:platform_suffix.start()] return path + '.' + module_name.rsplit('_', 1)[-1] def run_coverage(module): module_name = module.__name__ module_path = module_name.replace('.', os.path.sep) + '.' + module_name.rsplit('_', 1)[-1] cov = coverage() cov.start() assert module.func1(1, 2) == (1 * 2) + 2 + 1 assert module.func2(2) == 2 * 2 if '_include_' in module_name: assert module.main_func(2) == (2 * 3) + ((2 * 2) + 4 + 1) + (2 * 2) cov.stop() out = StringIO() cov.report(file=out) #cov.report([module], file=out) lines = out.getvalue().splitlines() assert any(module_path in line for line in lines), "'%s' not found in coverage report:\n\n%s" % ( module_path, out.getvalue()) mod_file, exec_lines, excl_lines, missing_lines, _ = cov.analysis2(source_file_for(module)) assert module_path in mod_file if '_include_' in module_name: executed = set(exec_lines) - set(missing_lines) assert all(line in executed for line in [7, 12]), '%s / %s' % (exec_lines, missing_lines) # rest of test if for include file mod_file, exec_lines, excl_lines, missing_lines, _ = cov.analysis2( os.path.join(os.path.dirname(module.__file__), "pkg", "coverage_test_pyx.pxi")) executed = set(exec_lines) - set(missing_lines) assert all(line in executed for line in [5, 6, 7, 11]), '%s / %s' % (exec_lines, missing_lines) if __name__ == '__main__': run_coverage(coverage_test_py) run_coverage(coverage_test_pyx) run_coverage(coverage_test_include_pyx) Cython-0.26.1/tests/run/unicode_slicing.pyx0000664000175000017500000001130312542002467021476 0ustar stefanstefan00000000000000# coding: utf-8 __doc__ = u""" >>> slice_start_end(u'abcdef', 2, 3) c >>> slice_start(u'abcdef', 2, 3) cdef >>> slice_end(u'abcdef', 2, 3) ab >>> slice_all(u'abcdef', 2, 3) abcdef >>> slice_start_none(u'abcdef', 2, 3) cdef >>> slice_none_end(u'abcdef', 2, 3) ab >>> slice_none_none(u'abcdef', 2, 3) abcdef >>> slice_start_end(u'abcdef', 2, 10) cdef >>> slice_start(u'abcdef', 2, 10) cdef >>> slice_end(u'abcdef', 2, 10) ab >>> slice_all(u'abcdef', 2, 10) abcdef >>> slice_start_end(u'abcdef', 0, 5) abcde >>> slice_start(u'abcdef', 0, 5) abcdef >>> slice_end(u'abcdef', 0, 5) >>> slice_all(u'abcdef', 0, 5) abcdef >>> slice_start_none(u'abcdef', 0, 5) abcdef >>> slice_none_end(u'abcdef', 0, 5) >>> slice_none_none(u'abcdef', 0, 5) abcdef >>> slice_start_end(u'abcdef', -6, -1) abcde >>> slice_start(u'abcdef', -6, -1) abcdef >>> slice_end(u'abcdef', -6, -1) >>> slice_all(u'abcdef', -6, -1) abcdef >>> slice_start_none(u'abcdef', -6, -1) abcdef >>> slice_none_end(u'abcdef', -6, -1) >>> slice_none_none(u'abcdef', -6, -1) abcdef >>> slice_start_end(u'abcdef', -6, -7) >>> slice_start(u'abcdef', -6, -7) abcdef >>> slice_end(u'abcdef', -6, -7) >>> slice_all(u'abcdef', -6, -7) abcdef >>> slice_start_none(u'abcdef', -6, -7) abcdef >>> slice_none_end(u'abcdef', -6, -7) >>> slice_none_none(u'abcdef', -6, -7) abcdef >>> slice_start_end(u'abcdef', -7, -7) >>> slice_start(u'abcdef', -7, -7) abcdef >>> slice_end(u'abcdef', -7, -7) >>> slice_all(u'abcdef', -7, -7) abcdef >>> slice_start_none(u'abcdef', -7, -7) abcdef >>> slice_none_end(u'abcdef', -7, -7) >>> slice_none_none(u'abcdef', -7, -7) abcdef >>> slice_start_end(u'aÐbБcСdДeЕfФ', 2, 8) bБcСdД >>> slice_start(u'aÐbБcСdДeЕfФ', 2, 8) bБcСdДeЕfФ >>> slice_end(u'aÐbБcСdДeЕfФ', 2, 8) aÐ >>> slice_all(u'aÐbБcСdДeЕfФ', 2, 8) aÐbБcСdДeЕfФ >>> slice_start_none(u'aÐbБcСdДeЕfФ', 2, 8) bБcСdДeЕfФ >>> slice_none_end(u'aÐbБcСdДeЕfФ', 2, 8) aÐ >>> slice_none_none(u'aÐbБcСdДeЕfФ', 2, 8) aÐbБcСdДeЕfФ >>> slice_start_end(u'ÐБСДЕФ', 2, 4) СД >>> slice_start(u'ÐБСДЕФ', 2, 4) СДЕФ >>> slice_end(u'ÐБСДЕФ', 2, 4) ÐБ >>> slice_all(u'ÐБСДЕФ', 2, 4) ÐБСДЕФ >>> slice_start_none(u'ÐБСДЕФ', 2, 4) СДЕФ >>> slice_none_end(u'ÐБСДЕФ', 2, 4) ÐБ >>> slice_none_none(u'ÐБСДЕФ', 2, 4) ÐБСДЕФ >>> slice_start_end(u'ÐБСДЕФ', -4, -2) СД >>> slice_start(u'ÐБСДЕФ', -4, -2) СДЕФ >>> slice_end(u'ÐБСДЕФ', -4, -2) ÐБ >>> slice_all(u'ÐБСДЕФ', -4, -2) ÐБСДЕФ >>> slice_start_none(u'ÐБСДЕФ', -4, -2) СДЕФ >>> slice_none_end(u'ÐБСДЕФ', -4, -2) ÐБ >>> slice_none_none(u'ÐБСДЕФ', -4, -2) ÐБСДЕФ >>> slice_start_end(None, 2, 4) Traceback (most recent call last): TypeError: 'NoneType' object is not subscriptable >>> slice_start(None, 2, 4) Traceback (most recent call last): TypeError: 'NoneType' object is not subscriptable >>> slice_end(None, 2, 4) Traceback (most recent call last): TypeError: 'NoneType' object is not subscriptable >>> slice_all(None, 2, 4) Traceback (most recent call last): TypeError: 'NoneType' object is not subscriptable >>> slice_start_none(None, 2, 4) Traceback (most recent call last): TypeError: 'NoneType' object is not subscriptable >>> slice_none_end(None, 2, 4) Traceback (most recent call last): TypeError: 'NoneType' object is not subscriptable >>> slice_none_none(None, 2, 4) Traceback (most recent call last): TypeError: 'NoneType' object is not subscriptable """ import sys if sys.version_info[0] >= 3: __doc__ = __doc__.replace(u"(u'", u"('").replace(u" u'", u" '") def slice_start_end(unicode s, int i, int j): print(s[i:j]) def slice_start(unicode s, int i, int j): print(s[i:]) def slice_end(unicode s, int i, int j): print(s[:i]) def slice_all(unicode s, int i, int j): print(s[:]) def slice_start_none(unicode s, int i, int j): print(s[i:None]) def slice_none_end(unicode s, int i, int j): print(s[None:i]) def slice_none_none(unicode s, int i, int j): print(s[None:None]) Cython-0.26.1/tests/run/trybreak.pyx0000664000175000017500000000122212542002467020162 0ustar stefanstefan00000000000000# Indirectly makes sure the cleanup happens correctly on breaking. def try_except_break(): """ >>> print(try_except_break()) a """ for x in list("abc"): try: x() except: break return x def try_break_except(): """ >>> print(try_break_except()) a """ for x in list("abc"): try: break except: pass return x def try_no_break_except_return(): """ >>> print(try_no_break_except_return()) a """ for x in list("abc"): try: x() break except: return x return x Cython-0.26.1/tests/run/tryexcept.pyx0000664000175000017500000002303412542002467020373 0ustar stefanstefan00000000000000def single_except(a, x): """ >>> single_except(ValueError, None) 2 >>> single_except(ValueError, ValueError('test')) 3 >>> single_except(ValueError, TypeError('test')) Traceback (most recent call last): TypeError: test """ cdef int i try: i = 1 if x: raise x i = 2 except a: i = 3 return i def single_except_builtin(a, x): """ >>> single_except_builtin(ValueError, None) 2 >>> single_except_builtin(ValueError, ValueError('test')) 3 >>> single_except_builtin(ValueError, TypeError('test')) Traceback (most recent call last): TypeError: test """ cdef int i try: i = 1 if x: raise x i = 2 except ValueError: i = 3 return i def single_except_expression(a, x): """ >>> single_except_expression([[ValueError]], None) 2 >>> single_except_expression([[ValueError]], ValueError('test')) 3 >>> single_except_expression([[ValueError]], TypeError('test')) Traceback (most recent call last): TypeError: test """ cdef int i try: i = 1 if x: raise x i = 2 except a[0][0]: i = 3 return i def double_except_no_raise(a,b): """ >>> double_except_no_raise(TypeError, ValueError) 1 """ d = a or b # mark used cdef int i try: i = 1 except a: i = 2 except b: i = 3 return i def double_except_raise(x, a, b): """ >>> double_except_raise(None, TypeError, ValueError) 1 >>> double_except_raise(TypeError('test'), TypeError, ValueError) 2 >>> double_except_raise(ValueError('test'), TypeError, ValueError) 3 >>> double_except_raise(None, TypeError, ValueError) 1 """ cdef int i try: i = 1 if x: raise x except a: i = 2 except b: i = 3 return i def target_except_no_raise(a): """ >>> target_except_no_raise(TypeError) 1 """ d = a # mark used cdef int i try: i = 1 except a, b: i = 2 return i def target_except_raise(x, a): """ >>> target_except_raise(None, TypeError) 1 >>> target_except_raise(TypeError('test'), TypeError) 2 >>> target_except_raise(ValueError('test'), TypeError) Traceback (most recent call last): ValueError: test >>> target_except_raise(None, TypeError) 1 """ cdef int i try: i = 1 if x: raise x except a, b: i = 2 assert isinstance(b, a) return i def tuple_except_builtin(x): """ >>> tuple_except_builtin(None) 1 >>> tuple_except_builtin(TypeError('test')) 2 >>> tuple_except_builtin(ValueError('test')) 2 >>> tuple_except_builtin(IndexError('5')) Traceback (most recent call last): IndexError: 5 >>> tuple_except_builtin(None) 1 """ cdef int i try: i = 1 if x: raise x except (TypeError, ValueError): i = 2 return i def normal_and_bare_except_no_raise(a): """ >>> normal_and_bare_except_no_raise(TypeError) 1 """ d = a # mark used cdef int i try: i = 1 except a: i = 2 except: i = 3 return i def normal_and_bare_except_raise(x, a): """ >>> normal_and_bare_except_raise(None, TypeError) 1 >>> normal_and_bare_except_raise(TypeError('test'), TypeError) 2 >>> normal_and_bare_except_raise(ValueError('test'), TypeError) 3 >>> normal_and_bare_except_raise(None, TypeError) 1 """ cdef int i try: i = 1 if x: raise x except a: i = 2 except: i = 3 return i def tuple_except_index_target_no_raise(a, b, c): """ >>> l = [None, None] >>> tuple_except_index_target_no_raise(TypeError, ValueError, l) 1 >>> l [None, None] """ d = a or b or c # mark used cdef int i try: i = 1 except (a, b), c[1]: i = 2 return i def tuple_except_index_target_raise(x, a, b, c): """ >>> l = [None, None] >>> tuple_except_index_target_raise(None, TypeError, ValueError, l) 1 >>> l [None, None] >>> tuple_except_index_target_raise(TypeError('test'), TypeError, ValueError, l) 2 >>> l[0] is None, isinstance(l[1], TypeError) (True, True) >>> tuple_except_index_target_raise(ValueError('test'), TypeError, ValueError, l) 2 >>> l[0] is None, isinstance(l[1], ValueError) (True, True) >>> tuple_except_index_target_raise(IndexError('5'), TypeError, ValueError, l) Traceback (most recent call last): IndexError: 5 >>> tuple_except_index_target_raise(None, TypeError, ValueError, l) 1 >>> l[0] is None, isinstance(l[1], ValueError) (True, True) """ cdef int i try: i = 1 if x: raise x except (a, b), c[1]: i = 2 assert isinstance(c[1], (a,b)) return i def loop_bare_except_no_raise(a, b, int c): """ >>> loop_bare_except_no_raise(TypeError, range(2), 2) (1, 3528) """ cdef int i = 1 for a in b: try: c = c * 42 except: i = 17 return i,c def loop_bare_except_raise(a, b, int c): """ >>> loop_bare_except_raise(TypeError, range(2), 2) (1, 3528) >>> loop_bare_except_raise(TypeError, range(3), 2) (17, 148176) >>> loop_bare_except_raise(TypeError, range(4), 2) (17, 6223392) """ cdef int i = 1 for a in b: try: c = c * 42 if a == 2: raise TypeError('test') except: i = 17 return i,c def bare_except_reraise_no_raise(l): """ >>> l = [None] >>> bare_except_reraise_no_raise(l) 1 >>> l [None] """ d = l # mark used cdef int i try: i = 1 except: l[0] = 2 raise return i def bare_except_reraise_raise(x, l): """ >>> l = [None] >>> bare_except_reraise_raise(None, l) 1 >>> l [None] >>> bare_except_reraise_raise(TypeError('test'), l) Traceback (most recent call last): TypeError: test >>> l [2] >>> l = [None] >>> bare_except_reraise_raise(None, l) 1 >>> l [None] """ cdef int i try: i = 1 if x: raise x except: l[0] = 2 raise return i def except_as_no_raise(a): """ >>> except_as_no_raise(TypeError) 1 """ d = a # mark used try: i = 1 except a as b: i = 2 return i def except_as_raise(x, a): """ >>> except_as_raise(None, TypeError) 1 >>> except_as_raise(TypeError('test'), TypeError) 2 >>> except_as_raise(ValueError('test'), TypeError) Traceback (most recent call last): ValueError: test >>> except_as_raise(None, TypeError) 1 """ try: i = 1 if x: raise x except a as b: i = 2 assert isinstance(b, a) return i def except_as_no_raise_does_not_touch_target(a): """ >>> i,b = except_as_no_raise_does_not_touch_target(TypeError) >>> i 1 >>> b 1 """ d = a # mark used b = 1 try: i = 1 except a as b: i = 2 return i, b def except_as_raise_does_not_delete_target(x, a): """ >>> except_as_raise_does_not_delete_target(None, TypeError) 1 >>> except_as_raise_does_not_delete_target(TypeError('test'), TypeError) 2 >>> except_as_raise_does_not_delete_target(ValueError('test'), TypeError) Traceback (most recent call last): ValueError: test >>> except_as_raise_does_not_delete_target(None, TypeError) 1 """ b = 1 try: i = 1 if x: raise x except a as b: i = 2 assert isinstance(b, a) # exception variable leaks with Py2 except-as semantics if x: assert isinstance(b, a) else: assert b == 1 return i def except_as_raise_with_empty_except(x, a): """ >>> except_as_raise_with_empty_except(None, TypeError) >>> except_as_raise_with_empty_except(TypeError('test'), TypeError) >>> except_as_raise_with_empty_except(ValueError('test'), TypeError) Traceback (most recent call last): ValueError: test >>> except_as_raise_with_empty_except(None, TypeError) """ try: if x: raise x b = 1 except a as b: pass if x: assert isinstance(b, a) else: assert b == 1 def complete_except_as_no_raise(a, b): """ >>> complete_except_as_no_raise(TypeError, ValueError) 5 """ d = a or b # mark used try: i = 1 except (a, b) as c: i = 2 except (b, a) as c: i = 3 except: i = 4 else: i = 5 return i def complete_except_as_raise(x, a, b): """ >>> complete_except_as_raise(None, TypeError, ValueError) 5 >>> complete_except_as_raise(TypeError('test'), TypeError, ValueError) 2 >>> complete_except_as_raise(ValueError('test'), TypeError, ValueError) 2 >>> complete_except_as_raise(IndexError('5'), TypeError, ValueError) 4 >>> complete_except_as_raise(None, TypeError, ValueError) 5 """ try: i = 1 if x: raise x except (a, b) as c: i = 2 assert isinstance(c, (a, b)) except (b, a) as c: i = 3 assert isinstance(c, (a, b)) except: i = 4 else: i = 5 return i Cython-0.26.1/tests/run/int_float_builtins_as_casts_T400.pyx0000664000175000017500000001366113143605603024627 0ustar stefanstefan00000000000000# ticket: 400 cimport cython @cython.test_assert_path_exists("//SingleAssignmentNode/TypecastNode") @cython.test_fail_if_path_exists("//SimpleCallNode") def double_to_short_int(double x): """ >>> double_to_short_int(4.1) 4 >>> double_to_short_int(4) 4 """ cdef short r = int(x) return r @cython.test_assert_path_exists("//SingleAssignmentNode/TypecastNode") @cython.test_fail_if_path_exists("//SimpleCallNode") def double_to_pyssizet_int(double x): """ >>> double_to_pyssizet_int(4.1) 4 >>> double_to_pyssizet_int(4) 4 """ cdef Py_ssize_t r = int(x) return r @cython.test_assert_path_exists("//SingleAssignmentNode/TypecastNode") @cython.test_fail_if_path_exists("//SimpleCallNode") def int_to_pyssizet_int(int x): """ >>> int_to_pyssizet_int(4.1) 4 >>> int_to_pyssizet_int(4) 4 """ cdef Py_ssize_t r = int(x) return r ## @cython.test_assert_path_exists("//SingleAssignmentNode/TypecastNode") ## @cython.test_fail_if_path_exists("//SimpleCallNode") ## def double_to_pyssizet_float(double x): ## """ ## >>> double_to_pyssizet_float(4.1) ## 4 ## >>> double_to_pyssizet_float(4) ## 4 ## """ ## cdef Py_ssize_t r = float(x) ## return r @cython.test_assert_path_exists("//SingleAssignmentNode/TypecastNode") @cython.test_fail_if_path_exists("//SimpleCallNode") def int_to_short_int(int x): """ >>> int_to_short_int(4) 4 """ cdef short r = int(x) return r @cython.test_assert_path_exists("//SingleAssignmentNode/TypecastNode") @cython.test_fail_if_path_exists("//SimpleCallNode") def short_to_float_float(short x): """ >>> short_to_float_float(4) 4.0 """ cdef float r = float(x) return r @cython.test_assert_path_exists("//SingleAssignmentNode/TypecastNode") @cython.test_fail_if_path_exists("//SimpleCallNode") def short_to_double_float(short x): """ >>> short_to_double_float(4) 4.0 """ cdef double r = float(x) return r @cython.test_assert_path_exists("//SingleAssignmentNode/TypecastNode") @cython.test_fail_if_path_exists("//SimpleCallNode") def short_to_double_int(short x): """ >>> short_to_double_int(4) 4.0 """ cdef double r = int(x) return r @cython.test_fail_if_path_exists("//SimpleCallNode") def float_to_float_float(float x): """ >>> 4.05 < float_to_float_float(4.1) < 4.15 True >>> float_to_float_float(4) 4.0 """ cdef float r = float(x) return r @cython.test_fail_if_path_exists("//SimpleCallNode", "//SingleAssignmentNode//TypecastNode") def double_to_double_float(double x): """ >>> 4.05 < double_to_double_float(4.1) < 4.15 True >>> double_to_double_float(4) 4.0 """ cdef double r = float(x) return r # tests that cannot be optimised @cython.test_fail_if_path_exists("//TypecastNode") @cython.test_assert_path_exists("//PythonCapiCallNode") def double_to_py_int(double x): """ >>> double_to_py_int(4.1) 4 >>> double_to_py_int(4) 4 """ return int(x) @cython.test_fail_if_path_exists("//SingleAssignmentNode//TypecastNode") @cython.test_assert_path_exists("//PythonCapiCallNode") def double_to_double_int(double x): """ >>> double_to_double_int(4.1) 4.0 >>> double_to_double_int(4) 4.0 """ cdef double r = int(x) return r @cython.test_fail_if_path_exists("//SingleAssignmentNode//TypecastNode") @cython.test_assert_path_exists( "//PythonCapiCallNode", "//PythonCapiCallNode/PythonCapiFunctionNode/@cname = 'truncf'", ) def float_to_float_int(float x): """ >>> float_to_float_int(4.1) 4.0 >>> float_to_float_int(4) 4.0 """ cdef float r = int(x) return r @cython.test_fail_if_path_exists("//SingleAssignmentNode//TypecastNode") @cython.test_assert_path_exists( "//PythonCapiCallNode", "//PythonCapiCallNode/PythonCapiFunctionNode/@cname = 'truncf'", ) def float_to_double_int(float x): """ >>> float_to_double_int(4.1) 4.0 >>> float_to_double_int(4) 4.0 """ cdef double r = int(x) return r @cython.test_fail_if_path_exists("//SingleAssignmentNode//TypecastNode") @cython.test_assert_path_exists( "//PythonCapiCallNode", "//PythonCapiCallNode/PythonCapiFunctionNode/@cname = 'trunc'", ) def double_to_float_int(double x): """ >>> double_to_float_int(4.1) 4.0 >>> double_to_float_int(4) 4.0 """ cdef float r = int(x) return r @cython.test_fail_if_path_exists("//SimpleCallNode") @cython.test_assert_path_exists("//PythonCapiCallNode") def object_float(x): """ >>> 4.05 < object_float(4.1) < 4.15 True >>> object_float(2**100) == float(2**100) True >>> object_float(2.5**100) == float(2.5**100) True >>> object_float(4) 4.0 >>> object_float('4') 4.0 >>> object_float('4.0') 4.0 >>> object_float('4'.encode('ascii')) 4.0 >>> object_float('4.0'.encode('ascii')) 4.0 """ return float(x) @cython.test_fail_if_path_exists("//SimpleCallNode") @cython.test_assert_path_exists("//PythonCapiCallNode") def object_int(x): """ >>> object_int(4) 4 >>> object_int(2**100) == 2**100 or object_int(2**100) True >>> object_int(-(2**100)) == -(2**100) or object_int(-(2**100)) True >>> object_int(4.1) 4 >>> object_int(4.0) 4 >>> object_int('4') 4 >>> object_int('4'.encode('ascii')) 4 """ return int(x) @cython.test_fail_if_path_exists("//SimpleCallNode", "//CoerceFromPyTypeNode") def no_args_int_cint(): """ >>> no_args_int_cint() 0 """ cdef int x = int() return x @cython.test_fail_if_path_exists("//SimpleCallNode", "//CoerceFromPyTypeNode") def no_args_float_cdouble(): """ >>> no_args_float_cdouble() (0.0, 0.0) """ cdef double xd = float() cdef float xf = float() return xd, xf Cython-0.26.1/tests/run/includes/0000775000175000017500000000000013151203436017402 5ustar stefanstefan00000000000000Cython-0.26.1/tests/run/includes/b.pxd0000664000175000017500000000004712542002467020345 0ustar stefanstefan00000000000000cdef extern from "b.h": cdef int b Cython-0.26.1/tests/run/includes/all.pyx0000664000175000017500000000045412542002467020723 0ustar stefanstefan00000000000000__doc__ = """ >>> test() 1, 2, 3, 4, 5 """ # Make sure all of these happen in order. cdef extern from "a.h": cdef int a from b cimport b cdef extern from "c.h": cdef int c cimport indirect_d cdef extern from "e.h": cdef int e def test(): print a, b, c, indirect_d.d, e Cython-0.26.1/tests/run/includes/b.h0000664000175000017500000000001412542002467017773 0ustar stefanstefan00000000000000int b = a+1;Cython-0.26.1/tests/run/includes/includefile.pxi0000664000175000017500000000004612542002467022413 0ustar stefanstefan00000000000000# this file will be included XYZ = 5 Cython-0.26.1/tests/run/includes/c.h0000664000175000017500000000001412542002467017774 0ustar stefanstefan00000000000000int c = b+1;Cython-0.26.1/tests/run/includes/a.h0000664000175000017500000000001212542002467017770 0ustar stefanstefan00000000000000int a = 1;Cython-0.26.1/tests/run/includes/d.h0000664000175000017500000000001412542002467017775 0ustar stefanstefan00000000000000int d = c+1;Cython-0.26.1/tests/run/includes/indirect_d.pxd0000664000175000017500000000002112542002467022220 0ustar stefanstefan00000000000000from d cimport d Cython-0.26.1/tests/run/includes/d.pxd0000664000175000017500000000004712542002467020347 0ustar stefanstefan00000000000000cdef extern from "d.h": cdef int d Cython-0.26.1/tests/run/includes/e.h0000664000175000017500000000001412542002467017776 0ustar stefanstefan00000000000000int e = d+1;Cython-0.26.1/tests/run/pass.pyx0000664000175000017500000000005612542002467017311 0ustar stefanstefan00000000000000def f(): """ >>> f() """ pass Cython-0.26.1/tests/run/literalslice.pyx0000664000175000017500000000156212542002467021022 0ustar stefanstefan00000000000000__doc__ = u""" >>> test_str(1) 'b' >>> test_unicode_ascii(2) u'c' >>> test_unicode(2) == u'\u00e4' True >>> test_int_list(2) 3 >>> test_str_list(1) 'bcd' >>> test_int_tuple(2) 3 >>> test_str_tuple(0) 'a' >>> test_mix_tuple(1) 'abc' >>> test_mix_tuple(0) 1 """ import sys IS_PY3 = sys.version_info[0] >= 3 if IS_PY3: __doc__ = __doc__.replace(u" u'", u" '") else: __doc__ = __doc__.replace(u" b'", u" '") def test_str(n): return "abcd"[n] def test_unicode_ascii(n): return u"abcd"[n] def test_unicode(n): return u"\u00fc\u00f6\u00e4"[n] def test_int_list(n): return [1,2,3,4][n] def test_str_list(n): return ["a","bcd","efg","xyz"][n] def test_int_tuple(n): return (1,2,3,4)[n] def test_str_tuple(n): return ("a","bcd","efg","xyz")[n] def test_mix_tuple(n): return (1, "abc", u"\u00fc", 1.1)[n] Cython-0.26.1/tests/run/short_circuit_T404.pyx0000664000175000017500000000107712542002467021743 0ustar stefanstefan00000000000000# ticket: 404 cdef long foo(long x): print "foo(%s)" % x return x def test_or(long a, long b): """ >>> test_or(1,2) foo(1) 1 >>> test_or(1,0) foo(1) 1 >>> test_or(0,2) foo(0) foo(2) 2 >>> test_or(0,0) foo(0) foo(0) 0 """ print foo(a) or foo(b) def test_and(long a, long b): """ >>> test_and(1,2) foo(1) foo(2) 2 >>> test_and(1,0) foo(1) foo(0) 0 >>> test_and(0,2) foo(0) 0 >>> test_and(0,0) foo(0) 0 """ print foo(a) and foo(b) Cython-0.26.1/tests/run/reimport_from_package.srctree0000664000175000017500000000137212542002467023533 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import a" ######## setup.py ######## from Cython.Build.Dependencies import cythonize from distutils.core import setup setup( ext_modules = cythonize("**/*.pyx"), ) ######## a.pyx ######## import sys import a assert a in sys.modules.values(), list(sys.modules) assert sys.modules['a'] is a, list(sys.modules) from atest.package import module ######## atest/__init__.py ######## ######## atest/package/__init__.py ######## ######## atest/package/module.pyx ######## import sys assert 'atest.package.module' in sys.modules import a import atest.package.module as module assert module in sys.modules.values(), list(sys.modules) assert sys.modules['atest.package.module'] is module, list(sys.modules) Cython-0.26.1/tests/run/for_in_range_T372.pyx0000664000175000017500000000401612542002467021512 0ustar stefanstefan00000000000000# ticket: 372 cimport cython @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def test_modify(): """ >>> test_modify() 0 1 2 3 4 (4, 0) """ cdef int i, n = 5 for i in range(n): print i n = 0 print return i,n @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def test_negindex(): """ >>> test_negindex() 6 5 4 3 2 (2, 0) """ cdef int i, n = 5 for i in range(n+1, 1, -1): print i n = 0 return i,n @cython.test_assert_path_exists("//ForFromStatNode", "//ForFromStatNode//PrintStatNode//CoerceToPyTypeNode") @cython.test_fail_if_path_exists("//ForInStatNode") def test_negindex_inferred(): """ >>> test_negindex_inferred() 5 4 3 2 (2, 0) """ cdef int n = 5 for i in range(n, 1, -1): print i n = 0 return i,n @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def test_fix(): """ >>> test_fix() 0 1 2 3 4 4 """ cdef int i for i in range(5): print i print return i @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def test_break(): """ >>> test_break() 0 1 2 (2, 0) """ cdef int i, n = 5 for i in range(n): print i n = 0 if i == 2: break else: print "FAILED!" print return i,n @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def test_return(): """ >>> test_return() 0 1 2 (2, 0) """ cdef int i, n = 5 for i in range(n): print i n = 0 if i == 2: return i,n print return "FAILED!" Cython-0.26.1/tests/run/eval.pyx0000664000175000017500000000070112542002467017267 0ustar stefanstefan00000000000000# mode: run # tag: eval GLOBAL = 123 def eval_simple(local): """ >>> eval_simple(321) (123, 321) """ return eval('GLOBAL, local') def eval_class_scope(): """ >>> eval_class_scope().c 3 """ class TestClassScope: a = 1 b = 2 c = eval('a + b') return TestClassScope def eval_locals(a, b): """ >>> eval_locals(1, 2) (1, 2) """ return eval('a, b', {}, locals()) Cython-0.26.1/tests/run/tandemstats.pyx0000664000175000017500000000026212542002467020671 0ustar stefanstefan00000000000000cdef int i, j, k i = 17; j = 42; k = i * j if j > k: i = 88 else: i = 99; j = k def result(): """ >>> result() == (99, 17*42, 17*42) True """ return (i,j,k) Cython-0.26.1/tests/run/pure_pxd.srctree0000664000175000017500000000244113023021033021001 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import a; a.test()" ######## setup.py ######## from Cython.Build.Dependencies import cythonize from distutils.core import setup setup( ext_modules=cythonize("a.py"), ) ######## a.py ######## class ExtTypePass(object): pass class ExtTypePxdDocstring(object): pass class ExtTypeDocstring(object): """huhu!""" # this should override the .pxd docstring class ExtTypeAttributes(object): """ >>> x = ExtTypeAttributes() >>> x.b [1, 2, 3] """ def __init__(self): self.a = 123 self.b = [1, 2, 3] def test(): import os.path assert not os.path.basename(__file__).endswith('.py'), __file__ assert not os.path.basename(__file__).endswith('.pyc'), __file__ assert not os.path.basename(__file__).endswith('.pyo'), __file__ assert not ExtTypePass().__doc__, ExtTypePass().__doc__ assert ExtTypeDocstring().__doc__ == "huhu!", ExtTypeDocstring().__doc__ assert ExtTypePxdDocstring().__doc__ == "ho, ho, ho!", ExtTypePxdDocstring().__doc__ import doctest doctest.testmod(verbose=True) ######## a.pxd ######## cdef class ExtTypePass: pass cdef class ExtTypePxdDocstring: """ho, ho, ho!""" cdef class ExtTypeAttributes: cdef int a cdef readonly list b Cython-0.26.1/tests/run/slice_ptr.pyx0000664000175000017500000000436312542002467020334 0ustar stefanstefan00000000000000from libc.stdlib cimport malloc, free from cpython.object cimport Py_EQ, Py_NE def double_ptr_slice(x, L, int a, int b): """ >>> L = list(range(10)) >>> double_ptr_slice(5, L, 0, 10) >>> double_ptr_slice(6, L, 0, 10) >>> double_ptr_slice(None, L, 0, 10) >>> double_ptr_slice(0, L, 3, 7) >>> double_ptr_slice(5, L, 3, 7) >>> double_ptr_slice(9, L, 3, 7) >>> double_ptr_slice(EqualsEvens(), L, 0, 10) >>> double_ptr_slice(EqualsEvens(), L, 1, 10) """ cdef double *L_c = NULL try: L_c = malloc(len(L) * sizeof(double)) for i, a in enumerate(L): L_c[i] = L[i] assert (x in L_c[:b]) == (x in L[:b]) assert (x not in L_c[:b]) == (x not in L[:b]) assert (x in L_c[a:b]) == (x in L[a:b]) assert (x not in L_c[a:b]) == (x not in L[a:b]) assert (x in L_c[a:b:2]) == (x in L[a:b:2]) assert (x not in L_c[a:b:2]) == (x not in L[a:b:2]) finally: free(L_c) def void_ptr_slice(py_x, L, int a, int b): """ >>> L = list(range(10)) >>> void_ptr_slice(5, L, 0, 10) >>> void_ptr_slice(6, L, 0, 10) >>> void_ptr_slice(None, L, 0, 10) >>> void_ptr_slice(0, L, 3, 7) >>> void_ptr_slice(5, L, 3, 7) >>> void_ptr_slice(9, L, 3, 7) """ # I'm using the fact that small Python ints are cached. cdef void **L_c = NULL cdef void *x = py_x try: L_c = malloc(len(L) * sizeof(void*)) for i, a in enumerate(L): L_c[i] = L[i] assert (x in L_c[:b]) == (py_x in L[:b]) assert (x not in L_c[:b]) == (py_x not in L[:b]) assert (x in L_c[a:b]) == (py_x in L[a:b]) assert (x not in L_c[a:b]) == (py_x not in L[a:b]) assert (x in L_c[a:b:2]) == (py_x in L[a:b:2]) assert (x not in L_c[a:b:2]) == (py_x not in L[a:b:2]) finally: free(L_c) cdef class EqualsEvens: """ >>> e = EqualsEvens() >>> e == 2 True >>> e == 5 False >>> [e == k for k in range(4)] [True, False, True, False] """ def __richcmp__(self, other, int op): if op == Py_EQ: return other % 2 == 0 elif op == Py_NE: return other % 2 == 1 else: return False Cython-0.26.1/tests/run/cpp_template_ref_args.h0000664000175000017500000000053712574327400022305 0ustar stefanstefan00000000000000#ifndef _TEMPLATE_ARGS_H_ #define _TEMPLATE_ARGS_H_ template struct Bar { Bar & ref() { return *this; } const Bar & const_ref() { return *this; } const Bar & const_ref_const() const { return *this; } T value; }; template struct Foo { int bar_value(const Bar & bar) { return bar.value; } }; #endif Cython-0.26.1/tests/run/r_jeff_epler_1.pyx0000664000175000017500000000017212542002467021204 0ustar stefanstefan00000000000000def blowup(p): """ >>> blowup([2, 3, 5]) 1 """ cdef int n, i n = 10 i = 1 return n % p[i] Cython-0.26.1/tests/run/auto_cpdef_closures.py0000664000175000017500000000050012542002467022175 0ustar stefanstefan00000000000000# cython: auto_cpdef=True # mode:run # tag: directive,auto_cpdef,closures def closure_func(x): """ >>> c = closure_func(2) >>> c() 2 """ def c(): return x return c def generator_func(): """ >>> for i in generator_func(): print(i) 1 2 """ yield 1 yield 2 Cython-0.26.1/tests/run/generator_expressions_and_locals.pyx0000664000175000017500000000035112542002467025150 0ustar stefanstefan00000000000000# mode: run # tag: genexpr, locals # ticket: 715 def genexpr_not_in_locals(): """ >>> genexpr_not_in_locals() {'t': (0, 1, 4, 9, 16, 25, 36, 49, 64, 81)} """ t = tuple(x*x for x in range(10)) return locals() Cython-0.26.1/tests/run/reraise.py0000664000175000017500000000132412542002467017604 0ustar stefanstefan00000000000000 def reraise(): raise def test_reraise(): """ >>> test_reraise() Traceback (most recent call last): ValueError: TEST """ try: raise ValueError("TEST") except ValueError: raise def test_reraise_indirect(): """ >>> test_reraise_indirect() Traceback (most recent call last): ValueError: TEST INDIRECT """ try: raise ValueError("TEST INDIRECT") except ValueError: reraise() def test_reraise_error(): """ >>> try: test_reraise_error() ... except (RuntimeError, TypeError): pass # Py2, Py3, ... ... else: print("FAILED") """ import sys if hasattr(sys, 'exc_clear'): # Py2 sys.exc_clear() raise Cython-0.26.1/tests/run/constant_folding_cy.pyx0000664000175000017500000000444412542002467022376 0ustar stefanstefan00000000000000# coding=utf8 # mode: run # tag: constant_folding cimport cython bstring = b'abc\xE9def' ustring = u'abc\xE9def' surrogates_ustring = u'abc\U00010000def' @cython.test_fail_if_path_exists( "//SliceIndexNode", ) def bytes_slicing2(): """ >>> a,b,c,d = bytes_slicing2() >>> a == bstring[:] True >>> b == bstring[2:] True >>> c == bstring[:4] True >>> d == bstring[2:4] True """ str0 = b'abc\xE9def'[:] str1 = b'abc\xE9def'[2:] str2 = b'abc\xE9def'[:4] str3 = b'abc\xE9def'[2:4] return str0, str1, str2, str3 @cython.test_fail_if_path_exists( "//SliceIndexNode", ) def unicode_slicing2(): """ >>> a,b,c,d = unicode_slicing2() >>> a == ustring[:] True >>> b == ustring[2:] True >>> c == ustring[:4] True >>> d == ustring[2:4] True """ str0 = u'abc\xE9def'[:] str1 = u'abc\xE9def'[2:] str2 = u'abc\xE9def'[:4] str3 = u'abc\xE9def'[2:4] return str0, str1, str2, str3 @cython.test_assert_path_exists( "//SliceIndexNode", ) def unicode_slicing_unsafe_surrogates2(): """ >>> unicode_slicing_unsafe_surrogates2() == surrogates_ustring[2:] True """ ustring = u'abc\U00010000def'[2:] return ustring @cython.test_fail_if_path_exists( "//SliceIndexNode", ) def unicode_slicing_safe_surrogates2(): """ >>> unicode_slicing_safe_surrogates2() == surrogates_ustring[:2] True >>> print(unicode_slicing_safe_surrogates2()) ab """ ustring = u'abc\U00010000def'[:2] return ustring @cython.test_fail_if_path_exists( "//ComprehensionNode", "//ForInStatNode", ) @cython.test_assert_path_exists( "//SetNode", ) def for_in_empty_setcomp(): """ >>> s = for_in_empty_setcomp() >>> isinstance(s, set) True >>> len(s) 0 """ return {i for i in []} @cython.test_fail_if_path_exists( "//ReturnStatNode//AddNode", ) @cython.test_assert_path_exists( "//ListNode//AddNode", ) def add_strings(): """ >>> u, b, rest = add_strings() >>> u == 'abcdef' or u True >>> b == b'abcdef' or b True >>> rest 1 """ a = ["abc" + "def"] # not currently optimised # FIXME: test encodings and unicode escapes return u"abc" + u"def", b"abc" + b"def", a[0] and 1 Cython-0.26.1/tests/run/knuth_man_or_boy_test.pyx0000664000175000017500000000223112542002467022734 0ustar stefanstefan00000000000000# mode: run # tag: closures # Cython version of Knuth's "man or boy" test -- "It separates the man # Algol 60 compilers from the boy Algol 60 compilers." Here's the # original (from wikipedia): # # begin # real procedure A (k, x1, x2, x3, x4, x5); # value k; integer k; # begin # real procedure B; # begin k:= k - 1; # B:= A := A (k, B, x1, x2, x3, x4); # end; # if k <= 0 then A:= x4 + x5 else B; # end; # outreal (A (10, 1, -1, -1, 1, 0)); # end; # # and a table of values: # # k A # 0 1 # 1 0 # 2 -2 # 3 0 # 4 1 # 5 0 # 6 1 # 7 -1 # 8 -10 # 9 -30 # 10 -67 # # Past 10 or so, we blow the C stack -- can't just set a higher recursion limit # to get around that one. # def compute(val): if isinstance(val, int): return val else: return val() def a(in_k, x1, x2, x3, x4, x5): """ >>> import sys >>> sys.setrecursionlimit(1350) >>> a(10, 1, -1, -1, 1, 0) -67 """ k = [in_k] def b(): k[0] -= 1 return a(k[0], b, x1, x2, x3, x4) return compute(x4) + compute(x5) if k[0] <= 0 else b() Cython-0.26.1/tests/run/cstringmeth.pyx0000664000175000017500000000014212542002467020666 0ustar stefanstefan00000000000000__doc__ = u""" >>> y ('1', '2', '3') >>> x '1foo2foo3' """ y = ('1','2','3') x = 'foo'.join(y) Cython-0.26.1/tests/run/inlined_generator_expressions.pyx0000664000175000017500000001471112542002467024500 0ustar stefanstefan00000000000000 cimport cython ## def range_tuple_genexp(int N): ## """ ## >>> range_tuple_genexp(5) ## (0, 1, 2, 3, 4) ## """ ## return tuple(i for i in range(N)) @cython.test_assert_path_exists('//ForFromStatNode', "//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists('//SimpleCallNode', '//ForInStatNode') def range_sum(int N): """ >>> sum(range(10)) 45 >>> range_sum(10) 45 """ result = sum(i for i in range(N)) return result @cython.test_assert_path_exists('//ForFromStatNode', "//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists('//SimpleCallNode', '//CoerceFromPyTypeNode//InlinedGeneratorExpressionNode', '//ForInStatNode') def range_sum_typed(int N): """ >>> sum(range(10)) 45 >>> range_sum_typed(10) 45 """ cdef int result = sum(i for i in range(N)) return result @cython.test_assert_path_exists('//ForFromStatNode', "//InlinedGeneratorExpressionNode", "//ReturnStatNode//InlinedGeneratorExpressionNode", "//ReturnStatNode//CoerceToPyTypeNode//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists('//SimpleCallNode', '//CoerceFromPyTypeNode//InlinedGeneratorExpressionNode', '//TypecastNode//InlinedGeneratorExpressionNode', '//ForInStatNode') def return_range_sum_cast(int N): """ >>> sum(range(10)) 45 >>> return_range_sum_cast(10) 45 """ return sum(i for i in range(N)) @cython.test_assert_path_exists('//ForFromStatNode', "//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists('//SimpleCallNode', '//ForInStatNode') def return_range_sum(int N): """ >>> sum(range(10)) 45 >>> return_range_sum(10) 45 """ return sum(i for i in range(N)) @cython.test_assert_path_exists('//ForFromStatNode', "//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists('//SimpleCallNode', '//ForInStatNode') def return_range_sum_squares(int N): """ >>> sum([i*i for i in range(10)]) 285 >>> return_range_sum_squares(10) 285 >>> print(sum([i*i for i in range(10000)])) 333283335000 >>> print(return_range_sum_squares(10000)) 333283335000 """ return sum(i*i for i in range(N)) @cython.test_assert_path_exists('//ForInStatNode', "//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists('//SimpleCallNode') def return_sum_squares(seq): """ >>> sum([i*i for i in range(10)]) 285 >>> return_sum_squares(range(10)) 285 >>> print(sum([i*i for i in range(10000)])) 333283335000 >>> print(return_sum_squares(range(10000))) 333283335000 """ return sum(i*i for i in seq) @cython.test_assert_path_exists('//ForInStatNode', "//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists('//SimpleCallNode') def return_sum_squares_start(seq, int start): """ >>> sum([i*i for i in range(10)], -1) 284 >>> return_sum_squares_start(range(10), -1) 284 >>> print(sum([i*i for i in range(10000)], 9)) 333283335009 >>> print(return_sum_squares_start(range(10000), 9)) 333283335009 """ return sum((i*i for i in seq), start) @cython.test_assert_path_exists( '//ForInStatNode', "//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists( '//SimpleCallNode', "//InlinedGeneratorExpressionNode//CoerceToPyTypeNode") def return_typed_sum_squares_start(seq, int start): """ >>> sum([i*i for i in range(10)], -1) 284 >>> return_typed_sum_squares_start(range(10), -1) 284 >>> print(sum([i*i for i in range(1000)], 9)) 332833509 >>> print(return_typed_sum_squares_start(range(1000), 9)) 332833509 """ cdef int i return sum((i*i for i in seq), start) @cython.test_assert_path_exists('//ForInStatNode', "//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists('//SimpleCallNode') def return_sum_of_listcomp_consts_start(seq, int start): """ >>> sum([1 for i in range(10) if i > 3], -1) 5 >>> return_sum_of_listcomp_consts_start(range(10), -1) 5 >>> print(sum([1 for i in range(10000) if i > 3], 9)) 10005 >>> print(return_sum_of_listcomp_consts_start(range(10000), 9)) 10005 """ return sum([1 for i in seq if i > 3], start) @cython.test_assert_path_exists('//ForInStatNode', "//InlinedGeneratorExpressionNode", # the next test is for a deficiency # (see InlinedGeneratorExpressionNode.coerce_to()), # hope this breaks one day "//CoerceFromPyTypeNode//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists('//SimpleCallNode') def return_typed_sum_of_listcomp_consts_start(seq, int start): """ >>> sum([1 for i in range(10) if i > 3], -1) 5 >>> return_typed_sum_of_listcomp_consts_start(range(10), -1) 5 >>> print(sum([1 for i in range(10000) if i > 3], 9)) 10005 >>> print(return_typed_sum_of_listcomp_consts_start(range(10000), 9)) 10005 """ return sum([1 for i in seq if i > 3], start) @cython.test_assert_path_exists( '//ForInStatNode', "//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists( '//SimpleCallNode', "//InlinedGeneratorExpressionNode//CoerceToPyTypeNode") def return_typed_sum_cond_exp(seq): """ >>> return_typed_sum_cond_exp([1,2,3,4]) 2 """ cdef int i return sum( 0 if i%2 else 1 for i in seq ) @cython.test_assert_path_exists( '//ForInStatNode', "//InlinedGeneratorExpressionNode") @cython.test_fail_if_path_exists( '//SimpleCallNode', "//InlinedGeneratorExpressionNode//CoerceToPyTypeNode") def return_typed_sum_cond_exp_in(seq): """ >>> return_typed_sum_cond_exp_in([1,2,3,4,5,6,7,8,9]) 3 """ cdef int i return sum( 0 if i%3 in (0,1) else 1 for i in seq ) Cython-0.26.1/tests/run/cpdef_void_return.pyx0000664000175000017500000000110413023021033022020 0ustar stefanstefan00000000000000cpdef void unraisable(): """ >>> unraisable() here """ print('here') raise RuntimeError() cpdef void raisable() except *: """ >>> raisable() Traceback (most recent call last): ... RuntimeError """ print('here') raise RuntimeError() cdef class A: """ >>> A().foo() A """ cpdef void foo(self): print "A" cdef class B(A): """ >>> B().foo() B """ cpdef void foo(self): print "B" class C(B): """ >>> C().foo() C """ def foo(self): print "C" Cython-0.26.1/tests/run/assigned_builtin_methods.pyx0000664000175000017500000000320412542002467023407 0ustar stefanstefan00000000000000# mode: run # tag: builtins cimport cython @cython.test_assert_path_exists( '//ReturnStatNode//PythonCapiCallNode') def unbound_dict_get(d): """ >>> unbound_dict_get({}) >>> unbound_dict_get({1:2}) 2 """ get = dict.get return get(d, 1) @cython.test_assert_path_exists( '//ReturnStatNode//PythonCapiCallNode') def bound_dict_get(dict d): """ >>> bound_dict_get({}) >>> bound_dict_get({1:2}) 2 """ get = d.get return get(1) @cython.test_fail_if_path_exists( '//ReturnStatNode//PythonCapiCallNode') @cython.test_assert_path_exists( '//ReturnStatNode//SimpleCallNode') def bound_dict_get_reassign(dict d): """ >>> bound_dict_get_reassign({}) >>> bound_dict_get_reassign({1:2}) 2 """ get = d.get d = {1: 3} return get(1) @cython.test_assert_path_exists( '//PythonCapiCallNode//NameNode[@name="l"]') def unbound_list_sort(list l): """ >>> unbound_list_sort([1, 3, 2]) [1, 2, 3] >>> unbound_list_sort([1, 3, 2]) [1, 2, 3] """ sort = list.sort sort(l) return l @cython.test_assert_path_exists( '//PythonCapiCallNode//NameNode[@name="l"]') def bound_list_sort(list l): """ >>> bound_list_sort([1, 3, 2]) [1, 2, 3] >>> bound_list_sort([1, 3, 2]) [1, 2, 3] """ sort = l.sort sort() return l @cython.test_fail_if_path_exists( '//PythonCapiCallNode') def bound_list_sort_reassign(list l): """ >>> bound_list_sort_reassign([1, 3, 2]) [3, 2, 1] >>> bound_list_sort_reassign([1, 3, 2]) [3, 2, 1] """ sort = l.sort l = [3, 2, 1] sort() return l Cython-0.26.1/tests/run/with_gil.pyx0000664000175000017500000002536713143605603020164 0ustar stefanstefan00000000000000""" Test the 'with gil:' statement. """ cimport cython from cpython.ref cimport PyObject import sys def redirect_stderr(func, *args, **kwargs): """ Helper function that redirects stderr to stdout for doctest. """ stderr, sys.stderr = sys.stderr, sys.stdout func(*args, **kwargs) sys.stderr = stderr cdef void puts(char *string) with gil: """ We need this for doctest, used from nogil sections. """ print string.decode('ascii') class ExceptionWithMsg(Exception): """ In python2.4 Exception is formatted as when swallowed. """ def __repr__(self): return "ExceptionWithMsg(%r)" % self.args # Start with some normal Python functions def test_simple(): """ >>> test_simple() ['spam', 'ham'] """ with nogil: with gil: print ['spam', 'ham'] def test_nested_gil_blocks(): """ >>> test_nested_gil_blocks() entered outer nogil section entered outer gil section entered inner nogil section entered inner gil section leaving inner gil section leaving inner nogil section leaving outer gil section leaving outer nogil section """ with nogil: puts("entered outer nogil section") with gil: print 'entered outer gil section' with nogil: puts("entered inner nogil section") with gil: print 'entered inner gil section' print 'leaving inner gil section' puts("leaving inner nogil section") print "leaving outer gil section" puts("leaving outer nogil section") def test_propagate_exception(): """ >>> test_propagate_exception() Traceback (most recent call last): ... Exception: This exception propagates! """ # Note, doctest doesn't support both output and exceptions with nogil: with gil: raise Exception("This exception propagates!") def test_catch_exception(): """ >>> test_catch_exception() This is executed Exception value This is also executed """ try: with nogil: with gil: print "This is executed" raise Exception("Exception value") print "This is not executed" puts("This is also not executed") except Exception, e: print e print "This is also executed" def test_try_finally_and_outer_except(): """ >>> test_try_finally_and_outer_except() First finally clause Second finally clause Caught: Some Exception End of function """ try: with nogil: with gil: try: with nogil: with gil: try: raise Exception("Some Exception") finally: puts("First finally clause") finally: puts("Second finally clause") puts("This is not executed") except Exception, e: print "Caught:", e print "End of function" def test_restore_exception(): """ >>> test_restore_exception() Traceback (most recent call last): ... Exception: Override the raised exception """ with nogil: with gil: try: with nogil: with gil: raise Exception("Override this please") finally: raise Exception("Override the raised exception") ### DISABLED: this cannot work with flow control analysis ## ## def test_declared_variables(): ## """ ## >>> test_declared_variables() ## None ## None ## ['s', 'p', 'a', 'm'] ## ['s', 'p', 'a', 'm'] ## """ ## cdef object somevar ## ## print somevar ## ## with nogil: ## with gil: ## print somevar ## somevar = list("spam") ## print somevar ## ## print somevar ### DISABLED: this cannot work with flow control analysis ## ## def test_undeclared_variables(): ## """ ## >>> test_undeclared_variables() ## None ## None ## ['s', 'p', 'a', 'm'] ## ['s', 'p', 'a', 'm'] ## """ ## print somevar ## with nogil: ## with gil: ## print somevar ## somevar = list("spam") ## print somevar ## ## print somevar def test_loops_and_boxing(): """ >>> test_loops_and_boxing() spamham h a m done looping """ cdef char c, *string = "spamham" with nogil: with gil: print string.decode('ascii') for c in string[4:]: print "%c" % c else: print "done looping" cdef class SomeExtClass(object): cdef int some_attribute @cython.infer_types(True) def test_infer_types(): """ >>> test_infer_types() 10 """ with nogil: with gil: obj = SomeExtClass() obj.some_attribute = 10 print obj.some_attribute def test_closure(): """ >>> test_closure() Traceback (most recent call last): ... Exception: {'twinkle': 'little star'} """ a = dict(twinkle='little star') def inner_function(): with nogil: with gil: raise Exception(a) with nogil: with gil: inner_function() raise Exception("This should not be raised!") cpdef test_cpdef(): """ >>> test_cpdef() Seems to work! Or does it? """ with nogil: with gil: print "Seems to work!" puts("Or does it?") # Now test some cdef functions with different return types cdef void void_nogil_ignore_exception() nogil: with gil: raise ExceptionWithMsg("This is swallowed") puts("unreachable") with gil: print "unreachable" cdef void void_nogil_nested_gil() nogil: with gil: with nogil: with gil: print 'Inner gil section' puts("nogil section") raise ExceptionWithMsg("Swallow this") puts("Don't print this") def test_nogil_void_funcs_with_gil(): """ >>> redirect_stderr(test_nogil_void_funcs_with_gil) # doctest: +ELLIPSIS with_gil.ExceptionWithMsg: This is swallowed Exception... ignored... Inner gil section nogil section ... Exception... ignored... """ void_nogil_ignore_exception() void_nogil_nested_gil() def test_nogil_void_funcs_with_nogil(): """ >>> redirect_stderr(test_nogil_void_funcs_with_nogil) # doctest: +ELLIPSIS with_gil.ExceptionWithMsg: This is swallowed Exception... ignored... Inner gil section nogil section with_gil.ExceptionWithMsg: Swallow this Exception... ignored... """ with nogil: void_nogil_ignore_exception() void_nogil_nested_gil() cdef PyObject *nogil_propagate_exception() nogil except NULL: with nogil: with gil: raise Exception("This exception propagates!") return 1 def test_nogil_propagate_exception(): """ >>> test_nogil_propagate_exception() Traceback (most recent call last): ... Exception: This exception propagates! """ nogil_propagate_exception() cdef with_gil_raise() with gil: raise Exception("This exception propagates!") def test_release_gil_call_gil_func(): """ >>> test_release_gil_call_gil_func() Traceback (most recent call last): ... Exception: This exception propagates! """ with nogil: with gil: with_gil_raise() # Test try/finally in nogil blocks def test_try_finally_in_nogil(): """ >>> test_try_finally_in_nogil() Traceback (most recent call last): ... Exception: Override exception! """ with nogil: try: with gil: raise Exception("This will be overridden") finally: with gil: raise Exception("Override exception!") with gil: raise Exception("This code should not be executed!") def test_nogil_try_finally_no_exception(): """ >>> test_nogil_try_finally_no_exception() first nogil try nogil try gil second nogil try nogil finally ------ First with gil block Second with gil block finally block """ with nogil: try: puts("first nogil try") with gil: print "nogil try gil" puts("second nogil try") finally: puts("nogil finally") print '------' with nogil: try: with gil: print "First with gil block" with gil: print "Second with gil block" finally: puts("finally block") def test_nogil_try_finally_propagate_exception(): """ >>> test_nogil_try_finally_propagate_exception() Execute finally clause Propagate this! """ try: with nogil: try: with gil: raise Exception("Propagate this!") with gil: raise Exception("Don't reach this section!") finally: puts("Execute finally clause") except Exception, e: print e def test_nogil_try_finally_return_in_with_gil(x): """ >>> test_nogil_try_finally_return_in_with_gil(10) print me 10 """ with nogil: try: with gil: raise Exception("Swallow me!") finally: with gil: print "print me" return x print "I am not executed" cdef void nogil_try_finally_return() nogil: try: with gil: raise Exception("I am swallowed in nogil code... right?") finally: with gil: print "print me first" return with gil: print "I am not executed" def test_nogil_try_finally_return(): """ >>> test_nogil_try_finally_return() print me first """ with nogil: nogil_try_finally_return() cdef int error_func() except -1 with gil: raise Exception("propagate this") def test_nogil_try_finally_error_label(): """ >>> test_nogil_try_finally_error_label() print me first propagate this """ try: with nogil: try: error_func() finally: with gil: print "print me first" except Exception, e: print e.args[0] cdef void test_timing_callback() with gil: pass def test_timing(long N): """ >>> sorted([test_timing(10000) for _ in range(10)]) # doctest: +ELLIPSIS [...] """ import time t = time.time() with nogil: for _ in range(N): test_timing_callback() return time.time() - t Cython-0.26.1/tests/run/cython_includes.pyx0000664000175000017500000000117712542002467021542 0ustar stefanstefan00000000000000 from libc.stdio cimport sprintf from cpython cimport PyType_Check from cpython cimport PyType_Check as PyType_Check2 from cpython.type cimport PyType_Check as PyType_Check3 def libc_cimports(): """ >>> libc_cimports() hello """ cdef char[10] buf sprintf(buf, "%s", b'hello') print (buf).decode('ASCII') def cpython_cimports(): """ >>> cpython_cimports() True False True False True False """ print PyType_Check(list) print PyType_Check([]) print PyType_Check2(list) print PyType_Check2([]) print PyType_Check3(list) print PyType_Check3([]) Cython-0.26.1/tests/run/closure_decorators_T478.pyx0000664000175000017500000000206512542002467022774 0ustar stefanstefan00000000000000# mode: run # tag: closures # ticket: 478 __doc__ = """ >>> Num(13).is_prime() args (Num(13),) kwds {} True >>> Num(13).is_prime(True) args (Num(13), True) kwds {} True >>> Num(15).is_prime(print_factors=True) args (Num(15),) kwds {'print_factors': True} 3 5 False """ def print_args(func): def f(*args, **kwds): print "args", args, "kwds", kwds return func(*args, **kwds) return f cdef class Num: cdef int n def __init__(self, n): self.n = n def __repr__(self): return "Num(%s)" % self.n @print_args def is_prime(self, bint print_factors=False): if self.n == 2: return True elif self.n < 2: return False elif self.n % 2 == 0: if print_factors: print 2, self.n // 2 cdef int i = 3 while i*i <= self.n: if self.n % i == 0: if print_factors: print i, self.n // i return False i += 2 return True Cython-0.26.1/tests/run/inlined_context_manager.pyx0000664000175000017500000000061712542002467023226 0ustar stefanstefan00000000000000# mode: run cimport cython @cython.final cdef class TypedContextManager(object): cdef double __enter__(self): # not callable from Python ! return 2.0 # FIXME: inline __exit__() as well def __exit__(self, exc_type, exc_value, exc_tb): return 0 def with_statement(): """ >>> with_statement() 2.0 """ with TypedContextManager() as x: return x Cython-0.26.1/tests/run/closure_arg_type_error.pyx0000664000175000017500000000205612542002467023124 0ustar stefanstefan00000000000000# mode: run # tag: closures # The arguments in f() are put into the closure one after the other, # so the reference of 'o' is filled in before the type errors are # found. This leaves a reference in the closure instance on error # return, which must be properly ref-counted to facilitate generic # closure deallocation. In the case of an argument type error, it's # actually best to just Py_CLEAR() the already handled references, as # this frees them as early as possible. # This test doesn't really check the ref-counting itself, it just # reproduces the problem. def func_with_typed_args(object o, int i, tuple t, double d): """ >>> g = func_with_typed_args(1, 2, (), 3.0) >>> g() (1, 2, (), 3.0) >>> g = func_with_typed_args(1, 'x', (), 3.0) Traceback (most recent call last): TypeError: an integer is required >>> g = func_with_typed_args(1, 2, 3, 3.0) Traceback (most recent call last): TypeError: Argument 't' has incorrect type (expected tuple, got int) """ def g(): return o, i, t, d return g Cython-0.26.1/tests/run/tupleunpack_T712.pyx0000664000175000017500000000037512542002467021417 0ustar stefanstefan00000000000000# mode: run # ticket: 712 def single_from_string(): """ >>> print(single_from_string()) a """ (a,) = 'a' return a def single_from_set(): """ >>> print(single_from_set()) a """ (a,) = set(["a"]) return a Cython-0.26.1/tests/run/ishimoto3.pyx0000664000175000017500000000036412542002467020263 0ustar stefanstefan00000000000000__doc__ = u""" >>> c1 = C1() >>> c2 = C2(c1) >>> c1 is c2.getc1() True """ cdef class C1: pass cdef class C2: cdef C1 c1 def __init__(self, arg): self.c1 = arg def getc1(self): return self.c1 Cython-0.26.1/tests/run/generators_in_refcycles.pyx0000664000175000017500000000134212542002467023240 0ustar stefanstefan00000000000000 import sys def _next(it): if sys.version_info[0] >= 3: return next(it) else: return it.next() def test_reference_cycle_cleanup(): """ >>> import gc >>> delegator, gen, deleted = test_reference_cycle_cleanup() >>> _next(delegator(gen())) 123 >>> _ = gc.collect(); print(sorted(deleted)) ['bar', 'foo'] """ deleted = [] class Destructed(object): def __init__(self, name): self.name = name def __del__(self): deleted.append(self.name) def delegator(c): d = Destructed('foo') yield from c def gen(): d = Destructed('bar') while True: yield 123 return delegator, gen, deleted Cython-0.26.1/tests/run/pure.pyx0000664000175000017500000001066013023021033017301 0ustar stefanstefan00000000000000import cython def test_sizeof(): """ >>> test_sizeof() True True True True True """ x = cython.declare(cython.bint) print sizeof(x) == sizeof(cython.bint) print sizeof(cython.char) <= sizeof(cython.short) <= sizeof(cython.int) <= sizeof(cython.long) <= sizeof(cython.longlong) print sizeof(cython.uint) == sizeof(cython.int) print sizeof(cython.p_int) == sizeof(cython.p_double) if cython.compiled: print sizeof(cython.char) < sizeof(cython.longlong) else: print sizeof(cython.char) == 1 def test_declare(n): """ >>> test_declare(100) (100, 100) >>> test_declare(100.5) (100, 100) >>> test_declare(None) Traceback (most recent call last): ... TypeError: an integer is required """ x = cython.declare(cython.int) y = cython.declare(cython.int, n) if cython.compiled: cython.declare(xx=cython.int, yy=cython.long) i = sizeof(xx) ptr = cython.declare(cython.p_int, cython.address(y)) return y, ptr[0] @cython.locals(x=cython.double, n=cython.int) def test_cast(x): """ >>> test_cast(1.5) 1 >>> try: test_cast(None) ... except TypeError: pass """ n = cython.cast(cython.int, x) return n @cython.locals(as_list=list) def test_cast_object(x, typecheck): """ >>> test_cast_object([1, 2, 3], True) [1, 2, 3] >>> test_cast_object([1, 2, 3], False) [1, 2, 3] >>> test_cast_object((1, 2, 3), True) Traceback (most recent call last): ... TypeError: Expected list, got tuple >>> test_cast_object((1, 2, 3), False) (1, 2, 3) """ if typecheck: as_list = cython.cast(list, x, typecheck=True) else: as_list = cython.cast(list, x, typecheck=False) return as_list @cython.locals(x=cython.int, y=cython.p_int) def test_address(x): """ >>> test_address(39) 39 """ y = cython.address(x) return y[0] @cython.locals(x=cython.int) @cython.locals(y=cython.bint) def test_locals(x): """ >>> test_locals(5) True """ y = x return y MyUnion = cython.union(n=cython.int, x=cython.double) MyStruct = cython.struct(is_integral=cython.bint, data=MyUnion) MyStruct2 = cython.typedef(MyStruct[2]) def test_struct(n, x): """ >>> test_struct(389, 1.64493) (389, 1.64493) """ a = cython.declare(MyStruct2) a[0] = MyStruct(True, data=MyUnion(n=n)) a[1] = MyStruct(is_integral=False, data={'x': x}) return a[0].data.n, a[1].data.x import cython as cy from cython import declare, cast, locals, address, typedef, p_void, compiled from cython import declare as my_declare, locals as my_locals, p_void as my_void_star, typedef as my_typedef, compiled as my_compiled @my_locals(a=cython.p_void) def test_imports(): """ >>> test_imports() True """ a = cython.NULL b = declare(p_void, cython.NULL) c = my_declare(my_void_star, cython.NULL) d = cy.declare(cy.p_void, cython.NULL) return a == d and compiled and my_compiled MyStruct3 = typedef(MyStruct[3]) MyStruct4 = my_typedef(MyStruct[4]) MyStruct5 = cy.typedef(MyStruct[5]) def test_declare_c_types(n): """ >>> test_declare_c_types(0) >>> test_declare_c_types(1) >>> test_declare_c_types(2) """ # b00 = cython.declare(cython.bint, 0) b01 = cython.declare(cython.bint, 1) b02 = cython.declare(cython.bint, 2) # i00 = cython.declare(cython.uchar, n) i01 = cython.declare(cython.char, n) i02 = cython.declare(cython.schar, n) i03 = cython.declare(cython.ushort, n) i04 = cython.declare(cython.short, n) i05 = cython.declare(cython.sshort, n) i06 = cython.declare(cython.uint, n) i07 = cython.declare(cython.int, n) i08 = cython.declare(cython.sint, n) i09 = cython.declare(cython.slong, n) i10 = cython.declare(cython.long, n) i11 = cython.declare(cython.ulong, n) i12 = cython.declare(cython.slonglong, n) i13 = cython.declare(cython.longlong, n) i14 = cython.declare(cython.ulonglong, n) i20 = cython.declare(cython.Py_ssize_t, n) i21 = cython.declare(cython.size_t, n) # f00 = cython.declare(cython.float, n) f01 = cython.declare(cython.double, n) f02 = cython.declare(cython.longdouble, n) # #z00 = cython.declare(cython.complex, n+1j) #z01 = cython.declare(cython.floatcomplex, n+1j) #z02 = cython.declare(cython.doublecomplex, n+1j) #z03 = cython.declare(cython.longdoublecomplex, n+1j) Cython-0.26.1/tests/run/__debug__.srctree0000664000175000017500000000311112542002467021047 0ustar stefanstefan00000000000000""" PYTHON setup.py build_ext -i PYTHON debug_test.py PYTHON -O debug_test.py PYTHON -OO debug_test.py """ ######## setup.py ######## from distutils.core import setup from Cython.Build import cythonize setup(ext_modules = cythonize('debug_test_cython.pyx')) ######## debug_test.py ######## if __debug__: DBG = True else: DBG = False import sys try: optimised = bool(sys.flags.optimize) except AttributeError: # Py2.[45] optimised = eval('not __debug__') if DBG == optimised: raise RuntimeError( "PYTHON: unexpected debug value %s, expected %s" % ( DBG, optimised)) ASSERT_CALLED = False def sideeffect(): global ASSERT_CALLED ASSERT_CALLED = True return True assert sideeffect() if ASSERT_CALLED == optimised: raise RuntimeError("Assert called in optimised Python run") import debug_test_cython if debug_test_cython.DBG == optimised: raise RuntimeError( "CYTHON: unexpected debug value %s, expected %s" % ( debug_test_cython.DBG, optimised)) ######## debug_test_cython.pyx ######## if __debug__: DBG = True else: DBG = False import sys try: optimised = bool(sys.flags.optimize) except AttributeError: # Py2.[45] optimised = eval('not __debug__') ASSERT_CALLED = False def sideeffect(): global ASSERT_CALLED ASSERT_CALLED = True return True if DBG == optimised: raise RuntimeError("Unexpected debug value %s, expected %s" % ( DBG, optimised)) assert sideeffect() if ASSERT_CALLED == optimised: raise RuntimeError("Assert called in optimised Python run") Cython-0.26.1/tests/run/test_fstring.pyx0000664000175000017500000007502113143605603021061 0ustar stefanstefan00000000000000# cython: language_level=3 # mode: run # tag: allow_unknown_names, f_strings, pep498 import ast import types import decimal import unittest import contextlib import sys IS_PY2 = sys.version_info[0] < 3 IS_PY26 = sys.version_info[:2] < (2, 7) from Cython.Build.Inline import cython_inline from Cython.TestUtils import CythonTest from Cython.Compiler.Errors import CompileError, hold_errors, release_errors, error_stack, held_errors def cy_eval(s, **kwargs): return cython_inline('return ' + s, force=True, **kwargs) a_global = 'global variable' # You could argue that I'm too strict in looking for specific error # values with assertRaisesRegex, but without it it's way too easy to # make a syntax error in the test strings. Especially with all of the # triple quotes, raw strings, backslashes, etc. I think it's a # worthwhile tradeoff. When I switched to this method, I found many # examples where I wasn't testing what I thought I was. class TestCase(CythonTest): def assertAllRaise(self, exception_type, regex, error_strings): for str in error_strings: hold_errors() if exception_type is SyntaxError: try: self.fragment(str) assert held_errors(), "Invalid Cython code failed to raise SyntaxError: %s" % str except CompileError: assert True finally: release_errors(ignore=True) else: try: cython_inline(str, quiet=True) except exception_type: assert True else: assert False, "Invalid Cython code failed to raise %s: %s" % (exception_type, str) finally: if error_stack: release_errors(ignore=True) if IS_PY2: def assertEqual(self, first, second, msg=None): # strip u'' string prefixes in Py2 if first != second and isinstance(first, unicode): stripped_first = first.replace("u'", "'").replace('u"', '"') if stripped_first == second: first = stripped_first elif stripped_first.decode('unicode_escape') == second: first = stripped_first.decode('unicode_escape') super(TestCase, self).assertEqual(first, second, msg) if IS_PY26: @contextlib.contextmanager def assertRaises(self, exc): try: yield except exc: pass else: assert False, "exception '%s' not raised" % exc def assertIn(self, value, collection): self.assertTrue(value in collection) def test__format__lookup(self): if IS_PY26: return elif IS_PY2: raise unittest.SkipTest("Py3-only") # Make sure __format__ is looked up on the type, not the instance. class X: def __format__(self, spec): return 'class' x = X() # Add a bound __format__ method to the 'y' instance, but not # the 'x' instance. y = X() y.__format__ = types.MethodType(lambda self, spec: 'instance', y) self.assertEqual(f'{y}', format(y)) self.assertEqual(f'{y}', 'class') self.assertEqual(format(x), format(y)) # __format__ is not called this way, but still make sure it # returns what we expect (so we can make sure we're bypassing # it). self.assertEqual(x.__format__(''), 'class') self.assertEqual(y.__format__(''), 'instance') # This is how __format__ is actually called. self.assertEqual(type(x).__format__(x, ''), 'class') self.assertEqual(type(y).__format__(y, ''), 'class') def __test_ast(self): # Inspired by http://bugs.python.org/issue24975 class X: def __init__(self): self.called = False def __call__(self): self.called = True return 4 x = X() expr = """ a = 10 f'{a * x()}'""" t = ast.parse(expr) c = compile(t, '', 'exec') # Make sure x was not called. self.assertFalse(x.called) # Actually run the code. exec(c) # Make sure x was called. self.assertTrue(x.called) def test_docstring(self): def f(): f'''Not a docstring''' self.assertTrue(f.__doc__ is None) def g(): '''Not a docstring''' \ f'' self.assertTrue(g.__doc__ is None) def __test_literal_eval(self): # With no expressions, an f-string is okay. self.assertEqual(ast.literal_eval("f'x'"), 'x') self.assertEqual(ast.literal_eval("f'x' 'y'"), 'xy') # But this should raise an error. with self.assertRaisesRegex(ValueError, 'malformed node or string'): ast.literal_eval("f'x'") # As should this, which uses a different ast node with self.assertRaisesRegex(ValueError, 'malformed node or string'): ast.literal_eval("f'{3}'") def __test_ast_compile_time_concat(self): x = [''] expr = """x[0] = 'foo' f'{3}'""" t = ast.parse(expr) c = compile(t, '', 'exec') exec(c) self.assertEqual(x[0], 'foo3') def test_compile_time_concat_errors(self): self.assertAllRaise(SyntaxError, 'cannot mix bytes and nonbytes literals', [r"""f'' b''""", r"""b'' f''""", ]) def test_literal(self): self.assertEqual(f'', '') self.assertEqual(f'a', 'a') self.assertEqual(f' ', ' ') def test_unterminated_string(self): self.assertAllRaise(SyntaxError, 'f-string: unterminated string', [r"""f'{"x'""", r"""f'{"x}'""", r"""f'{("x'""", r"""f'{("x}'""", ]) def test_mismatched_parens(self): self.assertAllRaise(SyntaxError, 'f-string: mismatched', ["f'{((}'", ]) def test_double_braces(self): self.assertEqual(f'{{', '{') self.assertEqual(f'a{{', 'a{') self.assertEqual(f'{{b', '{b') self.assertEqual(f'a{{b', 'a{b') self.assertEqual(f'}}', '}') self.assertEqual(f'a}}', 'a}') self.assertEqual(f'}}b', '}b') self.assertEqual(f'a}}b', 'a}b') self.assertEqual(f'{{}}', '{}') self.assertEqual(f'a{{}}', 'a{}') self.assertEqual(f'{{b}}', '{b}') self.assertEqual(f'{{}}c', '{}c') self.assertEqual(f'a{{b}}', 'a{b}') self.assertEqual(f'a{{}}c', 'a{}c') self.assertEqual(f'{{b}}c', '{b}c') self.assertEqual(f'a{{b}}c', 'a{b}c') self.assertEqual(f'{{{10}', '{10') self.assertEqual(f'}}{10}', '}10') self.assertEqual(f'}}{{{10}', '}{10') self.assertEqual(f'}}a{{{10}', '}a{10') self.assertEqual(f'{10}{{', '10{') self.assertEqual(f'{10}}}', '10}') self.assertEqual(f'{10}}}{{', '10}{') self.assertEqual(f'{10}}}a{{' '}', '10}a{}') # Inside of strings, don't interpret doubled brackets. self.assertEqual(f'{"{{}}"}', '{{}}') self.assertAllRaise(TypeError, 'unhashable type', ["f'{ {{}} }'", # dict in a set ]) def test_compile_time_concat(self): x = 'def' self.assertEqual('abc' f'## {x}ghi', 'abc## defghi') self.assertEqual('abc' f'{x}' 'ghi', 'abcdefghi') self.assertEqual('abc' f'{x}' 'gh' f'i{x:4}', 'abcdefghidef ') self.assertEqual('{x}' f'{x}', '{x}def') self.assertEqual('{x' f'{x}', '{xdef') self.assertEqual('{x}' f'{x}', '{x}def') self.assertEqual('{{x}}' f'{x}', '{{x}}def') self.assertEqual('{{x' f'{x}', '{{xdef') self.assertEqual('x}}' f'{x}', 'x}}def') self.assertEqual(f'{x}' 'x}}', 'defx}}') self.assertEqual(f'{x}' '', 'def') self.assertEqual('' f'{x}' '', 'def') self.assertEqual('' f'{x}', 'def') self.assertEqual(f'{x}' '2', 'def2') self.assertEqual('1' f'{x}' '2', '1def2') self.assertEqual('1' f'{x}', '1def') self.assertEqual(f'{x}' f'-{x}', 'def-def') self.assertEqual('' f'', '') self.assertEqual('' f'' '', '') self.assertEqual('' f'' '' f'', '') self.assertEqual(f'', '') self.assertEqual(f'' '', '') self.assertEqual(f'' '' f'', '') self.assertEqual(f'' '' f'' '', '') self.assertAllRaise(SyntaxError, "f-string: expecting '}'", ["f'{3' f'}'", # can't concat to get a valid f-string ]) def test_comments(self): # These aren't comments, since they're in strings. d = {'#': 'hash'} self.assertEqual(f'{"#"}', '#') self.assertEqual(f'{d["#"]}', 'hash') self.assertAllRaise(SyntaxError, "f-string expression part cannot include '#'", ["f'{1#}'", # error because the expression becomes "(1#)" "f'{3(#)}'", "f'{#}'", "f'{)#}'", # When wrapped in parens, this becomes # '()#)'. Make sure that doesn't compile. ]) def test_many_expressions(self): # Create a string with many expressions in it. Note that # because we have a space in here as a literal, we're actually # going to use twice as many ast nodes: one for each literal # plus one for each expression. def build_fstr(n, extra=''): return "f'" + ('{x} ' * n) + extra + "'" x = 'X' width = 1 # Test around 256. for i in range(250, 260): self.assertEqual(cy_eval(build_fstr(i), x=x, width=width), (x+' ')*i) # Test concatenating 2 largs fstrings. self.assertEqual(cy_eval(build_fstr(255)*3, x=x, width=width), (x+' ')*(255*3)) # CPython uses 255*256 s = build_fstr(253, '{x:{width}} ') self.assertEqual(cy_eval(s, x=x, width=width), (x+' ')*254) # Test lots of expressions and constants, concatenated. s = "f'{1}' 'x' 'y'" * 1024 self.assertEqual(cy_eval(s, x=x, width=width), '1xy' * 1024) def test_format_specifier_expressions(self): width = 10 precision = 4 value = decimal.Decimal('12.34567') if not IS_PY26: self.assertEqual(f'result: {value:{width}.{precision}}', 'result: 12.35') self.assertEqual(f'result: {value:{width!r}.{precision}}', 'result: 12.35') self.assertEqual(f'result: {value:{width:0}.{precision:1}}', 'result: 12.35') self.assertEqual(f'result: {value:{1}{0:0}.{precision:1}}', 'result: 12.35') self.assertEqual(f'result: {value:{ 1}{ 0:0}.{ precision:1}}', 'result: 12.35') self.assertEqual(f'{10:#{1}0x}', ' 0xa') self.assertEqual(f'{10:{"#"}1{0}{"x"}}', ' 0xa') self.assertEqual(f'{-10:-{"#"}1{0}x}', ' -0xa') self.assertEqual(f'{-10:{"-"}#{1}0{"x"}}', ' -0xa') self.assertEqual(f'{10:#{3 != {4:5} and width}x}', ' 0xa') self.assertAllRaise(SyntaxError, "f-string: expecting '}'", ["""f'{"s"!r{":10"}}'""", # This looks like a nested format spec. ]) self.assertAllRaise(SyntaxError, "invalid syntax", [# Invalid syntax inside a nested spec. "f'{4:{/5}}'", ]) # CYTHON: The nesting restriction seems rather arbitrary. Ignoring it for now and instead test that it works. if not IS_PY26: self.assertEqual(f'result: {value:{width:{0}}.{precision:1}}', 'result: 12.35') #self.assertAllRaise(SyntaxError, "f-string: expressions nested too deeply", # [# Can't nest format specifiers. # "f'result: {value:{width:{0}}.{precision:1}}'", # ]) self.assertAllRaise(SyntaxError, 'f-string: invalid conversion character', [# No expansion inside conversion or for # the : or ! itself. """f'{"s"!{"r"}}'""", ]) def test_side_effect_order(self): class X: def __init__(self): self.i = 0 def __format__(self, spec): self.i += 1 return str(self.i) x = X() self.assertEqual(f'{x} {x}', '1 2') def test_missing_expression(self): self.assertAllRaise(SyntaxError, 'f-string: empty expression not allowed', ["f'{}'", "f'{ }'" "f' {} '", "f'{!r}'", "f'{ !r}'", "f'{10:{ }}'", "f' { } '", # Catch the empty expression before the # invalid conversion. "f'{!x}'", "f'{ !xr}'", "f'{!x:}'", "f'{!x:a}'", "f'{ !xr:}'", "f'{ !xr:a}'", "f'{!}'", "f'{:}'", # We find the empty expression before the # missing closing brace. "f'{!'", "f'{!s:'", "f'{:'", "f'{:x'", ]) def test_parens_in_expressions(self): self.assertEqual(f'{3,}', '(3,)') # Add these because when an expression is evaluated, parens # are added around it. But we shouldn't go from an invalid # expression to a valid one. The added parens are just # supposed to allow whitespace (including newlines). self.assertAllRaise(SyntaxError, 'invalid syntax', ["f'{,}'", "f'{,}'", # this is (,), which is an error ]) self.assertAllRaise(SyntaxError, "f-string: expecting '}'", ["f'{3)+(4}'", ]) self.assertAllRaise(SyntaxError, 'EOL while scanning string literal', ["f'{\n}'", ]) def test_backslashes_in_string_part(self): self.assertEqual(f'\t', '\t') self.assertEqual(r'\t', '\\t') self.assertEqual(rf'\t', '\\t') self.assertEqual(f'{2}\t', '2\t') self.assertEqual(f'{2}\t{3}', '2\t3') self.assertEqual(f'\t{3}', '\t3') self.assertEqual(f'\u0394', '\u0394') self.assertEqual(r'\u0394', '\\u0394') self.assertEqual(rf'\u0394', '\\u0394') self.assertEqual(f'{2}\u0394', '2\u0394') self.assertEqual(f'{2}\u0394{3}', '2\u03943') self.assertEqual(f'\u0394{3}', '\u03943') self.assertEqual(f'\U00000394', '\u0394') self.assertEqual(r'\U00000394', '\\U00000394') self.assertEqual(rf'\U00000394', '\\U00000394') self.assertEqual(f'{2}\U00000394', '2\u0394') self.assertEqual(f'{2}\U00000394{3}', '2\u03943') self.assertEqual(f'\U00000394{3}', '\u03943') self.assertEqual(f'\N{GREEK CAPITAL LETTER DELTA}', '\u0394') self.assertEqual(f'{2}\N{GREEK CAPITAL LETTER DELTA}', '2\u0394') self.assertEqual(f'{2}\N{GREEK CAPITAL LETTER DELTA}{3}', '2\u03943') self.assertEqual(f'\N{GREEK CAPITAL LETTER DELTA}{3}', '\u03943') self.assertEqual(f'2\N{GREEK CAPITAL LETTER DELTA}', '2\u0394') self.assertEqual(f'2\N{GREEK CAPITAL LETTER DELTA}3', '2\u03943') self.assertEqual(f'\N{GREEK CAPITAL LETTER DELTA}3', '\u03943') self.assertEqual(f'\x20', ' ') self.assertEqual(r'\x20', '\\x20') self.assertEqual(rf'\x20', '\\x20') self.assertEqual(f'{2}\x20', '2 ') self.assertEqual(f'{2}\x20{3}', '2 3') self.assertEqual(f'\x20{3}', ' 3') self.assertEqual(f'2\x20', '2 ') self.assertEqual(f'2\x203', '2 3') self.assertEqual(f'\x203', ' 3') def test_misformed_unicode_character_name(self): # These test are needed because unicode names are parsed # differently inside f-strings. self.assertAllRaise(SyntaxError, r"\(unicode error\) 'unicodeescape' codec can't decode bytes in position .*: malformed \\N character escape", [r"f'\N'", r"f'\N{'", r"f'\N{GREEK CAPITAL LETTER DELTA'", # Here are the non-f-string versions, # which should give the same errors. r"'\N'", r"'\N{'", r"'\N{GREEK CAPITAL LETTER DELTA'", ]) def test_no_backslashes_in_expression_part(self): self.assertAllRaise(SyntaxError, 'f-string expression part cannot include a backslash', [r"f'{\'a\'}'", r"f'{\t3}'", r"f'{\}'", r"rf'{\'a\'}'", r"rf'{\t3}'", r"rf'{\}'", r"""rf'{"\N{LEFT CURLY BRACKET}"}'""", r"f'{\n}'", ]) def test_no_escapes_for_braces(self): """ Only literal curly braces begin an expression. """ # \x7b is '{'. self.assertEqual(f'\x7b1+1}}', '{1+1}') self.assertEqual(f'\x7b1+1', '{1+1') self.assertEqual(f'\u007b1+1', '{1+1') self.assertEqual(f'\N{LEFT CURLY BRACKET}1+1\N{RIGHT CURLY BRACKET}', '{1+1}') def test_newlines_in_expressions(self): self.assertEqual(f'{0}', '0') self.assertEqual(rf'''{3+ 4}''', '7') def test_lambda(self): x = 5 self.assertEqual(f'{(lambda y:x*y)("8")!r}', "'88888'") if not IS_PY2: self.assertEqual(f'{(lambda y:x*y)("8")!r:10}', "'88888' ") self.assertEqual(f'{(lambda y:x*y)("8"):10}', "88888 ") # lambda doesn't work without parens, because the colon # makes the parser think it's a format_spec self.assertAllRaise(SyntaxError, 'unexpected EOF while parsing', ["f'{lambda x:x}'", ]) def test_yield(self): # Not terribly useful, but make sure the yield turns # a function into a generator def fn(y): f'y:{yield y*2}' g = fn(4) self.assertEqual(next(g), 8) def test_yield_send(self): def fn(x): yield f'x:{yield (lambda i: x * i)}' g = fn(10) the_lambda = next(g) self.assertEqual(the_lambda(4), 40) self.assertEqual(g.send('string'), 'x:string') def test_expressions_with_triple_quoted_strings(self): self.assertEqual(f"{'''x'''}", 'x') self.assertEqual(f"{'''eric's'''}", "eric's") # Test concatenation within an expression self.assertEqual(f'{"x" """eric"s""" "y"}', 'xeric"sy') self.assertEqual(f'{"x" """eric"s"""}', 'xeric"s') self.assertEqual(f'{"""eric"s""" "y"}', 'eric"sy') self.assertEqual(f'{"""x""" """eric"s""" "y"}', 'xeric"sy') self.assertEqual(f'{"""x""" """eric"s""" """y"""}', 'xeric"sy') self.assertEqual(f'{r"""x""" """eric"s""" """y"""}', 'xeric"sy') def test_multiple_vars(self): x = 98 y = 'abc' self.assertEqual(f'{x}{y}', '98abc') self.assertEqual(f'X{x}{y}', 'X98abc') self.assertEqual(f'{x}X{y}', '98Xabc') self.assertEqual(f'{x}{y}X', '98abcX') self.assertEqual(f'X{x}Y{y}', 'X98Yabc') self.assertEqual(f'X{x}{y}Y', 'X98abcY') self.assertEqual(f'{x}X{y}Y', '98XabcY') self.assertEqual(f'X{x}Y{y}Z', 'X98YabcZ') def test_closure(self): def outer(x): def inner(): return f'x:{x}' return inner self.assertEqual(outer('987')(), 'x:987') self.assertEqual(outer(7)(), 'x:7') def test_arguments(self): y = 2 def f(x, width): return f'x={x*y:{width}}' self.assertEqual(f('foo', 10), 'x=foofoo ') x = 'bar' self.assertEqual(f(10, 10), 'x= 20') def test_locals(self): value = 123 self.assertEqual(f'v:{value}', 'v:123') def test_missing_variable(self): with self.assertRaises(NameError): f'v:{value}' def test_missing_format_spec(self): class O: def __format__(self, spec): if not spec: return '*' return spec self.assertEqual(f'{O():x}', 'x') self.assertEqual(f'{O()}', '*') self.assertEqual(f'{O():}', '*') self.assertEqual(f'{3:}', '3') self.assertEqual(f'{3!s:}', '3') def test_global(self): self.assertEqual(f'g:{a_global}', 'g:global variable') self.assertEqual(f'g:{a_global!r}', "g:'global variable'") a_local = 'local variable' self.assertEqual(f'g:{a_global} l:{a_local}', 'g:global variable l:local variable') self.assertEqual(f'g:{a_global!r}', "g:'global variable'") self.assertEqual(f'g:{a_global} l:{a_local!r}', "g:global variable l:'local variable'") self.assertIn("module 'unittest' from", f'{unittest}') def test_shadowed_global(self): a_global = 'really a local' self.assertEqual(f'g:{a_global}', 'g:really a local') self.assertEqual(f'g:{a_global!r}', "g:'really a local'") a_local = 'local variable' self.assertEqual(f'g:{a_global} l:{a_local}', 'g:really a local l:local variable') self.assertEqual(f'g:{a_global!r}', "g:'really a local'") self.assertEqual(f'g:{a_global} l:{a_local!r}', "g:really a local l:'local variable'") def test_call(self): def foo(x): return 'x=' + str(x) self.assertEqual(f'{foo(10)}', 'x=10') def test_nested_fstrings(self): y = 5 self.assertEqual(f'{f"{0}"*3}', '000') self.assertEqual(f'{f"{y}"*3}', '555') def test_invalid_string_prefixes(self): self.assertAllRaise(SyntaxError, 'unexpected EOF while parsing', ["fu''", "uf''", "Fu''", "fU''", "Uf''", "uF''", "ufr''", "urf''", "fur''", "fru''", "rfu''", "ruf''", "FUR''", "Fur''", "fb''", "fB''", "Fb''", "FB''", "bf''", "bF''", "Bf''", "BF''", ]) def test_leading_trailing_spaces(self): self.assertEqual(f'{ 3}', '3') self.assertEqual(f'{ 3}', '3') self.assertEqual(f'{3 }', '3') self.assertEqual(f'{3 }', '3') self.assertEqual(f'expr={ {x: y for x, y in [(1, 2), ]}}', 'expr={1: 2}') self.assertEqual(f'expr={ {x: y for x, y in [(1, 2), ]} }', 'expr={1: 2}') def test_not_equal(self): # There's a special test for this because there's a special # case in the f-string parser to look for != as not ending an # expression. Normally it would, while looking for !s or !r. self.assertEqual(f'{3!=4}', 'True') self.assertEqual(f'{3!=4:}', 'True') self.assertEqual(f'{3!=4!s}', 'True') self.assertEqual(f'{3!=4!s:.3}', 'Tru') def test_conversions(self): self.assertEqual(f'{3.14:10.10}', ' 3.14') if not IS_PY26: self.assertEqual(f'{3.14!s:10.10}', '3.14 ') self.assertEqual(f'{3.14!r:10.10}', '3.14 ') self.assertEqual(f'{3.14!a:10.10}', '3.14 ') self.assertEqual(f'{"a"}', 'a') self.assertEqual(f'{"a"!r}', "'a'") self.assertEqual(f'{"a"!a}', "'a'") # Not a conversion. self.assertEqual(f'{"a!r"}', "a!r") # Not a conversion, but show that ! is allowed in a format spec. self.assertEqual(f'{3.14:!<10.10}', '3.14!!!!!!') self.assertAllRaise(SyntaxError, 'f-string: invalid conversion character', ["f'{3!g}'", "f'{3!A}'", "f'{3!3}'", "f'{3!G}'", "f'{3!!}'", "f'{3!:}'", "f'{3! s}'", # no space before conversion char ]) self.assertAllRaise(SyntaxError, "f-string: expecting '}'", ["f'{x!s{y}}'", "f'{3!ss}'", "f'{3!ss:}'", "f'{3!ss:s}'", ]) def test_assignment(self): self.assertAllRaise(SyntaxError, 'invalid syntax', ["f'' = 3", "f'{0}' = x", "f'{x}' = x", ]) def test_del(self): self.assertAllRaise(CompileError, 'invalid syntax', # CPython raises SyntaxError ["del f''", "del '' f''", ]) def test_mismatched_braces(self): self.assertAllRaise(SyntaxError, "f-string: single '}' is not allowed", ["f'{{}'", "f'{{}}}'", "f'}'", "f'x}'", "f'x}x'", r"f'\u007b}'", # Can't have { or } in a format spec. "f'{3:}>10}'", "f'{3:}}>10}'", ]) self.assertAllRaise(SyntaxError, "f-string: expecting '}'", ["f'{3:{{>10}'", "f'{3'", "f'{3!'", "f'{3:'", "f'{3!s'", "f'{3!s:'", "f'{3!s:3'", "f'x{'", "f'x{x'", "f'{x'", "f'{3:s'", "f'{{{'", "f'{{}}{'", "f'{'", ]) # But these are just normal strings. self.assertEqual(f'{"{"}', '{') self.assertEqual(f'{"}"}', '}') self.assertEqual(f'{3:{"}"}>10}', '}}}}}}}}}3') self.assertEqual(f'{2:{"{"}>10}', '{{{{{{{{{2') def test_if_conditional(self): # There's special logic in compile.c to test if the # conditional for an if (and while) are constants. Exercise # that code. def test_fstring(x, expected): flag = 0 if f'{x}': flag = 1 else: flag = 2 self.assertEqual(flag, expected) def test_concat_empty(x, expected): flag = 0 if '' f'{x}': flag = 1 else: flag = 2 self.assertEqual(flag, expected) def test_concat_non_empty(x, expected): flag = 0 if ' ' f'{x}': flag = 1 else: flag = 2 self.assertEqual(flag, expected) test_fstring('', 2) test_fstring(' ', 1) test_concat_empty('', 2) test_concat_empty(' ', 1) test_concat_non_empty('', 1) test_concat_non_empty(' ', 1) def test_empty_format_specifier(self): x = 'test' self.assertEqual(f'{x}', 'test') self.assertEqual(f'{x:}', 'test') self.assertEqual(f'{x!s:}', 'test') self.assertEqual(f'{x!r:}', "'test'") def test_str_format_differences(self): d = {'a': 'string', 0: 'integer', } a = 0 self.assertEqual(f'{d[0]}', 'integer') self.assertEqual(f'{d["a"]}', 'string') self.assertEqual(f'{d[a]}', 'integer') self.assertEqual('{d[a]}'.format(d=d), 'string') self.assertEqual('{d[0]}'.format(d=d), 'integer') def test_invalid_expressions(self): self.assertAllRaise(SyntaxError, 'invalid syntax', [r"f'{a[4)}'", r"f'{a(4]}'", ]) def test_errors(self): # see issue 26287 self.assertAllRaise((TypeError, ValueError), 'non-empty', # TypeError in Py3.4+ [r"f'{(lambda: 0):x}'", r"f'{(0,):x}'", ]) self.assertAllRaise(ValueError, 'Unknown format code', [r"f'{1000:j}'", r"f'{1000:j}'", ]) def test_loop(self): for i in range(1000): self.assertEqual(f'i:{i}', 'i:' + str(i)) def test_dict(self): d = {'"': 'dquote', "'": 'squote', 'foo': 'bar', } self.assertEqual(f'''{d["'"]}''', 'squote') self.assertEqual(f"""{d['"']}""", 'dquote') self.assertEqual(f'{d["foo"]}', 'bar') self.assertEqual(f"{d['foo']}", 'bar') if __name__ == '__main__': unittest.main() Cython-0.26.1/tests/run/special_methods_T561_py3.pyx0000664000175000017500000000554212542002467023025 0ustar stefanstefan00000000000000# ticket: 561 # tag: py3 # This file tests the behavior of special methods under Python 3 # after #561. (Only methods whose behavior differs between Python 2 and 3 # are tested here; see special_methods_T561.pyx for the rest of the tests.) __doc__ = u""" >>> vs0 = VerySpecial(0) VS __init__ 0 >>> # Python 3 does not use __cmp__, so any provided __cmp__ method is >>> # discarded under Python 3. >>> vs0_cmp = vs0.__cmp__ Traceback (most recent call last): ... AttributeError: 'special_methods_T561_py3.VerySpecial' object has no attribute '__cmp__' >>> # Python 3 does not use __div__ or __idiv__, so these methods are >>> # discarded under Python 3. >>> vs0_div = vs0.__div__ Traceback (most recent call last): ... AttributeError: 'special_methods_T561_py3.VerySpecial' object has no attribute '__div__' >>> vs0_rdiv = vs0.__rdiv__ Traceback (most recent call last): ... AttributeError: 'special_methods_T561_py3.VerySpecial' object has no attribute '__rdiv__' >>> vs0_idiv = vs0.__idiv__ Traceback (most recent call last): ... AttributeError: 'special_methods_T561_py3.VerySpecial' object has no attribute '__idiv__' >>> # Python 3 does not use __oct__ or __hex__, so these methods are >>> # discarded under Python 3. >>> vs0_oct = vs0.__oct__ Traceback (most recent call last): ... AttributeError: 'special_methods_T561_py3.VerySpecial' object has no attribute '__oct__' >>> vs0_hex = vs0.__hex__ Traceback (most recent call last): ... AttributeError: 'special_methods_T561_py3.VerySpecial' object has no attribute '__hex__' >>> # Python 3 does not use __long__; if you define __long__ but not >>> # __int__, the __long__ definition will be used for __int__. >>> Ll = Long().__long__ Traceback (most recent call last): ... AttributeError: 'special_methods_T561_py3.Long' object has no attribute '__long__' >>> Li = Long().__int__ >>> Li() Long __long__ >>> # As of Python 3, defining __nonzero__ gives you a __bool__ method >>> # instead. >>> vs0_bool = vs0.__bool__ >>> vs0_bool() VS __nonzero__ 0 False """ cdef class VerySpecial: cdef readonly int value def __init__(self, v): self.value = v print "VS __init__ %d" % self.value def __nonzero__(self): print "VS __nonzero__ %d" % self.value def __oct__(self): print "VS __oct__ %d" % self.value def __hex__(self): print "VS __hex__ %d" % self.value def __cmp__(self, other): print "VS __cmp__ %d %d" % (self.value, other.value) def __div__(self, other): print "VS __div__ %d %d" % (self.value, other.value) def __idiv__(self, other): print "VS __idiv__ %d /= %d" % (self.value, other.value) cdef class Long: def __long__(self): print "Long __long__" Cython-0.26.1/tests/run/test_call.py0000664000175000017500000000610313023021033020105 0ustar stefanstefan00000000000000import unittest # The test cases here cover several paths through the function calling # code. They depend on the METH_XXX flag that is used to define a C # function, which can't be verified from Python. If the METH_XXX decl # for a C function changes, these tests may not cover the right paths. class CFunctionCalls(unittest.TestCase): def test_varargs0(self): self.assertRaises(TypeError, {}.__contains__) def test_varargs1(self): {}.__contains__(0) def test_varargs2(self): self.assertRaises(TypeError, {}.__contains__, 0, 1) def test_varargs0_ext(self): try: {}.__contains__(*()) except TypeError: pass def test_varargs1_ext(self): {}.__contains__(*(0,)) def test_varargs2_ext(self): try: {}.__contains__(*(1, 2)) except TypeError: pass else: raise RuntimeError def test_varargs0_kw(self): self.assertRaises(TypeError, {}.__contains__, x=2) def test_varargs1_kw(self): self.assertRaises(TypeError, {}.__contains__, x=2) def test_varargs2_kw(self): self.assertRaises(TypeError, {}.__contains__, x=2, y=2) def test_oldargs0_0(self): {}.keys() def test_oldargs0_1(self): self.assertRaises(TypeError, {}.keys, 0) def test_oldargs0_2(self): self.assertRaises(TypeError, {}.keys, 0, 1) def test_oldargs0_0_ext(self): {}.keys(*()) def test_oldargs0_1_ext(self): try: {}.keys(*(0,)) except TypeError: pass else: raise RuntimeError def test_oldargs0_2_ext(self): try: {}.keys(*(1, 2)) except TypeError: pass else: raise RuntimeError ### Cython makes this a compile time error # def test_oldargs0_0_kw(self): # try: # {}.keys(x=2) # except TypeError: # pass # else: # raise RuntimeError def test_oldargs0_1_kw(self): self.assertRaises(TypeError, {}.keys, x=2) def test_oldargs0_2_kw(self): self.assertRaises(TypeError, {}.keys, x=2, y=2) def test_oldargs1_0(self): self.assertRaises(TypeError, [].count) def test_oldargs1_1(self): [].count(1) def test_oldargs1_2(self): self.assertRaises(TypeError, [].count, 1, 2) def test_oldargs1_0_ext(self): try: [].count(*()) except TypeError: pass else: raise RuntimeError def test_oldargs1_1_ext(self): [].count(*(1,)) def test_oldargs1_2_ext(self): try: [].count(*(1, 2)) except TypeError: pass else: raise RuntimeError def test_oldargs1_0_kw(self): self.assertRaises(TypeError, [].count, x=2) def test_oldargs1_1_kw(self): self.assertRaises(TypeError, [].count, {}, x=2) def test_oldargs1_2_kw(self): self.assertRaises(TypeError, [].count, x=2, y=2) if __name__ == "__main__": unittest.main() Cython-0.26.1/tests/run/for_in_string.pyx0000664000175000017500000001342312542002467021207 0ustar stefanstefan00000000000000cimport cython bytes_abc = b'abc' bytes_ABC = b'ABC' bytes_abc_null = b'a\0b\0c' bytes_ABC_null = b'A\0B\0C' unicode_abc = u'abc' unicode_ABC = u'ABC' unicode_abc_null = u'a\0b\0c' unicode_ABC_null = u'A\0B\0C' def for_in_bytes(bytes s): """ >>> for_in_bytes(bytes_abc) 'X' >>> for_in_bytes(bytes_ABC) 'C' >>> for_in_bytes(bytes_abc_null) 'X' >>> for_in_bytes(bytes_ABC_null) 'C' """ for c in s: # Py2/Py3 if c == b'C' or c == c'C': return 'C' else: return 'X' @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def for_char_in_bytes(bytes s): """ >>> for_char_in_bytes(bytes_abc) 'X' >>> for_char_in_bytes(bytes_ABC) 'C' >>> for_char_in_bytes(bytes_abc_null) 'X' >>> for_char_in_bytes(bytes_ABC_null) 'C' """ cdef char c for c in s: if c == b'C': return 'C' else: return 'X' #### Py2 and Py3 behave differently here: Py2->bytes, Py3->integer ## ## @cython.test_assert_path_exists("//ForFromStatNode") ## @cython.test_fail_if_path_exists("//ForInStatNode") ## def for_obj_in_bytes_slice(bytes s): ## """ ## >>> for_obj_in_bytes_slice(bytes_abc) ## 'X' ## >>> for_obj_in_bytes_slice(bytes_ABC) ## 'B' ## >>> for_obj_in_bytes_slice(bytes_abc_null) ## 'X' ## >>> for_obj_in_bytes_slice(bytes_ABC_null) ## 'B' ## """ ## for c in s[1:-1]: ## if c == b'B': ## return 'B' ## else: ## return 'X' @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def for_char_in_bytes_slice(bytes s): """ >>> for_char_in_bytes_slice(bytes_abc) 'X' >>> for_char_in_bytes_slice(bytes_ABC) 'B' >>> for_char_in_bytes_slice(bytes_abc_null) 'X' >>> for_char_in_bytes_slice(bytes_ABC_null) 'B' """ cdef char c for c in s[1:-1]: if c == c'B': return 'B' else: return 'X' @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def for_char_in_enumerate_bytes(bytes s): """ >>> for_char_in_enumerate_bytes(bytes_abc) 'X' >>> for_char_in_enumerate_bytes(bytes_ABC) 2 >>> for_char_in_enumerate_bytes(bytes_abc_null) 'X' >>> for_char_in_enumerate_bytes(bytes_ABC_null) 4 """ cdef char c cdef Py_ssize_t i for i, c in enumerate(s): if c == b'C': return i else: return 'X' #### Py2 and Py3 behave differently here: Py2->bytes, Py3->integer ## ## @cython.test_assert_path_exists("//ForFromStatNode") ## @cython.test_fail_if_path_exists("//ForInStatNode") ## def for_pyvar_in_char_ptr(char* c_string): ## """ ## >>> for_pyvar_in_char_ptr( (bytes_abc+bytes_ABC) * 2 ) ## [True, True, True, False, False, False, True, True, True, False] ## >>> for_pyvar_in_char_ptr( bytes_abc_null * 2 ) ## [True, False, True, False, True, True, False, True, False, True] ## """ ## in_test = [] ## cdef object c ## for c in c_string[:10]: ## in_test.append( c in b'abc' ) ## return in_test @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def for_char_in_char_ptr(char* c_string): """ >>> for_char_in_char_ptr( (bytes_abc+bytes_ABC) * 2 ) [True, True, True, False, False, False, True, True, True, False] >>> for_char_in_char_ptr( bytes_abc_null * 2 ) [True, False, True, False, True, True, False, True, False, True] """ in_test = [] cdef char c for c in c_string[:10]: in_test.append( c in b'abc' ) return in_test @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def for_pyunicode_in_unicode(unicode s): """ >>> for_pyunicode_in_unicode(unicode_abc) 'X' >>> for_pyunicode_in_unicode(unicode_ABC) 'C' >>> for_pyunicode_in_unicode(unicode_abc_null) 'X' >>> for_pyunicode_in_unicode(unicode_ABC_null) 'C' """ cdef Py_UNICODE c for c in s: if c == u'C': return 'C' else: return 'X' @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def for_pyunicode_in_enumerate_unicode(unicode s): """ >>> for_pyunicode_in_enumerate_unicode(unicode_abc) 'X' >>> for_pyunicode_in_enumerate_unicode(unicode_ABC) 2 >>> for_pyunicode_in_enumerate_unicode(unicode_abc_null) 'X' >>> for_pyunicode_in_enumerate_unicode(unicode_ABC_null) 4 """ cdef Py_UNICODE c cdef Py_ssize_t i for i, c in enumerate(s): if c == u'C': return i else: return 'X' @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def for_pyucs4_in_unicode(unicode s): """ >>> for_pyucs4_in_unicode(unicode_abc) 'X' >>> for_pyucs4_in_unicode(unicode_ABC) 'C' >>> for_pyucs4_in_unicode(unicode_abc_null) 'X' >>> for_pyucs4_in_unicode(unicode_ABC_null) 'C' """ cdef Py_UCS4 c for c in s: if c == u'C': return 'C' else: return 'X' @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") def for_pyucs4_in_enumerate_unicode(unicode s): """ >>> for_pyucs4_in_enumerate_unicode(unicode_abc) 'X' >>> for_pyucs4_in_enumerate_unicode(unicode_ABC) 2 >>> for_pyucs4_in_enumerate_unicode(unicode_abc_null) 'X' >>> for_pyucs4_in_enumerate_unicode(unicode_ABC_null) 4 """ cdef Py_UCS4 c cdef Py_ssize_t i for i, c in enumerate(s): if c == u'C': return i else: return 'X' Cython-0.26.1/tests/run/builtin_slice.pyx0000664000175000017500000000251212542002467021167 0ustar stefanstefan00000000000000# mode: run def slice1(stop): """ >>> list(range(8)) [0, 1, 2, 3, 4, 5, 6, 7] >>> list(range(10))[slice1(8)] [0, 1, 2, 3, 4, 5, 6, 7] >>> slice1(1) slice(None, 1, None) >>> slice1(10) slice(None, 10, None) >>> slice1(None) slice(None, None, None) >>> slice1(1) == slice(1) True >>> slice1(None) == slice(None) True """ return slice(stop) def slice1_const(): """ >>> slice1_const() == slice(12) True """ return slice(12) def slice2(start, stop): """ >>> list(range(2, 8)) [2, 3, 4, 5, 6, 7] >>> list(range(10))[slice2(2, 8)] [2, 3, 4, 5, 6, 7] >>> slice2(1, 10) slice(1, 10, None) >>> slice2(None, 10) slice(None, 10, None) >>> slice2(4, None) slice(4, None, None) """ return slice(start, stop) def slice2_const(): """ >>> slice2_const() == slice(None, 12) True """ return slice(None, 12) def slice3(start, stop, step): """ >>> list(range(2, 8, 3)) [2, 5] >>> list(range(10))[slice3(2, 8, 3)] [2, 5] >>> slice3(2, None, 3) slice(2, None, 3) >>> slice3(None, 3, 2) slice(None, 3, 2) """ return slice(start, stop, step) def slice3_const(): """ >>> slice3_const() == slice(12, None, 34) True """ return slice(12, None, 34) Cython-0.26.1/tests/run/cpdef_temps_T411.pyx0000664000175000017500000000053612542002467021350 0ustar stefanstefan00000000000000# ticket: 411 cdef class A: """ >>> A().is_True() True >>> A().is_False() False """ cpdef is_True(self): return True cpdef is_False(self): return not self.is_True() class B(A): """ >>> B().is_True() True >>> B().is_False() False """ def is_True(self): return True Cython-0.26.1/tests/run/refcount_in_meth.pyx0000664000175000017500000000236512542002467021700 0ustar stefanstefan00000000000000#!/usr/bin/env python __doc__=u""" >>> t = RefCountInMeth() >>> t.chk_meth() True >>> t.chk_nogil() True >>> t.chk_meth_if() True >>> t.chk_nogil_if() True """ from cpython.ref cimport PyObject def get_refcount(obj): return (obj).ob_refcnt cdef class RefCountInMeth(object): cdef double value def __cinit__(self): self.value = 1.5 cdef double c_get_value(self) nogil: return self.value cdef double c_get_value_if(self) nogil: cdef double v if 9>4: v = 2.3 return self.value cdef int c_meth(self): cdef int v v = get_refcount(self) return v cdef int c_meth_if(self): cdef int v if 5>6: v = 7 v = get_refcount(self) return v def chk_meth(self): cdef int a,b a = get_refcount(self) b = self.c_meth() return a==b def chk_meth_if(self): cdef int a,b a = get_refcount(self) b = self.c_meth_if() return a==b def chk_nogil(self): cdef double v v = self.c_get_value() return v==self.value def chk_nogil_if(self): cdef double v v = self.c_get_value_if() return v==self.value Cython-0.26.1/tests/run/powop.pyx0000664000175000017500000000657312542002467017521 0ustar stefanstefan00000000000000def f(obj2, obj3): """ >>> f(1.0, 2.95)[0] == f(1.0, 2.95)[1] True """ cdef float flt1, flt2, flt3 flt2, flt3 = obj2, obj3 flt1 = flt2 ** flt3 obj1 = obj2 ** obj3 return flt1, obj1 def g(i): """ >>> g(4) 1024 """ return i ** 5 def h(i): """ >>> h(4) 625 """ return 5 ** i def constant_py(): """ >>> constant_py() == 2 ** 10 True """ result = (2) ** 10 return result def constant_long(): """ >>> constant_long() == 2 ** 36 True """ result = (2L) ** 36 return result def small_int_pow(long s): """ >>> small_int_pow(3) (1, 3, 9, 27, 81) >>> small_int_pow(-5) (1, -5, 25, -125, 625) """ return s**0, s**1, s**2, s**3, s**4 def int_pow(short a, short b): """ >>> int_pow(7, 2) 49 >>> int_pow(5, 3) 125 >>> int_pow(2, 10) 1024 """ return a**b class I(int): """ Copied from CPython's test_descr.py >>> I(2) ** I(3) I(8) >>> 2 ** I(3) I(8) >>> I(3).pow2() I(8) """ def __repr__(self): return 'I(%s)' % int(self) def __pow__(self, other, mod=None): if mod is None: return I(pow(int(self), int(other))) else: return I(pow(int(self), int(other), int(mod))) def __rpow__(self, other, mod=None): if mod is None: return I(pow(int(other), int(self), mod)) else: return I(pow(int(other), int(self), int(mod))) def pow2(self): return 2 ** self def optimised_pow2(n): """ >>> optimised_pow2(0) 1 >>> optimised_pow2(1) 2 >>> optimised_pow2(10) 1024 >>> optimised_pow2(30) 1073741824 >>> print(repr(optimised_pow2(31)).rstrip('L')) 2147483648 >>> print(repr(optimised_pow2(32)).rstrip('L')) 4294967296 >>> print(repr(optimised_pow2(60)).rstrip('L')) 1152921504606846976 >>> print(repr(optimised_pow2(63)).rstrip('L')) 9223372036854775808 >>> print(repr(optimised_pow2(64)).rstrip('L')) 18446744073709551616 >>> print(repr(optimised_pow2(100)).rstrip('L')) 1267650600228229401496703205376 >>> optimised_pow2(30000) == 2 ** 30000 True >>> optimised_pow2(-1) 0.5 >>> optimised_pow2(0.5) == 2 ** 0.5 True >>> optimised_pow2('test') Traceback (most recent call last): TypeError: unsupported operand type(s) for ** or pow(): 'int' and 'str' """ if isinstance(n, (int, long)) and 0 <= n < 1000: assert isinstance(2.0 ** n, float), 'float %s' % n assert isinstance(2 ** n, (int, long)), 'int %s' % n return 2 ** n def optimised_pow2_inplace(n): """ >>> optimised_pow2_inplace(0) 1 >>> optimised_pow2_inplace(1) 2 >>> optimised_pow2_inplace(10) 1024 >>> optimised_pow2_inplace(30) 1073741824 >>> print(repr(optimised_pow2_inplace(32)).rstrip('L')) 4294967296 >>> print(repr(optimised_pow2_inplace(100)).rstrip('L')) 1267650600228229401496703205376 >>> optimised_pow2_inplace(30000) == 2 ** 30000 True >>> optimised_pow2_inplace(-1) 0.5 >>> optimised_pow2_inplace(0.5) == 2 ** 0.5 True >>> optimised_pow2_inplace('test') Traceback (most recent call last): TypeError: unsupported operand type(s) for ** or pow(): 'int' and 'str' """ x = 2 x **= n return x Cython-0.26.1/tests/run/ass2cglobal.pyx0000664000175000017500000000033312542002467020535 0ustar stefanstefan00000000000000__doc__ = u""" >>> what() 0 5 >>> f(5) >>> what() 42 5 >>> f(6) >>> what() 42 6 >>> f("spam") >>> what() 42 spam """ cdef int i = 0 cdef x = 5 def f(a): global i, x i = 42 x = a def what(): print i,x Cython-0.26.1/tests/run/charescape.pyx0000664000175000017500000000245712542002467020450 0ustar stefanstefan00000000000000import sys if sys.version_info[0] < 3: __doc__ = u""" >>> s = test() >>> assert s == ''.join(map(chr, range(1,49))), repr(s) """ else: __doc__ = u""" >>> s = test() >>> assert s == bytes(range(1,49)), repr(s) """ def test(): cdef char[50] s s[ 0] = c'\0' s[ 1] = c'\x01' s[ 2] = c'\x02' s[ 3] = c'\x03' s[ 4] = c'\x04' s[ 5] = c'\x05' s[ 6] = c'\x06' s[ 7] = c'\x07' s[ 8] = c'\x08' s[ 9] = c'\x09' s[10] = c'\x0A' s[11] = c'\x0B' s[12] = c'\x0C' s[13] = c'\x0D' s[14] = c'\x0E' s[15] = c'\x0F' s[16] = c'\x10' s[17] = c'\x11' s[18] = c'\x12' s[19] = c'\x13' s[20] = c'\x14' s[21] = c'\x15' s[22] = c'\x16' s[23] = c'\x17' s[24] = c'\x18' s[25] = c'\x19' s[26] = c'\x1A' s[27] = c'\x1B' s[28] = c'\x1C' s[29] = c'\x1D' s[30] = c'\x1E' s[31] = c'\x1F' s[32] = c'\x20' s[33] = c'\x21' s[34] = c'\x22' s[35] = c'\x23' s[36] = c'\x24' s[37] = c'\x25' s[38] = c'\x26' s[39] = c'\x27' s[40] = c'\x28' s[41] = c'\x29' s[42] = c'\x2A' s[43] = c'\x2B' s[44] = c'\x2C' s[45] = c'\x2D' s[46] = c'\x2E' s[47] = c'\x2F' s[48] = c'\x30' s[49] = c'\x00' assert s[ 0] == c'\x00' assert s[49] == c'\0' return &s[1] Cython-0.26.1/tests/run/tupleassign.pyx0000664000175000017500000000603612542002467020705 0ustar stefanstefan00000000000000t = (1,2,3) l = [1,2,3] def assign3(t): """ >>> assign3(l) (1, 2, 3) >>> assign3(t) (1, 2, 3) >>> assign3((1,)) Traceback (most recent call last): ValueError: need more than 1 value to unpack >>> assign3((1,2)) Traceback (most recent call last): ValueError: need more than 2 values to unpack >>> assign3((1,2,3,4)) Traceback (most recent call last): ValueError: too many values to unpack (expected 3) """ a,b,c = t return (a,b,c) def assign3_typed(tuple t): """ >>> assign3_typed(t) (1, 2, 3) >>> assign3_typed(l) Traceback (most recent call last): TypeError: Argument 't' has incorrect type (expected tuple, got list) >>> a,b,c = (1,) # doctest: +ELLIPSIS Traceback (most recent call last): ValueError: ... >>> assign3_typed((1,)) Traceback (most recent call last): ValueError: need more than 1 value to unpack >>> a,b,c = (1,2) # doctest: +ELLIPSIS Traceback (most recent call last): ValueError: ... >>> assign3_typed((1,2)) Traceback (most recent call last): ValueError: need more than 2 values to unpack >>> a,b,c = (1,2,3,4) # doctest: +ELLIPSIS Traceback (most recent call last): ValueError: ... >>> assign3_typed((1,2,3,4)) Traceback (most recent call last): ValueError: too many values to unpack (expected 3) >>> a,b = 99,98 >>> a,b = t # doctest: +ELLIPSIS Traceback (most recent call last): ValueError: ... >>> a,b (99, 98) """ a,b,c = t return (a,b,c) def assign3_int(t): """ >>> assign3_int(l) (1, 2, 3) """ cdef int a,b,c a,b,c = t return (a,b,c) def assign3_mixed1(t): """ >>> assign3_mixed1(l) (1, 2, 3) """ cdef int a a,b,c = t return (a,b,c) def assign3_mixed2(t): """ >>> assign3_mixed2(l) (1, 2, 3) """ cdef int b a,b,c = t return (a,b,c) def assign3_mixed3(t): """ >>> assign3_mixed3(l) (1, 2, 3) """ cdef int c a,b,c = t return (a,b,c) def assign3_mixed4(t): cdef int b,c a,b,c = t return (a,b,c) def test_overwrite(t): """ >>> test_overwrite(l) (99, 98) >>> test_overwrite(t) (99, 98) """ a,b = 99,98 try: a,b = t except ValueError: pass return (a,b) def test_overwrite_int(t): """ >>> test_overwrite_int(l) (99, 98) >>> test_overwrite_int(t) (99, 98) """ cdef int a,b a,b = 99,98 try: a,b = t except ValueError: pass return (a,b) def test_overwrite_mixed(t): """ >>> test_overwrite_mixed(l) (99, 98) >>> test_overwrite_mixed(t) (99, 98) """ cdef int b a,b = 99,98 try: a,b = t except ValueError: pass return (a,b) def test_overwrite_mixed2(t): """ >>> test_overwrite_mixed2(l) (99, 98) >>> test_overwrite_mixed2(t) (99, 98) """ cdef int a a,b = 99,98 try: a,b = t except ValueError: pass return (a,b) Cython-0.26.1/tests/run/enumerate_T316.pyx0000664000175000017500000001116412542002467021047 0ustar stefanstefan00000000000000# ticket: 316 cimport cython @cython.test_fail_if_path_exists("//SimpleCallNode//NameNode[@name = 'enumerate']") def go_py_enumerate(): """ >>> go_py_enumerate() 0 1 1 2 2 3 3 4 """ for i,k in enumerate(range(1,5)): print i, k @cython.test_fail_if_path_exists("//SimpleCallNode//NameNode[@name = 'enumerate']") def py_enumerate_list_index_target(): """ >>> py_enumerate_list_index_target() [0] 1 [1] 2 [2] 3 [3] 4 """ target = [None] for target[0],k in enumerate(range(1,5)): print target, k @cython.test_fail_if_path_exists("//SimpleCallNode//NameNode[@name = 'enumerate']") def go_py_enumerate_start(): """ >>> go_py_enumerate_start() 5 1 6 2 7 3 8 4 """ for i,k in enumerate(range(1,5), 5): print i, k @cython.test_fail_if_path_exists("//SimpleCallNode//NameNode[@name = 'enumerate']") def go_c_enumerate(): """ >>> go_c_enumerate() 0 1 1 2 2 3 3 4 """ cdef int i,k for i,k in enumerate(range(1,5)): print i, k @cython.test_fail_if_path_exists("//SimpleCallNode//NameNode[@name = 'enumerate']") def c_enumerate_carray_target(): """ >>> c_enumerate_carray_target() 0 1 1 2 2 3 3 4 """ cdef int k cdef int[1] i for i[0],k in enumerate(range(1,5)): print i[0], k @cython.test_fail_if_path_exists("//SimpleCallNode//NameNode[@name = 'enumerate']") def go_c_enumerate_step(): """ >>> go_c_enumerate_step() 0 1 1 3 2 5 """ cdef int i,k for i,k in enumerate(range(1,7,2)): print i, k @cython.test_fail_if_path_exists("//SimpleCallNode//NameNode[@name = 'enumerate']") def py_enumerate_dict(dict d): """ >>> py_enumerate_dict({}) :: 55 99 >>> py_enumerate_dict(dict(a=1, b=2, c=3)) 0 True 1 True 2 True :: 2 True """ cdef int i = 55 k = 99 keys = list(d.keys()) for i,k in enumerate(d): k = keys[i] == k print i, k print u"::", i, k @cython.test_fail_if_path_exists("//SimpleCallNode") def py_enumerate_break(*t): """ >>> py_enumerate_break(1,2,3,4) 0 1 :: 0 1 """ i,k = 55,99 for i,k in enumerate(t): print i, k break print u"::", i, k @cython.test_fail_if_path_exists("//SimpleCallNode") def py_enumerate_return(*t): """ >>> py_enumerate_return() :: 55 99 >>> py_enumerate_return(1,2,3,4) 0 1 """ i,k = 55,99 for i,k in enumerate(t): print i, k return print u"::", i, k @cython.test_fail_if_path_exists("//SimpleCallNode") def py_enumerate_continue(*t): """ >>> py_enumerate_continue(1,2,3,4) 0 1 1 2 2 3 3 4 :: 3 4 """ i,k = 55,99 for i,k in enumerate(t): print i, k continue print u"::", i, k @cython.test_fail_if_path_exists("//SimpleCallNode//NameNode[@name = 'enumerate']") def empty_c_enumerate(): """ >>> empty_c_enumerate() (55, 99) """ cdef int i = 55, k = 99 for i,k in enumerate(range(0)): print i, k return i, k # not currently optimised def single_target_enumerate(): """ >>> single_target_enumerate() 0 1 1 2 2 3 3 4 """ for t in enumerate(range(1,5)): print t[0], t[1] @cython.test_fail_if_path_exists("//SimpleCallNode//NameNode[@name = 'enumerate']") def multi_enumerate(): """ >>> multi_enumerate() 0 0 0 1 1 1 1 2 2 2 2 3 3 3 3 4 """ for a,(b,(c,d)) in enumerate(enumerate(enumerate(range(1,5)))): print a,b,c,d @cython.test_fail_if_path_exists("//SimpleCallNode//NameNode[@name = 'enumerate']") def multi_enumerate_start(): """ >>> multi_enumerate_start() 0 2 0 1 1 3 1 2 2 4 2 3 3 5 3 4 """ for a,(b,(c,d)) in enumerate(enumerate(enumerate(range(1,5)), 2)): print a,b,c,d @cython.test_fail_if_path_exists("//SimpleCallNode") def multi_c_enumerate(): """ >>> multi_c_enumerate() 0 0 0 1 1 1 1 2 2 2 2 3 3 3 3 4 """ cdef int a,b,c,d for a,(b,(c,d)) in enumerate(enumerate(enumerate(range(1,5)))): print a,b,c,d @cython.test_fail_if_path_exists("//SimpleCallNode") def convert_target_enumerate(L): """ >>> convert_target_enumerate([2,3,5]) 0 2 1 3 2 5 """ cdef int a,b for a, b in enumerate(L): print a,b @cython.test_fail_if_path_exists("//SimpleCallNode") def convert_target_enumerate_start(L, int n): """ >>> convert_target_enumerate_start([2,3,5], 3) 3 2 4 3 5 5 """ cdef int a,b for a, b in enumerate(L, n): print a,b Cython-0.26.1/tests/run/py_unicode_strings.pyx0000664000175000017500000000507712542002467022262 0ustar stefanstefan00000000000000# tag: py_unicode_strings import sys cimport cython from libc.string cimport memcpy, strcpy cdef bint Py_UNICODE_equal(const Py_UNICODE* u1, const Py_UNICODE* u2): while u1[0] != 0 and u2[0] != 0 and u1[0] == u2[0]: u1 += 1 u2 += 1 return u1[0] == u2[0] ctypedef Py_UNICODE* LPWSTR cdef unicode uobj = u'unicode\u1234' cdef unicode uobj1 = u'u' cdef Py_UNICODE* c_pu_str = u"unicode\u1234" cdef Py_UNICODE[42] c_pu_arr cdef LPWSTR c_wstr = u"unicode\u1234" cdef Py_UNICODE* c_pu_empty = u"" cdef char* c_empty = "" cdef unicode uwide_literal = u'\U00020000\U00020001' cdef Py_UNICODE* c_pu_wide_literal = u'\U00020000\U00020001' memcpy(c_pu_arr, c_pu_str, sizeof(Py_UNICODE) * (len(uobj) + 1)) def test_c_to_python(): """ >>> test_c_to_python() """ assert c_pu_arr == uobj assert c_pu_str == uobj assert c_wstr == uobj assert c_pu_arr[1:] == uobj[1:] assert c_pu_str[1:] == uobj[1:] assert c_wstr[1:] == uobj[1:] assert c_pu_arr[:1] == uobj[:1] assert c_pu_arr[:1] == uobj[:1] assert c_pu_str[:1] == uobj[:1] assert c_wstr[:1] == uobj[:1] assert c_pu_arr[1:7] == uobj[1:7] assert c_pu_str[1:7] == uobj[1:7] assert c_wstr[1:7] == uobj[1:7] assert c_pu_arr[1] == uobj[1] assert c_pu_str[1] == uobj[1] assert c_wstr[1] == uobj[1] assert len(c_pu_str) == 8 assert len(c_pu_arr) == 8 assert len(c_wstr) == 8 assert sizeof(c_pu_arr) == sizeof(Py_UNICODE) * 42 assert sizeof(c_pu_str) == sizeof(void*) assert c_pu_wide_literal == uwide_literal if sizeof(Py_UNICODE) >= 4: assert len(c_pu_wide_literal) == 2 else: assert len(c_pu_wide_literal) == 4 if sys.version_info >= (3, 3): # Make sure len(unicode) is not reverted to pre-3.3 behavior assert len(uwide_literal) == 2 assert u'unicode' assert not u'' assert c_pu_str assert c_pu_empty def test_python_to_c(): """ >>> test_python_to_c() """ cdef unicode u assert Py_UNICODE_equal(c_pu_arr, uobj) assert Py_UNICODE_equal(c_pu_str, uobj) assert Py_UNICODE_equal(c_pu_str, uobj) u = uobj[1:] assert Py_UNICODE_equal(c_pu_str + 1, u) assert Py_UNICODE_equal(c_wstr + 1, u) u = uobj[:1] assert Py_UNICODE_equal(u"u", u) u = uobj[1:7] assert Py_UNICODE_equal(u"nicode", u) u = uobj[1] assert Py_UNICODE_equal(u"n", u) assert Py_UNICODE_equal(uwide_literal, c_pu_wide_literal) assert len(u"abc\0") == 4 assert len(u"abc\0") == 3 Cython-0.26.1/tests/run/cdef_function_kwargs.pyx0000664000175000017500000002106612542002467022533 0ustar stefanstefan00000000000000cimport cython from libc.string cimport strstr cdef cfunc(a,b,c,d): return (a,b,c,d) cpdef cpfunc(a,b,c,d): return (a,b,c,d) cdef optargs(a, b=2, c=3): return (a,b,c) ctypedef int (*cfuncptr_type)(int a, int b) cdef int cfuncptr(int a, int b): print a, b cdef cfuncptr_type get_cfuncptr(): return cfuncptr sideeffect = [] cdef side_effect(x): sideeffect.append(x) return x @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def cfunc_all_keywords(): """ >>> cfunc_all_keywords() (1, 2, 3, 4) """ return cfunc(a=1, b=2, c=3, d=4) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def cfunc_some_keywords(): """ >>> cfunc_some_keywords() (1, 2, 3, 4) """ return cfunc(1, 2, c=3, d=4) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def cfunc_some_keywords_unordered(): """ >>> cfunc_some_keywords_unordered() (1, 2, 3, 4) """ return cfunc(1, 2, d=4, c=3) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def cfunc_some_keywords_unordered_sideeffect(): """ >>> del sideeffect[:] >>> cfunc_some_keywords_unordered_sideeffect() (1, 2, 3, 4) >>> sideeffect [4, 3] """ return cfunc(1, 2, d=side_effect(4), c=side_effect(3)) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def cpfunc_all_keywords(): """ >>> cpfunc_all_keywords() (1, 2, 3, 4) """ return cpfunc(a=1, b=2, c=3, d=4) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def cpfunc_some_keywords(): """ >>> cpfunc_some_keywords() (1, 2, 3, 4) """ return cpfunc(1, 2, c=3, d=4) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def cpfunc_some_keywords_unordered(): """ >>> cpfunc_some_keywords_unordered() (1, 2, 3, 4) """ return cpfunc(1, 2, d=4, c=3) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def cpfunc_some_keywords_unordered_sideeffect(): """ >>> del sideeffect[:] >>> cpfunc_some_keywords_unordered_sideeffect() (1, 2, 3, 4) >>> sideeffect [4, 3] """ return cpfunc(1, 2, d=side_effect(4), c=side_effect(3)) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def libc_strstr(): """ >>> libc_strstr() (True, True, True, True, True) """ return ( strstr("xabcy", "abc") is not NULL, strstr("abc", "xabcy") is NULL, strstr(needle="abc", haystack="xabcz") is not NULL, strstr(needle="xabcz", haystack="abc") is NULL, strstr(haystack="abc", needle="xabcz") is NULL, ) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def cdef_optargs(): """ >>> cdef_optargs() (11, 2, 3) (11, 2, 3) (11, 12, 3) (11, 12, 3) (11, 12, 3) (11, 12, 3) (11, 12, 3) (11, 12, 13) (11, 12, 13) (11, 12, 13) (11, 12, 13) (11, 12, 13) (11, 12, 13) (11, 12, 13) """ print(optargs(11)) print(optargs(a=11)) print(optargs(11, 12)) print(optargs(11, b=12)) print(optargs(a=11, b=12)) print(optargs(b=12, a=11)) print(optargs(a=11, b=12)) print(optargs(11, 12, 13)) print(optargs(11, 12, c=13)) print(optargs(11, c=13, b=12)) print(optargs(a=11, b=12, c=13)) print(optargs(b=12, a=11, c=13)) print(optargs(b=12, c=13, a=11)) print(optargs(c=13, a=11, b=12)) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def cdef_funcptr(): """ >>> cdef_funcptr() 1 2 1 2 1 2 1 2 """ cdef cfuncptr_type cfunc_ptr = get_cfuncptr() cfunc_ptr(1, 2) cfunc_ptr(1, b=2) cfunc_ptr(a=1, b=2) cfunc_ptr(b=2, a=1) ''' # This works but currently brings up C compiler warnings # because the format string is not a literal C string. from libc.stdio cimport snprintf @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def varargs(): """ >>> print(varargs()) abc """ cdef char[10] buffer retval = snprintf(buffer, template="abc", size=10) if retval < 0: raise MemoryError() return buffer[:retval].decode('ascii') ''' cdef class ExtType: cdef cmeth(self, a, b, c, d): return (a,b,c,d) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def call_cmeth(self, ExtType ext): """ >>> x = ExtType() >>> x.call_cmeth(x) (1, 2, 3, 4) (1, 2, 3, 4) (1, 2, 3, 4) EXT (1, 2, 3, 4) (1, 2, 3, 4) (1, 2, 3, 4) """ print self.cmeth(1,2,3,4) print self.cmeth(1,2,c=3,d=4) print self.cmeth(a=1,b=2,c=3,d=4) print "EXT" print ext.cmeth(1,2,3,4) print ext.cmeth(1,2,c=3,d=4) print ext.cmeth(a=1,b=2,c=3,d=4) cpdef cpmeth(self, a, b, c, d): return (a,b,c,d) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def call_cpmeth(self, ExtType ext): """ >>> x = ExtType() >>> x.call_cpmeth(x) (1, 2, 3, 4) (1, 2, 3, 4) (1, 2, 3, 4) EXT (1, 2, 3, 4) (1, 2, 3, 4) (1, 2, 3, 4) """ print self.cpmeth(1,2,3,4) print self.cpmeth(1,2,c=3,d=4) print self.cpmeth(a=1,b=2,c=3,d=4) print "EXT" print ext.cpmeth(1,2,3,4) print ext.cpmeth(1,2,c=3,d=4) print ext.cpmeth(a=1,b=2,c=3,d=4) cdef optargs(self, a=1, b=2): return (a,b) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def call_optargs(self, ExtType ext): """ >>> x = ExtType() >>> x.call_optargs(x) (3, 4) (3, 4) (3, 4) (1, 2) (3, 2) (3, 2) EXT (3, 4) (3, 4) (3, 4) (1, 2) (3, 2) (3, 2) """ print self.optargs(3,4) print self.optargs(3,b=4) print self.optargs(a=3,b=4) print self.optargs() print self.optargs(3) print self.optargs(a=3) #print self.optargs(b=4) print "EXT" print ext.optargs(3,4) print ext.optargs(3,b=4) print ext.optargs(a=3,b=4) print ext.optargs() print ext.optargs(3) print ext.optargs(a=3) #print ext.optargs(b=4) cpdef cpmeth_optargs(self, a=1, b=2): return (a,b) @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def call_cpmeth_optargs(self, ExtType ext): """ >>> x = ExtType() >>> x.call_cpmeth_optargs(x) (3, 4) (3, 4) (3, 4) (1, 2) (3, 2) (3, 2) EXT (3, 4) (3, 4) (3, 4) (1, 2) (3, 2) (3, 2) """ print self.cpmeth_optargs(3,4) print self.cpmeth_optargs(3,b=4) print self.cpmeth_optargs(a=3,b=4) print self.cpmeth_optargs() print self.cpmeth_optargs(3) print self.cpmeth_optargs(a=3) #print self.cpmeth_optargs(b=4) print "EXT" print ext.cpmeth_optargs(3,4) print ext.cpmeth_optargs(3,b=4) print ext.cpmeth_optargs(a=3,b=4) print ext.cpmeth_optargs() print ext.cpmeth_optargs(3) print ext.cpmeth_optargs(a=3) #print ext.cpmeth_optargs(b=4) cpdef cpmeth_optargs1(self, a=1): return a @cython.test_fail_if_path_exists('//GeneralCallNode') @cython.test_assert_path_exists('//SimpleCallNode') def call_cpmeth_optargs1(self, ExtType ext): """ >>> x = ExtType() >>> x.call_cpmeth_optargs1(x) 1 3 3 EXT 1 3 3 """ print self.cpmeth_optargs1() print self.cpmeth_optargs1(3) print self.cpmeth_optargs1(a=3) print "EXT" print ext.cpmeth_optargs1() print ext.cpmeth_optargs1(3) print ext.cpmeth_optargs1(a=3) Cython-0.26.1/tests/run/iteratorexception.pyx0000664000175000017500000000061312542002467022112 0ustar stefanstefan00000000000000class IteratorAndIterateable: def next(self): raise ValueError def __next__(self): raise ValueError def __iter__(self): return self def f(): """ >>> f() """ try: for x in IteratorAndIterateable(): pass assert False, u"Should not reach this point, iterator has thrown exception" except ValueError: pass Cython-0.26.1/tests/run/index.pyx0000664000175000017500000002111513143605603017450 0ustar stefanstefan00000000000000__doc__ = u""" >>> index_object(100, 100) # doctest: +ELLIPSIS Traceback (most recent call last): ... TypeError: 'int' object ... """ cdef Py_ssize_t maxsize import sys if sys.version_info < (2,5): __doc__ = __doc__.replace(u"'int' object ...", u'unsubscriptable object') maxsize = min(sys.maxint, 2**31-1) else: maxsize = getattr(sys, 'maxsize', getattr(sys, 'maxint', None)) py_maxsize = maxsize import cython def index_tuple(tuple t, int i): """ >>> index_tuple((1,1,2,3,5), 0) 1 >>> index_tuple((1,1,2,3,5), 3) 3 >>> index_tuple((1,1,2,3,5), -1) 5 >>> index_tuple((1,1,2,3,5), 100) Traceback (most recent call last): IndexError: tuple index out of range >>> index_tuple((1,1,2,3,5), -7) Traceback (most recent call last): IndexError: tuple index out of range >>> index_tuple(None, 0) Traceback (most recent call last): TypeError: 'NoneType' object is not subscriptable """ return t[i] def index_list(list L, int i): """ >>> index_list([2,3,5,7,11,13,17,19], 0) 2 >>> index_list([2,3,5,7,11,13,17,19], 5) 13 >>> index_list([2,3,5,7,11,13,17,19], -1) 19 >>> index_list([2,3,5,7,11,13,17,19], 100) Traceback (most recent call last): IndexError: list index out of range >>> index_list([2,3,5,7,11,13,17,19], -10) Traceback (most recent call last): IndexError: list index out of range >>> index_list(None, 0) Traceback (most recent call last): TypeError: 'NoneType' object is not subscriptable """ return L[i] def index_object(object o, int i): """ >>> index_object([2,3,5,7,11,13,17,19], 1) 3 >>> index_object([2,3,5,7,11,13,17,19], -1) 19 >>> index_object((1,1,2,3,5), 2) 2 >>> index_object((1,1,2,3,5), -2) 3 >>> index_object("abcdef...z", 0) 'a' >>> index_object("abcdef...z", -1) 'z' >>> index_object("abcdef...z", 100) Traceback (most recent call last): IndexError: string index out of range >>> try: index_object(None, 0) ... except TypeError: pass """ return o[i] def del_index_list(list L, Py_ssize_t index): """ >>> del_index_list(list(range(4)), 0) [1, 2, 3] >>> del_index_list(list(range(4)), 1) [0, 2, 3] >>> del_index_list(list(range(4)), -1) [0, 1, 2] >>> del_index_list(list(range(4)), py_maxsize) # doctest: +ELLIPSIS Traceback (most recent call last): IndexError: list... index out of range >>> del_index_list(list(range(4)), -py_maxsize) # doctest: +ELLIPSIS Traceback (most recent call last): IndexError: list... index out of range """ del L[index] return L def set_index_list(list L, Py_ssize_t index): """ >>> set_index_list(list(range(4)), 0) [5, 1, 2, 3] >>> set_index_list(list(range(4)), 1) [0, 5, 2, 3] >>> set_index_list(list(range(4)), -1) [0, 1, 2, 5] >>> set_index_list(list(range(4)), py_maxsize) # doctest: +ELLIPSIS Traceback (most recent call last): IndexError: list... index out of range >>> set_index_list(list(range(4)), -py_maxsize) # doctest: +ELLIPSIS Traceback (most recent call last): IndexError: list... index out of range """ L[index] = 5 return L # These make sure that our fast indexing works with large and unsigned types. def test_unsigned_long(): """ >>> test_unsigned_long() """ cdef int i cdef unsigned long ix cdef D = {} for i from 0 <= i < sizeof(unsigned long) * 8: ix = (1) << i D[ix] = True for i from 0 <= i < sizeof(unsigned long) * 8: ix = (1) << i assert D[ix] is True del D[ix] assert len(D) == 0 def test_unsigned_short(): """ >>> test_unsigned_short() """ cdef int i cdef unsigned short ix cdef D = {} for i from 0 <= i < sizeof(unsigned short) * 8: ix = (1) << i D[ix] = True for i from 0 <= i < sizeof(unsigned short) * 8: ix = (1) << i assert D[ix] is True del D[ix] assert len(D) == 0 def test_long_long(): """ >>> test_long_long() """ cdef int i cdef long long ix cdef D = {} for i from 0 <= i < sizeof(long long) * 8: ix = (1) << i D[ix] = True for i from 0 <= i < sizeof(long long) * 8: ix = (1) << i assert D[ix] is True del D[ix] L = [1, 2, 3] try: ix = py_maxsize + 1 except OverflowError: pass # can't test this here else: try: L[ix] = 5 except IndexError: pass else: assert False, "setting large index failed to raise IndexError" try: del L[ix] except IndexError: pass else: assert False, "deleting large index failed to raise IndexError" try: ix = -py_maxsize - 2 except OverflowError: pass # can't test this here else: try: L[ix] = 5 except IndexError: pass else: assert False, "setting large index failed to raise IndexError" try: del L[ix] except IndexError: pass else: assert False, "deleting large index failed to raise IndexError" assert len(D) == 0 def test_ulong_long(): """ >>> test_ulong_long() """ cdef unsigned long long ix L = [1, 2, 3] try: ix = py_maxsize + 1 except OverflowError: pass # can't test this here else: try: L[ix] = 5 except IndexError: pass else: assert False, "setting large index failed to raise IndexError" try: del L[ix] except IndexError: pass else: assert False, "deleting large index failed to raise IndexError" @cython.boundscheck(False) def test_boundscheck_unsigned(list L, tuple t, object o, unsigned long ix): """ >>> test_boundscheck_unsigned([1, 2, 4], (1, 2, 4), [1, 2, 4], 2) (4, 4, 4) >>> test_boundscheck_unsigned([1, 2, 4], (1, 2, 4), "", 2) Traceback (most recent call last): ... IndexError: string index out of range """ return L[ix], t[ix], o[ix] @cython.boundscheck(False) def test_boundscheck_signed(list L, tuple t, object o, long ix): """ >>> test_boundscheck_signed([1, 2, 4], (1, 2, 4), [1, 2, 4], 2) (4, 4, 4) >>> test_boundscheck_signed([1, 2, 4], (1, 2, 4), "", 2) Traceback (most recent call last): ... IndexError: string index out of range """ return L[ix], t[ix], o[ix] @cython.wraparound(False) def test_wraparound_signed(list L, tuple t, object o, long ix): """ >>> test_wraparound_signed([1, 2, 4], (1, 2, 4), [1, 2, 4], 2) (4, 4, 4) >>> test_wraparound_signed([1, 2, 4], (1, 2, 4), "", 2) Traceback (most recent call last): ... IndexError: string index out of range """ return L[ix], t[ix], o[ix] def large_literal_index(object o): """ >>> large_literal_index({1000000000000000000000000000000: True}) True """ return o[1000000000000000000000000000000] class LargeIndexable(object): expected = None def __len__(self): raise OverflowError def __getitem__(self, index): return index def __setitem__(self, index, value): assert index == value == self.expected self.expected = None def __delitem__(self, index): assert self.expected == index self.expected = None def test_large_indexing(obj): """ >>> obj = LargeIndexable() >>> zero, pone, none, pmaxsize, nmaxsize = test_large_indexing(obj) >>> # , p2maxsize, n2maxsize >>> zero 0 >>> pone 1 >>> none -1 >>> pmaxsize == py_maxsize True >>> nmaxsize == -py_maxsize True #>>> p2maxsize == py_maxsize*2 #True #>>> n2maxsize == -py_maxsize*2 #True """ return ( obj[0], obj[1], obj[-1], obj[maxsize], obj[-maxsize], #obj[maxsize*2], obj[-maxsize*2] # FIXME! ) def del_large_index(obj, Py_ssize_t index): """ >>> obj = LargeIndexable() >>> del_large_index(obj, 0) >>> del_large_index(obj, 1) >>> del_large_index(obj, -1) >>> del_large_index(obj, py_maxsize) >>> del_large_index(obj, -py_maxsize) """ obj.expected = index del obj[index] assert obj.expected is None def set_large_index(obj, Py_ssize_t index): """ >>> obj = LargeIndexable() >>> set_large_index(obj, 0) >>> set_large_index(obj, 1) >>> set_large_index(obj, -1) >>> set_large_index(obj, py_maxsize) >>> set_large_index(obj, -py_maxsize) """ obj.expected = index obj[index] = index assert obj.expected is None Cython-0.26.1/tests/run/inlinepxd.pxd0000664000175000017500000000007012542002467020304 0ustar stefanstefan00000000000000 cdef inline int my_add(int a, int b): return a + b Cython-0.26.1/tests/run/tracebacks.pyx0000664000175000017500000000136313143605603020446 0ustar stefanstefan00000000000000import traceback def foo1(): foo2() cdef foo2(): foo3() cdef int foo3() except -1: raise RuntimeError('my_message') def test_traceback(cline_in_traceback=None): """ >>> test_traceback() >>> test_traceback(True) >>> test_traceback(False) """ if cline_in_traceback is not None: import cython_runtime cython_runtime.cline_in_traceback = cline_in_traceback try: foo1() except: tb_string = traceback.format_exc() expected = ( 'tracebacks.pyx', 'foo1', 'foo2', 'foo3', 'line 4', 'line 7', 'line 10', 'my_message') for s in expected: assert s in tb_string, s if cline_in_traceback: assert 'tracebacks.c' in tb_string else: assert 'tracebacks.c' not in tb_string Cython-0.26.1/tests/run/cimport_alias_subclass_helper.pxd0000664000175000017500000000005112542002467024375 0ustar stefanstefan00000000000000cdef class Base: cdef bint foo(self) Cython-0.26.1/tests/run/unicode_default_auto_encoding.pyx0000664000175000017500000000047312542002467024376 0ustar stefanstefan00000000000000# cython: c_string_type = unicode # cython: c_string_encoding = default import sys if sys.version_info[0] >= 3: __doc__ = r""" >>> as_objects("ab\xff") == "ab\xff" True >>> slice_as_objects("ab\xffd", 1, 4) == "b\xff" True """ include "unicode_ascii_auto_encoding.pyx" Cython-0.26.1/tests/run/cimport_cython_T505.pyx0000664000175000017500000000040612542002467022120 0ustar stefanstefan00000000000000# ticket: 505 cimport cython cdef extern from "Python.h": cdef cython.unicode PyUnicode_DecodeUTF8(char* s, Py_ssize_t size, char* errors) def test_capi(): """ >>> print(test_capi()) abc """ return PyUnicode_DecodeUTF8("abc", 3, NULL) Cython-0.26.1/tests/run/bytearray_default_auto_encoding.pyx0000664000175000017500000000077512542002467024757 0ustar stefanstefan00000000000000# cython: c_string_type = bytearray # cython: c_string_encoding = default import sys if sys.version_info[0] >= 3: __doc__ = r""" >>> isinstance(as_objects("ab\xff"), bytearray) True >>> as_objects("ab\xff") == bytearray("ab\xff".encode()) True >>> isinstance(slice_as_objects("ab\xff", 1, 4), bytearray) True >>> slice_as_objects("ab\xffd", 1, 4) == bytearray("b\xff".encode()) True """ include "bytearray_ascii_auto_encoding.pyx" Cython-0.26.1/tests/run/lambda_tests.pyx0000664000175000017500000002562212542002467021013 0ustar stefanstefan00000000000000# mode: run # tag: closures, lambda # Battery of tests for closures in Cython. Based on the collection of # compiler tests from P423/B629 at Indiana University, Spring 1999 and # Fall 2000. Special thanks to R. Kent Dybvig, Dan Friedman, Kevin # Millikin, and everyone else who helped to generate the original # tests. Converted into a collection of Python/Cython tests by Craig # Citro. # # Note: This set of tests is split (somewhat randomly) into several # files, simply because putting all the tests in a single file causes # gcc and g++ to buckle under the load. # def g0(): """ >>> g0() 4000 """ return (lambda y_1: y_1)(4000) def g1(): """ >>> g1() 1 """ f_3 = (lambda x_2: x_2) return (f_3(0)+1) def g2(): """ >>> g2() 4 """ f_5 = (lambda y_4: y_4) return f_5(f_5(4)) def g3(): """ >>> g3() 4 """ return (lambda f_7: f_7(f_7(4)))((lambda y_6: y_6)) def g5(): """ >>> g5() 9000 """ def g4(): a_8 = 4000 return lambda b_9: ((a_8)+(b_9)) return g4()(5000) def g6(): """ >>> g6() 9000 """ return (lambda a_10: (lambda b_11: (a_10)+(b_11)))(4000)(5000) def g7(): """ >>> g7() 2 """ return (lambda f_13: f_13(f_13(0)))((lambda x_12: (x_12+1))) def g8(): """ >>> g8() 0 """ f_16 = (lambda x_15, y_14: x_15) a_17 = f_16(0, 1) return f_16(a_17, a_17) def g10(): """ >>> g10() 3 """ f_19 = (lambda x_18: x_18) def g9(): a_22 = 0 b_21 = 1 c_20 = 2 return (f_19(a_22))+((f_19(b_21))+(f_19(c_20))) return (f_19(0))+(g9()) def g12(): """ >>> g12() 2 """ def g11(): x_23 = 1 return lambda y_24: ((x_23)+(y_24)) f_25 = g11() x_26 = 0 return f_25(f_25(x_26)) def g14(): """ >>> g14() 3050 """ def g13(): t_29 = (lambda x_28: (x_28)+(50)) return lambda f_30: (t_29(f_30(1000))) return g13()((lambda y_27: (y_27)+(2000))) def g15(): """ >>> g15() 3050 """ return (lambda t_33: (lambda f_34: t_33(f_34(1000))))((lambda x_32: (x_32)+(50)))((lambda y_31: (y_31)+(2000))) def g17(): """ >>> g17() 2050 """ def g16(): t_35 = 50 return lambda f_36: ((t_35)+(f_36())) return g16()((lambda : 2000)) def g18(): """ >>> g18() 2050 """ return (lambda t_37: (lambda f_38: (t_37)+(f_38())))(50)((lambda : 2000)) def g20(): """ >>> g20() 700 """ def g19(): x_39 = 300 return lambda y_40: ((x_39)+(y_40)) return g19()(400) def g21(): """ >>> g21() 0 """ x_44 = 3 f_43 = (lambda x_42, y_41: x_42) if (f_43(0, 0)): return f_43(f_43(0, 0), x_44) else: return 0 def g22(): """ >>> g22() False """ f_46 = (lambda x_45: (x_45) if (((not ((x_45[0]) == 0))) if (isinstance(x_45, list)) else (False)) else (False)) return f_46([0,[0,[]]]) def g23(): """ >>> g23() False """ f_48 = (lambda x_47: (x_47) if (((not ((not ((x_47[0]) == 0))) if (isinstance(x_47, list)) else (False))) if (x_47) else (False)) else (False)) return f_48(0) def g24(): """ >>> g24() [] """ f_50 = (lambda x_49: (x_49) if ((True) if (isinstance(x_49, list)) else ((x_49 == []))) else ([])) return f_50(0) def g25(): """ >>> g25() 0 """ y_51 = 4 f_54 = (lambda x_53, y_52: 0) return f_54(f_54(y_51, y_51), f_54(y_51, y_51)) def g26(): """ >>> g26() 0 """ y_55 = 4 f_58 = (lambda x_57, y_56: 0) return f_58(f_58(y_55, f_58(y_55, y_55)), f_58(y_55, f_58(y_55, y_55))) def g27(): """ >>> g27() 4 """ return (lambda y_59: (lambda f_61: f_61(f_61(y_59)))((lambda y_60: y_60)))(4) def g28(): """ >>> g28() 23 """ f_63 = (lambda x_62: x_62) return ((1) if (False) else (f_63(22))+1) def g29(): """ >>> g29() 5061 """ f_68 = (lambda x_65: ((not x_65)) if (x_65) else (x_65)) f2_67 = (lambda x_64: (10)*(x_64)) x_66 = 23 return ((1) if (f_68(x_66 == 0)) else ((x_66)*(f2_67((x_66-1))))+1) def g30(): """ >>> g30() 1 """ one_69 = (lambda n_70: (1) if (n_70 == 0) else (one_69((n_70-1)))) return one_69(13) def g31(): """ >>> g31() True """ even_72 = (lambda x_74: (True) if (x_74 == 0) else (odd_71((x_74-1)))) odd_71 = (lambda x_73: (False) if (x_73 == 0) else (even_72((x_73-1)))) return odd_71(13) def g32(): """ >>> g32() False """ even_76 = (lambda x_78: (True) if (x_78 == 0) else (odd_75((x_78-1)))) odd_75 = (lambda x_77: (False) if (x_77 == 0) else (even_76((x_77-1)))) return even_76(13) def g34(): """ >>> g34() True """ even_80 = (lambda x_79: x_79) def g33(): even_82 = (lambda x_84: (True) if (x_84 == 0) else (odd_81((x_84-1)))) odd_81 = (lambda x_83: (False) if (x_83 == 0) else (even_82((x_83-1)))) return odd_81(13) return even_80(g33()) def g35(): """ >>> g35() 120 """ fact_85 = (lambda n_86: (1) if (n_86 == 0) else ((n_86)*(fact_85((n_86-1))))) return fact_85(5) def g38(): """ >>> g38() 10 """ x_87 = 5 a_90 = (lambda u_101, v_100, w_99: (b_89(v_100, w_99)) if (u_101 == 0) else (a_90((u_101)-(1), v_100, w_99))) def g37(): def g36(q_93, r_92): p_94 = (q_93)*(r_92) e_96 = (lambda n_98: (c_88(p_94)) if (n_98 == 0) else (o_95((n_98)-(1)))) o_95 = (lambda n_97: (c_88(x_87)) if (n_97 == 0) else (e_96((n_97)-(1)))) return e_96((q_93)*(r_92)) return g36 b_89 = g37() c_88 = (lambda x_91: (5)*(x_91)) return a_90(3, 2, 1) def g39(): """ >>> g39() 120 """ fact_104 = (lambda fact_103, n_102: (1) if (n_102 == 0) else ((fact_103(fact_103, (n_102-1)))*(n_102))) return fact_104(fact_104, 5) def g40(): """ >>> g40() 35 """ return (lambda x_105: (lambda y_106: (lambda z_107: (lambda w_108: (lambda u_109: (x_105)+((y_106)+((z_107)+((w_108)+(u_109)))))))))(5)(6)(7)(8)(9) def g41(): """ >>> g41() 6 """ sum_112 = (lambda sum_111, ls_110: (0) if ((ls_110 == [])) else (((ls_110[0]))+(sum_111(sum_111, (ls_110[1]))))) return sum_112(sum_112, [1,[2,[3,[]]]]) def g46(): """ >>> g46() 1500 """ def g45(): def g44(): def g42(a_113): def g43(): (a_113)+(200 if True else None) return 1500 return g43 return g42 return g44()(1000) return g45()() def g53(): """ >>> g53() 2600 """ def g52(): def g51(): def g50(): def g47(a_114): def g48(b_115): a_114 = 200 if b_115 else None def g49(c_116): c_116 = 400 if 300 else None return (a_114)+((b_115)+(c_116)) return g49 return g48 return g47 return g50()(1000) return g51()(2000) return g52()(3000) def g54(): """ >>> g54() 5 """ return (lambda f_118: f_118(f_118(5)))((lambda x_117: x_117)) def g56(): """ >>> g56() 8000 """ def g55(): f_120 = (lambda x_119: (x_119)+(3000)) return lambda y_121: (f_120(f_120(y_121))) return g55()(2000) def g57(): """ >>> g57() 120 """ fact_125 = (lambda fact_124, n_123, acc_122: (acc_122) if (n_123 == 0) else (fact_124(fact_124, (n_123-1), (n_123)*(acc_122)))) return fact_125(fact_125, 5, 1) def g58(): """ >>> g58() 3 """ f_127 = (lambda x_126: (lambda : x_126())) return f_127((lambda : 3))() def g59(): """ >>> g59() 22 """ f_129 = (lambda x_132: (x_132)+((lambda y_133: (lambda z_134: (y_133)+(z_134)))(6)(7))) g_128 = (5)+((lambda w_131, u_130: (w_131)+(u_130))(8, 9)) return g_128 def g60(): """ >>> g60() 0 """ loop_135 = (lambda : (lambda : loop_135())) loop_135() return 0 def g63(): """ >>> g63() 668 """ def g62(): def g61(): loop_137 = (lambda link_138: (lambda : link_138())) return loop_137((lambda : 668)) return g61 f_136 = g62() return f_136()() def g64(): """ >>> g64() 17 """ k_141 = (lambda x_140, y_139: x_140) b_142 = 17 return k_141(k_141(k_141, 37), 37)(b_142, (b_142)*(b_142)) def g65(): """ >>> g65() 37 """ f_145 = (lambda g_144, u_143: g_144((g_144(37)) if (u_143) else (u_143))) return f_145((lambda x_146: x_146), 75) def g66(): """ >>> g66() 4687 """ f_150 = (lambda h_148, u_147: h_148((h_148((u_147)+(37))) if (u_147) else (u_147))) w_149 = 62 return f_150((lambda x_151: (w_149)-(x_151)), (75)*(w_149)) def g67(): """ >>> g67() True """ t_153 = True f_152 = False bools_156 = [t_153,f_152] id_155 = (lambda x_154: (f_152) if ((not x_154)) else (t_153)) even_158 = (lambda x_160: ((bools_156[0])) if (id_155(x_160 == 0)) else (odd_157((x_160)-(1)))) odd_157 = (lambda y_159: (id_155((bools_156[1]))) if (y_159 == 0) else (even_158((y_159)-(1)))) return odd_157(5) def g68(): """ >>> g68() 5 """ f_162 = (lambda x_164: (x_164)+(1)) g_161 = (lambda y_163: f_162(f_162(y_163))) return (f_162(1))+(g_161(1)) def g69(): """ >>> g69() 1521 """ y_165 = 3 f_168 = (lambda x_171: (g_167((x_171)+(1))) if (x_171 == 0) else (f_168((x_171)-(y_165)))) g_167 = (lambda x_170: h_166((x_170)*(x_170))) h_166 = (lambda x_169: x_169) return g_167(39) def g70(): """ >>> g70() -1 """ f_173 = (lambda x_175: (x_175)+(1)) g_172 = (lambda y_174: f_173(f_173(y_174))) f_173 = (lambda x_176: (x_176)-(1)) return (f_173(1))+(g_172(1)) def g71(): """ >>> g71() [52, [17, [35, [17, 35]]]] """ f_180 = (lambda : (a_179)+(b_178)) a_179 = 17 b_178 = 35 h_177 = [(lambda : a_179),(lambda : b_178)] return [f_180(),[a_179,[b_178,[(h_177[0])(),(h_177[1])()]]]] def g73(): """ >>> g73() 120 """ x_183 = 5 def g72(): a_181 = 1 return lambda : (a_181) th_182 = g72() fact_184 = (lambda n_186, th_185: (th_185()) if (n_186 == 0) else ((n_186)*(fact_184((n_186)-(1), th_185)))) return fact_184(x_183, th_182) def g74(): """ >>> g74() [120, -120] """ negative_188 = (lambda n_187: (n_187 < 0)) fact_190 = (lambda n_192: (1) if (n_192 == 0) else ((n_192)*(fact_190((n_192)-(1))))) call_fact_189 = (lambda n_191: (fact_190(n_191)) if ((not negative_188(n_191))) else ((0)-(fact_190((0)-(n_191))))) return [call_fact_189(5),call_fact_189(-5)] def g75(): """ >>> g75() [[33, 55], [77, 99]] """ return (lambda a_193: (lambda b_194: (lambda c_195: (lambda d_196: [[a_193,b_194],[c_195,d_196]]))))(33)(55)(77)(99) Cython-0.26.1/tests/run/menten1.pyx0000664000175000017500000000023712542002467017713 0ustar stefanstefan00000000000000def loops(): """ >>> loops() 5 """ cdef int k for i from 0 <= i < 5: for j from 0 <= j < 2: k = i + j return k Cython-0.26.1/tests/run/complex_numbers_c89_T398.pyx0000664000175000017500000000014612542002467022757 0ustar stefanstefan00000000000000# ticket: 398 cdef extern from "complex_numbers_c89_T398.h": pass include "complex_numbers_T305.pyx" Cython-0.26.1/tests/run/contains_T455.pyx0000664000175000017500000000370112542002467020702 0ustar stefanstefan00000000000000# ticket: 455 def in_sequence(x, seq): """ >>> in_sequence(1, []) False >>> in_sequence(1, ()) False >>> in_sequence(1, {}) False >>> in_sequence(1, [1]) True >>> in_sequence(1, (1,)) True >>> in_sequence(1, {1:None}) True >>> in_sequence(1, None) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ...iterable... >>> in_sequence(1, 1) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ...iterable... """ return x in seq def not_in_sequence(x, seq): """ >>> not_in_sequence(1, []) True >>> not_in_sequence(1, ()) True >>> not_in_sequence(1, {}) True >>> not_in_sequence(1, [1]) False >>> not_in_sequence(1, (1,)) False >>> not_in_sequence(1, {1:None}) False >>> not_in_sequence(1, None) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ...iterable... >>> not_in_sequence(1, 1) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ...iterable... """ return x not in seq def in_dict(k, dict dct): """ >>> in_dict(1, {}) False >>> in_dict(1, {1:None}) True >>> in_dict(1, None) Traceback (most recent call last): ... TypeError: 'NoneType' object is not iterable """ return k in dct def not_in_dict(k, dict dct): """ >>> not_in_dict(1, {}) True >>> not_in_dict(1, {1:None}) False >>> not_in_dict(1, None) Traceback (most recent call last): ... TypeError: 'NoneType' object is not iterable """ return k not in dct def cascaded(a, b, c): """ >>> cascaded(1, 2, 3) # doctest: +ELLIPSIS Traceback (most recent call last): ... TypeError: ...iterable... >>> cascaded(-1, (1,2), (1,3)) True >>> cascaded(1, (1,2), (1,3)) False >>> cascaded(-1, (1,2), (1,0)) False """ return a not in b < c Cython-0.26.1/tests/run/external_ref_reassignment.pyx0000664000175000017500000000104012542002467023572 0ustar stefanstefan00000000000000# Test that variable visible outside of the local scope (e.g. closure, cglobals) # is set before original value is decrefed. cdef object g def test_cglobals_reassignment(): """ >>> test_cglobals_reassignment() 1234 """ global g class Special: def __del__(self): print g g = (Special(),) g = 1234 def test_closure_reassignment(): """ >>> test_closure_reassignment() 4321 """ class Special: def __del__(self): print c c = (Special(),) c = 4321 Cython-0.26.1/tests/run/genexpr_T715.pyx0000664000175000017500000000045412542002467020535 0ustar stefanstefan00000000000000# mode: run # ticket: 715 # tag: genexpr, comprehension def t715(*items): """ # Blocked by T724 # >>> [list(i) for i in t715([1, 2, 3], [4, 5, 6])] # [[1, 2, 3], [4, 5, 6]] >>> [list(i) for i in t715([1, 2, 3])] [[1, 2, 3]] """ return [(j for j in i) for i in items] Cython-0.26.1/tests/run/locals_rebind_T429.pyx0000664000175000017500000000100012542002467021653 0ustar stefanstefan00000000000000# ticket: 429 __doc__ = u""" >>> sorted( get_locals(1,2,3, k=5) .items()) [('args', (2, 3)), ('kwds', {'k': 5}), ('x', 1), ('y', 'hi'), ('z', 5)] """ def get_locals(x, *args, **kwds): cdef int z = 5 y = "hi" return locals() def get_locals_rebound(x, *args, **kwds): """ >>> get_locals_rebound(1,2,3) 'REBOUND' """ cdef int z = 5 locals = _locals y = "hi" return locals() def _locals(): return "REBOUND" def sorted(it): l = list(it) l.sort() return l Cython-0.26.1/tests/run/cpp_iterators.pyx0000664000175000017500000000424512542002467021225 0ustar stefanstefan00000000000000# mode: run # tag: cpp, werror from libcpp.vector cimport vector from cython.operator cimport dereference as deref cdef extern from "cpp_iterators_simple.h": cdef cppclass DoublePointerIter: DoublePointerIter(double* start, int len) double* begin() double* end() def test_vector(py_v): """ >>> test_vector([1, 2, 3]) [1, 2, 3] """ cdef vector[int] v = py_v cdef vector[int] result with nogil: for item in v: result.push_back(item) return result def test_ptrs(): """ >>> test_ptrs() [1.0, 2.0, 3.0] """ cdef double a = 1 cdef double b = 2 cdef double c = 3 cdef vector[double*] v v.push_back(&a) v.push_back(&b) v.push_back(&c) return [item[0] for item in v] def test_custom(): """ >>> test_custom() [1.0, 2.0, 3.0] """ cdef double* values = [1, 2, 3] cdef DoublePointerIter* iter try: iter = new DoublePointerIter(values, 3) # TODO: It'd be nice to automatically dereference this in a way that # would not conflict with the pointer slicing iteration. return [x for x in iter[0]] finally: del iter def test_iteration_over_heap_vector(L): """ >>> test_iteration_over_heap_vector([1,2]) [1, 2] """ cdef int i cdef vector[int] *vint = new vector[int]() try: for i in L: vint.push_back(i) return [ i for i in deref(vint) ] finally: del vint def test_iteration_in_generator(vector[int] vint): """ >>> list( test_iteration_in_generator([1,2]) ) [1, 2] """ for i in vint: yield i def test_iteration_in_generator_reassigned(): """ >>> list( test_iteration_in_generator_reassigned() ) [1] """ cdef vector[int] *vint = new vector[int]() cdef vector[int] *orig_vint = vint vint.push_back(1) reassign = True try: for i in deref(vint): yield i if reassign: reassign = False vint = new vector[int]() vint.push_back(2) finally: if vint is not orig_vint: del vint del orig_vint Cython-0.26.1/tests/run/print.pyx0000664000175000017500000000135112542002467017476 0ustar stefanstefan00000000000000def print_to_stdout(a, b): """ >>> print_to_stdout(1, 'test') 1 1 test 1 test 1 test 42 spam """ print print a print a, print b print a, b print a, b, print 42, u"spam" try: from StringIO import StringIO except ImportError: from io import StringIO def print_to_stringio(stream, a, b): """ >>> stream = StringIO() >>> print_to_stringio(stream, 1, 'test') >>> print(stream.getvalue()) 1 1 test 1 test 1 test 42 spam """ print >> stream print >> stream, a print >> stream, a, print >> stream, b print >> stream, a, b print >> stream, a, b, print >> stream, 42, u"spam" Cython-0.26.1/tests/run/r_mcintyre1.pyx0000664000175000017500000000041412542002467020575 0ustar stefanstefan00000000000000__doc__ = u""" >>> b = Bicycle() >>> b.fall_off() Falling off extremely hard >>> b.fall_off("somewhat") Falling off somewhat hard """ class Bicycle: def fall_off(self, how_hard = u"extremely"): print u"Falling off", how_hard, u"hard" Cython-0.26.1/tests/run/dynamic_attributes.pyx0000664000175000017500000000227513023021033022223 0ustar stefanstefan00000000000000# mode: run cimport cython cdef class Spam: cdef dict __dict__ cdef class SuperSpam(Spam): pass cdef class MegaSpam: pass cdef public class UltraSpam [type UltraSpam_Type, object UltraSpam_Object]: cdef dict __dict__ cdef class OwnProperty1: """ >>> obj = OwnProperty1() >>> assert obj.__dict__ == {'a': 123} """ @property def __dict__(self): return {'a': 123} cdef class OwnProperty2: """ >>> obj = OwnProperty2() >>> assert obj.__dict__ == {'a': 123} """ property __dict__: def __get__(self): return {'a': 123} def test_class_attributes(): """ >>> test_class_attributes() 'bar' """ o = Spam() o.foo = "bar" return o.foo def test_subclass_attributes(): """ >>> test_subclass_attributes() 'bar' """ o = SuperSpam() o.foo = "bar" return o.foo def test_defined_class_attributes(): """ >>> test_defined_class_attributes() 'bar' """ o = MegaSpam() o.foo = "bar" return o.foo def test_public_class_attributes(): """ >>> test_public_class_attributes() 'bar' """ o = UltraSpam() o.foo = "bar" return o.foo Cython-0.26.1/tests/run/function_as_method_py_T494.py0000664000175000017500000000021712542002467023256 0ustar stefanstefan00000000000000# ticket: 494 __doc__ = """ >>> A.foo = foo >>> A().foo() True """ class A: pass def foo(self): return self is not None Cython-0.26.1/tests/run/fastcall.pyx0000664000175000017500000000176413143605603020142 0ustar stefanstefan00000000000000# mode: run # tag: METH_FASTCALL import sys import struct from collections import deque pack = struct.pack def deque_methods(v): """ >>> deque_methods(2) [1, 2, 3, 4] """ d = deque([1, 3, 4]) assert list(d) == [1,3,4] if sys.version_info >= (3, 5): d.insert(1, v) else: # deque has no 2-args methods in older Python versions d.rotate(-1) d.appendleft(2) d.rotate(1) assert list(d) == [1,2,3,4] d.rotate(len(d) // 2) assert list(d) == [3,4,1,2] d.rotate(len(d) // 2) assert list(d) == [1,2,3,4] return list(d) def struct_methods(v): """ >>> i, lf, i2, f = struct_methods(2) >>> struct.unpack('i', i) (2,) >>> struct.unpack('i', i2) (2,) >>> struct.unpack('lf', lf) (2, 4.0) >>> struct.unpack('f', f) (2.0,) """ local_pack = pack return [ struct.pack('i', v), struct.pack('lf', v, v*2), pack('i', v), local_pack('f', v), ] Cython-0.26.1/tests/run/withstat_py27.py0000664000175000017500000001101112542002467020674 0ustar stefanstefan00000000000000import sys def typename(t): name = type(t).__name__ if sys.version_info < (2,5): if name == 'classobj' and issubclass(t, MyException): name = 'type' elif name == 'instance' and isinstance(t, MyException): name = 'MyException' return "" % name class MyException(Exception): pass class ContextManager(object): def __init__(self, value, exit_ret = None): self.value = value self.exit_ret = exit_ret def __exit__(self, a, b, tb): print("exit %s %s %s" % (typename(a), typename(b), typename(tb))) return self.exit_ret def __enter__(self): print("enter") return self.value def multimanager(): """ >>> multimanager() enter enter enter enter enter enter 2 value 1 2 3 4 5 nested exit exit exit exit exit exit """ with ContextManager(1), ContextManager(2) as x, ContextManager('value') as y,\ ContextManager(3), ContextManager((1, 2, (3, (4, 5)))) as (a, b, (c, (d, e))): with ContextManager('nested') as nested: print(x) print(y) print('%s %s %s %s %s' % (a, b, c, d, e)) print(nested) class GetManager(object): def get(self, *args): return ContextManager(*args) def manager_from_expression(): """ >>> manager_from_expression() enter 1 exit enter 2 exit """ with GetManager().get(1) as x: print(x) g = GetManager() with g.get(2) as x: print(x) # Tests borrowed from pyregr test_with.py, # modified to follow the constraints of Cython. import unittest class Dummy(object): def __init__(self, value=None, gobble=False): if value is None: value = self self.value = value self.gobble = gobble self.enter_called = False self.exit_called = False def __enter__(self): self.enter_called = True return self.value def __exit__(self, *exc_info): self.exit_called = True self.exc_info = exc_info if self.gobble: return True class InitRaises(object): def __init__(self): raise RuntimeError() class EnterRaises(object): def __enter__(self): raise RuntimeError() def __exit__(self, *exc_info): pass class ExitRaises(object): def __enter__(self): pass def __exit__(self, *exc_info): raise RuntimeError() class NestedWith(unittest.TestCase): """ >>> NestedWith().runTest() """ def runTest(self): self.testNoExceptions() self.testExceptionInExprList() self.testExceptionInEnter() self.testExceptionInExit() self.testEnterReturnsTuple() def testNoExceptions(self): with Dummy() as a, Dummy() as b: self.assertTrue(a.enter_called) self.assertTrue(b.enter_called) self.assertTrue(a.exit_called) self.assertTrue(b.exit_called) def testExceptionInExprList(self): try: with Dummy() as a, InitRaises(): pass except: pass self.assertTrue(a.enter_called) self.assertTrue(a.exit_called) def testExceptionInEnter(self): try: with Dummy() as a, EnterRaises(): self.fail('body of bad with executed') except RuntimeError: pass else: self.fail('RuntimeError not reraised') self.assertTrue(a.enter_called) self.assertTrue(a.exit_called) def testExceptionInExit(self): body_executed = False with Dummy(gobble=True) as a, ExitRaises(): body_executed = True self.assertTrue(a.enter_called) self.assertTrue(a.exit_called) self.assertTrue(body_executed) self.assertNotEqual(a.exc_info[0], None) def testEnterReturnsTuple(self): with Dummy(value=(1,2)) as (a1, a2), \ Dummy(value=(10, 20)) as (b1, b2): self.assertEqual(1, a1) self.assertEqual(2, a2) self.assertEqual(10, b1) self.assertEqual(20, b2) Cython-0.26.1/tests/run/funcexc_iter_T228.pyx0000664000175000017500000000271112542002467021540 0ustar stefanstefan00000000000000# ticket: 228 __doc__ = u""" >>> def py_iterator(): ... if True: return ... yield None >>> list(py_iterator()) [] >>> list(cy_iterator()) [] >>> try: ... raise ValueError ... except: ... print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) ... a = list(py_iterator()) ... print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) True True >>> print(sys.exc_info()[0] is None or sys.exc_info()[0]) True >>> try: ... raise ValueError ... except: ... print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) ... a = list(py_iterator()) ... print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) ... a = list(cy_iterator()) ... print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) True True True >>> print(sys.exc_info()[0] is None or sys.exc_info()[0]) True >>> double_raise(py_iterator) True True True >>> print(sys.exc_info()[0] is None or sys.exc_info()[0]) True """ import sys if sys.version_info[0] < 3: sys.exc_clear() cdef class cy_iterator(object): def __iter__(self): return self def __next__(self): raise StopIteration def double_raise(py_iterator): try: raise ValueError except: print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) a = list(py_iterator()) print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) a = list(cy_iterator()) print(sys.exc_info()[0] is ValueError or sys.exc_info()[0]) Cython-0.26.1/tests/run/inline.pyx0000664000175000017500000000020312542002467017613 0ustar stefanstefan00000000000000def test(x): """ >>> test(3) 3 """ return retinput(x) cdef inline int retinput(int x): o = x return o Cython-0.26.1/tests/run/extern_impl.srctree0000664000175000017500000000067012542002467021522 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import foo" PYTHON -c "import a" ######## setup.py ######## from Cython.Build import cythonize from distutils.core import setup setup( ext_modules = cythonize("*.pyx"), ) ######## foo.pxd ######## cdef void bar() ######## foo.pyx ######## cdef extern from "bar_impl.c": void bar() ######## bar_impl.c ######## static void bar() {} ######## a.pyx ######## from foo cimport bar Cython-0.26.1/tests/run/builtin_abs.pyx0000664000175000017500000000761013143605603020640 0ustar stefanstefan00000000000000# mode: run # ticket: 698 cdef extern from *: int INT_MAX long LONG_MAX max_int = INT_MAX max_long = LONG_MAX max_long_long = 2 ** (sizeof(long long) * 8 - 1) - 1 cimport cython def abs_as_name(): """ >>> _abs = abs_as_name() >>> _abs(-5) 5 """ x = abs return x def py_abs(a): """ >>> py_abs(-5) 5 >>> py_abs(-5.5) 5.5 """ return abs(a) @cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']", "//ReturnStatNode//NameNode[@entry.cname = '__Pyx_abs_int']") def int_abs(int a): """ >>> int_abs(-5) == 5 True >>> int_abs(-5.1) == 5 True >>> int_abs(-max_int-1) > 0 True >>> int_abs(-max_int-1) == abs(-max_int-1) or (max_int, int_abs(-max_int-1), abs(-max_int-1)) True >>> int_abs(max_int) == abs(max_int) or (max_int, int_abs(max_int), abs(max_int)) True """ return abs(a) @cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']") @cython.test_fail_if_path_exists("//ReturnStatNode//NameNode[@entry.cname = '__Pyx_abs_int']", "//ReturnStatNode//NameNode[@entry.cname = '__Pyx_abs_long']") def uint_abs(unsigned int a): """ >>> uint_abs(max_int) == abs(max_int) or (max_int, uint_abs(max_int), abs(max_int)) True """ return abs(a) @cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']", "//ReturnStatNode//NameNode[@entry.cname = '__Pyx_abs_long']") def long_abs(long a): """ >>> long_abs(-5) == 5 True >>> long_abs(-5.1) == 5 True >>> long_abs(-max_long-1) > 0 True >>> long_abs(-max_long-1) == abs(-max_long-1) or (max_long, long_abs(-max_long-1), abs(-max_long-1)) True >>> long_abs(max_long) == abs(max_long) or (max_long, long_abs(max_long), abs(max_long)) True """ return abs(a) @cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']") @cython.test_fail_if_path_exists("//ReturnStatNode//NameNode[@entry.cname = '__Pyx_abs_int']", "//ReturnStatNode//NameNode[@entry.cname = '__Pyx_abs_long']") def ulong_abs(unsigned long a): """ >>> ulong_abs(max_long) == abs(max_long) or (max_int, ulong_abs(max_long), abs(max_long)) True """ return abs(a) @cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']", "//ReturnStatNode//NameNode[@entry.cname = '__Pyx_abs_longlong']") def long_long_abs(long long a): """ >>> long_long_abs(-(2**33)) == 2**33 True >>> long_long_abs(-max_long_long-1) > 0 True >>> long_long_abs(-max_long_long-1) == abs(-max_long_long-1) or (max_long_long, long_long_abs(-max_long_long-1), abs(-max_long_long-1)) True >>> long_long_abs(max_long_long) == abs(max_long_long) or (max_long_long, long_long_abs(max_long_long), abs(max_long_long)) True """ return abs(a) @cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']", "//ReturnStatNode//NameNode[@entry.cname = 'fabs']") def double_abs(double a): """ >>> double_abs(-5) 5.0 >>> double_abs(-5.5) 5.5 """ return abs(a) @cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']", "//ReturnStatNode//NameNode[@entry.cname = 'fabsf']") def float_abs(float a): """ >>> float_abs(-5) 5.0 >>> float_abs(-5.5) 5.5 """ return abs(a) @cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']", "//ReturnStatNode//NameNode[@entry.cname = '__Pyx_c_abs_double']") def complex_abs(complex a): """ >>> complex_abs(-5j) 5.0 >>> complex_abs(-5.5j) 5.5 """ return abs(a) Cython-0.26.1/tests/run/unicodefunction.pyx0000664000175000017500000000126412542002467021541 0ustar stefanstefan00000000000000__doc__ = u""" >>> u('test') u'test' >>> z u'test' >>> c('testing') u'testing' >>> subu('testing a Python subtype') u'testing a Python subtype' >>> sub('testing a Python subtype') u'testing a Python subtype' # >>> csubu('testing a C subtype') # u'testing a C subtype' # >>> csub('testing a C subtype') # u'testing a C subtype' """ import sys if sys.version_info[0] >= 3: __doc__ = __doc__.replace(u" u'", u" '") u = unicode z = unicode(u'test') def c(string): return unicode(string) class subu(unicode): pass def sub(string): return subu(string) #cdef class csubu(unicode): # pass #def csub(string): # return csubu(string) Cython-0.26.1/tests/run/builtins_truth_test.pyx0000664000175000017500000001061612542002467022464 0ustar stefanstefan00000000000000 def bool_list(list obj): """ >>> bool_list( [] ) False >>> bool_list( [1] ) True >>> bool_list(None) False """ return bool(obj) def if_list(list obj): """ >>> if_list( [] ) False >>> if_list( [1] ) True >>> if_list(None) False """ if obj: return True else: return False def if_list_nogil(list obj): """ >>> if_list_nogil( [] ) False >>> if_list_nogil( [1] ) True >>> if_list_nogil(None) False """ cdef bint result with nogil: if obj: result = True else: result = False return result def if_list_literal(t): """ >>> if_list_literal(True) True >>> if_list_literal(False) False """ if t: if [1,2,3]: return True else: return False else: if []: return True else: return False def bool_tuple(tuple obj): """ >>> bool_tuple( () ) False >>> bool_tuple( (1,) ) True >>> bool_tuple(None) False """ return bool(obj) def if_tuple(tuple obj): """ >>> if_tuple( () ) False >>> if_tuple( (1,) ) True >>> if_tuple(None) False """ if obj: return True else: return False def if_tuple_literal(t): """ >>> if_tuple_literal(True) True >>> if_tuple_literal(False) False """ if t: if (1,2,3): return True else: return False else: if (): return True else: return False def bool_set(set obj): """ >>> bool_set( set() ) False >>> bool_set( set([1]) ) True >>> bool_set(None) False """ return bool(obj) def if_set(set obj): """ >>> if_set( set() ) False >>> if_set( set([1]) ) True >>> if_set(None) False """ if obj: return True else: return False def if_set_nogil(set obj): """ >>> if_set_nogil( set() ) False >>> if_set_nogil( set([1]) ) True >>> if_set_nogil(None) False """ cdef bint result with nogil: if obj: result = True else: result = False return result def if_set_literal(t): """ >>> if_set_literal(True) True >>> if_set_literal(False) False """ if t: if {1,2,3}: return True else: return False else: if set(): return True else: return False def bool_frozenset(frozenset obj): """ >>> bool_frozenset( frozenset() ) False >>> bool_frozenset( frozenset([1]) ) True >>> bool_frozenset(None) False """ return bool(obj) def if_frozenset(frozenset obj): """ >>> if_frozenset( frozenset() ) False >>> if_frozenset( frozenset([1]) ) True >>> if_frozenset(None) False """ if obj: return True else: return False b0 = b'' b1 = b'abc' def bool_bytes(bytes obj): """ >>> bool_bytes(b0) False >>> bool_bytes(b1) True >>> bool_bytes(None) False """ return bool(obj) def if_bytes(bytes obj): """ >>> if_bytes(b0) False >>> if_bytes(b1) True >>> if_bytes(None) False """ if obj: return True else: return False def if_bytes_literal(t): """ >>> if_bytes_literal(True) True >>> if_bytes_literal(False) False """ if t: if b'abc': return True else: return False else: if b'': return True else: return False u0 = u'' u1 = u'abc' def bool_unicode(unicode obj): """ >>> bool_unicode(u0) False >>> bool_unicode(u1) True >>> bool_unicode(None) False """ return bool(obj) def if_unicode(unicode obj): """ >>> if_unicode(u0) False >>> if_unicode(u1) True >>> if_unicode(None) False """ if obj: return True else: return False def if_unicode_literal(t): """ >>> if_unicode_literal(True) True >>> if_unicode_literal(False) False """ if t: if u'abc': return True else: return False else: if u'': return True else: return False Cython-0.26.1/tests/run/pyfunction_redefine_T489.pyx0000664000175000017500000000176012542002467023135 0ustar stefanstefan00000000000000# ticket: 489 """ >>> xxx [0, 1, 2, 3] """ xxx = [] foo = 0 xxx.append(foo) def foo(): return 1 xxx.append(foo()) def foo(): return 2 xxx.append(foo()) foo = 3 xxx.append(foo) def closure_scope(a): """ >>> closure_scope(0) [0, 1, 'X', -4, 3] """ ret = [] foo = a + 0 ret.append(foo) def foo(): return a + 1 ret.append(foo()) def foo(): return 'X' ret.append(foo()) def foo(b): return a - b ret.append(foo(4)) foo = a + 3 ret.append(foo) return ret class ClassScope(object): """ >>> obj = ClassScope() [0, 1, 2, 3] """ x = [] def __init__(self): r = [] for x in self.x: if isinstance(x, int): r.append(x) else: r.append(x(self)) print r foo = 0 x.append(foo) def foo(self): return 1 x.append(foo) def foo(self): return 2 x.append(foo) foo = 3 x.append(foo) Cython-0.26.1/tests/run/cpp_function_lib.pxd0000664000175000017500000000125613143605603021635 0ustar stefanstefan00000000000000from libcpp.functional cimport function cdef extern from "cpp_function_lib.cpp": # CPP is include here so that it doesn't need to be compiled externally pass cdef extern from "cpp_function_lib.h": double add_one(double, int) double add_two(double a, int b) cdef cppclass AddAnotherFunctor: AddAnotherFunctor(double to_add) double call "operator()"(double a, int b) cdef cppclass FunctionKeeper: FunctionKeeper(function[double(double, int)] user_function) void set_function(function[double(double, int)] user_function) function[double(double, int)] get_function() double call_function(double a, int b) except + Cython-0.26.1/tests/run/cascaded_list_unpacking_T467.pyx0000664000175000017500000000436312542002467023715 0ustar stefanstefan00000000000000# ticket: 467 def simple_parallel_assignment_from_call(): """ >>> simple_parallel_assignment_from_call() (2, 1, 2, 1, 2, 1, 2, [1, 2], [1, 2]) """ cdef int ai, bi cdef long al, bl cdef object ao, bo reset() ai, bi = al, bl = ao, bo = c = d = [intval(1), intval(2)] return call_count, ao, bo, ai, bi, al, bl, c, d def recursive_parallel_assignment_from_call_left(): """ >>> recursive_parallel_assignment_from_call_left() (3, 1, 2, 3, 1, 2, 3, (1, 2), 3, [(1, 2), 3]) """ cdef int ai, bi, ci cdef object ao, bo, co reset() (ai, bi), ci = (ao, bo), co = t,o = d = [(intval(1), intval(2)), intval(3)] return call_count, ao, bo, co, ai, bi, ci, t, o, d def recursive_parallel_assignment_from_call_right(): """ >>> recursive_parallel_assignment_from_call_right() (3, 1, 2, 3, 1, 2, 3, 1, (2, 3), [1, (2, 3)]) """ cdef int ai, bi, ci cdef object ao, bo, co reset() ai, (bi, ci) = ao, (bo, co) = o,t = d = [intval(1), (intval(2), intval(3))] return call_count, ao, bo, co, ai, bi, ci, o, t, d def recursive_parallel_assignment_from_call_left_reversed(): """ >>> recursive_parallel_assignment_from_call_left_reversed() (3, 1, 2, 3, 1, 2, 3, (1, 2), 3, [(1, 2), 3]) """ cdef int ai, bi, ci cdef object ao, bo, co reset() d = t,o = (ao, bo), co = (ai, bi), ci = [(intval(1), intval(2)), intval(3)] return call_count, ao, bo, co, ai, bi, ci, t, o, d def recursive_parallel_assignment_from_call_right_reversed(): """ >>> recursive_parallel_assignment_from_call_right_reversed() (3, 1, 2, 3, 1, 2, 3, 1, (2, 3), [1, (2, 3)]) """ cdef int ai, bi, ci cdef object ao, bo, co reset() d = o,t = ao, (bo, co) = ai, (bi, ci) = [intval(1), (intval(2), intval(3))] return call_count, ao, bo, co, ai, bi, ci, o, t, d cdef int call_count = 0 cdef int next_expected_arg = 1 cdef reset(): global call_count, next_expected_arg call_count = 0 next_expected_arg = 1 cdef int intval(int x) except -1: global call_count, next_expected_arg call_count += 1 assert next_expected_arg == x, "calls not in source code order: expected %d, found %d" % (next_expected_arg, x) next_expected_arg += 1 return x Cython-0.26.1/tests/run/bytearray_coercion.pyx0000664000175000017500000000742112542002467022231 0ustar stefanstefan00000000000000# mode: run # NOTE: Py2.6+ only cimport cython cpdef bytearray coerce_to_charptr(char* b): """ >>> b = bytearray(b'abc') >>> coerced = coerce_to_charptr(b) >>> coerced == b or coerced True >>> isinstance(coerced, bytearray) or type(coerced) True """ return b def coerce_to_charptrs(bytearray b): """ >>> b = bytearray(b'abc') >>> coerce_to_charptrs(b) True """ cdef char* cs = b cdef unsigned char* ucs = b cdef signed char* scs = b return b == cs == ucs == scs cpdef bytearray coerce_charptr_slice(char* b): """ >>> b = bytearray(b'abc') >>> coerced = coerce_charptr_slice(b) >>> coerced == b[:2] or coerced True >>> isinstance(coerced, bytearray) or type(coerced) True """ return b[:2] def infer_index_types(bytearray b): """ >>> b = bytearray(b'a\\xFEc') >>> print(infer_index_types(b)) (254, 254, 254, 'unsigned char', 'unsigned char', 'unsigned char', 'int') """ c = b[1] with cython.wraparound(False): d = b[1] with cython.boundscheck(False): e = b[1] return c, d, e, cython.typeof(c), cython.typeof(d), cython.typeof(e), cython.typeof(b[1]) def infer_slice_types(bytearray b): """ >>> b = bytearray(b'abc') >>> print(infer_slice_types(b)) (bytearray(b'bc'), bytearray(b'bc'), bytearray(b'bc'), 'Python object', 'Python object', 'Python object', 'bytearray object') """ c = b[1:] with cython.boundscheck(False): d = b[1:] with cython.boundscheck(False), cython.wraparound(False): e = b[1:] return c, d, e, cython.typeof(c), cython.typeof(d), cython.typeof(e), cython.typeof(b[1:]) def assign_to_index(bytearray b, value): """ >>> b = bytearray(b'0abcdefg') >>> assign_to_index(b, 1) bytearray(b'xyzee\\x01h') >>> b bytearray(b'xyzee\\x01h') >>> assign_to_index(bytearray(b'0ABCDEFG'), 40) bytearray(b'xyzEE(o') >>> assign_to_index(bytearray(b'0abcdefg'), -1) Traceback (most recent call last): OverflowError: can't convert negative value to unsigned char >>> assign_to_index(bytearray(b'0abcdef\\x00'), 255) bytearray(b'xyzee\\xff\\xff') >>> assign_to_index(bytearray(b'0abcdef\\x01'), 255) Traceback (most recent call last): OverflowError: value too large to convert to unsigned char >>> assign_to_index(bytearray(b'0abcdef\\x00'), 256) Traceback (most recent call last): OverflowError: value too large to convert to unsigned char """ b[1] = 'x' b[2] = b'y' b[3] = c'z' b[4] += 1 b[5] |= 1 b[6] = value b[7] += value del b[0] try: b[7] = 1 except IndexError: pass else: assert False, "IndexError not raised" try: b[int(str(len(b)))] = 1 # test non-int-index assignment except IndexError: pass else: assert False, "IndexError not raised" return b def check_bounds(int cvalue): """ >>> check_bounds(0) 0 >>> check_bounds(255) 255 >>> check_bounds(256) Traceback (most recent call last): ValueError: byte must be in range(0, 256) >>> check_bounds(-1) Traceback (most recent call last): ValueError: byte must be in range(0, 256) """ b = bytearray(b'x') try: b[0] = 256 except ValueError: pass else: assert False, "ValueError not raised" try: b[0] = -1 except ValueError: pass else: assert False, "ValueError not raised" b[0] = cvalue return b[0] def nogil_assignment(bytearray x, int value): """ >>> b = bytearray(b'abc') >>> nogil_assignment(b, ord('y')) >>> b bytearray(b'xyc') """ with nogil: x[0] = 'x' x[1] = value Cython-0.26.1/tests/run/extstarargs.pyx0000664000175000017500000000661513143605603020720 0ustar stefanstefan00000000000000__doc__ = u""" >>> s = Silly(1,2,3, 'test') >>> (spam,grail,swallow,creosote,onlyt,onlyk,tk) = ( ... s.spam,s.grail,s.swallow,s.creosote,s.onlyt,s.onlyk,s.tk) >>> spam(1,2,3) (1, 2, 3) >>> spam(1,2) Traceback (most recent call last): TypeError: spam() takes exactly 3 positional arguments (2 given) >>> spam(1,2,3,4) Traceback (most recent call last): TypeError: spam() takes exactly 3 positional arguments (4 given) >>> spam(1,2,3, a=1) #doctest: +ELLIPSIS Traceback (most recent call last): TypeError: spam() got an unexpected keyword argument 'a' >>> grail(1,2,3) (1, 2, 3, ()) >>> grail(1,2,3,4) (1, 2, 3, (4,)) >>> grail(1,2,3,4,5,6,7,8,9) (1, 2, 3, (4, 5, 6, 7, 8, 9)) >>> grail(1,2) Traceback (most recent call last): TypeError: grail() takes at least 3 positional arguments (2 given) >>> grail(1,2,3, a=1) #doctest: +ELLIPSIS Traceback (most recent call last): TypeError: grail() got an unexpected keyword argument 'a' >>> swallow(1,2,3) (1, 2, 3, ()) >>> swallow(1,2,3,4) Traceback (most recent call last): TypeError: swallow() takes exactly 3 positional arguments (4 given) >>> swallow(1,2,3, a=1, b=2) (1, 2, 3, (('a', 1), ('b', 2))) >>> swallow(1,2,3, x=1) Traceback (most recent call last): TypeError: swallow() got multiple values for keyword argument 'x' >>> creosote(1,2,3) (1, 2, 3, (), ()) >>> creosote(1,2,3,4) (1, 2, 3, (4,), ()) >>> creosote(1,2,3, a=1) (1, 2, 3, (), (('a', 1),)) >>> creosote(1,2,3,4, a=1, b=2) (1, 2, 3, (4,), (('a', 1), ('b', 2))) >>> creosote(1,2,3,4, x=1) Traceback (most recent call last): TypeError: creosote() got multiple values for keyword argument 'x' >>> onlyt(1) (1,) >>> onlyt(1,2) (1, 2) >>> onlyt(a=1) Traceback (most recent call last): TypeError: onlyt() got an unexpected keyword argument 'a' >>> onlyt(1, a=2) Traceback (most recent call last): TypeError: onlyt() got an unexpected keyword argument 'a' >>> onlyk(a=1) (('a', 1),) >>> onlyk(a=1, b=2) (('a', 1), ('b', 2)) >>> onlyk(1) Traceback (most recent call last): TypeError: onlyk() takes exactly 0 positional arguments (1 given) >>> onlyk(1, 2) Traceback (most recent call last): TypeError: onlyk() takes exactly 0 positional arguments (2 given) >>> onlyk(1, a=1, b=2) Traceback (most recent call last): TypeError: onlyk() takes exactly 0 positional arguments (1 given) >>> tk(a=1) (('a', 1),) >>> tk(a=1, b=2) (('a', 1), ('b', 2)) >>> tk(1) (1,) >>> tk(1, 2) (1, 2) >>> tk(1, a=1, b=2) (1, ('a', 1), ('b', 2)) """ import sys, re if sys.version_info >= (2,6): __doc__ = re.sub(u"(ELLIPSIS[^>]*Error: )[^\n]*\n", u"\\1...\n", __doc__) cdef sorteditems(d): l = list(d.items()) l.sort() return tuple(l) cdef class Silly: def __init__(self, *a): pass def spam(self, x, y, z): return (x, y, z) def grail(self, x, y, z, *a): return (x, y, z, a) def swallow(self, x, y, z, **k): return (x, y, z, sorteditems(k)) def creosote(self, x, y, z, *a, **k): return (x, y, z, a, sorteditems(k)) def onlyt(self, *a): return a def onlyk(self, **k): return sorteditems(k) def tk(self, *a, **k): return a + sorteditems(k) Cython-0.26.1/tests/run/cstringmul.pyx0000664000175000017500000000025212542002467020530 0ustar stefanstefan00000000000000__doc__ = u""" >>> print(spam) eggseggseggseggs >>> print(grail) tomatotomatotomatotomatotomatotomatotomato """ spam = u"eggs" * 4 grail = 7 * u"tomato" Cython-0.26.1/tests/run/complex_extern_GH1433.pyx0000664000175000017500000000046113023021033022251 0ustar stefanstefan00000000000000# tag: numpy cimport numpy as np def divide(np.float64_t x, np.complex128_t y): """ >>> divide(2, 1+1j) (1-1j) """ return x / y def pow(np.complex128_t x, np.complex128_t y): """ >>> pow(1 + 1j, 2j) # doctest: +ELLIPSIS (0.1599...+0.1328...j) """ return x ** y Cython-0.26.1/tests/run/kostyrka2.pyx0000664000175000017500000000013512542002467020272 0ustar stefanstefan00000000000000__doc__ = u""" >>> x = X() >>> x.slots [''] """ class X: slots = ["", ] Cython-0.26.1/tests/run/cimport_alias_subclass.pyx0000664000175000017500000000034112542002467023065 0ustar stefanstefan00000000000000# mode: compile cimport cimport_alias_subclass_helper as cash cdef class Derived(cash.Base): cdef bint foo(self): print "Hello" def run(): """ >>> run() Hello """ d = Derived() d.foo() Cython-0.26.1/tests/run/bytes_indexing.pyx0000664000175000017500000001221612542002467021357 0ustar stefanstefan00000000000000 cimport cython cdef bytes b12345 = b'12345' def index_literal(int i): """ Python 3 returns integer values on indexing, Py2 returns byte string literals... >>> index_literal(0) in (ord('1'), '1') True >>> index_literal(-5) in (ord('1'), '1') True >>> index_literal(2) in (ord('3'), '3') True >>> index_literal(4) in (ord('5'), '5') True """ return b"12345"[i] @cython.test_assert_path_exists("//PythonCapiCallNode") @cython.test_fail_if_path_exists("//IndexNode", "//CoerceFromPyTypeNode") def index_literal_char_cast(int i): """ >>> index_literal_char_cast(0) == ord('1') True >>> index_literal_char_cast(-5) == ord('1') True >>> index_literal_char_cast(2) == ord('3') True >>> index_literal_char_cast(4) == ord('5') True >>> index_literal_char_cast(6) Traceback (most recent call last): IndexError: string index out of range """ return (b"12345"[i]) @cython.test_assert_path_exists("//PythonCapiCallNode") @cython.test_fail_if_path_exists("//IndexNode", "//CoerceFromPyTypeNode") def index_nonliteral_char_cast(int i): """ >>> index_nonliteral_char_cast(0) == ord('1') True >>> index_nonliteral_char_cast(-5) == ord('1') True >>> index_nonliteral_char_cast(2) == ord('3') True >>> index_nonliteral_char_cast(4) == ord('5') True >>> index_nonliteral_char_cast(6) Traceback (most recent call last): IndexError: string index out of range """ return (b12345[i]) @cython.test_assert_path_exists("//PythonCapiCallNode") @cython.test_fail_if_path_exists("//IndexNode", "//CoerceFromPyTypeNode") def index_literal_uchar_cast(int i): """ >>> index_literal_uchar_cast(0) == ord('1') True >>> index_literal_uchar_cast(-5) == ord('1') True >>> index_literal_uchar_cast(2) == ord('3') True >>> index_literal_uchar_cast(4) == ord('5') True >>> index_literal_uchar_cast(6) Traceback (most recent call last): IndexError: string index out of range """ return (b"12345"[i]) @cython.test_assert_path_exists("//PythonCapiCallNode") @cython.test_fail_if_path_exists("//IndexNode", "//CoerceFromPyTypeNode") def index_nonliteral_uchar_cast(int i): """ >>> index_nonliteral_uchar_cast(0) == ord('1') True >>> index_nonliteral_uchar_cast(-5) == ord('1') True >>> index_nonliteral_uchar_cast(2) == ord('3') True >>> index_nonliteral_uchar_cast(4) == ord('5') True >>> index_nonliteral_uchar_cast(6) Traceback (most recent call last): IndexError: string index out of range """ return (b12345[i]) @cython.test_assert_path_exists("//PythonCapiCallNode") @cython.test_fail_if_path_exists("//IndexNode", "//CoerceFromPyTypeNode") def index_literal_char_coerce(int i): """ >>> index_literal_char_coerce(0) == ord('1') True >>> index_literal_char_coerce(-5) == ord('1') True >>> index_literal_char_coerce(2) == ord('3') True >>> index_literal_char_coerce(4) == ord('5') True >>> index_literal_char_coerce(6) Traceback (most recent call last): IndexError: string index out of range """ cdef char result = b"12345"[i] return result @cython.test_assert_path_exists("//PythonCapiCallNode") @cython.test_fail_if_path_exists("//IndexNode", "//CoerceFromPyTypeNode") def index_nonliteral_char_coerce(int i): """ >>> index_nonliteral_char_coerce(0) == ord('1') True >>> index_nonliteral_char_coerce(-5) == ord('1') True >>> index_nonliteral_char_coerce(2) == ord('3') True >>> index_nonliteral_char_coerce(4) == ord('5') True >>> index_nonliteral_char_coerce(6) Traceback (most recent call last): IndexError: string index out of range """ cdef char result = b12345[i] return result @cython.test_assert_path_exists("//PythonCapiCallNode") @cython.test_fail_if_path_exists("//IndexNode", "//CoerceFromPyTypeNode") @cython.boundscheck(False) def index_literal_char_coerce_no_check(int i): """ >>> index_literal_char_coerce_no_check(0) == ord('1') True >>> index_literal_char_coerce_no_check(-5) == ord('1') True >>> index_literal_char_coerce_no_check(2) == ord('3') True >>> index_literal_char_coerce_no_check(4) == ord('5') True """ cdef char result = b"12345"[i] return result @cython.test_assert_path_exists("//PythonCapiCallNode") @cython.test_fail_if_path_exists("//IndexNode", "//CoerceFromPyTypeNode") @cython.boundscheck(False) def index_nonliteral_char_coerce_no_check(int i): """ >>> index_nonliteral_char_coerce_no_check(0) == ord('1') True >>> index_nonliteral_char_coerce_no_check(-5) == ord('1') True >>> index_nonliteral_char_coerce_no_check(2) == ord('3') True >>> index_nonliteral_char_coerce_no_check(4) == ord('5') True """ cdef char result = b12345[i] return result Cython-0.26.1/tests/run/voidstarcast.pyx0000664000175000017500000000066012542002467021052 0ustar stefanstefan00000000000000cdef class C: cdef int i def foo(self): self.i = 42 def get_i(self): return self.i def cast_cast_cast(arg): """ >>> x = C() >>> x.foo() >>> cast_cast_cast(x) == x True >>> x.get_i() 42 """ cdef object x cdef void *p = arg cdef int i x = p p = x x = (p).foo i = (p).i (p).i = i return p Cython-0.26.1/tests/run/cdivision_CEP_516.pyx0000664000175000017500000001127113150045407021412 0ustar stefanstefan00000000000000__doc__ = u""" >>> v = [(17, 10), (-17, 10), (-17, -10), (17, -10)] >>> standard = [(a % b) for a, b in v] >>> standard [7, 3, -7, -3] >>> [mod_int_py(a, b) for a, b in v] == standard True >>> [mod_short_py(a, b) for a, b in v] == standard True >>> [mod_float_py(a, b) for a, b in v] == standard True >>> [mod_double_py(a, b) for a, b in v] == standard True >>> [mod_int_c(a, b) for a, b in v] [7, -7, -7, 7] >>> [mod_float_c(a, b) for a, b in v] [7.0, -7.0, -7.0, 7.0] >>> [mod_double_c(a, b) for a, b in v] [7.0, -7.0, -7.0, 7.0] >>> [div_int_py(a, b) for a, b in v] [1, -2, 1, -2] >>> [div_int_c(a, b) for a, b in v] [1, -1, 1, -1] >>> [test_cdiv_cmod(a, b) for a, b in v] [(1, 7), (-1, -7), (1, -7), (-1, 7)] >>> all([mod_int_py(a,b) == a % b for a in range(-10, 10) for b in range(-10, 10) if b != 0]) True >>> all([div_int_py(a,b) == a // b for a in range(-10, 10) for b in range(-10, 10) if b != 0]) True """ import warnings orig_showwarning = warnings.showwarning true_py_functions = {} exec "def simple_warn(msg, *args): print(msg)" in true_py_functions simple_warn = true_py_functions['simple_warn'] del true_py_functions def _all(seq): for x in seq: if not x: return False return True try: all except NameError: all = _all cimport cython @cython.cdivision(False) def mod_int_py(int a, int b): return a % b @cython.cdivision(False) def mod_short_py(short a, short b): return a % b @cython.cdivision(False) def mod_double_py(double a, double b): return a % b @cython.cdivision(False) def mod_float_py(float a, float b): return a % b @cython.cdivision(True) def mod_int_c(int a, int b): return a % b @cython.cdivision(True) def mod_float_c(float a, float b): return a % b @cython.cdivision(True) def mod_double_c(double a, double b): return a % b @cython.cdivision(False) def div_int_py(int a, int b): return a // b @cython.cdivision(True) def div_int_c(int a, int b): return a // b @cython.cdivision(False) def test_cdiv_cmod(short a, short b): cdef short q = cython.cdiv(a, b) cdef short r = cython.cmod(a, b) return q, r @cython.cdivision(True) @cython.cdivision_warnings(True) def mod_int_c_warn(int a, int b): """ >>> warnings.showwarning = simple_warn >>> mod_int_c_warn(-17, 10) division with oppositely signed operands, C and Python semantics differ -7 >>> warnings.showwarning = orig_showwarning """ return a % b @cython.cdivision(True) @cython.cdivision_warnings(True) def div_int_c_warn(int a, int b): """ >>> warnings.showwarning = simple_warn >>> div_int_c_warn(-17, 10) division with oppositely signed operands, C and Python semantics differ -1 >>> warnings.showwarning = orig_showwarning """ return a // b @cython.cdivision(False) @cython.cdivision_warnings(True) def complex_expression(int a, int b, int c, int d): """ >>> warnings.showwarning = simple_warn >>> complex_expression(-150, 20, 19, -7) verbose_call(20) division with oppositely signed operands, C and Python semantics differ verbose_call(19) division with oppositely signed operands, C and Python semantics differ -2 >>> warnings.showwarning = orig_showwarning """ return (a // verbose_call(b)) % (verbose_call(c) // d) cdef int verbose_call(int x): print u"verbose_call(%s)" % x return x # These may segfault with cdivision @cython.cdivision(False) def mod_div_zero_int(int a, int b, int c): """ >>> mod_div_zero_int(25, 10, 2) verbose_call(5) 2 >>> print(mod_div_zero_int(25, 10, 0)) verbose_call(5) integer division or modulo by zero >>> print(mod_div_zero_int(25, 0, 0)) integer division or modulo by zero """ try: return verbose_call(a % b) / c except ZeroDivisionError, ex: return unicode(ex) @cython.cdivision(False) def mod_div_zero_float(float a, float b, float c): """ >>> mod_div_zero_float(25, 10, 2) 2.5 >>> print(mod_div_zero_float(25, 10, 0)) float division >>> print(mod_div_zero_float(25, 0, 0)) float divmod() """ try: return (a % b) / c except ZeroDivisionError, ex: return unicode(ex) @cython.cdivision(False) def py_div_long(long a, long b): """ >>> py_div_long(-5, -1) 5 >>> import sys >>> maxint = getattr(sys, ((sys.version_info[0] >= 3) and 'maxsize' or 'maxint')) >>> py_div_long(-maxint-1, -1) # doctest: +ELLIPSIS Traceback (most recent call last): ... OverflowError: ... """ return a / b def c_div_const_test(a, b): """ >>> c_div_const_test(5, 3) 1 """ return c_div_const(a, b) cdef long c_div_const(const long a, int b): cdef long c = a / b return c Cython-0.26.1/tests/run/builtin_type.pyx0000664000175000017500000000243212542002467021052 0ustar stefanstefan00000000000000cimport cython @cython.test_assert_path_exists( '//PythonCapiCallNode/PythonCapiFunctionNode[@cname="Py_TYPE"]') def get_type_of(a): """ >>> get_type_of(object()) is object True """ return type(a) @cython.test_assert_path_exists( '//PythonCapiCallNode/PythonCapiFunctionNode[@cname="Py_TYPE"]') def get_type_through_local(a): """ >>> get_type_of(object()) is object True """ t = type(a) return t @cython.test_assert_path_exists( '//PythonCapiCallNode/PythonCapiFunctionNode[@cname="Py_TYPE"]') @cython.test_fail_if_path_exists( '//PythonCapiCallNode/PythonCapiFunctionNode[@cname="__Pyx_Type"]', '//NameNode[@name="type"]') def test_type(a, t): """ >>> test_type(object(), object) True """ return type(a) and type(a) is t and type(a) == t @cython.test_assert_path_exists('//NameNode[@name="type"]') def type_type(): """ >>> type_type()(object()) is object True """ return type cpdef type pass_type(type x): """ >>> pass_type(int) == int True >>> class MyType(object): pass >>> pass_type(MyType) == MyType True >>> pass_type(object()) Traceback (most recent call last): TypeError: Argument 'x' has incorrect type (expected type, got object) """ return x Cython-0.26.1/tests/run/charcomparisonT412.pyx0000664000175000017500000000022212542002467021721 0ustar stefanstefan00000000000000# ticket: 412 def f(): """ >>> f() True True """ cdef char a a = 62 print (a == '>') print (a == '>') Cython-0.26.1/tests/run/backquote.pyx0000664000175000017500000000030412542002467020315 0ustar stefanstefan00000000000000def f(obj2): """ >>> f(20) '20' >>> f('test') "'test'" """ obj1 = `obj2` return obj1 def g(): """ >>> g() '42' """ obj1 = `42` return obj1 Cython-0.26.1/tests/run/posix_test.pyx0000664000175000017500000000477413023021023020537 0ustar stefanstefan00000000000000# tag: posix from libc.stdio cimport * from posix.unistd cimport * from posix.fcntl cimport * cdef int noisy_function() except -1: cdef int ret = 0 ret = printf(b"012%s6789\n", "345") assert ret == 11 # printf() ret = printf(b"012%d6789\n", 345) assert ret == 11 # printf() ret = printf(b"0123456789\n") assert ret == 11 # printf() ret = fflush(stdout) assert ret == 0 # fflush() ret = fprintf(stdout, b"012%d6789\n", 345) assert ret == 11 # fprintf() ret = fflush(stdout) assert ret == 0 # fflush() ret = write(STDOUT_FILENO, b"0123456789\n", 11) assert ret == 11 # write() return 0 def test_silent_stdout(): """ >>> test_silent_stdout() """ cdef int ret cdef int stdout_save, dev_null stdout_save = dup(STDOUT_FILENO) assert stdout_save != -1 dev_null = open(b"/dev/null", O_WRONLY, 0) assert dev_null != -1 ret = dup2(dev_null, STDOUT_FILENO) assert ret == STDOUT_FILENO ret = close(dev_null) assert ret == 0 try: noisy_function() finally: ret = dup2(stdout_save, STDOUT_FILENO) assert ret == STDOUT_FILENO ret = close(stdout_save) assert ret == 0 cdef class silent_fd: cdef int fd_save, fd def __cinit__(self, int fd=-1): self.fd_save = -1 self.fd = STDOUT_FILENO if fd != -1: self.fd = fd def __enter__(self): cdef int ret = 0, dev_null = -1 assert self.fd_save == -1 dev_null = open(b"/dev/null", O_WRONLY, 0) assert dev_null != -1 try: self.fd_save = dup(self.fd) assert self.fd_save != -1 try: ret = dup2(dev_null, self.fd) assert ret != -1 except: ret = close(self.fd_save) self.fd_save = -1 finally: ret = close(dev_null) def __exit__(self, t, v, tb): cdef int ret = 0 if self.fd_save != -1: ret = dup2(self.fd_save, self.fd) assert ret == self.fd ret = close(self.fd_save) assert ret == 0 self.fd_save = -1 return None def test_silent_stdout_ctxmanager(): """ >> test_silent_stdout_ctxmanager() """ with silent_fd(): noisy_function() try: with silent_fd(): noisy_function() raise RuntimeError except RuntimeError: pass with silent_fd(STDOUT_FILENO): noisy_function() Cython-0.26.1/tests/run/cpp_nested_templates.pyx0000664000175000017500000000240512542002467022545 0ustar stefanstefan00000000000000# mode: run # tag: cpp, werror from cython.operator cimport dereference as deref cdef extern from "cpp_templates_helper.h": cdef cppclass Wrap[T]: Wrap(T) void set(T) T get() bint operator==(Wrap[T]) cdef cppclass Pair[T1,T2]: Pair(T1,T2) T1 first() T2 second() bint operator==(Pair[T1,T2]) bint operator!=(Pair[T1,T2]) def test_wrap_pair(int i, double x): """ >>> test_wrap_pair(1, 1.5) (1, 1.5, True) >>> test_wrap_pair(2, 2.25) (2, 2.25, True) """ try: wrap = new Wrap[Pair[int, double]](Pair[int, double](i, x)) return wrap.get().first(), wrap.get().second(), deref(wrap) == deref(wrap) finally: del wrap def test_wrap_pair_pair(int i, int j, double x): """ >>> test_wrap_pair_pair(1, 3, 1.5) (1, 3, 1.5, True) >>> test_wrap_pair_pair(2, 5, 2.25) (2, 5, 2.25, True) """ try: wrap = new Wrap[Pair[int, Pair[int, double]]]( Pair[int, Pair[int, double]](i,Pair[int, double](j, x))) return (wrap.get().first(), wrap.get().second().first(), wrap.get().second().second(), deref(wrap) == deref(wrap)) finally: del wrap Cython-0.26.1/tests/run/methodmangling_T5.py0000664000175000017500000000346712542002467021531 0ustar stefanstefan00000000000000# mode: run # ticket: 5 class CyTest(object): """ >>> cy = CyTest() >>> '_CyTest__private' in dir(cy) True >>> cy._CyTest__private() 8 >>> '__private' in dir(cy) False >>> '_CyTest__x' in dir(cy) True >>> '__x' in dir(cy) False """ __x = 1 def __private(self): return 8 def get(self): """ >>> CyTest().get() (1, 1, 8) """ return self._CyTest__x, self.__x, self.__private() def get_inner(self): """ >>> CyTest().get_inner() (1, 1, 8) """ def get(o): return o._CyTest__x, o.__x, o.__private() return get(self) class CyTestSub(CyTest): """ >>> cy = CyTestSub() >>> '_CyTestSub__private' in dir(cy) True >>> cy._CyTestSub__private() 9 >>> '_CyTest__private' in dir(cy) True >>> cy._CyTest__private() 8 >>> '__private' in dir(cy) False >>> '_CyTestSub__x' in dir(cy) False >>> '_CyTestSub__y' in dir(cy) True >>> '_CyTest__x' in dir(cy) True >>> '__x' in dir(cy) False """ __y = 2 def __private(self): return 9 def get(self): """ >>> CyTestSub().get() (1, 2, 2, 9) """ return self._CyTest__x, self._CyTestSub__y, self.__y, self.__private() def get_inner(self): """ >>> CyTestSub().get_inner() (1, 2, 2, 9) """ def get(o): return o._CyTest__x, o._CyTestSub__y, o.__y, o.__private() return get(self) class _UnderscoreTest(object): """ >>> ut = _UnderscoreTest() >>> '__x' in dir(ut) False >>> '_UnderscoreTest__x' in dir(ut) True >>> ut._UnderscoreTest__x 1 >>> ut.get() 1 """ __x = 1 def get(self): return self.__x Cython-0.26.1/tests/run/args_unpacking_in_closure_T658.pyx0000664000175000017500000000053312542002467024306 0ustar stefanstefan00000000000000# mode: run # tag: closures # ticket: 658 def outer(int x, *args, **kwargs): """ >>> inner = outer(1, 2, a=3) >>> inner() (1, (2,), {'a': 3}) >>> inner = outer('abc', 2, a=3) Traceback (most recent call last): TypeError: an integer is required """ def inner(): return x, args, kwargs return inner Cython-0.26.1/tests/run/duplicate_keyword_in_call.py0000664000175000017500000000116112542002467023350 0ustar stefanstefan00000000000000# mode: run # tag: kwargs, call # ticket: 717 def f(**kwargs): return sorted(kwargs.items()) def test_call(kwargs): """ >>> kwargs = {'b' : 2} >>> f(a=1, **kwargs) [('a', 1), ('b', 2)] >>> test_call(kwargs) [('a', 1), ('b', 2)] >>> kwargs = {'a' : 2} >>> f(a=1, **kwargs) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ...got multiple values for keyword argument 'a' >>> test_call(kwargs) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ...got multiple values for keyword argument 'a' """ return f(a=1, **kwargs) Cython-0.26.1/tests/run/list_comp_in_closure_T598.pyx0000664000175000017500000000457112542002467023315 0ustar stefanstefan00000000000000# mode: run # tag: closures # ticket: 598 # cython: language_level=3 def list_comp_in_closure(): """ >>> list_comp_in_closure() [0, 4, 8] """ x = 'abc' def f(): return x result = [x*2 for x in range(5) if x % 2 == 0] assert x == 'abc' # don't leak in Py3 code assert f() == 'abc' # don't leak in Py3 code return result def pytyped_list_comp_in_closure(): """ >>> pytyped_list_comp_in_closure() [0, 4, 8] """ cdef object x x = 'abc' def f(): return x result = [x*2 for x in range(5) if x % 2 == 0] assert x == 'abc' # don't leak in Py3 code assert f() == 'abc' # don't leak in Py3 code return result def pytyped_list_comp_in_closure_repeated(): """ >>> pytyped_list_comp_in_closure_repeated() [0, 4, 8] """ cdef object x x = 'abc' def f(): return x for i in range(3): result = [x*2 for x in range(5) if x % 2 == 0] assert x == 'abc' # don't leak in Py3 code assert f() == 'abc' # don't leak in Py3 code return result def genexpr_in_closure(): """ >>> genexpr_in_closure() [0, 4, 8] """ x = 'abc' def f(): return x result = list( x*2 for x in range(5) if x % 2 == 0 ) assert x == 'abc' # don't leak in Py3 code assert f() == 'abc' # don't leak in Py3 code return result def pytyped_genexpr_in_closure(): """ >>> pytyped_genexpr_in_closure() [0, 4, 8] """ cdef object x x = 'abc' def f(): return x result = list( x*2 for x in range(5) if x % 2 == 0 ) assert x == 'abc' # don't leak in Py3 code assert f() == 'abc' # don't leak in Py3 code return result def pytyped_genexpr_in_closure_repeated(): """ >>> pytyped_genexpr_in_closure_repeated() [0, 4, 8] """ cdef object x x = 'abc' def f(): return x for i in range(3): result = list( x*2 for x in range(5) if x % 2 == 0 ) assert x == 'abc' # don't leak in Py3 code assert f() == 'abc' # don't leak in Py3 code return result def genexpr_scope_in_closure(): """ >>> genexpr_scope_in_closure() [0, 4, 8] """ i = 2 x = 'abc' def f(): return i, x result = list( x*i for x in range(5) if x % 2 == 0 ) assert x == 'abc' # don't leak in Py3 code assert f() == (2,'abc') # don't leak in Py3 code return result Cython-0.26.1/tests/run/inplace.pyx0000664000175000017500000001157012542002467017761 0ustar stefanstefan00000000000000cimport cython def f(a,b): """ >>> str(f(5, 7)) '29509034655744' """ a += b a *= b a **= b return a def g(int a, int b): """ >>> g(13, 4) 32 """ a -= b a /= b a <<= b return a def h(double a, double b): """ >>> h(56, 7) 105.0 """ a /= b a += b a *= b return a from libc cimport stdlib def arrays(): """ >>> arrays() 19 """ cdef char* buf = stdlib.malloc(10) cdef int i = 2 cdef object j = 2 buf[2] = 0 buf[i] += 2 buf[2] *= 10 buf[j] -= 1 print buf[2] stdlib.free(buf) cdef class A: cdef attr cdef int attr2 cdef char* buf def __init__(self): self.attr = 3 self.attr2 = 3 class B: attr = 3 def attributes(): """ >>> attributes() 26 26 26 """ cdef A a = A() b = B() a.attr += 10 a.attr *= 2 a.attr2 += 10 a.attr2 *= 2 b.attr += 10 b.attr *= 2 print a.attr, a.attr2, b.attr def get_2(): return 2 cdef int identity(int value): return value def smoketest(): """ >>> smoketest() 10 """ cdef char* buf = stdlib.malloc(10) cdef A a = A() a.buf = buf a.buf[identity(1)] = 0 (a.buf + identity(4) - (2*get_2() - 1))[get_2() - 2*identity(1)] += 10 print a.buf[1] stdlib.free(buf) def side_effect(x): print u"side effect", x return x cdef int c_side_effect(int x): print u"c side effect", x return x def test_side_effects(): """ >>> test_side_effects() side effect 1 c side effect 2 side effect 3 c side effect 4 ([0, 11, 102, 3, 4], [0, 1, 2, 13, 104]) """ cdef object a = list(range(5)) a[side_effect(1)] += 10 a[c_side_effect(2)] += 100 cdef int i cdef int[5] b for i from 0 <= i < 5: b[i] = i b[side_effect(3)] += 10 b[c_side_effect(4)] += 100 return a, [b[i] for i from 0 <= i < 5] @cython.cdivision(True) def test_inplace_cdivision(int a, int b): """ >>> test_inplace_cdivision(13, 10) 3 >>> test_inplace_cdivision(13, -10) 3 >>> test_inplace_cdivision(-13, 10) -3 >>> test_inplace_cdivision(-13, -10) -3 """ a %= b return a @cython.cdivision(False) def test_inplace_pydivision(int a, int b): """ >>> test_inplace_pydivision(13, 10) 3 >>> test_inplace_pydivision(13, -10) -7 >>> test_inplace_pydivision(-13, 10) 7 >>> test_inplace_pydivision(-13, -10) -3 """ a %= b return a def test_complex_inplace(double complex x, double complex y): """ >>> test_complex_inplace(1, 1) (2+0j) >>> test_complex_inplace(2, 3) (15+0j) >>> test_complex_inplace(2+3j, 4+5j) (-16+62j) """ x += y x *= y return x # The following is more subtle than one might expect. cdef struct Inner: int x cdef struct Aa: int value Inner inner cdef struct NestedA: Aa a cdef struct ArrayOfA: Aa[10] a def nested_struct_assignment(): """ >>> nested_struct_assignment() """ cdef NestedA nested nested.a.value = 2 nested.a.value += 3 assert nested.a.value == 5 nested.a.inner.x = 5 nested.a.inner.x += 10 assert nested.a.inner.x == 15 def nested_array_assignment(): """ >>> nested_array_assignment() c side effect 0 c side effect 1 """ cdef ArrayOfA array array.a[0].value = 2 array.a[c_side_effect(0)].value += 3 assert array.a[0].value == 5 array.a[1].inner.x = 5 array.a[c_side_effect(1)].inner.x += 10 assert array.a[1].inner.x == 15 cdef class VerboseDict(object): cdef name cdef dict dict def __init__(self, name, **kwds): self.name = name self.dict = kwds def __getitem__(self, key): print self.name, "__getitem__", key return self.dict[key] def __setitem__(self, key, value): print self.name, "__setitem__", key, value self.dict[key] = value def __repr__(self): return repr(self.name) def deref_and_increment(o, key): """ >>> deref_and_increment({'a': 1}, 'a') side effect a >>> v = VerboseDict('v', a=10) >>> deref_and_increment(v, 'a') side effect a v __getitem__ a v __setitem__ a 11 """ o[side_effect(key)] += 1 def double_deref_and_increment(o, key1, key2): """ >>> v = VerboseDict('v', a=10) >>> w = VerboseDict('w', vkey=v) >>> double_deref_and_increment(w, 'vkey', 'a') side effect vkey w __getitem__ vkey side effect a v __getitem__ a v __setitem__ a 11 """ o[side_effect(key1)][side_effect(key2)] += 1 def conditional_inplace(value, a, condition, b): """ >>> conditional_inplace([1, 2, 3], [100], True, [200]) [1, 2, 3, 100] >>> conditional_inplace([1, 2, 3], [100], False, [200]) [1, 2, 3, 200] """ value += a if condition else b return value Cython-0.26.1/tests/run/cpdef_optargs_pure.py0000664000175000017500000000161613023021033022012 0ustar stefanstefan00000000000000# mode: run # tag: cyfunction # cython: binding=True import cython class PyClass(object): a = 2 class PyClass99(object): a = 99 def pymethod(self, x, y=1, z=PyClass): """ >>> obj = PyClass99() >>> obj.pymethod(0) (0, 1, 2) """ return x, y, z.a def func(x, y=1, z=PyClass): """ >>> func(0) (0, 1, 2) >>> func(0, 3) (0, 3, 2) >>> func(0, 3, PyClass) (0, 3, 2) >>> func(0, 3, 5) Traceback (most recent call last): AttributeError: 'int' object has no attribute 'a' """ return x, y, z.a @cython.ccall def pyfunc(x, y=1, z=PyClass): """ >>> pyfunc(0) (0, 1, 2) >>> pyfunc(0, 3) (0, 3, 2) >>> pyfunc(0, 3, PyClass) (0, 3, 2) >>> pyfunc(0, 3, 5) Traceback (most recent call last): AttributeError: 'int' object has no attribute 'a' """ return x, y, z.a Cython-0.26.1/tests/run/r_uintindex.pyx0000664000175000017500000000043112542002467020670 0ustar stefanstefan00000000000000__doc__ = u""" >>> print(idx_uint( ["buckle", "my", "shoe"], 2)) shoe >>> print(idx_ulong(["buckle", "my", "shoe"], 2)) shoe """ def idx_ulong(seq, i): cdef unsigned long u u = i return seq[u] def idx_uint(seq, i): cdef unsigned int u u = i return seq[u] Cython-0.26.1/tests/run/sequential_parallel.pyx0000664000175000017500000004027713143605603022401 0ustar stefanstefan00000000000000# tag: run cimport cython.parallel from cython.parallel import prange, threadid from cython.view cimport array from libc.stdlib cimport malloc, calloc, free, abort from libc.stdio cimport puts import os import sys try: from builtins import next # Py3k except ImportError: def next(it): return it.next() #@cython.test_assert_path_exists( # "//ParallelWithBlockNode//ParallelRangeNode[@schedule = 'dynamic']", # "//GILStatNode[@state = 'nogil]//ParallelRangeNode") def test_prange(): """ >>> test_prange() (9, 9, 45, 45) """ cdef Py_ssize_t i, j, sum1 = 0, sum2 = 0 with nogil, cython.parallel.parallel(): for i in prange(10, schedule='dynamic'): sum1 += i for j in prange(10, nogil=True): sum2 += j return i, j, sum1, sum2 def test_descending_prange(): """ >>> test_descending_prange() 5 """ cdef int i, start = 5, stop = -5, step = -2 cdef int sum = 0 for i in prange(start, stop, step, nogil=True): sum += i return sum def test_prange_matches_range(int start, int stop, int step): """ >>> test_prange_matches_range(0, 8, 3) >>> test_prange_matches_range(0, 9, 3) >>> test_prange_matches_range(0, 10, 3) >>> test_prange_matches_range(0, 10, -3) >>> test_prange_matches_range(0, -10, -3) >>> test_prange_matches_range(1, -10, -3) >>> test_prange_matches_range(2, -10, -3) >>> test_prange_matches_range(3, -10, -3) """ cdef int i, range_last, prange_last prange_set = set() for i in prange(start, stop, step, nogil=True, num_threads=3): prange_last = i with gil: prange_set.add(i) range_set = set(range(start, stop, step)) assert range_set == prange_set, "missing: %s extra %s" % (sorted(range_set-prange_set), sorted(prange_set - range_set)) for ii in range(start, stop, step): range_last = ii if range_set: assert prange_last == i assert range_last == prange_last def test_propagation(): """ >>> test_propagation() (9, 9, 9, 9, 450, 450) """ cdef int i = 0, j = 0, x = 0, y = 0 cdef int sum1 = 0, sum2 = 0 for i in prange(10, nogil=True): for j in prange(10): sum1 += i with nogil, cython.parallel.parallel(): for x in prange(10): for y in prange(10): sum2 += y return i, j, x, y, sum1, sum2 # DISABLED, not allowed in OpenMP 3.0 (fails on Windows) #def test_unsigned_operands(): # """ # >>> test_unsigned_operands() # 10 # """ # cdef int i # cdef int start = -5 # cdef unsigned int stop = 5 # cdef int step = 1 # # cdef int steps_taken = 0 # cdef int *steps_takenp = &steps_taken # # for i in prange(start, stop, step, nogil=True): # steps_taken += 1 # if steps_takenp[0] > 10: # abort() # # return steps_taken def test_reassign_start_stop_step(): """ >>> test_reassign_start_stop_step() 20 """ cdef int start = 0, stop = 10, step = 2 cdef int i cdef int sum = 0 for i in prange(start, stop, step, nogil=True): start = -2 stop = 2 step = 0 sum += i return sum def test_closure_parallel_privates(): """ >>> test_closure_parallel_privates() 9 9 45 45 0 0 9 9 """ cdef int x def test_target(): nonlocal x for x in prange(10, nogil=True): pass return x print test_target(), x def test_reduction(): nonlocal x cdef int i x = 0 for i in prange(10, nogil=True): x += i return x print test_reduction(), x def test_generator(): nonlocal x cdef int i x = 0 yield x x = 2 for i in prange(10, nogil=True): x = i yield x g = test_generator() print next(g), x, next(g), x def test_closure_parallel_with_gil(): """ >>> test_closure_parallel_with_gil() 45 45 """ cdef int sum = 0 temp1 = 5 temp2 = -5 def test_reduction(): nonlocal sum, temp1, temp2 cdef int i for i in prange(10, nogil=True): with gil: sum += temp1 + temp2 + i # assert abs(sum - sum) == 0 return sum print test_reduction() print sum def test_pure_mode(): """ >>> test_pure_mode() 0 1 2 3 4 4 3 2 1 0 0 """ import Cython.Shadow pure_parallel = sys.modules['cython.parallel'] for i in pure_parallel.prange(5): print i for i in pure_parallel.prange(4, -1, -1, schedule='dynamic', nogil=True): print i with pure_parallel.parallel(): print pure_parallel.threadid() cdef extern from "types.h": ctypedef short actually_long_t ctypedef long actually_short_t ctypedef int myint_t def test_nan_init(): """ >>> test_nan_init() """ cdef int mybool = 0 cdef int err = 0 cdef int *errp = &err cdef signed char a1 = 10 cdef unsigned char a2 = 10 cdef short b1 = 10 cdef unsigned short b2 = 10 cdef int c1 = 10 cdef unsigned int c2 = 10 cdef long d1 = 10 cdef unsigned long d2 = 10 cdef long long e1 = 10 cdef unsigned long long e2 = 10 cdef actually_long_t miss1 = 10 cdef actually_short_t miss2 = 10 cdef myint_t typedef1 = 10 cdef float f = 10.0 cdef double g = 10.0 cdef long double h = 10.0 cdef void *p = 10 with nogil, cython.parallel.parallel(): # First, trick the error checking to make it believe these variables # are initialized after this if if mybool: # mybool is always false! a1 = a2 = b1 = b2 = c1 = c2 = d1 = d2 = e1 = e2 = 0 f = g = h = 0.0 p = NULL miss1 = miss2 = typedef1 = 0 if (a1 == 10 or a2 == 10 or b1 == 10 or b2 == 10 or c1 == 10 or c2 == 10 or d1 == 10 or d2 == 10 or e1 == 10 or e2 == 10 or f == 10.0 or g == 10.0 or h == 10.0 or p == 10 or miss1 == 10 or miss2 == 10 or typedef1 == 10): errp[0] = 1 cdef int i for i in prange(10, nogil=True): # First, trick the error checking to make it believe these variables # are initialized after this if if mybool: # mybool is always false! a1 = a2 = b1 = b2 = c1 = c2 = d1 = d2 = e1 = e2 = 0 f = g = h = 0.0 p = NULL miss1 = miss2 = typedef1 = 0 if (a1 == 10 or a2 == 10 or b1 == 10 or b2 == 10 or c1 == 10 or c2 == 10 or d1 == 10 or d2 == 10 or e1 == 10 or e2 == 10 or f == 10.0 or g == 10.0 or h == 10.0 or p == 10 or miss1 == 10 or miss2 == 10 or typedef1 == 10): errp[0] = 1 if err: raise Exception("One of the values was not initialized to a maximum " "or NaN value") c1 = 20 with nogil, cython.parallel.parallel(): c1 = 16 cdef void nogil_print(char *s) with gil: print s.decode('ascii') def test_else_clause(): """ >>> test_else_clause() else clause executed """ cdef int i for i in prange(5, nogil=True): pass else: nogil_print('else clause executed') def test_prange_break(): """ >>> test_prange_break() """ cdef int i for i in prange(10, nogil=True): if i == 8: break else: nogil_print('else clause executed') def test_prange_continue(): """ >>> test_prange_continue() else clause executed 0 0 1 0 2 2 3 0 4 4 5 0 6 6 7 0 8 8 9 0 """ cdef int i cdef int *p = calloc(10, sizeof(int)) if p == NULL: raise MemoryError for i in prange(10, nogil=True): if i % 2 != 0: continue p[i] = i else: nogil_print('else clause executed') for i in range(10): print i, p[i] free(p) def test_nested_break_continue(): """ DISABLED. For some reason this fails intermittently on jenkins, with the first line of output being '0 0 0 0'. The generated code looks awfully correct though... needs investigation >> test_nested_break_continue() 6 7 6 7 8 """ cdef int i, j, result1 = 0, result2 = 0 for i in prange(10, nogil=True, num_threads=2, schedule='static'): for j in prange(10, num_threads=2, schedule='static'): if i == 6 and j == 7: result1 = i result2 = j break else: continue break print i, j, result1, result2 with nogil, cython.parallel.parallel(num_threads=2): for i in prange(10, schedule='static'): if i == 8: break else: continue print i cdef int parallel_return() nogil: cdef int i for i in prange(10): if i == 8: return i else: return 1 return 2 def test_return(): """ >>> test_return() 8 """ print parallel_return() def test_parallel_exceptions(): """ >>> test_parallel_exceptions() I am executed first ('propagate me',) 0 """ cdef int i, j, sum = 0 mylist = [] try: for i in prange(10, nogil=True): try: for j in prange(10): with gil: raise Exception("propagate me") sum += i * j sum += i finally: with gil: mylist.append("I am executed first") except Exception, e: print mylist[0] print e.args, sum def test_parallel_exceptions_unnested(): """ >>> test_parallel_exceptions_unnested() ('I am executed first', 0) ('propagate me',) 0 """ cdef int i, sum = 0 mylist = [] try: with nogil, cython.parallel.parallel(): try: for i in prange(10): with gil: raise Exception("propagate me") sum += i finally: with gil: mylist.append(("I am executed first", sum)) except Exception, e: print mylist[0] print e.args, sum cdef int parallel_exc_cdef() except -3: cdef int i, j for i in prange(10, nogil=True): for j in prange(10, num_threads=6): with gil: raise Exception("propagate me") return 0 cdef int parallel_exc_cdef_unnested() except -3: cdef int i for i in prange(10, nogil=True): with gil: raise Exception("propagate me") return 0 def test_parallel_exc_cdef(): """ >>> test_parallel_exc_cdef() Traceback (most recent call last): ... Exception: propagate me """ parallel_exc_cdef_unnested() parallel_exc_cdef() cpdef int parallel_exc_cpdef() except -3: cdef int i, j for i in prange(10, nogil=True): for j in prange(10, num_threads=6): with gil: raise Exception("propagate me") return 0 cpdef int parallel_exc_cpdef_unnested() except -3: cdef int i, j for i in prange(10, nogil=True): with gil: raise Exception("propagate me") return 0 def test_parallel_exc_cpdef(): """ >>> test_parallel_exc_cpdef() Traceback (most recent call last): ... Exception: propagate me """ parallel_exc_cpdef_unnested() parallel_exc_cpdef() cdef int parallel_exc_nogil_swallow() except -1: cdef int i, j for i in prange(10, nogil=True): try: for j in prange(10): with gil: raise Exception("propagate me") finally: return i return 0 cdef int parallel_exc_nogil_swallow_unnested() except -1: cdef int i with nogil: try: for i in prange(10): with gil: raise Exception("propagate me") finally: return i return 0 def test_parallel_exc_nogil_swallow(): """ >>> test_parallel_exc_nogil_swallow() execute me execute me """ parallel_exc_nogil_swallow_unnested() print 'execute me' parallel_exc_nogil_swallow() print 'execute me' def parallel_exc_replace(): """ >>> parallel_exc_replace() Traceback (most recent call last): ... Exception: propagate me instead """ cdef int i, j for i in prange(10, nogil=True): with gil: try: for j in prange(10, nogil=True): with gil: raise Exception("propagate me") except Exception, e: raise Exception("propagate me instead") return 0 def parallel_exceptions2(): """ >>> parallel_exceptions2() Traceback (most recent call last): ... Exception: propagate me """ cdef int i, j, k for i in prange(10, nogil=True): for j in prange(10): for k in prange(10): if i + j + k > 20: with gil: raise Exception("propagate me") break continue return def test_parallel_with_gil_return(): """ >>> test_parallel_with_gil_return() True 45 """ cdef int i, sum = 0 for i in prange(10, nogil=True): with gil: obj = i sum += obj print obj in range(10) with nogil, cython.parallel.parallel(): with gil: return sum def test_parallel_with_gil_continue_unnested(): """ >>> test_parallel_with_gil_continue_unnested() 20 """ cdef int i, sum = 0 for i in prange(10, nogil=True): with gil: if i % 2: continue sum += i print sum cdef int inner_parallel_section() nogil: cdef int j, sum = 0 for j in prange(10): sum += j return sum def outer_parallel_section(): """ >>> outer_parallel_section() 450 """ cdef int i, sum = 0 for i in prange(10, nogil=True): sum += inner_parallel_section() return sum cdef int nogil_cdef_except_clause() nogil except 0: return 1 cdef void nogil_cdef_except_star() nogil except *: pass def test_nogil_cdef_except_clause(): """ >>> test_nogil_cdef_except_clause() """ cdef int i for i in prange(10, nogil=True): nogil_cdef_except_clause() nogil_cdef_except_star() def test_num_threads_compile(): cdef int i for i in prange(10, nogil=True, num_threads=2): pass with nogil, cython.parallel.parallel(num_threads=2): pass with nogil, cython.parallel.parallel(num_threads=2): for i in prange(10): pass cdef int chunksize() nogil: return 3 def test_chunksize(): """ >>> test_chunksize() 45 45 45 """ cdef int i, sum sum = 0 for i in prange(10, nogil=True, num_threads=2, schedule='static', chunksize=chunksize()): sum += i print sum sum = 0 for i in prange(10, nogil=True, num_threads=6, schedule='dynamic', chunksize=chunksize()): sum += i print sum sum = 0 with nogil, cython.parallel.parallel(): for i in prange(10, schedule='guided', chunksize=chunksize()): sum += i print sum cdef class PrintOnDealloc(object): def __dealloc__(self): print "deallocating..." def error(): raise Exception("propagate me") def test_clean_temps(): """ >>> test_clean_temps() deallocating... propagate me """ cdef Py_ssize_t i try: for i in prange(100, nogil=True, num_threads=1): with gil: x = PrintOnDealloc() + error() except Exception, e: print e.args[0] def test_pointer_temps(double x): """ >>> test_pointer_temps(1.0) 4.0 """ cdef Py_ssize_t i cdef double* f cdef double[:] arr = array(format="d", shape=(10,), itemsize=sizeof(double)) arr[0] = 4.0 arr[1] = 3.0 for i in prange(10, nogil=True, num_threads=1): f = &arr[0] return f[0] Cython-0.26.1/tests/run/with_statement_module_level_T536.pyx0000664000175000017500000000134512542002467024661 0ustar stefanstefan00000000000000# ticket: 536 __doc__ = """ >>> inner_result ['ENTER'] >>> result # doctest: +ELLIPSIS ['ENTER', ...EXIT (<...ValueError...>,...ValueError..., >> inner_result_no_exc ['ENTER'] >>> result_no_exc ['ENTER', 'EXIT (None, None, None)'] """ class ContextManager(object): def __init__(self, result): self.result = result def __enter__(self): self.result.append("ENTER") def __exit__(self, *values): self.result.append("EXIT %r" % (values,)) return True result_no_exc = [] with ContextManager(result_no_exc) as c: inner_result_no_exc = result_no_exc[:] result = [] with ContextManager(result) as c: inner_result = result[:] raise ValueError('TEST') Cython-0.26.1/tests/run/tupleunpack_T298.pyx0000664000175000017500000000043012542002467021420 0ustar stefanstefan00000000000000# ticket: 298 """ >>> func() 0 0 0 0 1 1 1 1 2 2 2 2 >>> func2() """ def g(): return ((3, 2), 1, 0) def func2(): (a, b), c, d = g() def func(): for (a, b),c ,d in zip(zip(range(3), range(3)), range(3), range(3)): print a, b print c print d Cython-0.26.1/tests/run/consts.pyx0000664000175000017500000001452512542002467017662 0ustar stefanstefan00000000000000import sys IS_PY3 = sys.version_info[0] >= 3 cimport cython DEF INT_VAL = 1 def _func(a,b,c): return a+b+c @cython.test_fail_if_path_exists("//AddNode") def add(): """ >>> add() == 1+2+3+4 True """ return 1+2+3+4 #@cython.test_fail_if_path_exists("//AddNode") def add_var(a): """ >>> add_var(10) == 1+2+10+3+4 True """ return 1+2 +a+ 3+4 @cython.test_fail_if_path_exists("//AddNode", "//SubNode") def neg(): """ >>> neg() == -1 -2 - (-3+4) True """ return -1 -2 - (-3+4) @cython.test_fail_if_path_exists("//AddNode", "//MulNode", "//DivNode") def long_int_mix(): """ >>> long_int_mix() == 1 + (2 * 3) // 2 True >>> if IS_PY3: type(long_int_mix()) is int or type(long_int_mix()) ... else: type(long_int_mix()) is long or type(long_int_mix()) True """ return 1L + (2 * 3L) // 2 @cython.test_fail_if_path_exists("//AddNode", "//MulNode", "//DivNode") def char_int_mix(): """ >>> char_int_mix() == 1 + (ord(' ') * 3) // 2 + ord('A') True """ return 1L + (c' ' * 3L) // 2 + c'A' @cython.test_fail_if_path_exists("//AddNode", "//MulNode") def int_cast(): """ >>> int_cast() == 1 + 2 * 6000 True """ return (1 + 2 * 6000) @cython.test_fail_if_path_exists("//MulNode") def mul(): """ >>> mul() == 1*60*1000 True """ return 1*60*1000 @cython.test_fail_if_path_exists("//AddNode", "//MulNode") def arithm(): """ >>> arithm() == 9*2+3*8//6-10 True """ return 9*2+3*8//6-10 @cython.test_fail_if_path_exists("//AddNode", "//MulNode") def parameters(): """ >>> parameters() == _func(-1 -2, - (-3+4), 1*2*3) True """ return _func(-1 -2, - (-3+4), 1*2*3) #@cython.test_fail_if_path_exists("//AddNode") def lists(): """ >>> lists() == [1,2,3] + [4,5,6] True """ return [1,2,3] + [4,5,6] @cython.test_fail_if_path_exists("//MulNode") def multiplied_lists_right_len1(): """ >>> multiplied_lists_right_len1() == [1] * 5 True """ return [1] * 5 @cython.test_fail_if_path_exists("//MulNode") def multiplied_lists_right(): """ >>> multiplied_lists_right() == [1,2,3] * 5 True """ return [1,2,3] * 5 @cython.test_fail_if_path_exists("//MulNode") def multiplied_lists_left(): """ >>> multiplied_lists_left() == [1,2,3] * 5 True """ return 5 * [1,2,3] @cython.test_fail_if_path_exists("//MulNode") def multiplied_lists_neg(): """ >>> multiplied_lists_neg() == [1,2,3] * -5 True """ return [1,2,3] * -5 @cython.test_fail_if_path_exists("//MulNode") def multiplied_lists_nonconst(x): """ >>> multiplied_lists_nonconst(5) == [1,2,3] * 5 True >>> multiplied_lists_nonconst(-5) == [1,2,3] * -5 True >>> multiplied_lists_nonconst(0) == [1,2,3] * 0 True >>> try: [1,2,3] * 'abc' ... except TypeError: pass >>> try: multiplied_nonconst_tuple_arg('abc') ... except TypeError: pass >>> try: [1,2,3] * 1.0 ... except TypeError: pass >>> try: multiplied_nonconst_tuple_arg(1.0) ... except TypeError: pass """ return [1,2,3] * x @cython.test_assert_path_exists("//MulNode") def multiplied_lists_nonconst_left(x): """ >>> multiplied_lists_nonconst_left(5) == 5 * [1,2,3] True >>> multiplied_lists_nonconst_left(-5) == -5 * [1,2,3] True >>> multiplied_lists_nonconst_left(0) == 0 * [1,2,3] True """ return x * [1,2,3] @cython.test_fail_if_path_exists("//MulNode//ListNode") @cython.test_assert_path_exists("//MulNode") def multiplied_lists_nonconst_expression(x): """ >>> multiplied_lists_nonconst_expression(5) == [1,2,3] * (5 * 2) True >>> multiplied_lists_nonconst_expression(-5) == [1,2,3] * (-5 * 2) True >>> multiplied_lists_nonconst_expression(0) == [1,2,3] * (0 * 2) True """ return [1,2,3] * (x*2) cdef side_effect(int x): print x return x @cython.test_fail_if_path_exists("//MulNode") def multiplied_lists_with_side_effects(): """ >>> multiplied_lists_with_side_effects() == [1,2,3] * 5 1 2 3 True """ return [side_effect(1), side_effect(2), side_effect(3)] * 5 @cython.test_fail_if_path_exists("//MulNode") def multiplied_lists_nonconst_with_side_effects(x): """ >>> multiplied_lists_nonconst_with_side_effects(5) == [1,2,3] * 5 1 2 3 True """ return [side_effect(1), side_effect(2), side_effect(3)] * x @cython.test_fail_if_path_exists("//MulNode") def multiplied_nonconst_tuple_arg(x): """ >>> multiplied_nonconst_tuple_arg(5) == (1,2) * 5 True >>> multiplied_nonconst_tuple_arg(-5) == (1,2) * -5 True >>> multiplied_nonconst_tuple_arg(0) == (1,2) * 0 True >>> try: (1,2) * 'abc' ... except TypeError: pass >>> try: multiplied_nonconst_tuple_arg('abc') ... except TypeError: pass >>> try: (1,2) * 1.0 ... except TypeError: pass >>> try: multiplied_nonconst_tuple_arg(1.0) ... except TypeError: pass """ return (1,2) * x @cython.test_fail_if_path_exists("//MulNode") def multiplied_nonconst_tuple_int_arg(int x): """ >>> multiplied_nonconst_tuple_int_arg(5) == (1,2) * 5 True """ return (1,2) * x @cython.test_fail_if_path_exists("//MulNode") def multiplied_nonconst_tuple(x): """ >>> multiplied_nonconst_tuple(5) == (1,2) * (5+1) True """ return (1,2) * (x + 1) MULT = 5 @cython.test_fail_if_path_exists("//MulNode") def multiplied_global_nonconst_tuple(): """ >>> multiplied_global_nonconst_tuple() == (1,2,3) * 5 1 2 3 True """ return (side_effect(1), side_effect(2), side_effect(3)) * MULT @cython.test_fail_if_path_exists("//MulNode") def multiplied_const_tuple(): """ >>> multiplied_const_tuple() == (1,2) * 5 True """ return (1,2) * 5 @cython.test_fail_if_path_exists("//MulNode") def multiplied_const_tuple_len1(): """ >>> multiplied_const_tuple_len1() == (1,) * 5 True """ return (1,) * 5 @cython.test_fail_if_path_exists("//PrimaryCmpNode") def compile_time_DEF(): """ >>> compile_time_DEF() (1, False, True, True, False) """ return INT_VAL, INT_VAL == 0, INT_VAL != 0, INT_VAL == 1, INT_VAL != 1 @cython.test_fail_if_path_exists("//PrimaryCmpNode") def cascaded_compare(): """ >>> cascaded_compare() True """ return 1 < 2 < 3 < 4 Cython-0.26.1/tests/run/float_floor_division_T260.pyx0000664000175000017500000000041612542002467023270 0ustar stefanstefan00000000000000# ticket: 260 def floor_div_float(double a, double b): """ >>> floor_div_float(2, 1.5) 1.0 >>> floor_div_float(2, -1.5) -2.0 >>> floor_div_float(-2.3, 1.5) -2.0 >>> floor_div_float(1e10, 1e-10) == 1e20 True """ return a // b Cython-0.26.1/tests/run/starimport_cimport.srctree0000664000175000017500000000147312542002467023137 0ustar stefanstefan00000000000000PYTHON setup.py build_ext --inplace PYTHON -c "import star_cimport_test" ######## setup.py ######## from distutils.core import setup from Cython.Distutils import build_ext from Cython.Distutils.extension import Extension setup( ext_modules = [ Extension("star_cimport_ext", ["star_cimport_ext.pyx"]), Extension("star_cimport_test", ["star_cimport_test.pyx"]), ], cmdclass={'build_ext': build_ext}, ) ######## star_cimport_ext.pyx ######## cdef class test_pxd: pass ######## star_cimport_ext.pxd ######## cdef class test_pxd: pass ######## star_cimport.py ######## class test_py: pass ######## star_cimport_test.pyx ######## # Tests a Python star import followed by a cimport from star_cimport import * from star_cimport_ext cimport test_pxd cdef test_pxd t = test_pxd() Cython-0.26.1/tests/run/property_decorator_T593.py0000664000175000017500000000202113023021033022600 0ustar stefanstefan00000000000000# mode: run # ticket: 593 # tag: property, decorator my_property = property class Prop(object): """ >>> p = Prop() >>> p.prop GETTING 'None' >>> p.prop = 1 SETTING '1' (previously: 'None') >>> p.prop GETTING '1' 1 >>> p.prop = 2 SETTING '2' (previously: '1') >>> p.prop GETTING '2' 2 >>> del p.prop DELETING (previously: '2') >>> p.my_prop GETTING 'my_prop' 389 >>> list(p.generator_prop) [42] """ _value = None @property def prop(self): print("GETTING '%s'" % self._value) return self._value @prop.setter def prop(self, value): print("SETTING '%s' (previously: '%s')" % (value, self._value)) self._value = value @prop.deleter def prop(self): print("DELETING (previously: '%s')" % self._value) self._value = None @my_property def my_prop(self): print("GETTING 'my_prop'") return 389 @property def generator_prop(self): yield 42 Cython-0.26.1/tests/run/check_fused_types.pyx0000664000175000017500000001005512542002467022032 0ustar stefanstefan00000000000000cimport cython cimport check_fused_types_pxd import math ctypedef char *string_t fused_t = cython.fused_type(int, long, float, string_t) other_t = cython.fused_type(int, long) base_t = cython.fused_type(short, int) # complex_t = cython.fused_type(cython.floatcomplex, cython.doublecomplex) cdef fused complex_t: float complex double complex ctypedef base_t **base_t_p_p # ctypedef cython.fused_type(char, base_t_p_p, fused_t, complex_t) composed_t cdef fused composed_t: char int float string_t cython.pp_int float complex double complex int complex long complex cdef func(fused_t a, other_t b): cdef int int_a cdef string_t string_a cdef other_t other_a if fused_t is other_t: print 'fused_t is other_t' other_a = a if fused_t is int: print 'fused_t is int' int_a = a if fused_t is string_t: print 'fused_t is string_t' string_a = a if fused_t in check_fused_types_pxd.unresolved_t: print 'fused_t in unresolved_t' if int in check_fused_types_pxd.unresolved_t: print 'int in unresolved_t' if string_t in check_fused_types_pxd.unresolved_t: print 'string_t in unresolved_t' def test_int_int(): """ >>> test_int_int() fused_t is other_t fused_t is int fused_t in unresolved_t int in unresolved_t """ cdef int x = 1 cdef int y = 2 func(x, y) def test_int_long(): """ >>> test_int_long() fused_t is int fused_t in unresolved_t int in unresolved_t """ cdef int x = 1 cdef long y = 2 func(x, y) def test_float_int(): """ >>> test_float_int() fused_t in unresolved_t int in unresolved_t """ cdef float x = 1 cdef int y = 2 func(x, y) def test_string_int(): """ >>> test_string_int() fused_t is string_t int in unresolved_t """ cdef string_t x = b"spam" cdef int y = 2 func(x, y) cdef if_then_else(fused_t a, other_t b): cdef other_t other_a cdef string_t string_a cdef fused_t specific_a if fused_t is other_t: print 'fused_t is other_t' other_a = a elif fused_t is string_t: print 'fused_t is string_t' string_a = a else: print 'none of the above' specific_a = a def test_if_then_else_long_long(): """ >>> test_if_then_else_long_long() fused_t is other_t """ cdef long x = 0, y = 0 if_then_else(x, y) def test_if_then_else_string_int(): """ >>> test_if_then_else_string_int() fused_t is string_t """ cdef string_t x = b"spam" cdef int y = 0 if_then_else(x, y) def test_if_then_else_float_int(): """ >>> test_if_then_else_float_int() none of the above """ cdef float x = 0.0 cdef int y = 1 if_then_else(x, y) cdef composed_t composed(composed_t x, composed_t y): if composed_t in base_t_p_p or composed_t is string_t: if string_t == composed_t: print x.decode('ascii'), y.decode('ascii') else: print x[0][0], y[0][0] return x elif composed_t == string_t: print 'this is never executed' elif list(): print 'neither is this one' else: if composed_t not in complex_t: print 'not a complex number' print x, y else: print 'it is a complex number' print x.real, x.imag return x + y def test_composed_types(): """ >>> test_composed_types() it is a complex number 0.5 0.6 9 4 not a complex number 7 8 15 7 8 spam eggs spam """ cdef double complex a = 0.5 + 0.6j, b = 0.4 -0.2j, result cdef int c = 7, d = 8 cdef int *cp = &c, *dp = &d cdef string_t e = "spam", f = "eggs" result = composed(a, b) print int(math.ceil(result.real * 10)), int(math.ceil(result.imag * 10)) print print composed(c, d) print composed(&cp, &dp) print print composed(e, f).decode('ascii') Cython-0.26.1/tests/run/curiously_recurring_template_pattern_GH1458_suport.h0000664000175000017500000000112113023021033030007 0ustar stefanstefan00000000000000template class Base { public: Base(T x) : x_(x) { }; Derived half() { Derived d(x_ / 2); return d; }; virtual T calculate() = 0; virtual ~Base() { }; protected: T x_; }; template class Square : public Base > { public: Square(T x) : Base >(x) { }; T calculate() { return this->x_ * this->x_; } }; template class Cube : public Base > { public: Cube(T x) : Base >(x) { }; T calculate() { return this->x_ * this->x_ * this->x_; } }; Cython-0.26.1/tests/run/r_forloop.pyx0000664000175000017500000000546513023021033020336 0ustar stefanstefan00000000000000def go_py(): """ >>> go_py() Spam! Spam! Spam! Spam! """ for i in range(4): print u"Spam!" def go_py_ret(): """ >>> go_py_ret() 2 """ for i in range(4): if i > 1: return i def go_c(): """ >>> go_c() Spam! Spam! Spam! Spam! """ cdef int i for i in range(4): print u"Spam!" def go_c_enumerate(): """ >>> go_c_enumerate() True True True True """ cdef int i,k for i,k in enumerate(range(4)): print i == k def go_c_int(int a, int b): """ >>> go_c_int(1,5) Spam! Spam! """ cdef int i for i in range(a,b,2): print u"Spam!" def go_c_all(): """ >>> go_c_all() Spam! Spam! Spam! """ cdef int i for i in range(8,2,-2): print u"Spam!" def go_c_all_exprs(x): """ >>> go_c_all_exprs(1) Spam! >>> go_c_all_exprs(3) Spam! Spam! """ cdef long i for i in range(4*x,2*x,-3): print u"Spam!" def go_c_const_exprs(): """ >>> go_c_const_exprs() Spam! Spam! """ cdef int i for i in range(4*2+1,2*2,-2-1): print u"Spam!" def f(x): return 2*x def go_c_calc(x): """ >>> go_c_calc(2) Spam! Spam! """ cdef long i for i in range(2*f(x),f(x), -2): print u"Spam!" def go_c_calc_ret(x): """ >>> go_c_calc_ret(2) 6 """ cdef long i for i in range(2*f(x),f(x), -2): if i < 2*f(x): return i def go_c_ret(): """ >>> go_c_ret() 2 """ cdef int i for i in range(4): if i > 1: return i def go_list(): """ >>> go_list() Spam! Spam! Spam! Spam! """ cdef list l = list(range(4)) for i in l: print u"Spam!" def go_list_ret(): """ >>> go_list_ret() 2 """ cdef list l = list(range(4)) for i in l: if i > 1: return i def go_tuple(): """ >>> go_tuple() Spam! Spam! Spam! Spam! """ cdef tuple t = tuple(range(4)) for i in t: print u"Spam!" def go_tuple_ret(): """ >>> go_tuple_ret() 2 """ cdef tuple t = tuple(range(4)) for i in t: if i > 1: return i def go_dict(): """ >>> go_dict() Spam! Spam! Spam! Spam! """ cdef dict d = dict(zip(range(4), range(4))) for i in d: print u"Spam!" def go_dict_ret(): """ >>> go_dict_ret() 2 >>> global_result 6 """ cdef dict d = dict(zip(range(4), range(4))) for i in d: if i > 1 and i < 3: return i # test global scope also global_result = None cdef int i for i in range(4*2+1,2*2,-2-1): if i < 7: global_result = i break Cython-0.26.1/tests/run/r_jiba1.pxd0000664000175000017500000000012212542002467017617 0ustar stefanstefan00000000000000cdef class Parrot: cdef void describe(self) cdef class Norwegian(Parrot): pass Cython-0.26.1/tests/run/all.pyx0000664000175000017500000001215413023021033017076 0ustar stefanstefan00000000000000# mode: run # tag: all, builtins, werror cdef class VerboseGetItem(object): cdef object sequence def __init__(self, seq): self.sequence = seq def __getitem__(self, i): print i return self.sequence[i] # may raise IndexError cimport cython @cython.test_assert_path_exists("//SimpleCallNode") @cython.test_fail_if_path_exists("//ForInStatNode") def all_item(x): """ >>> all_item([1,1,1,1,1]) True >>> all_item([1,1,1,1,0]) False >>> all_item([0,1,1,1,0]) False >>> all(VerboseGetItem([1,1,1,0,0])) 0 1 2 3 False >>> all_item(VerboseGetItem([1,1,1,0,0])) 0 1 2 3 False >>> all(VerboseGetItem([1,1,1,1,1])) 0 1 2 3 4 5 True >>> all_item(VerboseGetItem([1,1,1,1,1])) 0 1 2 3 4 5 True """ return all(x) @cython.test_assert_path_exists( "//ForInStatNode", "//InlinedGeneratorExpressionNode" ) @cython.test_fail_if_path_exists( "//SimpleCallNode", "//YieldExprNode" ) def all_in_simple_gen(seq): """ >>> all_in_simple_gen([1,1,1]) True >>> all_in_simple_gen([1,1,0]) False >>> all_in_simple_gen([1,0,1]) False >>> all_in_simple_gen(VerboseGetItem([1,1,1,1,1])) 0 1 2 3 4 5 True >>> all_in_simple_gen(VerboseGetItem([1,1,0,1,1])) 0 1 2 False """ return all(x for x in seq) @cython.test_assert_path_exists( "//ForInStatNode", "//InlinedGeneratorExpressionNode" ) @cython.test_fail_if_path_exists( "//SimpleCallNode", "//YieldExprNode" ) def all_in_simple_gen_scope(seq): """ >>> all_in_simple_gen_scope([1,1,1]) True >>> all_in_simple_gen_scope([1,1,0]) False >>> all_in_simple_gen_scope([1,0,1]) False >>> all_in_simple_gen_scope(VerboseGetItem([1,1,1,1,1])) 0 1 2 3 4 5 True >>> all_in_simple_gen_scope(VerboseGetItem([1,1,0,1,1])) 0 1 2 False """ x = 'abc' result = all(x for x in seq) assert x == 'abc' return result @cython.test_assert_path_exists( "//ForInStatNode", "//InlinedGeneratorExpressionNode" ) @cython.test_fail_if_path_exists( "//SimpleCallNode", "//YieldExprNode" ) def all_in_conditional_gen(seq): """ >>> all_in_conditional_gen([3,6,9]) False >>> all_in_conditional_gen([0,3,7]) False >>> all_in_conditional_gen([1,0,1]) True >>> all_in_conditional_gen(VerboseGetItem([1,1,1,1,1])) 0 1 2 3 4 5 True >>> all_in_conditional_gen(VerboseGetItem([1,1,0,1,1])) 0 1 2 3 4 5 True """ return all(x%3 for x in seq if x%2 == 1) mixed_ustring = u'AbcDefGhIjKlmnoP' lower_ustring = mixed_ustring.lower() upper_ustring = mixed_ustring.upper() @cython.test_assert_path_exists( '//PythonCapiCallNode', '//ForFromStatNode' ) @cython.test_fail_if_path_exists( '//SimpleCallNode', '//ForInStatNode' ) def all_lower_case_characters(unicode ustring): """ >>> all_lower_case_characters(mixed_ustring) False >>> all_lower_case_characters(upper_ustring) False >>> all_lower_case_characters(lower_ustring) True """ return all(uchar.islower() for uchar in ustring) @cython.test_assert_path_exists( "//ForInStatNode", "//InlinedGeneratorExpressionNode", "//InlinedGeneratorExpressionNode//IfStatNode" ) @cython.test_fail_if_path_exists( "//SimpleCallNode", "//YieldExprNode", # "//IfStatNode//CoerceToBooleanNode" ) def all_in_typed_gen(seq): """ >>> all_in_typed_gen([1,1,1]) True >>> all_in_typed_gen([1,0,0]) False >>> all_in_typed_gen(VerboseGetItem([1,1,1,1,1])) 0 1 2 3 4 5 True >>> all_in_typed_gen(VerboseGetItem([1,1,1,1,0])) 0 1 2 3 4 False """ cdef int x return all(x for x in seq) @cython.test_assert_path_exists( "//ForInStatNode", "//InlinedGeneratorExpressionNode", "//InlinedGeneratorExpressionNode//IfStatNode" ) @cython.test_fail_if_path_exists( "//SimpleCallNode", "//YieldExprNode", # "//IfStatNode//CoerceToBooleanNode" ) def all_in_double_gen(seq): """ >>> all(x for L in [[1,1,1],[1,1,1],[1,1,1]] for x in L) True >>> all_in_double_gen([[1,1,1],[1,1,1],[1,1,1]]) True >>> all(x for L in [[1,1,1],[1,1,1],[1,1,0]] for x in L) False >>> all_in_double_gen([[1,1,1],[1,1,1],[1,1,0]]) False >>> all(x for L in [[1,1,1],[0,1,1],[1,1,1]] for x in L) False >>> all_in_double_gen([[1,1,1],[0,1,1],[1,1,1]]) False >>> all_in_double_gen([VerboseGetItem([1,1,1]), VerboseGetItem([1,1,1,1,1])]) 0 1 2 3 0 1 2 3 4 5 True >>> all_in_double_gen([VerboseGetItem([1,1,1]),VerboseGetItem([1,1]),VerboseGetItem([1,1,0])]) 0 1 2 3 0 1 2 0 1 2 False >>> all_in_double_gen([VerboseGetItem([1,1,1]),VerboseGetItem([1,0,1]),VerboseGetItem([1,1])]) 0 1 2 3 0 1 False """ cdef int x return all(x for L in seq for x in L) Cython-0.26.1/tests/run/funcexceptcypy.pyx0000664000175000017500000000250312542002467021413 0ustar stefanstefan00000000000000__doc__ = u""" >>> import sys >>> if not IS_PY3: sys.exc_clear() >>> def test_py(): ... old_exc = sys.exc_info()[0] ... try: ... raise AttributeError("test") ... except AttributeError: ... test_c(error=AttributeError) ... print(sys.exc_info()[0] is AttributeError or sys.exc_info()[0]) ... print((IS_PY3 and sys.exc_info()[0] is old_exc) or ... (not IS_PY3 and sys.exc_info()[0] is AttributeError) or ... sys.exc_info()[0]) >>> print(sys.exc_info()[0]) # 0 None >>> test_py() True True True True >>> print(sys.exc_info()[0]) # test_py() None >>> test_c(test_py) True True True True True True >>> print(sys.exc_info()[0]) # test_c() None >>> def test_raise(): ... raise TestException("test") >>> test_catch(test_raise, TestException) True None """ import sys IS_PY3 = sys.version_info[0] >= 3 class TestException(Exception): pass def test_c(func=None, error=None): try: raise TestException(u"test") except TestException: if func: func() print(sys.exc_info()[0] is TestException or sys.exc_info()[0]) print(sys.exc_info()[0] is error or sys.exc_info()[0]) def test_catch(func, error): try: func() except error: print(sys.exc_info()[0] is error or sys.exc_info()[0]) print(sys.exc_info()[0] is error or sys.exc_info()[0]) Cython-0.26.1/tests/run/r_bishop3.pyx0000664000175000017500000000051312542002467020231 0ustar stefanstefan00000000000000__doc__ = u""" >>> foo = Foo() >>> fee = Fee() >>> faa = Faa() >>> fee.bof() Fee bof 0 >>> faa.bof() Foo bof 0 """ cdef class Foo: cdef int val def __init__(self): self.val = 0 cdef class Fee(Foo): def bof(self): print u'Fee bof', self.val cdef class Faa(Fee): def bof(self): print u'Foo bof', self.val Cython-0.26.1/tests/run/clone_type.pyx0000664000175000017500000000047212542002467020506 0ustar stefanstefan00000000000000cdef class MyType: def dup(self): """ >>> x1 = MyType() >>> isinstance(x1, MyType) True >>> x2 = x1.dup() >>> isinstance(x2, MyType) True >>> x1 != x2 True """ cdef MyType clone = type(self)() return clone Cython-0.26.1/tests/run/extmember.pyx0000664000175000017500000000077212542002467020340 0ustar stefanstefan00000000000000__doc__ = u""" >>> s = Spam() >>> s.e = s >>> s.e = 1 Traceback (most recent call last): TypeError: Cannot convert int to extmember.Spam >>> s.e is s True >>> s.e = None >>> s = Bot() >>> s.e = s >>> s.e = 1 Traceback (most recent call last): TypeError: Cannot convert int to extmember.Bot >>> s.e is s True >>> s.e = None """ # declared in the pxd cdef class Spam: pass # not declared in the pxd cdef class Bot: cdef public Bot e Cython-0.26.1/tests/run/generator_type_inference.pyx0000664000175000017500000000174612542002467023417 0ustar stefanstefan00000000000000# mode: run # tag: typeinference, generators cimport cython def test_type_inference(): """ >>> list(test_type_inference()) [(2.0, 'double'), (2.0, 'double'), (2.0, 'double')] """ x = 1.0 for i in range(3): yield x * 2.0, cython.typeof(x) def test_unicode_loop(): """ >>> chars = list(test_unicode_loop()) 1 Py_UCS4 2 Py_UCS4 2 Py_UCS4 2 Py_UCS4 2 Py_UCS4 >>> len(chars) 4 >>> ''.join(chars) == 'abcd' True """ ustr = u'abcd' print 1, cython.typeof(ustr[0]) for c in ustr: print 2, cython.typeof(c) yield c def test_with_nonlocal(): """ >>> chars = list(test_with_nonlocal()) 1 Py_UCS4 2 Py_UCS4 2 Py_UCS4 >>> len(chars) 2 >>> ''.join(chars) == 'ab' True """ ustr = u'ab' print 1, cython.typeof(ustr[0]) def gen(): nonlocal ustr for c in ustr: print 2, cython.typeof(c) yield c return gen() Cython-0.26.1/tests/run/argerrors.py0000664000175000017500000000504212542002467020161 0ustar stefanstefan00000000000000# mode: run # tag: kwargs, argument unpacking # This test validates the error handling in the different specialised # code paths of the argument unpacking code. The have-kwargs and # no-kwargs branches take different paths, so we always test with and # without a keyword dict (even if it's empty). def test_single_arg(a): """ >>> test_single_arg(1) 1 >>> test_single_arg(1, **{}) 1 >>> test_single_arg() # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_single_arg(1,2) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_single_arg(1,2, **{}) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_single_arg(**{}) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_single_arg(*(), **{}) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_single_arg(**{'b':2}) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_single_arg(**{'a':1, 'b':2}) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... """ return a def test_two_args(a,b): """ >>> test_two_args(1,2) (1, 2) >>> test_two_args(1,2, **{}) (1, 2) >>> test_two_args(1,**{'b':2}) (1, 2) >>> test_two_args(**{'a':1, 'b':2}) (1, 2) >>> test_two_args() # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_two_args(1) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_two_args(1, **{}) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_two_args(1,2,3) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_two_args(1,2,3, **{}) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_two_args(**{}) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_two_args(*(), **{}) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_two_args(**{'a':1}) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... >>> test_two_args(**{'a':1, 'b':2, 'c':3}) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... """ return a,b Cython-0.26.1/tests/run/future_division.pyx0000664000175000017500000000651013023021033021543 0ustar stefanstefan00000000000000from __future__ import division cimport cython def bigints(values): for x in values: print(repr(x).rstrip('L')) def doit(x,y): """ >>> doit(1,2) (0.5, 0) >>> doit(4,3) (1.3333333333333333, 1) >>> doit(4,3.0) (1.3333333333333333, 1.0) >>> doit(4,2) (2.0, 2) """ return x/y, x//y def doit_inplace(x,y): """ >>> doit_inplace(1,2) 0.5 """ x /= y return x def doit_inplace_floor(x,y): """ >>> doit_inplace_floor(1,2) 0 """ x //= y return x def constants(): """ >>> constants() (0.5, 0, 2.5, 2.0, 2.5, 2) """ return 1/2, 1//2, 5/2.0, 5//2.0, 5/2, 5//2 def py_mix(a): """ >>> py_mix(1) (0.5, 0, 0.5, 0.0, 0.5, 0) >>> py_mix(1.0) (0.5, 0.0, 0.5, 0.0, 0.5, 0.0) >>> 2**53 / 2.0 4503599627370496.0 >>> bigints(py_mix(2**53)) 4503599627370496.0 4503599627370496 4503599627370496.0 4503599627370496.0 4503599627370496.0 4503599627370496 >>> bigints(py_mix(2**53 + 1)) 4503599627370496.0 4503599627370496 4503599627370496.0 4503599627370496.0 4503599627370496.0 4503599627370496 >>> py_mix(2**53 + 1.0) (4503599627370496.0, 4503599627370496.0, 4503599627370496.0, 4503599627370496.0, 4503599627370496.0, 4503599627370496.0) """ return a/2, a//2, a/2.0, a//2.0, a/2, a//2 def py_mix_by_neg1(a): """ >>> py_mix_by_neg1(0) (-0.0, 0, -0.0, -0.0, -0.0, 0) >>> py_mix_by_neg1(-1) (1.0, 1, 1.0, 1.0, 1.0, 1) >>> py_mix_by_neg1(int(2**31-1)) (-2147483647.0, -2147483647, -2147483647.0, -2147483647.0, -2147483647.0, -2147483647) >>> bigints(py_mix_by_neg1(int(-2**31-1))) 2147483649.0 2147483649 2147483649.0 2147483649.0 2147483649.0 2147483649 >>> results = py_mix_by_neg1(int(2**63-1)) >>> results[0] == results[2] == results[3] == results[4] == float(2**63-1) / -1.0 or results True >>> results[1] == results[5] == (2**63-1) // -1 or results True >>> results = py_mix_by_neg1(int(-2**63-1)) >>> results[0] == results[2] == results[3] == results[4] == float(-2**63-1) / -1.0 or results True >>> results[1] == results[5] == (-2**63-1) // -1 or results True """ return a/-1, a//-1, a/-1.0, a//-1.0, a/-1, a//-1 def py_mix_rev(a): """ >>> py_mix_rev(4) (0.25, 0, 1.25, 1.0, 1.25, 1) >>> py_mix_rev(4.0) (0.25, 0.0, 1.25, 1.0, 1.25, 1.0) """ return 1/a, 1//a, 5.0/a, 5.0//a, 5/a, 5//a def int_mix(int a): """ >>> int_mix(1) (0.5, 0, 0.5, 0.0, 0.5, 0) """ return a/2, a//2, a/2.0, a//2.0, a/2, a//2 def int_mix_rev(int a): """ >>> int_mix_rev(4) (0.25, 0, 1.25, 1.0, 1.25, 1) """ return 1/a, 1//a, 5.0/a, 5.0//a, 5/a, 5//a def float_mix(float a): """ >>> float_mix(1.0) (0.5, 0.0, 0.5, 0.0, 0.5, 0.0) """ return a/2, a//2, a/2.0, a//2.0, a/2, a//2 def float_mix_rev(float a): """ >>> float_mix_rev(4.0) (0.25, 0.0, 1.25, 1.0, 1.25, 1.0) """ return 1/a, 1//a, 5.0/a, 5.0//a, 5/a, 5//a def infer_division_type(): """ >>> v = infer_division_type() double >>> v 8333333.25 """ v = (10000**2 - 1) / 12 print(cython.typeof(v)) return v def int_int(int a, int b): """ >>> int_int(1, 2) (0.5, 2.0) """ return a/b, b/a Cython-0.26.1/tests/run/fused_types.pyx0000664000175000017500000002305412542002467020700 0ustar stefanstefan00000000000000# mode: run cimport cython from cython.view cimport array from cython cimport integral from cpython cimport Py_INCREF from Cython import Shadow as pure_cython ctypedef char * string_t # floating = cython.fused_type(float, double) floating # integral = cython.fused_type(int, long) integral ctypedef cython.floating floating fused_type1 = cython.fused_type(int, long, float, double, string_t) fused_type2 = cython.fused_type(string_t) ctypedef fused_type1 *composed_t other_t = cython.fused_type(int, double) ctypedef double *p_double ctypedef int *p_int fused_type3 = cython.fused_type(int, double) fused_composite = cython.fused_type(fused_type2, fused_type3) def test_pure(): """ >>> test_pure() 10 """ mytype = pure_cython.typedef(pure_cython.fused_type(int, long, complex)) print mytype(10) cdef cdef_func_with_fused_args(fused_type1 x, fused_type1 y, fused_type2 z): if fused_type1 is string_t: print x.decode('ascii'), y.decode('ascii'), z.decode('ascii') else: print x, y, z.decode('ascii') return x + y def test_cdef_func_with_fused_args(): """ >>> test_cdef_func_with_fused_args() spam ham eggs spamham 10 20 butter 30 4.2 8.6 bunny 12.8 """ print cdef_func_with_fused_args(b'spam', b'ham', b'eggs').decode('ascii') print cdef_func_with_fused_args(10, 20, b'butter') print cdef_func_with_fused_args(4.2, 8.6, b'bunny') cdef fused_type1 fused_with_pointer(fused_type1 *array): for i in range(5): if fused_type1 is string_t: print array[i].decode('ascii') else: print array[i] obj = array[0] + array[1] + array[2] + array[3] + array[4] # if cython.typeof(fused_type1) is string_t: Py_INCREF(obj) return obj def test_fused_with_pointer(): """ >>> test_fused_with_pointer() 0 1 2 3 4 10 0 1 2 3 4 10 0.0 1.0 2.0 3.0 4.0 10.0 humpty dumpty fall splatch breakfast humptydumptyfallsplatchbreakfast """ cdef int[5] int_array cdef long[5] long_array cdef float[5] float_array cdef string_t[5] string_array cdef char *s strings = [b"humpty", b"dumpty", b"fall", b"splatch", b"breakfast"] for i in range(5): int_array[i] = i long_array[i] = i float_array[i] = i s = strings[i] string_array[i] = s print fused_with_pointer(int_array) print print fused_with_pointer(long_array) print print fused_with_pointer(float_array) print print fused_with_pointer(string_array).decode('ascii') include "cythonarrayutil.pxi" cpdef cython.integral test_fused_memoryviews(cython.integral[:, ::1] a): """ >>> import cython >>> a = create_array((3, 5), mode="c") >>> test_fused_memoryviews[cython.int](a) 7 """ return a[1, 2] ctypedef int[:, ::1] memview_int ctypedef long[:, ::1] memview_long memview_t = cython.fused_type(memview_int, memview_long) def test_fused_memoryview_def(memview_t a): """ >>> a = create_array((3, 5), mode="c") >>> test_fused_memoryview_def["memview_int"](a) 7 """ return a[1, 2] cdef test_specialize(fused_type1 x, fused_type1 *y, composed_t z, other_t *a): cdef fused_type1 result if composed_t is p_double: print "double pointer" if fused_type1 in floating: result = x + y[0] + z[0] + a[0] return result def test_specializations(): """ >>> test_specializations() double pointer double pointer double pointer double pointer double pointer """ cdef object (*f)(double, double *, double *, int *) cdef double somedouble = 2.2 cdef double otherdouble = 3.3 cdef int someint = 4 cdef p_double somedouble_p = &somedouble cdef p_double otherdouble_p = &otherdouble cdef p_int someint_p = &someint f = test_specialize assert f(1.1, somedouble_p, otherdouble_p, someint_p) == 10.6 f = test_specialize assert f(1.1, somedouble_p, otherdouble_p, someint_p) == 10.6 assert ( test_specialize)(1.1, somedouble_p, otherdouble_p, someint_p) == 10.6 f = test_specialize[double, int] assert f(1.1, somedouble_p, otherdouble_p, someint_p) == 10.6 assert test_specialize[double, int](1.1, somedouble_p, otherdouble_p, someint_p) == 10.6 # The following cases are not supported # f = test_specialize[double][p_int] # print f(1.1, somedouble_p, otherdouble_p) # print # print test_specialize[double][p_int](1.1, somedouble_p, otherdouble_p) # print # print test_specialize[double](1.1, somedouble_p, otherdouble_p) # print cdef opt_args(integral x, floating y = 4.0): print x, y def test_opt_args(): """ >>> test_opt_args() 3 4.0 3 4.0 3 4.0 3 4.0 """ opt_args[int, float](3) opt_args[int, double](3) opt_args[int, float](3, 4.0) opt_args[int, double](3, 4.0) class NormalClass(object): def method(self, cython.integral i): print cython.typeof(i), i def test_normal_class(): """ >>> test_normal_class() short 10 """ NormalClass().method[pure_cython.short](10) def test_normal_class_refcount(): """ >>> test_normal_class_refcount() short 10 0 """ import sys x = NormalClass() c = sys.getrefcount(x) x.method[pure_cython.short](10) print sys.getrefcount(x) - c def test_fused_declarations(cython.integral i, cython.floating f): """ >>> test_fused_declarations[pure_cython.short, pure_cython.float](5, 6.6) short float 25 43.56 >>> test_fused_declarations[pure_cython.long, pure_cython.double](5, 6.6) long double 25 43.56 """ cdef cython.integral squared_int = i * i cdef cython.floating squared_float = f * f assert cython.typeof(squared_int) == cython.typeof(i) assert cython.typeof(squared_float) == cython.typeof(f) print cython.typeof(squared_int) print cython.typeof(squared_float) print '%d %.2f' % (squared_int, squared_float) def test_sizeof_fused_type(fused_type1 b): """ >>> test_sizeof_fused_type[pure_cython.double](11.1) """ t = sizeof(b), sizeof(fused_type1), sizeof(double) assert t[0] == t[1] == t[2], t def get_array(itemsize, format): result = array((10,), itemsize, format) result[5] = 5.0 result[6] = 6.0 return result def get_intc_array(): result = array((10,), sizeof(int), 'i') result[5] = 5 result[6] = 6 return result def test_fused_memslice_dtype(cython.floating[:] array): """ Note: the np.ndarray dtype test is in numpy_test >>> import cython >>> sorted(test_fused_memslice_dtype.__signatures__) ['double', 'float'] >>> test_fused_memslice_dtype[cython.double](get_array(8, 'd')) double[:] double[:] 5.0 6.0 >>> test_fused_memslice_dtype[cython.float](get_array(4, 'f')) float[:] float[:] 5.0 6.0 """ cdef cython.floating[:] otherarray = array[0:100:1] print cython.typeof(array), cython.typeof(otherarray), \ array[5], otherarray[6] def test_fused_memslice_dtype_repeated(cython.floating[:] array1, cython.floating[:] array2): """ Note: the np.ndarray dtype test is in numpy_test >>> sorted(test_fused_memslice_dtype_repeated.__signatures__) ['double', 'float'] >>> test_fused_memslice_dtype_repeated(get_array(8, 'd'), get_array(8, 'd')) double[:] double[:] >>> test_fused_memslice_dtype_repeated(get_array(4, 'f'), get_array(4, 'f')) float[:] float[:] >>> test_fused_memslice_dtype_repeated(get_array(8, 'd'), get_array(4, 'f')) Traceback (most recent call last): ValueError: Buffer dtype mismatch, expected 'double' but got 'float' """ print cython.typeof(array1), cython.typeof(array2) def test_fused_memslice_dtype_repeated_2(cython.floating[:] array1, cython.floating[:] array2, fused_type3[:] array3): """ Note: the np.ndarray dtype test is in numpy_test >>> sorted(test_fused_memslice_dtype_repeated_2.__signatures__) ['double|double', 'double|int', 'float|double', 'float|int'] >>> test_fused_memslice_dtype_repeated_2(get_array(8, 'd'), get_array(8, 'd'), get_array(8, 'd')) double[:] double[:] double[:] >>> test_fused_memslice_dtype_repeated_2(get_array(8, 'd'), get_array(8, 'd'), get_intc_array()) double[:] double[:] int[:] >>> test_fused_memslice_dtype_repeated_2(get_array(4, 'f'), get_array(4, 'f'), get_intc_array()) float[:] float[:] int[:] """ print cython.typeof(array1), cython.typeof(array2), cython.typeof(array3) def test_cython_numeric(cython.numeric arg): """ Test to see whether complex numbers have their utility code declared properly. >>> test_cython_numeric(10.0 + 1j) double complex (10+1j) """ print cython.typeof(arg), arg cdef fused ints_t: int long cdef _test_index_fused_args(cython.floating f, ints_t i): print cython.typeof(f), cython.typeof(i) def test_index_fused_args(cython.floating f, ints_t i): """ >>> import cython >>> test_index_fused_args[cython.double, cython.int](2.0, 3) double int """ _test_index_fused_args[cython.floating, ints_t](f, i) def test_composite(fused_composite x): """ >>> print(test_composite(b'a').decode('ascii')) a >>> test_composite(3) 6 >>> test_composite(3.0) 6.0 """ if fused_composite is string_t: return x else: return 2 * x Cython-0.26.1/tests/run/specialfloat.pyx0000664000175000017500000000542212542002467021013 0ustar stefanstefan00000000000000DEF FLOAT = 12.5 DEF EMFLOAT = 5e-1 DEF EPFLOAT = 5e+1 DEF FLOAT_NAN = float('nan') DEF FLOAT_INFP = float('+inf') DEF FLOAT_INFN = float('-inf') cdef double cdef_float_nan = float('nan') cdef double cdef_float_infp = float('+inf') cdef double cdef_float_infn = float('-inf') float_nan = FLOAT_NAN float_infp = FLOAT_INFP float_infn = FLOAT_INFN def f(): """ >>> f() 12.5 """ cdef float f = FLOAT cdef object o = FLOAT assert f == o return f def emfloat(): """ >>> emfloat() 0.5 """ cdef float f = EMFLOAT assert f == 5e-1 cdef object o = EMFLOAT assert o == 5e-1 assert f == o return f def epfloat(): """ >>> epfloat() 50.0 """ cdef float f = EPFLOAT assert f == 5e+1 cdef object o = EPFLOAT assert o == 5e+1 assert f == o return f def nan1(): """ >>> nan1() nan """ cdef double f = FLOAT_NAN cdef object o = FLOAT_NAN assert str(f) == str(o) return f def nan2(): """ >>> nan2() nan """ cdef double f = float('nan') cdef object o = float('nan') assert str(f) == str(o) return f def nan3(): """ >>> nan3() nan >>> float_nan nan """ cdef float f = FLOAT_NAN cdef object o = FLOAT_NAN assert str(f) == str(o) return f def infp1(): """ >>> infp1() inf >>> infp1() == float('inf') True """ cdef double f = FLOAT_INFP cdef object o = FLOAT_INFP assert f == o return f def infp2(): """ >>> infp2() inf >>> infp2() == float('inf') True """ cdef double f = float('+inf') cdef object o = float('+inf') assert f == o return f def infp3(): """ >>> infp3() inf >>> infp3() == float('inf') True >>> float_infp inf >>> float_infp == float('inf') True """ cdef float f = FLOAT_INFP cdef object o = FLOAT_INFP assert f == o return f def infn1(): """ >>> infn1() -inf >>> infn1() == float('-inf') True """ cdef double f = FLOAT_INFN cdef object o = FLOAT_INFN assert f == o return f def infn2(): """ >>> infn2() -inf >>> infn2() == float('-inf') True """ cdef double f = float('-inf') cdef object o = float('-inf') assert f == o return f def infn3(): """ >>> infn3() -inf >>> infn3() == float('-inf') True >>> float_infn -inf >>> float_infn == float('-inf') True """ cdef float f = FLOAT_INFN cdef object o = FLOAT_INFN assert f == o return f def global_floats(): """ >>> global_floats()[1:] == (float('+inf'), float('-inf')) True >>> global_floats()[0] nan """ return (cdef_float_nan, cdef_float_infp, cdef_float_infn) Cython-0.26.1/tests/run/cmethod_inline_T474.pxd0000664000175000017500000000014412542002467022017 0ustar stefanstefan00000000000000cdef class TestInlineMethod(object): cdef inline int cdef_inline_method(self): return 0 Cython-0.26.1/tests/run/builtinslice.pyx0000664000175000017500000000161412542002467021032 0ustar stefanstefan00000000000000cimport cython def unbound_method_lookup(): """ >>> unbound_method_lookup() """ ignore = slice.indices @cython.test_assert_path_exists('//SingleAssignmentNode//AttributeNode[@is_py_attr = False]') @cython.test_fail_if_path_exists('//SingleAssignmentNode//AttributeNode[@is_py_attr = True]') def typed_slice(): """ >>> typed_slice() (1, 2, 3) """ cdef slice s cdef object z cdef Py_ssize_t a,b,c z = slice s = slice(1, 2, 3) s.indices a = s.start b = s.stop c = s.step return (a,b,c) @cython.test_fail_if_path_exists('//SingleAssignmentNode//AttributeNode[@is_py_attr = False]') def plain_object_slice(): """ >>> plain_object_slice() (1, 2, 3) """ cdef object s cdef object z cdef Py_ssize_t a,b,c s = slice(1, 2, 3) s.indices a = s.start b = s.stop c = s.step return (a,b,c) Cython-0.26.1/tests/run/boolean_context.pyx0000664000175000017500000000034012542002467021522 0ustar stefanstefan00000000000000 def test(): """ >>> test() True """ cdef int x = 5 return bool(x) def test_bool_and_int(): """ >>> test_bool_and_int() 1 """ cdef int x = 5 cdef int b = bool(x) return b Cython-0.26.1/tests/run/funcexceptraise.pyx0000664000175000017500000000047512542002467021540 0ustar stefanstefan00000000000000__doc__ = u""" >>> def bar(): ... try: ... foo() ... except ValueError: ... pass >>> bar() >>> print(sys.exc_info()) (None, None, None) """ import sys if sys.version_info[0] < 3: sys.exc_clear() def foo(): try: raise TypeError except TypeError: raise ValueError Cython-0.26.1/tests/run/cpp_nested_classes_support.h0000664000175000017500000000110613143605603023403 0ustar stefanstefan00000000000000class A { public: class B { public: int square(int x) { return x * x; } class C { public: int cube(int x) { return x * x * x; } }; }; B* createB() { return new B(); } typedef int my_int; static my_int negate(my_int x) { return -x; } }; template class TypedClass { public: enum MyEnum { value = 39 }; union MyUnion { T typed_value; int int_value; }; struct MyStruct { T typed_value; int int_value; }; typedef T MyType; }; class SpecializedTypedClass : public TypedClass {}; Cython-0.26.1/tests/run/closure_tests_1.pyx0000664000175000017500000001552312542002467021466 0ustar stefanstefan00000000000000# mode: run # tag: closures # preparse: id # preparse: def_to_cdef # # closure_tests_1.pyx # # Battery of tests for closures in Cython. Based on the collection of # compiler tests from P423/B629 at Indiana University, Spring 1999 and # Fall 2000. Special thanks to R. Kent Dybvig, Dan Friedman, Kevin # Millikin, and everyone else who helped to generate the original # tests. Converted into a collection of Python/Cython tests by Craig # Citro. # # Note: This set of tests is split (somewhat randomly) into several # files, simply because putting all the tests in a single file causes # gcc and g++ to buckle under the load. # def g1425(): """ >>> g1425() 142 """ if (True): def g1424(): if (True): return 122 return (20)+(g1424()) else: return 10000 def g1432(): """ >>> g1432() [0, []] """ def g1431(): return [0,[]] x_1056 = g1431() if (x_1056): def g1430(): def g1429(): return (x_1056[0]) def g1428(): return (x_1056[0]) return (g1429())+(g1428()) x_1056[0] = g1430() return x_1056 def g1435(): """ >>> g1435() 4000 """ def g1434(): def g1433(y_1057): return y_1057 return g1433 return g1434()(4000) def g1438(): """ >>> g1438() 1 """ def g1437(): def g1436(x_1058): return x_1058 return g1436 f_1059 = g1437() return (f_1059(0)+1) def g1441(): """ >>> g1441() 4 """ def g1440(): def g1439(y_1060): return y_1060 return g1439 f_1061 = g1440() return f_1061(f_1061(4)) def g1446(): """ >>> g1446() 4 """ def g1445(): def g1444(f_1063): return f_1063(f_1063(4)) return g1444 def g1443(): def g1442(y_1062): return y_1062 return g1442 return g1445()(g1443()) def g1449(): """ >>> g1449() 9000 """ def g1448(): a_1064 = 4000 def g1447(b_1065): return (a_1064)+(b_1065) return g1447 return g1448()(5000) def g1454(): """ >>> g1454() 9000 """ def g1453(): def g1452(): def g1450(a_1066): def g1451(b_1067): return (a_1066)+(b_1067) return g1451 return g1450 return g1452()(4000) return g1453()(5000) def g1459(): """ >>> g1459() 2 """ def g1458(): def g1457(f_1069): return f_1069(f_1069(0)) return g1457 def g1456(): def g1455(x_1068): return (x_1068+1) return g1455 return g1458()(g1456()) def g1462(): """ >>> g1462() 0 """ x_1072 = 0 def g1461(): def g1460(x_1070): return x_1070 return g1460 f_1071 = g1461() a_1075 = f_1071(x_1072) b_1074 = f_1071(x_1072) c_1073 = f_1071(x_1072) return ((a_1075)+(b_1074))+(c_1073) def g1465(): """ >>> g1465() 3 """ x_1080 = 0 y_1079 = 1 z_1078 = 2 def g1464(): def g1463(x_1076): return x_1076 return g1463 f_1077 = g1464() a_1083 = f_1077(x_1080) b_1082 = f_1077(y_1079) c_1081 = f_1077(z_1078) return ((a_1083)+(b_1082))+(c_1081) def g1468(): """ >>> g1468() 0 """ def g1467(): def g1466(x_1085, y_1084): return x_1085 return g1466 f_1086 = g1467() a_1087 = f_1086(0, 1) return f_1086(a_1087, a_1087) def g1471(): """ >>> g1471() 0 """ x_1094 = 0 y_1093 = 1 z_1092 = 2 def g1470(): def g1469(x_1090, y_1089, z_1088): return x_1090 return g1469 f_1091 = g1470() a_1097 = f_1091(x_1094, y_1093, z_1092) b_1096 = y_1093 c_1095 = z_1092 return f_1091(a_1097, b_1096, c_1095) def g1474(): """ >>> g1474() 3 """ def g1473(): def g1472(a_1101, b_1100, c_1099, d_1098): return (a_1101)+(d_1098) return g1472 f_1102 = g1473() return f_1102(0, 1, 2, 3) def g1478(): """ >>> g1478() 3 """ def g1477(): def g1476(x_1103): return x_1103 return g1476 f_1104 = g1477() def g1475(): a_1107 = 0 b_1106 = 1 c_1105 = 2 return (f_1104(a_1107))+((f_1104(b_1106))+(f_1104(c_1105))) return (f_1104(0))+(g1475()) def g1483(): """ >>> g1483() """ a_1108 = 0 def g1482(): def g1481(): return 0 return g1481 a_1110 = g1482() def g1480(): def g1479(): return 11 return g1479 b_1109 = g1480() a_1110 = 11 def g1486(): """ >>> g1486() """ a_1111 = 0 def g1485(): def g1484(): a_1113 = 0 return g1484 a_1113 = g1485() b_1112 = 11 return a_1113() def g1491(): """ >>> g1491() 0 """ def g1490(): def g1489(): return 0 return g1489 a_1115 = g1490() def g1488(): def g1487(): return 11 return g1487 b_1114 = g1488() return a_1115() def g1494(): """ >>> g1494() 2 """ def g1493(): x_1116 = 1 def g1492(y_1117): return (x_1116)+(y_1117) return g1492 f_1118 = g1493() x_1119 = 0 return f_1118(f_1118(x_1119)) def g1501(): """ >>> g1501() 3050 """ def g1500(): def g1499(): def g1498(x_1121): return (x_1121)+(50) return g1498 t_1122 = g1499() def g1497(f_1123): return t_1122(f_1123(1000)) return g1497 def g1496(): def g1495(y_1120): return (y_1120)+(2000) return g1495 return g1500()(g1496()) def g1508(): """ >>> g1508() 60 """ def g1507(): def g1506(): def g1505(): def g1502(a_1124): def g1503(b_1125): def g1504(c_1126): return (a_1124)+((b_1125)+(c_1126)) return g1504 return g1503 return g1502 return g1505()(10) return g1506()(20) return g1507()(30) def g1513(): """ >>> g1513() 5 """ def g1512(): def g1509(b_1127): def g1511(): def g1510(a_1128): return (b_1127)+(a_1128) return g1510 return g1511()(2) return g1509 return g1512()(3) def g1518(): """ >>> g1518() 5 """ def g1517(): def g1516(f_1130): return f_1130(f_1130(5)) return g1516 def g1515(): def g1514(x_1129): return x_1129 return g1514 return g1517()(g1515()) def g1523(): """ >>> g1523() 8000 """ def g1522(): def g1521(): def g1520(x_1131): return (x_1131)+(3000) return g1520 f_1132 = g1521() def g1519(y_1133): return f_1132(f_1132(y_1133)) return g1519 return g1522()(2000) Cython-0.26.1/tests/run/builtinnames.pyx0000664000175000017500000000265112542002467021040 0ustar stefanstefan00000000000000cimport cython def test_file_py(file): assert isinstance(file, (str, unicode)), \ u"not a string, found '%s' instead" % file.__class__.__name__ return file cdef test_file_c(file): assert isinstance(file, (str, unicode)), \ u"not a string, found '%s' instead" % file.__class__.__name__ return u'file' + file def range(arg): return u'range' + arg def len(arg): return u'len' + arg cdef type(arg): return u'type' + arg @cython.test_fail_if_path_exists( '//PyMethodCallNode/NameNode[@name="type" and @entry.is_cfunction=False]', '//SimpleCallNode/NameNode[@name="type" and @entry.is_cfunction=False]', '//SimpleCallNode/NameNode[@name="len" and @entry.is_cfunction=True]', ) @cython.test_assert_path_exists( '//SimpleCallNode/NameNode[@name="type"]', '//SimpleCallNode/NameNode[@name="type" and @entry.is_cfunction=True]', '//PyMethodCallNode/NameNode[@name="len"]', ) def test_c(arg): """ >>> test_c('abc') fileabc lenabc typeabc >>> print(test_file_py('abc')) abc >>> print(range('abc')) rangeabc >>> print(len('abc')) lenabc """ print test_file_c(arg) print len(arg) print type(arg) def test_for_in_range(arg): """ >>> print(str(test_for_in_range('abc')).replace("u'", "'")) ['r', 'a', 'n', 'g', 'e', 'a', 'b', 'c'] """ l = [] for c in range(arg): l.append(c) return l Cython-0.26.1/tests/run/starargs.pyx0000664000175000017500000000577712542002467020210 0ustar stefanstefan00000000000000cdef sorteditems(d): l = list(d.items()) l.sort() return tuple(l) def spam(x, y, z): """ >>> spam(1,2,3) (1, 2, 3) >>> spam(1,2) Traceback (most recent call last): TypeError: spam() takes exactly 3 positional arguments (2 given) >>> spam(1,2,3,4) Traceback (most recent call last): TypeError: spam() takes exactly 3 positional arguments (4 given) >>> spam(1,2,3, a=1) Traceback (most recent call last): TypeError: spam() got an unexpected keyword argument 'a' """ return (x, y, z) def grail(x, y, z, *a): """ >>> grail(1,2,3) (1, 2, 3, ()) >>> grail(1,2,3,4) (1, 2, 3, (4,)) >>> grail(1,2,3,4,5,6,7,8,9) (1, 2, 3, (4, 5, 6, 7, 8, 9)) >>> grail(1,2) Traceback (most recent call last): TypeError: grail() takes at least 3 positional arguments (2 given) >>> grail(1,2,3, a=1) Traceback (most recent call last): TypeError: grail() got an unexpected keyword argument 'a' """ return (x, y, z, a) def swallow(x, y, z, **k): """ >>> swallow(1,2,3) (1, 2, 3, ()) >>> swallow(1,2,3,4) Traceback (most recent call last): TypeError: swallow() takes exactly 3 positional arguments (4 given) >>> swallow(1,2,3, a=1, b=2) (1, 2, 3, (('a', 1), ('b', 2))) >>> swallow(1,2,3, x=1) Traceback (most recent call last): TypeError: swallow() got multiple values for keyword argument 'x' """ return (x, y, z, sorteditems(k)) def creosote(x, y, z, *a, **k): """ >>> creosote(1,2,3) (1, 2, 3, (), ()) >>> creosote(1,2,3,4) (1, 2, 3, (4,), ()) >>> creosote(1,2,3, a=1) (1, 2, 3, (), (('a', 1),)) >>> creosote(1,2,3,4, a=1, b=2) (1, 2, 3, (4,), (('a', 1), ('b', 2))) >>> creosote(1,2,3,4, x=1) Traceback (most recent call last): TypeError: creosote() got multiple values for keyword argument 'x' """ return (x, y, z, a, sorteditems(k)) def onlyt(*a): """ >>> onlyt(1) (1,) >>> onlyt(1,2) (1, 2) >>> onlyt(a=1) Traceback (most recent call last): TypeError: onlyt() got an unexpected keyword argument 'a' >>> onlyt(1, a=2) Traceback (most recent call last): TypeError: onlyt() got an unexpected keyword argument 'a' """ return a def onlyk(**k): """ >>> onlyk(a=1) (('a', 1),) >>> onlyk(a=1, b=2) (('a', 1), ('b', 2)) >>> onlyk(1) Traceback (most recent call last): TypeError: onlyk() takes exactly 0 positional arguments (1 given) >>> onlyk(1, 2) Traceback (most recent call last): TypeError: onlyk() takes exactly 0 positional arguments (2 given) >>> onlyk(1, a=1, b=2) Traceback (most recent call last): TypeError: onlyk() takes exactly 0 positional arguments (1 given) """ return sorteditems(k) def tk(*a, **k): """ >>> tk(a=1) (('a', 1),) >>> tk(a=1, b=2) (('a', 1), ('b', 2)) >>> tk(1) (1,) >>> tk(1, 2) (1, 2) >>> tk(1, a=1, b=2) (1, ('a', 1), ('b', 2)) """ return a + sorteditems(k) Cython-0.26.1/tests/run/c_type_methods_T236.pyx0000664000175000017500000000157512542002467022076 0ustar stefanstefan00000000000000# ticket: 236 __doc__ = '' import sys if sys.version_info >= (2,6): __doc__ += ''' >>> float_is_integer(1.0) True >>> float_is_integer(1.1) False ''' if sys.version_info >= (3,1): __doc__ += ''' >>> int_bit_length(1) == (1).bit_length() True >>> int_bit_length(1234) == (1234).bit_length() True ''' def float_is_integer(float f): # requires Python 2.6+ return f.is_integer() def int_bit_length(int i): # requires Python 3.x return i.bit_length() def float__add__(float f): """ >>> float__add__(5.0) 7.0 """ return f.__add__(2) def float_const__add__(float f): """ >>> float_const__add__(5.0) 7.0 """ return 2. .__add__(f) def int__add__(int i): """ >>> int__add__(5) 7 """ return i.__add__(2) def int_const__add__(int i): """ >>> int_const__add__(5) 7 """ return 2 .__add__(i) Cython-0.26.1/tests/run/cpdef_optargs_pure.pxd0000664000175000017500000000003013023021033022142 0ustar stefanstefan00000000000000cpdef func(x, y=*, z=*) Cython-0.26.1/tests/run/ptr_warning_T714.pyx0000664000175000017500000000026112542002467021412 0ustar stefanstefan00000000000000# mode: run # tag: werror # ticket: 714 def test_ptr(): """ >>> test_ptr() 123 """ cdef int a cdef int *ptr ptr = &a ptr[0] = 123 return a Cython-0.26.1/tests/run/cpp_template_ref_args.pyx0000664000175000017500000000216012574327400022670 0ustar stefanstefan00000000000000# tag: cpp from libcpp.vector cimport vector cdef extern from "cpp_template_ref_args.h": cdef cppclass Bar[T]: Bar() # bug: Bar[T] created before class fully defined T value Bar[T] & ref() except + const Bar[T] & const_ref() except + const Bar[T] & const_ref_const() except + cdef cppclass Foo[T]: Foo() int bar_value(Bar[int] & bar) def test_template_ref_arg(int x): """ >>> test_template_ref_arg(4) 4 """ # Templated reference parameters in method # of templated classes were not properly coalesced. cdef Foo[size_t] foo cdef Bar[int] bar bar.value = x return foo.bar_value(bar.ref()) def test_template_ref_attr(int x): """ >>> test_template_ref_attr(4) (4, 4) """ cdef Bar[int] bar bar.value = x return bar.ref().value, bar.const_ref().value def test_template_ref_const_attr(int x): """ >>> test_template_ref_const_attr(4) 4 """ cdef vector[int] v v.push_back(x) cdef const vector[int] *configs = &v cdef int value = configs.at(0) return value Cython-0.26.1/tests/run/crashT245.h0000664000175000017500000000005112542002467017424 0ustar stefanstefan00000000000000typedef struct { int x; } MyStruct; Cython-0.26.1/tests/run/cpp_stl_string_ascii_auto_encoding_str.pyx0000664000175000017500000000570512542002467026341 0ustar stefanstefan00000000000000# mode: run # tag: cpp, werror # cython: c_string_encoding=ascii, c_string_type=str cimport cython from libcpp.string cimport string b_asdf = b'asdf' u_asdf = u'asdf' s_asdf = 'asdf' s_s = 's' def test_conversion(py_obj): """ >>> test_conversion(b_asdf) == s_asdf or test_conversion(b_asdf) True >>> test_conversion(u_asdf) == s_asdf or test_conversion(u_asdf) True >>> test_conversion(123) # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: expected ..., int found """ cdef string s = py_obj assert len(py_obj) == s.length(), '%d != %d' % (len(py_obj), s.length()) return s def test_empty(py_obj): """ >>> test_empty('') True >>> test_empty('abc') False >>> test_empty(u_asdf[:0]) True >>> test_empty(u_asdf) False """ cdef string a = py_obj return a.empty() def test_push_back(a): """ >>> test_push_back(b_asdf) == s_asdf + s_s True >>> test_push_back(u_asdf) == s_asdf + s_s True """ cdef string s = a s.push_back(ord('s')) return s def test_clear(a): """ >>> test_clear(u_asdf) == s_s[:0] True >>> test_clear(b_asdf) == s_s[:0] True """ cdef string s = a s.clear() return s def test_assign(char *a): """ >>> test_assign(b_asdf) == 'ggg' True """ cdef string s = string(a) s.assign("ggg") return s.c_str() def test_bytes_cast(a): """ >>> b = test_bytes_cast(b'abc') >>> isinstance(b, bytes) True >>> print(b.decode('ascii')) abc >>> b = test_bytes_cast(b'abc\\xe4\\xfc') >>> isinstance(b, bytes) True >>> len(b) 5 >>> print(b[:3].decode('ascii')) abc >>> print(ord(b[3:4])) 228 >>> print(ord(b[4:5])) 252 """ cdef string s = a assert s.length() == len(a), "%d != %d" % (s.length(), len(a)) return s def test_bytearray_cast(a): """ >>> b = test_bytearray_cast(b'abc') >>> isinstance(b, bytearray) True >>> print(b.decode('ascii')) abc >>> b = test_bytearray_cast(b'abc\\xe4\\xfc') >>> isinstance(b, bytearray) True >>> len(b) 5 >>> print(b[:3].decode('ascii')) abc >>> print(ord(b[3:4])) 228 >>> print(ord(b[4:5])) 252 """ cdef string s = a assert s.length() == len(a), "%d != %d" % (s.length(), len(a)) return s def test_unicode_cast(a): """ >>> u = test_unicode_cast(b'abc') >>> type(u) is type(u_asdf) or type(u) True >>> print(u) abc """ cdef string s = a assert s.length() == len(a), "%d != %d" % (s.length(), len(a)) return s def test_str_cast(a): """ >>> s = test_str_cast(b'abc') >>> type(s) is type(s_asdf) or type(s) True >>> print(s) abc """ cdef string s = a assert s.length() == len(a), "%d != %d" % (s.length(), len(a)) return s Cython-0.26.1/tests/run/lambda_T195.pyx0000664000175000017500000000414412542002467020307 0ustar stefanstefan00000000000000# mode: run # tag: lambda # ticket: 195 __doc__ = u""" #>>> py_identity = lambda x:x #>>> py_identity(1) == cy_identity(1) #True """ #cy_identity = lambda x:x def make_identity(): """ >>> idcall = make_identity() >>> idcall(1) 1 >>> idcall(2) 2 """ return lambda x:x def make_const0(x): """ >>> make_const0(1)() 1 """ return lambda :x def make_const1(x): """ >>> make_const1(1)(2) 1 >>> make_const1(1)(2) 1 """ return lambda _:x def make_const_calc0(): """ >>> make_const_calc0()() 11 """ return lambda : 1*2*3+5 def make_const_calc1(): """ >>> make_const_calc1()(2) 11 """ return lambda _: 1*2*3+5 def make_const_calc1_xy(x): """ >>> make_const_calc1_xy(8)(2) 27 """ return lambda y: x*y+(1*2*3+5) def make_lambda_lambda(x): """ >>> make_lambda_lambda(1)(2)(4) 7 """ return lambda y : \ lambda z:x+y+z def make_typed_lambda_lambda(int x): """ >>> make_typed_lambda_lambda(1)(2)(4) 7 >>> partial_lambda = make_typed_lambda_lambda(1)(2) >>> partial_lambda(4) 7 >>> partial_lambda(5) 8 """ return lambda int y : \ lambda int z:x+y+z def pass_lambda(f): """ >>> def f(a, lfunc): return lfunc(a,2) >>> pass_lambda(f) 12 """ return f(1, lambda a, b : a*10+b) def pass_lambda_with_args(f): """ >>> def f(a, lfunc): return lfunc(a,2,3) >>> pass_lambda_with_args(f) 123 """ return f(1, lambda a, *args : (a*10 + args[0])*10 + args[1]) def pass_lambda_with_args_kwargs(f): """ >>> def f(a, lfunc): return lfunc(a,2,3, b=4) >>> pass_lambda_with_args_kwargs(f) 1234 """ return f(1, lambda a, *args, **kwargs : ((a*10 + args[0])*10 + args[1])*10 + kwargs['b']) def pass_lambda_with_args_kwargs_kwonly_args(f): """ >>> def f(a, lfunc): return lfunc(a,2,3, b=4, c=5) >>> pass_lambda_with_args_kwargs_kwonly_args(f) 12345 """ return f(1, lambda a, *args, b, **kwargs : (((a*10 + args[0])*10 + args[1])*10 + b)*10 + kwargs['c']) Cython-0.26.1/tests/run/type_inference.pyx0000664000175000017500000004237713023021033021337 0ustar stefanstefan00000000000000# cython: infer_types = True cimport cython from cython cimport typeof, infer_types from cpython cimport bool ################################################## # type inference tests in 'full' mode cdef class MyType: pass def simple(): """ >>> simple() """ i = 3 assert typeof(i) == "long", typeof(i) x = 1.41 assert typeof(x) == "double", typeof(x) xptr = &x assert typeof(xptr) == "double *", typeof(xptr) xptrptr = &xptr assert typeof(xptrptr) == "double **", typeof(xptrptr) b = b"abc" assert typeof(b) == "bytes object", typeof(b) s = "abc" assert typeof(s) == "str object", typeof(s) u = u"xyz" assert typeof(u) == "unicode object", typeof(u) L = [1,2,3] assert typeof(L) == "list object", typeof(L) t = (4,5,6,()) assert typeof(t) == "tuple object", typeof(t) t2 = (4, 5.0, 6) assert typeof(t2) == "(long, double, long)", typeof(t) def builtin_types(): """ >>> builtin_types() """ b = bytes() assert typeof(b) == "bytes object", typeof(b) u = unicode() assert typeof(u) == "unicode object", typeof(u) L = list() assert typeof(L) == "list object", typeof(L) t = tuple() assert typeof(t) == "tuple object", typeof(t) d = dict() assert typeof(d) == "dict object", typeof(d) B = bool() assert typeof(B) == "bool", typeof(B) def slicing(): """ >>> slicing() """ b = b"abc" assert typeof(b) == "bytes object", typeof(b) b1 = b[1:2] assert typeof(b1) == "bytes object", typeof(b1) b2 = b[1:2:2] assert typeof(b2) == "bytes object", typeof(b2) u = u"xyz" assert typeof(u) == "unicode object", typeof(u) u1 = u[1:2] assert typeof(u1) == "unicode object", typeof(u1) u2 = u[1:2:2] assert typeof(u2) == "unicode object", typeof(u2) s = "xyz" assert typeof(s) == "str object", typeof(s) s1 = s[1:2] assert typeof(s1) == "str object", typeof(s1) s2 = s[1:2:2] assert typeof(s2) == "str object", typeof(s2) L = [1,2,3] assert typeof(L) == "list object", typeof(L) L1 = L[1:2] assert typeof(L1) == "list object", typeof(L1) L2 = L[1:2:2] assert typeof(L2) == "list object", typeof(L2) t = (4,5,6,()) assert typeof(t) == "tuple object", typeof(t) t1 = t[1:2] assert typeof(t1) == "tuple object", typeof(t1) t2 = t[1:2:2] assert typeof(t2) == "tuple object", typeof(t2) def indexing(): """ >>> indexing() """ b = b"abc" assert typeof(b) == "bytes object", typeof(b) b1 = b[1] assert typeof(b1) == "Python object", typeof(b1) # Py2: bytes, Py3: int u = u"xyz" assert typeof(u) == "unicode object", typeof(u) u1 = u[1] assert typeof(u1) == "Py_UCS4", typeof(u1) s = "xyz" assert typeof(s) == "str object", typeof(s) s1 = s[1] assert typeof(s1) == "str object", typeof(s1) L = [1,2,3] assert typeof(L) == "list object", typeof(L) L1 = L[1] assert typeof(L1) == "Python object", typeof(L1) t = (4,5,()) assert typeof(t) == "tuple object", typeof(t) t1 = t[1] assert typeof(t1) == "long", typeof(t1) t2 = ('abc', 'def', 'ghi') assert typeof(t2) == "tuple object", typeof(t2) t2_1 = t2[1] assert typeof(t2_1) == "str object", typeof(t2_1) t2_2 = t2[t[0]-3] assert typeof(t2_2) == "str object", typeof(t2_2) t5 = (b'abc', 'def', u'ghi') t5_0 = t5[0] assert typeof(t5_0) == "bytes object", typeof(t5_0) t5_1 = t5[1] assert typeof(t5_1) == "str object", typeof(t5_1) t5_2 = t5[2] assert typeof(t5_2) == "unicode object", typeof(t5_2) t5_3 = t5[t[0]-3] assert typeof(t5_3) == "Python object", typeof(t5_3) def multiple_assignments(): """ >>> multiple_assignments() """ a = 3 a = 4 a = 5 assert typeof(a) == "long" b = a b = 3.1 b = 3.14159 assert typeof(b) == "double" c = a c = b c = [1,2,3] assert typeof(c) == "Python object" def arithmetic(): """ >>> arithmetic() """ a = 1 + 2 assert typeof(a) == "long", typeof(a) b = 1 + 1.5 assert typeof(b) == "double", typeof(b) c = 1 + 2 assert typeof(c) == "Python object", typeof(c) d = 1 * 1.5 ** 2 assert typeof(d) == "double", typeof(d) cdef class some_class: pass def unary_operators(): """ >>> unary_operators() """ cdef int x = 1 assert typeof(~x) == "int", typeof(~x) cdef some_class obj assert typeof(~obj) == "Python object", typeof(~obj) a = int(1) assert typeof(a) == "Python object", typeof(a) b = not int(3) assert typeof(b) == "bint", typeof(b) c = +int(3) assert typeof(c) == "Python object", typeof(c) d = -int(5) assert typeof(d) == "Python object", typeof(d) def builtin_type_operations(): """ >>> builtin_type_operations() """ b1 = b'a' * 10 b1 = 10 * b'a' b1 = 10 * b'a' * 10 assert typeof(b1) == "bytes object", typeof(b1) b2 = b'a' + b'b' assert typeof(b2) == "bytes object", typeof(b2) u1 = u'a' * 10 u1 = 10 * u'a' assert typeof(u1) == "unicode object", typeof(u1) u2 = u'a' + u'b' assert typeof(u2) == "unicode object", typeof(u2) u3 = u'a%s' % u'b' u3 = u'a%s' % 10 assert typeof(u3) == "unicode object", typeof(u3) s1 = "abc %s" % "x" s1 = "abc %s" % 10 assert typeof(s1) == "str object", typeof(s1) s2 = "abc %s" + "x" assert typeof(s2) == "str object", typeof(s2) s3 = "abc %s" * 10 s3 = "abc %s" * 10 * 10 s3 = 10 * "abc %s" * 10 assert typeof(s3) == "str object", typeof(s3) L1 = [] + [] assert typeof(L1) == "list object", typeof(L1) L2 = [] * 2 assert typeof(L2) == "list object", typeof(L2) T1 = () + () assert typeof(T1) == "tuple object", typeof(T1) T2 = () * 2 assert typeof(T2) == "tuple object", typeof(T2) def builtin_type_methods(): """ >>> builtin_type_methods() """ l = [] assert typeof(l) == 'list object', typeof(l) append = l.append assert typeof(append) == 'Python object', typeof(append) append(1) assert l == [1], str(l) cdef int cfunc(int x): return x+1 def c_functions(): """ >>> c_functions() """ f = cfunc assert typeof(f) == 'int (*)(int)', typeof(f) assert 2 == f(1) def builtin_functions(): """ >>> _abs, _getattr = builtin_functions() Python object Python object >>> _abs(-1) 1 >>> class o(object): pass >>> o.x = 1 >>> _getattr(o, 'x') 1 """ _abs = abs print(typeof(_abs)) _getattr = getattr print(typeof(_getattr)) return _abs, _getattr def cascade(): """ >>> cascade() """ a = 1.0 b = a + 2 c = b + 3 d = c + 4 assert typeof(d) == "double" e = a + b + c + d assert typeof(e) == "double" def cascaded_assignment(): a = b = c = d = 1.0 assert typeof(a) == "double" assert typeof(b) == "double" assert typeof(c) == "double" assert typeof(d) == "double" e = a + b + c + d assert typeof(e) == "double" def increment(): """ >>> increment() """ a = 5 a += 1 assert typeof(a) == "long" def loop(): """ >>> loop() """ for a in range(10): pass assert typeof(a) == "long" b = 1.0 for b in range(5): pass assert typeof(b) == "double" for c from 0 <= c < 10 by .5: pass assert typeof(c) == "double" for d in range(0, 10L, 2): pass assert typeof(a) == "long" def loop_over_charptr(): """ >>> print( loop_over_charptr() ) char """ cdef char* char_ptr_string = 'abcdefg' for c in char_ptr_string: pass return typeof(c) def loop_over_bytes_literal(): """ >>> print( loop_over_bytes_literal() ) Python object """ for c in b'abcdefg': pass return typeof(c) def loop_over_bytes(): """ >>> print( loop_over_bytes() ) Python object """ cdef bytes bytes_string = b'abcdefg' # bytes in Py2, int in Py3 for c in bytes_string: pass return typeof(c) def loop_over_str(): """ >>> print( loop_over_str() ) str object """ cdef str string = 'abcdefg' # str (bytes) in Py2, str (unicode) in Py3 for c in string: pass return typeof(c) def loop_over_unicode(): """ >>> print( loop_over_unicode() ) Py_UCS4 """ cdef unicode ustring = u'abcdefg' # Py_UCS4 can represent any Unicode character for uchar in ustring: pass return typeof(uchar) def loop_over_unicode_literal(): """ >>> print( loop_over_unicode_literal() ) Py_UCS4 """ # Py_UCS4 can represent any Unicode character for uchar in u'abcdefg': pass return typeof(uchar) def loop_over_int_array(): """ >>> print( loop_over_int_array() ) int """ cdef int[10] int_array for i in int_array: pass return typeof(i) cdef struct MyStruct: int a def loop_over_struct_ptr(): """ >>> print( loop_over_struct_ptr() ) MyStruct """ cdef MyStruct[10] a_list cdef MyStruct *a_ptr = a_list for i in a_list[:10]: pass return typeof(i) cdef unicode retu(): return u"12345" cdef bytes retb(): return b"12345" def conditional(x): """ >>> conditional(True) (True, 'Python object') >>> conditional(False) (False, 'Python object') """ if x: a = retu() else: a = retb() return type(a) is unicode, typeof(a) ################################################## # type inference tests that work in 'safe' mode @infer_types(None) def double_inference(): """ >>> values, types = double_inference() >>> values == (1.0, 1.0*2, 1.0*2.0+2.0*2.0, 1.0*2.0) True >>> types ('double', 'double', 'double', 'Python object') """ d_a = 1.0 d_b = d_a * float(2) d_c = d_a * float(some_float_value()) + d_b * float(some_float_value()) o_d = d_a * some_float_value() return (d_a,d_b,d_c,o_d), (typeof(d_a), typeof(d_b), typeof(d_c), typeof(o_d)) cdef object some_float_value(): return 2.0 @infer_types(None) @cython.test_fail_if_path_exists('//DefNode//NameNode[@type.is_pyobject = True]') @cython.test_assert_path_exists('//DefNode//NameNode[@type.is_pyobject]', '//DefNode//NameNode[@type.is_pyobject = False]') def double_loop(): """ >>> double_loop() == 1.0 * 10 True """ cdef int i d = 1.0 for i in range(9): d += 1.0 return d @infer_types(None) def safe_only(): """ >>> safe_only() """ a = 1.0 assert typeof(a) == "double", typeof(c) b = 1; assert typeof(b) == "long", typeof(b) c = MyType() assert typeof(c) == "MyType", typeof(c) for i in range(10): pass assert typeof(i) == "long", typeof(i) d = 1 res = ~d assert typeof(d) == "long", typeof(d) # we special-case inference to type str, see # trac #553 s = "abc" assert typeof(s) == "Python object", typeof(s) cdef str t = "def" assert typeof(t) == "str object", typeof(t) # potentially overflowing arithmetic e = 1 e += 1 assert typeof(e) == "Python object", typeof(e) f = 1 res = f * 10 assert typeof(f) == "Python object", typeof(f) g = 1 res = 10*(~g) assert typeof(g) == "Python object", typeof(g) for j in range(10): res = -j assert typeof(j) == "Python object", typeof(j) @infer_types(None) def safe_c_functions(): """ >>> safe_c_functions() """ f = cfunc assert typeof(f) == 'int (*)(int)', typeof(f) assert 2 == f(1) @infer_types(None) def ptr_types(): """ >>> ptr_types() """ cdef int a a_ptr = &a assert typeof(a_ptr) == "int *", typeof(a_ptr) a_ptr_ptr = &a_ptr assert typeof(a_ptr_ptr) == "int **", typeof(a_ptr_ptr) cdef int[1] b b_ref = b assert typeof(b_ref) == "int *", typeof(b_ref) ptr = &a ptr = b assert typeof(ptr) == "int *", typeof(ptr) def const_types(const double x, double y, double& z): """ >>> const_types(1, 1, 1) """ a = x a = y a = z assert typeof(a) == "double", typeof(a) @infer_types(None) def args_tuple_keywords(*args, **kwargs): """ >>> args_tuple_keywords(1,2,3, a=1, b=2) """ assert typeof(args) == "tuple object", typeof(args) assert typeof(kwargs) == "dict object", typeof(kwargs) @infer_types(None) def args_tuple_keywords_reassign_same(*args, **kwargs): """ >>> args_tuple_keywords_reassign_same(1,2,3, a=1, b=2) """ assert typeof(args) == "tuple object", typeof(args) assert typeof(kwargs) == "dict object", typeof(kwargs) args = () kwargs = {} @infer_types(None) def args_tuple_keywords_reassign_pyobjects(*args, **kwargs): """ >>> args_tuple_keywords_reassign_pyobjects(1,2,3, a=1, b=2) """ assert typeof(args) == "Python object", typeof(args) assert typeof(kwargs) == "Python object", typeof(kwargs) args = [] kwargs = "test" # / A -> AA -> AAA # Base0 -> Base - # \ B -> BB # C -> CC cdef class Base0: pass cdef class Base(Base0): pass cdef class A(Base): pass cdef class AA(A): pass cdef class AAA(AA): pass cdef class B(Base): pass cdef class BB(B): pass cdef class C: pass cdef class CC(C): pass @infer_types(None) def common_extension_type_base(): """ >>> common_extension_type_base() """ x = A() x = AA() assert typeof(x) == "A", typeof(x) y = A() y = B() assert typeof(y) == "Base", typeof(y) z = AAA() z = BB() assert typeof(z) == "Base", typeof(z) w = A() w = CC() assert typeof(w) == "Python object", typeof(w) cdef class AcceptsKeywords: def __init__(self, *args, **kwds): pass @infer_types(None) def constructor_call(): """ >>> constructor_call() """ x = AcceptsKeywords(a=1, b=2) assert typeof(x) == "AcceptsKeywords", typeof(x) @infer_types(None) def large_literals(): """ >>> large_literals() """ # It's only safe to infer small integer literals. a = 10 b = 100000000000000000000000000000000 assert typeof(a) == "long", typeof(a) assert typeof(b) == "Python object", typeof(b) c, d = 10, 100000000000000000000000000000000 assert typeof(c) == "long", typeof(c) assert typeof(d) == "Python object", typeof(d) class EmptyContextManager(object): def __enter__(self): return None def __exit__(self, *args): return 0 def with_statement(): """ >>> with_statement() Python object Python object """ x = 1.0 with EmptyContextManager() as x: print(typeof(x)) print(typeof(x)) return x @cython.final cdef class TypedContextManager(object): cpdef double __enter__(self): return 2.0 def __exit__(self, *args): return 0 def with_statement_typed(): """ >>> with_statement_typed() double double 2.0 """ x = 1.0 with TypedContextManager() as x: print(typeof(x)) print(typeof(x)) return x def with_statement_untyped(): """ >>> with_statement_untyped() Python object Python object 2.0 """ x = 1.0 cdef object t = TypedContextManager() with t as x: print(typeof(x)) print(typeof(x)) return x def self_lookup(a): b = a b = b.foo(keyword=None) print typeof(b) # Regression test for trac #638. def bar(foo): qux = foo quux = foo[qux.baz] cdef enum MyEnum: enum_x = 1 enum_y = 2 ctypedef long my_long def test_int_typedef_inference(): """ >>> test_int_typedef_inference() """ cdef long x = 1 cdef my_long y = 2 cdef long long z = 3 assert typeof(x + y) == typeof(y + x) == 'my_long', typeof(x + y) assert typeof(y + z) == typeof(z + y) == 'long long', typeof(y + z) from libc.stdint cimport int32_t, int64_t def int64_long_sum(): cdef long x = 1 cdef int32_t x32 = 2 cdef int64_t x64 = 3 cdef unsigned long ux = 4 assert typeof(x + x32) == typeof(x32 + x) == 'long', typeof(x + x32) assert typeof(x + x64) == typeof(x64 + x) == 'int64_t', typeof(x + x64) # The correct answer here is either unsigned long or int64_t, depending on # whether sizeof(long) == 64 or not. Incorrect signedness is probably # preferable to incorrect width. assert typeof(ux + x64) == typeof(x64 + ux) == 'int64_t', typeof(ux + x64) cdef class InferInProperties: """ >>> InferInProperties().x ('double', 'unicode object', 'MyEnum', 'MyEnum') """ cdef MyEnum attr def __cinit__(self): self.attr = enum_x property x: def __get__(self): a = 1.0 b = u'abc' c = self.attr d = enum_y c = d return typeof(a), typeof(b), typeof(c), typeof(d) cdef class WithMethods: cdef int offset def __init__(self, offset): self.offset = offset cpdef int one_arg(self, int x): return x + self.offset cpdef int default_arg(self, int x, int y=0): return x + y + self.offset def test_bound_methods(): """ >>> test_bound_methods() """ o = WithMethods(10) assert typeof(o) == 'WithMethods', typeof(o) one_arg = o.one_arg assert one_arg(2) == 12, one_arg(2) default_arg = o.default_arg assert default_arg(2) == 12, default_arg(2) assert default_arg(2, 3) == 15, default_arg(2, 2) Cython-0.26.1/tests/run/tuple_constants.pyx0000664000175000017500000000603012542002467021566 0ustar stefanstefan00000000000000 cimport cython module_level_tuple = (1,2,3) def return_module_level_tuple(): """ >>> return_module_level_tuple() (1, 2, 3) """ return module_level_tuple @cython.test_assert_path_exists("//TupleNode", "//TupleNode[@is_literal = true]") @cython.test_fail_if_path_exists("//TupleNode[@is_literal = false]") def return_empty_tuple(): """ >>> return_empty_tuple() () """ return () @cython.test_assert_path_exists("//TupleNode", "//TupleNode[@is_literal = true]") @cython.test_fail_if_path_exists("//TupleNode[@is_literal = false]") def return_constant_tuple1(): """ >>> return_constant_tuple1() (1,) """ return (1,) @cython.test_assert_path_exists("//TupleNode", "//TupleNode[@is_literal = true]") @cython.test_fail_if_path_exists("//TupleNode[@is_literal = false]") def return_folded_tuple(): """ >>> return_folded_tuple() (1, 2, 3) """ return (1, 1+1, 1+1+1) @cython.test_assert_path_exists("//TupleNode", "//TupleNode[@is_literal = true]") @cython.test_fail_if_path_exists("//TupleNode[@is_literal = false]") def return_nested_tuple(): """ >>> return_nested_tuple() (1, (2, 3), (3, (4, 5))) """ return (1, (2, 3), (3, (4, 5))) @cython.test_assert_path_exists("//TupleNode", "//TupleNode[@is_literal = true]") @cython.test_fail_if_path_exists("//TupleNode[@is_literal = false]") def constant_tuple1(): """ >>> constant_tuple1() (1,) """ tuple1 = (1,) return tuple1 @cython.test_assert_path_exists("//TupleNode", "//TupleNode[@is_literal = true]") @cython.test_fail_if_path_exists("//TupleNode[@is_literal = false]") def return_constant_tuple2(): """ >>> return_constant_tuple2() (1, 2) """ return (1,2) @cython.test_assert_path_exists("//TupleNode", "//TupleNode[@is_literal = true]") @cython.test_fail_if_path_exists("//TupleNode[@is_literal = false]") def return_constant_tuple_strings(): """ >>> return_constant_tuple_strings() ('tuple_1', 'bc', 'tuple_2') """ return ('tuple_1', 'bc', 'tuple_2') @cython.test_assert_path_exists("//TupleNode", "//TupleNode[@is_literal = true]") @cython.test_fail_if_path_exists("//TupleNode[@is_literal = false]") def return_constant_tuples_string_types(): """ >>> a,b,c = return_constant_tuples_string_types() >>> a is b False >>> a is c False >>> b is c False """ return ('a', 'bc'), (u'a', u'bc'), (b'a', b'bc') @cython.test_assert_path_exists("//ReturnStatNode//TupleNode", "//ReturnStatNode//TupleNode[@is_literal = false]") @cython.test_fail_if_path_exists("//ReturnStatNode//TupleNode[@is_literal = true]") def return_nonconstant_tuple(): """ >>> return_nonconstant_tuple() ('a', 1, 'd') """ a = eval("1") return ('a', a, 'd') Cython-0.26.1/tests/run/cf_none.pyx0000664000175000017500000000451112542002467017752 0ustar stefanstefan00000000000000 cimport cython @cython.test_fail_if_path_exists('//NoneCheckNode') def none_checks(a): """ >>> none_checks(1) 22 >>> none_checks(None) True """ c = None d = {11:22} if a is c: return True else: return d.get(11) @cython.test_assert_path_exists('//NoneCheckNode') def dict_arg(dict a): """ >>> dict_arg({}) >>> dict_arg({1:2}) 2 """ return a.get(1) @cython.test_fail_if_path_exists('//NoneCheckNode') def dict_arg_not_none(dict a not None): """ >>> dict_arg_not_none({}) >>> dict_arg_not_none({1:2}) 2 """ return a.get(1) @cython.test_assert_path_exists('//NoneCheckNode') def reassignment(dict d): """ >>> reassignment({}) (None, 2) >>> reassignment({1:3}) (3, 2) """ a = d.get(1) d = {1:2} b = d.get(1) return a, b @cython.test_fail_if_path_exists('//NoneCheckNode') def conditional(a): """ >>> conditional(True) 2 >>> conditional(False) 3 """ if a: d = {1:2} else: d = {1:3} return d.get(1) @cython.test_assert_path_exists('//NoneCheckNode') def conditional_arg(a, dict d): """ >>> conditional_arg(True, {1:2}) >>> conditional_arg(False, {1:2}) 2 """ if a: d = {} return d.get(1) @cython.test_fail_if_path_exists('//NoneCheckNode') def conditional_not_none(a, dict d not None): """ >>> conditional_not_none(True, {1:2}) >>> conditional_not_none(False, {1:2}) 2 """ if a: d = {} return d.get(1) @cython.test_fail_if_path_exists('//NoneCheckNode') def self_dependency(int x): """ >>> self_dependency(1) (1, 2) >>> self_dependency(2) (None, None) """ cdef dict a, b a = {1:2} b = {2:1} for i in range(x): a,b = b,a return a.get(2), b.get(1) @cython.test_assert_path_exists('//NoneCheckNode') def self_dependency_none(int x): """ >>> self_dependency_none(False) 1 >>> self_dependency_none(True) Traceback (most recent call last): AttributeError: 'NoneType' object has no attribute 'get' """ cdef dict a, b a = None b = {2:1} if x: a,b = b,a return b.get(2) @cython.test_fail_if_path_exists('//NoneCheckNode') def in_place_op(): vals = [0] vals += [1] for x in vals: pass Cython-0.26.1/tests/run/builtin_float.py0000664000175000017500000000077012542002467021011 0ustar stefanstefan00000000000000 import sys def empty_float(): """ >>> float() 0.0 >>> empty_float() 0.0 """ x = float() return x def float_conjugate(): """ >>> float_call_conjugate() 1.5 """ if sys.version_info >= (2,6): x = 1.5 .conjugate() else: x = 1.5 return x def float_call_conjugate(): """ >>> float_call_conjugate() 1.5 """ if sys.version_info >= (2,6): x = float(1.5).conjugate() else: x = 1.5 return x Cython-0.26.1/tests/run/cpdef_extern_func_in_py.py0000664000175000017500000000044312542002467023032 0ustar stefanstefan00000000000000 """ >>> pxd_sqrt(9) 3.0 """ import cython if not cython.compiled: from math import sqrt as pxd_sqrt @cython.test_assert_path_exists('//SimpleCallNode/NameNode[@type.is_pyobject = False]') def call_pxd_sqrt(x): """ >>> call_pxd_sqrt(9) 3.0 """ return pxd_sqrt(x) Cython-0.26.1/tests/run/cfunc_convert.pyx0000664000175000017500000001076712542002467021213 0ustar stefanstefan00000000000000# mode: run # cython: always_allow_keywords=True cimport cython from libc.math cimport sqrt cdef void empty_cfunc(): print "here" # same signature cdef void another_empty_cfunc(): print "there" def call_empty_cfunc(): """ >>> call_empty_cfunc() here there """ cdef object py_func = empty_cfunc py_func() cdef object another_py_func = another_empty_cfunc another_py_func() cdef double square_c(double x): return x * x def call_square_c(x): """ >>> call_square_c(2) 4.0 >>> call_square_c(-7) 49.0 """ cdef object py_func = square_c return py_func(x) def return_square_c(): """ >>> square_c = return_square_c() >>> square_c(5) 25.0 >>> square_c(x=4) 16.0 >>> square_c.__doc__ # FIXME: try to make original C function name available 'wrap(x: float) -> float' """ return square_c def return_libc_sqrt(): """ >>> sqrt = return_libc_sqrt() >>> sqrt(9) 3.0 >>> sqrt(x=9) 3.0 >>> sqrt.__doc__ 'wrap(x: float) -> float' """ return sqrt global_csqrt = sqrt def test_global(): """ >>> global_csqrt(9) 3.0 >>> global_csqrt.__doc__ 'wrap(x: float) -> float' >>> test_global() double (double) nogil Python object """ print cython.typeof(sqrt) print cython.typeof(global_csqrt) cdef long long rad(long long x): cdef long long rad = 1 for p in range(2, sqrt(x) + 1): if x % p == 0: rad *= p while x % p == 0: x //= p if x == 1: break return rad cdef bint abc(long long a, long long b, long long c) except -1: if a + b != c: raise ValueError("Not a valid abc candidate: (%s, %s, %s)" % (a, b, c)) return rad(a*b*c) < c def call_abc(a, b, c): """ >>> call_abc(2, 3, 5) False >>> call_abc(1, 63, 64) True >>> call_abc(2, 3**10 * 109, 23**5) True >>> call_abc(a=2, b=3**10 * 109, c=23**5) True >>> call_abc(1, 1, 1) Traceback (most recent call last): ... ValueError: Not a valid abc candidate: (1, 1, 1) """ cdef object py_func = abc return py_func(a, b, c) def return_abc(): """ >>> abc = return_abc() >>> abc(2, 3, 5) False >>> abc.__doc__ "wrap(a: 'long long', b: 'long long', c: 'long long') -> bool" """ return abc ctypedef double foo cdef foo test_typedef_cfunc(foo x): return x def test_typedef(x): """ >>> test_typedef(100) 100.0 """ return (test_typedef_cfunc)(x) cdef union my_union: int a double b cdef struct my_struct: int which my_union y cdef my_struct c_struct_builder(int which, int a, double b): cdef my_struct value value.which = which if which: value.y.a = a else: value.y.b = b return value def return_struct_builder(): """ >>> make = return_struct_builder() >>> d = make(0, 1, 2) >>> d['which'] 0 >>> d['y']['b'] 2.0 >>> d = make(1, 1, 2) >>> d['which'] 1 >>> d['y']['a'] 1 >>> make.__doc__ "wrap(which: 'int', a: 'int', b: float) -> 'my_struct'" """ return c_struct_builder cdef object test_object_params_cfunc(a, b): return a, b def test_object_params(a, b): """ >>> test_object_params(1, 'a') (1, 'a') """ return (test_object_params_cfunc)(a, b) cdef tuple test_builtin_params_cfunc(list a, dict b): return a, b def test_builtin_params(a, b): """ >>> test_builtin_params([], {}) ([], {}) >>> test_builtin_params(1, 2) Traceback (most recent call last): ... TypeError: Argument 'a' has incorrect type (expected list, got int) """ return (test_builtin_params_cfunc)(a, b) def return_builtin_params_cfunc(): """ >>> cfunc = return_builtin_params_cfunc() >>> cfunc([1, 2], {'a': 3}) ([1, 2], {'a': 3}) >>> cfunc.__doc__ 'wrap(a: list, b: dict) -> tuple' """ return test_builtin_params_cfunc cdef class A: def __repr__(self): return self.__class__.__name__ cdef class B(A): pass cdef A test_cdef_class_params_cfunc(A a, B b): return b def test_cdef_class_params(a, b): """ >>> test_cdef_class_params(A(), B()) B >>> test_cdef_class_params(B(), A()) Traceback (most recent call last): ... TypeError: Argument 'b' has incorrect type (expected cfunc_convert.B, got cfunc_convert.A) """ return (test_cdef_class_params_cfunc)(a, b) Cython-0.26.1/tests/run/embedsignatures.pyx0000664000175000017500000001675112574327400021537 0ustar stefanstefan00000000000000#cython: embedsignature=True import sys if sys.version_info >= (3, 4): def funcdoc(f): if not f.__text_signature__: return f.__doc__ doc = '%s%s' % (f.__name__, f.__text_signature__) if f.__doc__: if '\n' in f.__doc__: # preceding line endings get stripped doc = '%s\n\n%s' % (doc, f.__doc__) else: doc = '%s\n%s' % (doc, f.__doc__) return doc else: def funcdoc(f): return f.__doc__ # note the r, we use \n below __doc__ = ur""" >>> print (Ext.__doc__) Ext(a, b, c=None) >>> print (Ext.attr0.__doc__) attr0: 'int' attr0 docstring >>> print (Ext.attr1.__doc__) attr1: object attr1 docstring >>> print (Ext.attr2.__doc__) attr2: list >>> print (Ext.attr3.__doc__) attr3: embedsignatures.Ext >>> print (Ext.prop0.__doc__) prop0 docstring >>> print (Ext.prop1.__doc__) None >>> print (Ext.attr4.__doc__) attr4 docstring >>> print (Ext.attr5.__doc__) attr5: 'int' attr5 docstring >>> print (Ext.a.__doc__) Ext.a(self) >>> print (Ext.b.__doc__) Ext.b(self, a, b, c) >>> print (Ext.c.__doc__) Ext.c(self, a, b, c=1) >>> print (Ext.d.__doc__) Ext.d(self, a, b, *, c=88) >>> print (Ext.e.__doc__) Ext.e(self, a, b, c=88, **kwds) >>> print (Ext.f.__doc__) Ext.f(self, a, b, *, c, d=42) >>> print (Ext.g.__doc__) Ext.g(self, a, b, *, c, d=42, e=17, f, **kwds) >>> print (Ext.h.__doc__) Ext.h(self, a, b, *args, c, d=42, e=17, f, **kwds) >>> print (Ext.k.__doc__) Ext.k(self, a, b, c=1, *args, d=42, e=17, f, **kwds) >>> print (Ext.l.__doc__) Ext.l(self, a, b, c=1, *args, d=42, e=17, f, **kwds) Existing string >>> print (Ext.m.__doc__) Ext.m(self, a=u'spam') >>> print (Ext.get_int.__doc__) Ext.get_int(self) -> int >>> print (Ext.get_float.__doc__) Ext.get_float(self) -> float >>> print (Ext.get_str.__doc__) Ext.get_str(self) -> str Existing string >>> print (Ext.clone.__doc__) Ext.clone(self) -> Ext >>> print (funcdoc(foo)) foo() >>> funcdoc(with_doc_1) 'with_doc_1(a, b, c)\nExisting string' >>> funcdoc(with_doc_2) 'with_doc_2(a, b, c)\n\n Existing string\n ' >>> funcdoc(with_doc_3) 'with_doc_3(a, b, c)\nExisting string' >>> funcdoc(with_doc_4) 'with_doc_4(int a, str b, list c) -> str\n\n Existing string\n ' >>> funcdoc(f_sd) "f_sd(str s='spam')" >>> funcdoc(cf_sd) "cf_sd(str s='spam') -> str" >>> funcdoc(types) 'types(Ext a, int b, unsigned short c, float d, e)' >>> print(funcdoc(f_c)) f_c(char c) -> char >>> print(funcdoc(f_uc)) f_uc(unsigned char c) -> unsigned char >>> print(funcdoc(f_sc)) f_sc(signed char c) -> signed char >>> print(funcdoc(f_s)) f_s(short s) -> short >>> print(funcdoc(f_us)) f_us(unsigned short s) -> unsigned short >>> print(funcdoc(f_i)) f_i(int i) -> int >>> print(funcdoc(f_ui)) f_ui(unsigned int i) -> unsigned int >>> print(funcdoc(f_bint)) f_bint(bool i) -> bool >>> print(funcdoc(f_l)) f_l(long l) -> long >>> print(funcdoc(f_ul)) f_ul(unsigned long l) -> unsigned long >>> print(funcdoc(f_L)) f_L(long long L) -> long long >>> print(funcdoc(f_uL)) f_uL(unsigned long long L) -> unsigned long long >>> print(funcdoc(f_f)) f_f(float f) -> float >>> print(funcdoc(f_d)) f_d(double d) -> double >>> print(funcdoc(f_D)) f_D(long double D) -> long double >>> print(funcdoc(f_my_i)) f_my_i(MyInt i) -> MyInt >>> print(funcdoc(f_my_f)) f_my_f(MyFloat f) -> MyFloat >>> print(funcdoc(f_defexpr1)) f_defexpr1(int x=FLAG1, int y=FLAG2) >>> print(funcdoc(f_defexpr2)) f_defexpr2(int x=FLAG1 | FLAG2, y=FLAG1 & FLAG2) >>> print(funcdoc(f_defexpr3)) f_defexpr3(int x=Ext.CONST1, f=__builtins__.abs) >>> print(funcdoc(f_defexpr4)) f_defexpr4(int x=(Ext.CONST1 + FLAG1) * Ext.CONST2) >>> print(funcdoc(f_defexpr5)) f_defexpr5(int x=4) >>> print(funcdoc(f_charptr_null)) f_charptr_null(char *s=NULL) -> char * """ cdef class Ext: cdef public int attr0 """attr0 docstring""" cdef public attr1 """attr1 docstring""" cdef public list attr2 cdef public Ext attr3 """NOT attr3 docstring""" cdef int attr4 cdef public int \ attr5 """attr5 docstring""" CONST1, CONST2 = 1, 2 property prop0: """prop0 docstring""" def __get__(self): return self.attr0 property prop1: def __get__(self): return self.attr1 property attr4: """attr4 docstring""" def __get__(self): return self.attr4 def __init__(self, a, b, c=None): pass def a(self): pass def b(self, a, b, c): pass def c(self, a, b, c=1): pass def d(self, a, b, *, c = 88): pass def e(self, a, b, c = 88, **kwds): pass def f(self, a, b, *, c, d = 42): pass def g(self, a, b, *, c, d = 42, e = 17, f, **kwds): pass def h(self, a, b, *args, c, d = 42, e = 17, f, **kwds): pass def k(self, a, b, c=1, *args, d = 42, e = 17, f, **kwds): pass def l(self, a, b, c=1, *args, d = 42, e = 17, f, **kwds): """Existing string""" pass def m(self, a=u'spam'): pass cpdef int get_int(self): return 0 cpdef float get_float(self): return 0.0 cpdef str get_str(self): """Existing string""" return "string" cpdef Ext clone(self): return Ext(1,2) def foo(): pass def types(Ext a, int b, unsigned short c, float d, e): pass def with_doc_1(a, b, c): """Existing string""" pass def with_doc_2(a, b, c): """ Existing string """ pass cpdef with_doc_3(a, b, c): """Existing string""" pass cpdef str with_doc_4(int a, str b, list c): """ Existing string """ return b def f_sd(str s='spam'): return s cpdef str cf_sd(str s='spam'): return s cpdef char f_c(char c): return c cpdef unsigned char f_uc(unsigned char c): return c cpdef signed char f_sc(signed char c): return c cpdef short f_s(short s): return s cpdef unsigned short f_us(unsigned short s): return s cpdef int f_i(int i): return i cpdef unsigned int f_ui(unsigned int i): return i cpdef bint f_bint(bint i): return i cpdef long f_l(long l): return l cpdef unsigned long f_ul(unsigned long l): return l cpdef long long f_L(long long L): return L cpdef unsigned long long f_uL(unsigned long long L): return L cpdef float f_f(float f): return f cpdef double f_d(double d): return d cpdef long double f_D(long double D): return D ctypedef int MyInt cpdef MyInt f_my_i(MyInt i): return i ctypedef float MyFloat cpdef MyFloat f_my_f(MyFloat f): return f cdef enum: FLAG1 FLAG2 cpdef f_defexpr1(int x = FLAG1, int y = FLAG2): pass cpdef f_defexpr2(int x = FLAG1 | FLAG2, y = FLAG1 & FLAG2): pass cpdef f_defexpr3(int x = Ext.CONST1, f = __builtins__.abs): pass cpdef f_defexpr4(int x = (Ext.CONST1 + FLAG1) * Ext.CONST2): pass cpdef f_defexpr5(int x = 2+2): pass cpdef (char*) f_charptr_null(char* s=NULL): return s or b'abc' # no signatures for lambda functions lambda_foo = lambda x: 10 lambda_bar = lambda x: 20 Cython-0.26.1/tests/run/libcpp_algo.pyx0000664000175000017500000000254312542002467020621 0ustar stefanstefan00000000000000# tag: cpp from libcpp cimport bool from libcpp.algorithm cimport make_heap, sort_heap, sort, partial_sort from libcpp.vector cimport vector # XXX should use std::greater, but I don't know how to wrap that. cdef inline bool greater(int x, int y): return x > y def heapsort(l, bool reverse=False): """ >>> heapsort([3, 5, 1, 0, 2, 4]) [0, 1, 2, 3, 4, 5] >>> heapsort([3, 5, 1, 0, 2, 4], reverse=True) [5, 4, 3, 2, 1, 0] """ cdef vector[int] v = l if reverse: make_heap(v.begin(), v.end(), greater) sort_heap(v.begin(), v.end(), greater) else: make_heap(v.begin(), v.end()) sort_heap(v.begin(), v.end()) return v def partialsort(l, int k, reverse=False): """ >>> partialsort([4, 2, 3, 1, 5], k=2)[:2] [1, 2] >>> partialsort([4, 2, 3, 1, 5], k=2, reverse=True)[:2] [5, 4] """ cdef vector[int] v = l if reverse: partial_sort(v.begin(), v.begin() + k, v.end(), greater) else: partial_sort(v.begin(), v.begin() + k, v.end()) return v def stdsort(l, reverse=False): """ >>> stdsort([3, 2, 1, 4, 5]) [1, 2, 3, 4, 5] >>> stdsort([3, 2, 1, 4, 5], reverse=True) [5, 4, 3, 2, 1] """ cdef vector[int] v = l if reverse: sort(v.begin(), v.end(), greater) else: sort(v.begin(), v.end()) return v Cython-0.26.1/tests/run/unop.pyx0000664000175000017500000000060412542002467017323 0ustar stefanstefan00000000000000def f(obj1, obj2, obj3): """ >>> f(1, 2, 3) (-3, -4, 1) """ cdef int bool1, bool2 cdef int int1, int2 cdef char *str1 int2 = obj3 str1 = NULL bool2 = 0 bool1 = not bool2 obj1 = not obj2 bool1 = not str1 int1 = +int2 obj1 = +obj2 int1 = -int2 obj1 = -obj2 int1 = ~int2 obj1 = ~obj2 return obj1, int1, bool1 Cython-0.26.1/tests/run/types.h0000664000175000017500000000021612542002467017114 0ustar stefanstefan00000000000000/* This header is present to test effects of misdeclaring types Cython-side. */ typedef long actually_long_t; typedef short actually_short_t; Cython-0.26.1/tests/run/charptr_len.pyx0000664000175000017500000000335413023021023020630 0ustar stefanstefan00000000000000cimport cython cdef char* s = b"abcdefg" cdef const char* cs = b"abcdefg" cdef unsigned char* us = b"abcdefg" cdef const unsigned char* cus = b"abcdefg" cdef bytes pystr = b"abcdefg" @cython.test_assert_path_exists( "//PythonCapiCallNode", ) def lentest_char(): """ >>> lentest_char() 7 """ return len(s) @cython.test_assert_path_exists( "//PythonCapiCallNode", ) def lentest_const_char(): """ >>> lentest_const_char() 7 """ return len(cs) @cython.test_assert_path_exists( "//PythonCapiCallNode", ) def lentest_char_c(): """ >>> lentest_char_c() 7 """ cdef Py_ssize_t l = len(s) return l @cython.test_assert_path_exists( "//PythonCapiCallNode", ) def lentest_char_c_short(): """ >>> lentest_char_c_short() 7 """ cdef short l = len(s) return l @cython.test_assert_path_exists( "//PythonCapiCallNode", ) def lentest_char_c_float(): """ >>> lentest_char_c_float() 7.0 """ cdef float l = len(s) return l @cython.test_assert_path_exists( "//PythonCapiCallNode", ) def lentest_uchar(): """ >>> lentest_uchar() 7 """ return len(us) @cython.test_assert_path_exists( "//PythonCapiCallNode", ) def lentest_const_uchar(): """ >>> lentest_const_uchar() 7 """ return len(cus) @cython.test_assert_path_exists( "//PythonCapiCallNode", ) def lentest_uchar_c(): """ >>> lentest_uchar_c() 7 """ cdef Py_ssize_t l = len(us) return l def lentest_py(): """ >>> lentest_py() 7 """ return len(pystr) def lentest_py_c(): """ >>> lentest_py_c() 7 """ cdef Py_ssize_t l = len(pystr) return l Cython-0.26.1/tests/run/funcexceptraisefrom.pyx0000664000175000017500000000171312542002467022420 0ustar stefanstefan00000000000000__doc__ = u""" >>> def bar(): ... try: ... foo() ... except ValueError: ... if IS_PY3: ... print(isinstance(sys.exc_info()[1].__cause__, TypeError)) ... else: ... print(True) >>> bar() True >>> print(sys.exc_info()) (None, None, None) >>> def bar2(): ... try: ... foo2() ... except ValueError: ... if IS_PY3: ... cause = sys.exc_info()[1].__cause__ ... print(isinstance(cause, TypeError)) ... print(cause.args==('value',)) ... pass ... else: ... print(True) ... print(True) >>> bar2() True True """ import sys IS_PY3 = sys.version_info[0] >= 3 if not IS_PY3: sys.exc_clear() def foo(): try: raise TypeError except TypeError: raise ValueError from TypeError def foo2(): try: raise TypeError except TypeError: raise ValueError() from TypeError('value') Cython-0.26.1/tests/run/tuplereassign.pyx0000664000175000017500000000066412542002467021235 0ustar stefanstefan00000000000000def test1(t): """ >>> test1( (1,2,3) ) 1 """ t,a,b = t return t def test3(t): """ >>> test3( (1,2,3) ) 3 """ a,b,t = t return t def test(t): """ >>> test( (1,2,3) ) 3 """ t,t,t = t return t def testnonsense(): """ >>> testnonsense() # doctest: +ELLIPSIS Traceback (most recent call last): TypeError: ... """ t,t,t = 1*2 return t Cython-0.26.1/tests/run/matrix_multiplier.pyx0000664000175000017500000000567612542002467022132 0ustar stefanstefan00000000000000 import sys if sys.version_info >= (3, 5): __doc__ = """\ Note: support for providing Python special methods despite missing the C-level slot is currently not supported. >>> a, b = ExtMatMult(1), ExtMatMult(2) >>> print(test_matmul(a, b)) ExtMatMult(1) @ ExtMatMult(2) >>> print(test_matmul(a, 22)) ExtMatMult(1) @ 22 >>> print(test_matmul(11, b)) 11 @ ExtMatMult(2) >>> print(test_imatmul(a, b)) ExtMatMult('ExtMatMult(1) @ ExtMatMult(2)') >>> print(test_imatmul(a, b)) ExtMatMult("ExtMatMult('ExtMatMult(1) @ ExtMatMult(2)') @ ExtMatMult(2)") >>> x = y = 1 >>> x @ y Traceback (most recent call last): TypeError: unsupported operand type(s) for @: 'int' and 'int' >>> x @= y Traceback (most recent call last): TypeError: unsupported operand type(s) for @=: 'int' and 'int' >>> y = MatMult(22) >>> x @= y >>> print(x) 1 @ MatMult(22) >>> x = MatMult(22) >>> print(x @ 1) MatMult(22) @ 1 >>> print(1 @ x) 1 @ MatMult(22) >>> x @= 1 >>> print(x) MatMult('MatMult(22) @ 1') """ class MatMult(object): def __init__(self, myself): self.myself = myself def __matmul__(self, other): return '%r @ %r' % (self, other) def __rmatmul__(self, other): return '%r @ %r' % (other, self) def __imatmul__(self, other): self.myself = '%r @ %r' % (self, other) return self def __repr__(self): return 'MatMult(%r)' % self.myself cdef class ExtMatMult: """ Note: support for providing Python special methods despite missing the C-level slot is currently not supported. """ cdef object myself def __init__(self, myself): self.myself = myself def __matmul__(self, other): return '%r @ %r' % (self, other) def __rmatmul__(self, other): return '%r @ %r' % (other, self) def __imatmul__(self, other): self.myself = '%r @ %r' % (self, other) return self def __repr__(self): return 'ExtMatMult(%r)' % self.myself def test_matmul(a, b): """ >>> print(test_matmul(MatMult(1), MatMult(2))) MatMult(1) @ MatMult(2) >>> print(test_matmul(MatMult(1), 22)) MatMult(1) @ 22 >>> print(test_matmul(11, MatMult(2))) 11 @ MatMult(2) >>> print(test_matmul(MatMult('abc'), MatMult('def'))) MatMult('abc') @ MatMult('def') >>> test_matmul(1, 2) Traceback (most recent call last): TypeError: unsupported operand type(s) for @: 'int' and 'int' """ return a @ b def test_imatmul(a, b): """ >>> print(test_imatmul(MatMult(1), MatMult(2))) MatMult('MatMult(1) @ MatMult(2)') >>> print(test_imatmul(MatMult('abc'), MatMult('def'))) MatMult("MatMult('abc') @ MatMult('def')") >>> print(test_imatmul(11, MatMult('def'))) 11 @ MatMult('def') >>> print(test_imatmul(MatMult('abc'), 11)) MatMult("MatMult('abc') @ 11") >>> test_imatmul(1, 2) Traceback (most recent call last): TypeError: unsupported operand type(s) for @=: 'int' and 'int' """ a @= b return a Cython-0.26.1/tests/run/str_encoding_latin1.pyx0000664000175000017500000000133312542002467022270 0ustar stefanstefan00000000000000# -*- coding: latin-1 -*- __doc__ = (u""" >>> a == 'abc' True >>> isinstance(a, str) True >>> isinstance(s, str) True >>> len(s) 6 >>> s == 'aäÄÖöo' True >>> isinstance(add(), str) True >>> len(add()) 9 >>> add() == 'abcaäÄÖöo' True >>> isinstance(add_literal(), str) True >>> len(add_literal()) 9 >>> add_literal() == 'abcaäÄÖöo' True >>> isinstance(typed(), str) True >>> len(typed()) 6 >>> typed() == 'üüääöö' True """ # recoding/escaping is required to properly pass the literals to doctest ).encode('unicode_escape').decode('ASCII').replace(u'\\n', u'\n') a = 'abc' s = 'aäÄÖöo' u = u'aäÄÖöo' cdef str S = 'üüääöö' def add(): return a+s def add_literal(): return 'abc' + 'aäÄÖöo' def typed(): return S Cython-0.26.1/tests/run/dict_iter_unpack.pyx0000664000175000017500000000070112542002467021647 0ustar stefanstefan00000000000000# mode: run # tag: dictiter def iteritems_unpack(dict the_dict): """ >>> d = {(1,2): (3,4), (5,6): (7,8)} >>> iteritems_unpack(d) [(1, 2, 3, 4), (5, 6, 7, 8)] """ return sorted([ (a,b,c,d) for (a,b), (c,d) in the_dict.iteritems() ]) def itervalues_unpack(dict the_dict): """ >>> d = {1: (3,4), 2: (7,8)} >>> itervalues_unpack(d) [(3, 4), (7, 8)] """ return [(a,b) for a,b in the_dict.itervalues() ] Cython-0.26.1/tests/run/annotate_html.pyx0000664000175000017500000000237513023021033021167 0ustar stefanstefan00000000000000""" >>> from codecs import open >>> import os.path as os_path >>> module_path = os_path.join(os_path.dirname(__file__), os_path.basename(__file__).split('.', 1)[0]) >>> assert module_path.endswith('annotate_html') >>> assert os_path.exists(module_path + '.c') or os_path.exists(module_path + '.cpp'), module_path >>> assert os_path.exists(module_path + '.html'), module_path >>> with open(module_path + '.html', 'r', 'utf8') as html_file: ... html = html_file.read() >>> import re >>> assert re.search('
', html)
"""


def mixed_test():
    """docstring
    """
    cdef int int1, int2, int3
    cdef char *ptr1, *ptr2 = "test", *ptr3 = "toast"
    int2 = 10
    int3 = 20
    obj1 = 1
    obj2 = 2
    obj3 = 3
    int1 = int2 + int3
    ptr1 = ptr2 + int3
    ptr1 = int2 + ptr3
    obj1 = obj2 + int3
    return int1, obj1


def add_x_1(int x):
    return x + 1


def add_x_1f(x):
    return x + 1.0


def add_x_large(x):
    return x + 2**30


def add_1_x(x):
    return 1 + x


def add_1f_x(double x):
    return 1.0 + x


def add_large_x(x):
    return 2**30 + x


class PythonClass(object):
    def call(self, x):
        return add_1_x(x)


cdef class ExtensionType(object):
    @classmethod
    def new(cls):
        return cls()
Cython-0.26.1/tests/run/builtin_sorted.pyx0000664000175000017500000000404713143605603021374 0ustar  stefanstefan00000000000000cimport cython


def generator():
    yield 2
    yield 1
    yield 3


def returns_set():
    return {"foo", "bar", "baz"}


def returns_tuple():
    return (1, 2, 3, 0)


@cython.test_fail_if_path_exists("//SimpleCallNode")
def sorted_arg(x):
    """
    >>> a = [3, 2, 1]
    >>> sorted_arg(a)
    [1, 2, 3]
    >>> a
    [3, 2, 1]
    >>> sorted(generator())
    [1, 2, 3]
    >>> sorted(returns_set())
    ['bar', 'baz', 'foo']
    >>> sorted(returns_tuple())
    [0, 1, 2, 3]
    >>> sorted(object())
    Traceback (most recent call last):
    TypeError: 'object' object is not iterable
    """
    return sorted(x)


@cython.test_assert_path_exists("//GeneralCallNode")
def sorted_arg_with_key(x):
    """
    >>> a = [3, 2, 1]
    >>> sorted_arg_with_key(a)
    [3, 2, 1]
    >>> a
    [3, 2, 1]
    >>> sorted_arg_with_key(generator())
    [3, 2, 1]
    >>> sorted_arg_with_key(returns_tuple())
    [3, 2, 1, 0]
    >>> sorted_arg_with_key(object())
    Traceback (most recent call last):
    TypeError: 'object' object is not iterable
    """
    return sorted(x, key=lambda x: -x)


@cython.test_fail_if_path_exists("//YieldExprNode",
                                 "//NoneCheckNode")
@cython.test_assert_path_exists("//InlinedGeneratorExpressionNode")
def sorted_genexp():
    """
    >>> sorted_genexp()
    [1, 4, 9, 16, 25, 36, 49, 64, 81, 100]
    """
    return sorted(i*i for i in range(10,0,-1))


@cython.test_fail_if_path_exists("//SimpleCallNode//SimpleCallNode")
@cython.test_assert_path_exists("//SimpleCallNode/NameNode[@name = 'range']")
def sorted_list_of_range():
    """
    >>> sorted_list_of_range()
    [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
    """
    return sorted(list(range(10,0,-1)))


@cython.test_fail_if_path_exists("//SimpleCallNode")
def sorted_list_literal():
    """
    >>> sorted_list_literal()
    [1, 1, 2, 2, 3, 3]
    """
    return sorted([3, 1, 2] * 2)


@cython.test_fail_if_path_exists("//SimpleCallNode")
def sorted_tuple_literal():
    """
    >>> sorted_tuple_literal()
    [1, 1, 2, 2, 3, 3]
    """
    return sorted((1, 3, 2) * 2)
Cython-0.26.1/tests/run/closure_self.pyx0000664000175000017500000000225612542002467021034 0ustar  stefanstefan00000000000000# mode: run
# tag: closures
cdef class Test:
    cdef int x

cdef class SelfInClosure(object):
    cdef Test _t
    cdef int x

    def plain(self):
        """
        >>> o = SelfInClosure()
        >>> o.plain()
        1
        """
        self.x = 1
        return self.x

    def closure_method(self):
        """
        >>> o = SelfInClosure()
        >>> o.closure_method()() == o
        True
        """
        def nested():
            return self
        return nested

    def closure_method_cdef_attr(self, Test t):
        """
        >>> o = SelfInClosure()
        >>> o.closure_method_cdef_attr(Test())()
        (1, 2)
        """
        t.x = 2
        self._t = t
        self.x = 1
        def nested():
            return self.x, t.x
        return nested

    def call_closure_method_cdef_attr_c(self, Test t):
        """
        >>> o = SelfInClosure()
        >>> o.call_closure_method_cdef_attr_c(Test())()
        (1, 2)
        """
        return self.closure_method_cdef_attr_c(t)

    cdef closure_method_cdef_attr_c(self, Test t):
        t.x = 2
        self._t = t
        self.x = 1
        def nested():
            return self.x, t.x
        return nested
Cython-0.26.1/tests/run/directive_locals_in_pxd.py0000664000175000017500000000106412542002467023027 0ustar  stefanstefan00000000000000import cython

def foo(egg):
    if not cython.compiled:
        egg = float(egg)
    return egg

def foo_defval(egg=1):
    if not cython.compiled:
        egg = float(egg)
    return egg**2

def cpfoo(egg=False):
    if not cython.compiled:
        egg = bool(egg)
        v = int(not egg)
    else:
        v = not egg
    return egg, v

def test_pxd_locals():
    """
    >>> v1, v2, v3 = test_pxd_locals()
    >>> isinstance(v1, float)
    True
    >>> isinstance(v2, float)
    True
    >>> v3
    (True, 0)
    """
    return foo(1), foo_defval(), cpfoo(1)
Cython-0.26.1/tests/run/builtin_next.pyx0000664000175000017500000000317512542002467021054 0ustar  stefanstefan00000000000000
import sys
IS_PY3 = sys.version_info[0] >= 3

__doc__ = """
>>> it = iter([1,2,3])
>>> if not IS_PY3:
...     next = type(it).next
>>> next(it)
1
>>> next(it)
2
>>> next(it)
3

>>> next(it)
Traceback (most recent call last):
StopIteration

>>> next(it)
Traceback (most recent call last):
StopIteration

>>> if IS_PY3: next(it, 123)
... else: print(123)
123
"""

if IS_PY3:
    __doc__ += """
>>> next(123)      # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: ...int... object is not an iterator
"""

def test_next_not_iterable(it):
    """
    >>> test_next_not_iterable(123)
    Traceback (most recent call last):
    TypeError: int object is not an iterator
    """
    return next(it)

def test_single_next(it):
    """
    >>> it = iter([1,2,3])
    >>> test_single_next(it)
    1
    >>> test_single_next(it)
    2
    >>> test_single_next(it)
    3
    >>> test_single_next(it)
    Traceback (most recent call last):
    StopIteration
    >>> test_single_next(it)
    Traceback (most recent call last):
    StopIteration
    """
    return next(it)

def test_default_next(it, default):
    """
    >>> it = iter([1,2,3])
    >>> test_default_next(it, 99)
    1
    >>> test_default_next(it, 99)
    2
    >>> test_default_next(it, 99)
    3
    >>> test_default_next(it, 99)
    99
    >>> test_default_next(it, 99)
    99
    """
    return next(it, default)

def test_next_override(it):
    """
    >>> it = iter([1,2,3])
    >>> test_next_override(it)
    1
    >>> test_next_override(it)
    1
    >>> test_next_override(it)
    1
    >>> test_next_override(it)
    1
    """
    def next(it):
        return 1
    return next(it)
Cython-0.26.1/tests/run/closure_tests_4.pyx0000664000175000017500000004353312542002467021473 0ustar  stefanstefan00000000000000# mode: run
# tag: closures
# preparse: id
# preparse: def_to_cdef
#
# closure_tests_4.pyx
#
# Battery of tests for closures in Cython. Based on the collection of
# compiler tests from P423/B629 at Indiana University, Spring 1999 and
# Fall 2000. Special thanks to R. Kent Dybvig, Dan Friedman, Kevin
# Millikin, and everyone else who helped to generate the original
# tests. Converted into a collection of Python/Cython tests by Craig
# Citro.
#
# Note: This set of tests is split (somewhat randomly) into several
# files, simply because putting all the tests in a single file causes
# gcc and g++ to buckle under the load.
#


def g1852():
    """
    >>> g1852()
    [3, 42]
    """
    def g1851():
      def g1850(x_1333):
        x_1334 = 3
        return 3
      return g1850
    f_1332 = g1851()
    def g1848():
      def g1847(x_1336):
        y_1337 = 14
        y_1337 = 7
        return y_1337
      return g1847
    g_1335 = g1848()
    def g1849():
      return [g_1335,3]
    g_1335 = g1849()
    def g1846():
      def g1845(x_1340):
        return x_1340
      return g1845
    h_1339 = g1846()
    z_1338 = 42
    def g1844():
      return (g_1335[1])
    return [g1844(),h_1339(z_1338)]


def g1864():
    """
    >>> g1864()
    True
    """
    t_1342 = True
    f_1341 = False
    def g1863():
      return [t_1342,f_1341]
    bools_1345 = g1863()
    def g1862():
      def g1861(x_1343):
        if ((not x_1343)):
          return f_1341
        else:
          return t_1342
      return g1861
    id_1344 = g1862()
    def g1860():
      def g1857(x_1349):
        def g1859():
          return x_1349 == 0
        if (g1859()):
          def g1858():
            return (bools_1345[0])
          return id_1344(g1858())
        else:
          return odd_1346((x_1349)-(1))
      return g1857
    even_1347 = g1860()
    def g1856():
      def g1853(y_1348):
        def g1855():
          return y_1348 == 0
        if (g1855()):
          def g1854():
            return (bools_1345[1])
          return id_1344(g1854())
        else:
          return even_1347((y_1348)-(1))
      return g1853
    odd_1346 = g1856()
    return odd_1346(5)


def g1872():
    """
    >>> g1872()
    35
    """
    a_1350 = 5
    def g1871():
      return [a_1350,6]
    b_1351 = g1871()
    def g1870():
      def g1869(x_1352):
        return (x_1352)*(a_1350)
      return g1869
    f_1353 = g1870()
    def g1867():
      def g1866():
        return (b_1351[0])
      return (f_1353(a_1350))-(g1866())
    if (g1867()):
      def g1868():
        if ((not a_1350)):
          return (2)*(a_1350)
        else:
          return (2)+(a_1350)
      b_1351[0] = g1868()
      f_1353(a_1350)
    else:
      if ((not (not (f_1353(a_1350) < b_1351)))): (f_1353(a_1350))
    def g1865():
      return (b_1351[0])
    return f_1353(g1865())


def g1885():
    """
    >>> g1885()
    9
    """
    def g1884():
      def g1883(x_1368, y_1367):
        if ((not x_1368)):
          return g_1355((x_1368+1), (y_1367+1))
        else:
          return h_1354((x_1368)+(y_1367))
      return g1883
    f_1356 = g1884()
    def g1882():
      def g1875(u_1359, v_1358):
        a_1361 = (u_1359)+(v_1358)
        b_1360 = (u_1359)*(v_1358)
        def g1881():
          def g1876(d_1363):
            def g1880():
              return [a_1361,b_1360]
            p_1365 = g1880()
            def g1879():
              def g1877(m_1366):
                if ((m_1366 < u_1359)):
                  return f_1356(m_1366, d_1363)
                else:
                  def g1878():
                    return (p_1365[0])
                  return h_1354(g1878())
              return g1877
            q_1364 = g1879()
            return q_1364(f_1356(a_1361, b_1360))
          return g1876
        e_1362 = g1881()
        return e_1362(u_1359)
      return g1875
    g_1355 = g1882()
    def g1874():
      def g1873(w_1357):
        return w_1357
      return g1873
    h_1354 = g1874()
    return f_1356(4, 5)


def g1897():
    """
    >>> g1897()
    22
    """
    def g1896():
      def g1890(x_1373):
        def g1895():
          def g1894():
            def g1893():
              def g1891(y_1374):
                def g1892(z_1375):
                  return (y_1374)+(z_1375)
                return g1892
              return g1891
            return g1893()(6)
          return g1894()(7)
        return (x_1373)+(g1895())
      return g1890
    f_1370 = g1896()
    def g1889():
      def g1888():
        def g1887():
          def g1886(w_1372, u_1371):
            return (w_1372)+(u_1371)
          return g1886
        return g1887()(8, 9)
      return (5)+(g1888())
    g_1369 = g1889()
    return g_1369


def g1923():
    """
    >>> g1923()
    True
    """
    y_1377 = []
    z_1376 = 10
    def g1911():
      return [5,y_1377]
    test_ls_1378 = g1911()
    def g1922():
      def g1913(f_1379):
        def g1921():
          def g1918(g_1382):
            def g1920():
              def g1919(x_1383):
                return g_1382(g_1382)(x_1383)
              return g1919
            return f_1379(g1920())
          return g1918
        def g1917():
          def g1914(g_1380):
            def g1916():
              def g1915(x_1381):
                return g_1380(g_1380)(x_1381)
              return g1915
            return f_1379(g1916())
          return g1914
        return g1921()(g1917())
      return g1913
    y_1377 = g1922()
    def g1912():
      return [z_1376,test_ls_1378]
    test_ls_1378 = g1912()
    def g1910():
      def g1906(ls_1385):
        def g1909():
          return (ls_1385 == [])
        if (g1909()):
          return 0
        else:
          def g1908():
            def g1907():
              return (ls_1385[1])
            return length_1384(g1907())
          return (1)+(g1908())
      return g1906
    length_1384 = g1910()
    len_1386 = length_1384(test_ls_1378)
    def g1905():
      def g1904():
        def g1903():
          def g1898(len_1387):
            def g1899(ls_1388):
              def g1902():
                return (ls_1388 == [])
              if (g1902()):
                return 0
              else:
                def g1901():
                  def g1900():
                    return (ls_1388[1])
                  return len_1387(g1900())
                return (1)+(g1901())
            return g1899
          return g1898
        return y_1377(g1903())
      length_1384 = g1904()
      return length_1384(test_ls_1378)
    return (g1905() == len_1386)


def g1927():
    """
    >>> g1927()
    0
    """
    def g1926():
      def g1924():
        def g1925():
          return loop_1389()
        return g1925
      return g1924
    loop_1389 = g1926()
    loop_1389()
    return 0


def g1935():
    """
    >>> g1935()
    668
    """
    def g1934():
      def g1928():
        def g1933():
          def g1931(link_1392):
            def g1932():
              return link_1392()
            return g1932
          return g1931
        loop_1391 = g1933()
        def g1930():
          def g1929():
            return 668
          return g1929
        return loop_1391(g1930())
      return g1928
    f_1390 = g1934()
    return f_1390()()


def g1946():
    """
    >>> g1946()
    14629
    """
    def g1945():
      def g1944():
        return 1
      return g1944
    if (g1945()):
      a_1393 = 2
      def g1943():
        def g1942():
          def g1941():
            def g1938(x_1394):
              def g1940():
                def g1939():
                  a_1393 = 1
                a_1393 = g1939()
              x_1395 = g1940()
              return x_1395
            return g1938
          return g1941()(1)
        if (g1942()):
          def g1937():
            def g1936():
              return None
            return (a_1393 == g1936())
          if (g1937()):
            return True
          else:
            return False
        else:
          return False
      if (g1943()):
        return 778477
      else:
        return 14629


def g1949():
    """
    >>> g1949()
    2
    """
    def g1948():
      def g1947(x_1396):
        return x_1396
      return g1947
    f_1397 = g1948()
    a_1398 = 1
    return ((f_1397(a_1398))+(a_1398))*(a_1398)


def g1952():
    """
    >>> g1952()
    17
    """
    def g1951():
      def g1950(x_1400, y_1399):
        return x_1400
      return g1950
    k_1401 = g1951()
    b_1402 = 17
    return k_1401(k_1401(k_1401, 37), 37)(b_1402, (b_1402)*(b_1402))


def g1956():
    """
    >>> g1956()
    False
    """
    def g1955():
      def g1953():
        n_1403 = 256
        def g1954():
          return ([0]*n_1403)
        v_1404 = g1954()
        v_1404[32] = n_1403
        return v_1404[32]
      return g1953
    f_1405 = g1955()
    return isinstance(f_1405(), list)


def g1959():
    """
    >>> g1959()
    60
    """
    w_1409 = 4
    x_1408 = 8
    y_1407 = 16
    z_1406 = 32
    def g1958():
      def g1957():
        return (w_1409)+((x_1408)+((y_1407)+(z_1406)))
      return g1957
    f_1410 = g1958()
    return f_1410()


def g1965():
    """
    >>> g1965()
    37
    """
    def g1964():
      def g1962(g_1412, u_1411):
        def g1963():
          if (u_1411):
            return g_1412(37)
          else:
            return u_1411
        return g_1412(g1963())
      return g1962
    f_1413 = g1964()
    def g1961():
      def g1960(x_1414):
        return x_1414
      return g1960
    return f_1413(g1961(), 75)


def g1971():
    """
    >>> g1971()
    4687
    """
    def g1970():
      def g1968(h_1416, u_1415):
        def g1969():
          if (u_1415):
            return h_1416((u_1415)+(37))
          else:
            return u_1415
        return h_1416(g1969())
      return g1968
    f_1418 = g1970()
    w_1417 = 62
    def g1967():
      def g1966(x_1419):
        return (w_1417)-(x_1419)
      return g1966
    return f_1418(g1967(), (75)*(w_1417))


def g1983():
    """
    >>> g1983()
    True
    """
    t_1421 = True
    f_1420 = False
    def g1982():
      return [t_1421,f_1420]
    bools_1424 = g1982()
    def g1981():
      def g1980(x_1422):
        if ((not x_1422)):
          return f_1420
        else:
          return t_1421
      return g1980
    id_1423 = g1981()
    def g1979():
      def g1976(x_1428):
        def g1978():
          def g1977():
            return x_1428 == 0
          return id_1423(g1977())
        if (g1978()):
          return (bools_1424[0])
        else:
          return odd_1425((x_1428)-(1))
      return g1976
    even_1426 = g1979()
    def g1975():
      def g1972(y_1427):
        def g1974():
          return y_1427 == 0
        if (g1974()):
          def g1973():
            return (bools_1424[1])
          return id_1423(g1973())
        else:
          return even_1426((y_1427)-(1))
      return g1972
    odd_1425 = g1975()
    return odd_1425(5)


def g1990():
    """
    >>> g1990()
    48
    """
    def g1989():
      def g1984(x_1431, y_1430, z_1429):
        def g1988():
          def g1987(u_1435, v_1434):
            x_1431 = u_1435
            return (x_1431)+(v_1434)
          return g1987
        f_1437 = g1988()
        def g1986():
          def g1985(r_1433, s_1432):
            y_1430 = (z_1429)+(s_1432)
            return y_1430
          return g1985
        g_1436 = g1986()
        return (f_1437(1, 2))*(g_1436(3, 4))
      return g1984
    return g1989()(10, 11, 12)


def g1997():
    """
    >>> g1997()
    176
    """
    def g1996():
      def g1991(x_1440, y_1439, z_1438):
        f_1444 = False
        def g1995():
          def g1994(r_1442, s_1441):
            y_1439 = (z_1438)+(s_1441)
            return y_1439
          return g1994
        g_1443 = g1995()
        def g1993():
          def g1992(u_1446, v_1445):
            v_1445 = u_1446
            return (x_1440)+(v_1445)
          return g1992
        f_1444 = g1993()
        return (f_1444(1, 2))*(g_1443(3, 4))
      return g1991
    return g1996()(10, 11, 12)


def g2002():
    """
    >>> g2002()
    5
    """
    def g2001():
      def g2000(x_1450):
        return (x_1450)+(1)
      return g2000
    f_1448 = g2001()
    def g1999():
      def g1998(y_1449):
        return f_1448(f_1448(y_1449))
      return g1998
    g_1447 = g1999()
    return (f_1448(1))+(g_1447(1))


def g2010():
    """
    >>> g2010()
    1521
    """
    y_1451 = 3
    def g2009():
      def g2007(x_1457):
        def g2008():
          return x_1457 == 0
        if (g2008()):
          return g_1453((x_1457)+(1))
        else:
          return f_1454((x_1457)-(y_1451))
      return g2007
    f_1454 = g2009()
    def g2006():
      def g2005(x_1456):
        return h_1452((x_1456)*(x_1456))
      return g2005
    g_1453 = g2006()
    def g2004():
      def g2003(x_1455):
        return x_1455
      return g2003
    h_1452 = g2004()
    return g_1453(39)


def g2017():
    """
    >>> g2017()
    -1
    """
    def g2014():
      def g2013(x_1461):
        return (x_1461)+(1)
      return g2013
    f_1459 = g2014()
    def g2012():
      def g2011(y_1460):
        return f_1459(f_1459(y_1460))
      return g2011
    g_1458 = g2012()
    def g2016():
      def g2015(x_1462):
        return (x_1462)-(1)
      return g2015
    f_1459 = g2016()
    return (f_1459(1))+(g_1458(1))


def g2032():
    """
    >>> g2032()
    [52, [17, [35, [17, 35]]]]
    """
    def g2031():
      def g2030():
        return (a_1465)+(b_1464)
      return g2030
    f_1466 = g2031()
    a_1465 = 17
    b_1464 = 35
    def g2029():
      def g2028():
        def g2027():
          return a_1465
        return g2027
      def g2026():
        def g2025():
          return b_1464
        return g2025
      return [g2028(),g2026()]
    h_1463 = g2029()
    def g2024():
      def g2023():
        def g2022():
          def g2021():
            def g2020():
              return (h_1463[0])
            return g2020()()
          def g2019():
            def g2018():
              return (h_1463[1])
            return g2018()()
          return [g2021(),g2019()]
        return [b_1464,g2022()]
      return [a_1465,g2023()]
    return [f_1466(),g2024()]


def g2038():
    """
    >>> g2038()
    120
    """
    x_1469 = 5
    def g2037():
      a_1467 = 1
      def g2036():
        return a_1467
      return g2036
    th_1468 = g2037()
    def g2035():
      def g2033(n_1472, th_1471):
        def g2034():
          return n_1472 == 0
        if (g2034()):
          return th_1471()
        else:
          return (n_1472)*(fact_1470((n_1472)-(1), th_1471))
      return g2033
    fact_1470 = g2035()
    return fact_1470(x_1469, th_1468)


def g2046():
    """
    >>> g2046()
    [120, -120]
    """
    def g2045():
      def g2044(n_1473):
        return (n_1473 < 0)
      return g2044
    negative_1474 = g2045()
    def g2043():
      def g2041(n_1478):
        def g2042():
          return n_1478 == 0
        if (g2042()):
          return 1
        else:
          return (n_1478)*(fact_1476((n_1478)-(1)))
      return g2041
    fact_1476 = g2043()
    def g2040():
      def g2039(n_1477):
        if ((not negative_1474(n_1477))):
          return fact_1476(n_1477)
        else:
          return (0)-(fact_1476((0)-(n_1477)))
      return g2039
    call_fact_1475 = g2040()
    return [call_fact_1475(5),call_fact_1475(-5)]


def g2050():
    """
    >>> g2050()
    [0, 1, 2, 3]
    """
    def g2049():
      def g2048(v_1482, i_1481, n_1480):
        if ((not (i_1481 == n_1480))):
          v_1482[i_1481] = i_1481
          return iota_fill_1479(v_1482, (i_1481)+(1), n_1480)
      return g2048
    iota_fill_1479 = g2049()
    n_1483 = 4
    def g2047():
      return ([0]*n_1483)
    v_1484 = g2047()
    iota_fill_1479(v_1484, 0, n_1483)
    return v_1484


def g2061():
    """
    >>> g2061()
    [[33, 55], [77, 99]]
    """
    def g2060():
      def g2059():
        def g2058():
          def g2057():
            def g2051(a_1485):
              def g2052(b_1486):
                def g2053(c_1487):
                  def g2054(d_1488):
                    def g2056():
                      return [a_1485,b_1486]
                    def g2055():
                      return [c_1487,d_1488]
                    return [g2056(),g2055()]
                  return g2054
                return g2053
              return g2052
            return g2051
          return g2057()(33)
        return g2058()(55)
      return g2059()(77)
    return g2060()(99)


def g2075():
    """
    >>> g2075()
    [[[3, [21, [18, []]]], [4, [28, [24, []]]]], [[[0, [0, [0, []]]], [1, [7, [6, []]]]], [[408, 408], []]]]
    """
    a_1489 = 17
    def g2074():
      def g2064(x_1490):
        x1_1492 = (x_1490)+(1)
        x2_1491 = (x_1490)+(2)
        y1_1494 = (x1_1492)*(7)
        y2_1493 = (x2_1491)*(7)
        z1_1496 = (y1_1494)-(x1_1492)
        z2_1495 = (y2_1493)-(x2_1491)
        w1_1498 = (z1_1496)*(a_1489)
        w2_1497 = (z2_1495)*(a_1489)
        def g2073():
          def g2068(b_1500):
            if ((b_1500 == a_1489)):
              def g2072():
                def g2071():
                  return [z1_1496,[]]
                return [y1_1494,g2071()]
              return [x1_1492,g2072()]
            else:
              def g2070():
                def g2069():
                  return [z2_1495,[]]
                return [y2_1493,g2069()]
              return [x2_1491,g2070()]
          return g2068
        g_1502 = g2073()
        def g2067():
          def g2066(c_1499):
            if ((c_1499 == x_1490)):
              return w1_1498
            else:
              return w2_1497
          return g2066
        h_1501 = g2067()
        def g2065():
          if (((x_1490)*(x_1490) == (x_1490)+(x_1490))):
            return True
          else:
            return (x_1490 < 0)
        if (g2065()):
          return [g_1502(17),g_1502(16)]
        else:
          return [h_1501(x_1490),h_1501((x_1490)-(0))]
      return g2064
    f_1503 = g2074()
    def g2063():
      def g2062():
        return [f_1503(3),[]]
      return [f_1503(-1),g2062()]
    return [f_1503(2),g2063()]

Cython-0.26.1/tests/run/py3k_super.pyx0000664000175000017500000000333412542002467020451 0ustar  stefanstefan00000000000000# mode: run
# tag: py3k_super

class A(object):
    def method(self):
        return 1

    @classmethod
    def class_method(cls):
        return 2

    @staticmethod
    def static_method():
        return 3

    def generator_test(self):
        return [1, 2, 3]


class B(A):
    """
    >>> obj = B()
    >>> obj.method()
    1
    >>> B.class_method()
    2
    >>> B.static_method(obj)
    3
    >>> list(obj.generator_test())
    [1, 2, 3]
    """
    def method(self):
        return super().method()

    @classmethod
    def class_method(cls):
        return super().class_method()

    @staticmethod
    def static_method(instance):
        return super().static_method()

    def generator_test(self):
        for i in super().generator_test():
            yield i


def test_class_cell_empty():
    """
    >>> test_class_cell_empty()
    Traceback (most recent call last):
    ...
    SystemError: super(): empty __class__ cell
    """
    class Base(type):
        def __new__(cls, name, bases, attrs):
            attrs['foo'](None)

    class EmptyClassCell(metaclass=Base):
        def foo(self):
            super()


cdef class CClassBase(object):
    def method(self):
        return 'def'

#     cpdef method_cp(self):
#         return 'cpdef'
#     cdef method_c(self):
#         return 'cdef'
#     def call_method_c(self):
#         return self.method_c()

cdef class CClassSub(CClassBase):
    """
    >>> CClassSub().method()
    'def'
    """
#     >>> CClassSub().method_cp()
#     'cpdef'
#     >>> CClassSub().call_method_c()
#     'cdef'

    def method(self):
        return super().method()

#     cpdef method_cp(self):
#         return super().method_cp()
#     cdef method_c(self):
#         return super().method_c()
Cython-0.26.1/tests/run/cpp_smart_ptr.pyx0000664000175000017500000000374713143605603021231 0ustar  stefanstefan00000000000000# distutils: extra_compile_args=-std=c++0x
# mode: run
# tag: cpp, werror

from libcpp.memory cimport unique_ptr, shared_ptr, default_delete
from libcpp cimport nullptr

cdef extern from "cpp_smart_ptr_helper.h":
    cdef cppclass CountAllocDealloc:
        CountAllocDealloc(int*, int*)

    cdef cppclass FreePtr[T]:
        pass

def test_unique_ptr():
    """
    >>> test_unique_ptr()
    """
    cdef int alloc_count = 0, dealloc_count = 0
    cdef unique_ptr[CountAllocDealloc] x_ptr
    x_ptr.reset(new CountAllocDealloc(&alloc_count, &dealloc_count))
    assert alloc_count == 1
    x_ptr.reset()
    assert alloc_count == 1
    assert dealloc_count == 1

    ##Repeat the above test with an explicit default_delete type
    alloc_count = 0
    dealloc_count = 0
    cdef unique_ptr[CountAllocDealloc,default_delete[CountAllocDealloc]] x_ptr2
    x_ptr2.reset(new CountAllocDealloc(&alloc_count, &dealloc_count))
    assert alloc_count == 1
    x_ptr2.reset()
    assert alloc_count == 1
    assert dealloc_count == 1

    alloc_count = 0
    dealloc_count = 0
    cdef unique_ptr[CountAllocDealloc,FreePtr[CountAllocDealloc]] x_ptr3
    x_ptr3.reset(new CountAllocDealloc(&alloc_count, &dealloc_count))
    assert x_ptr3.get() != nullptr;
    x_ptr3.reset()
    assert x_ptr3.get() == nullptr;

def test_shared_ptr():
    """
    >>> test_shared_ptr()
    """
    cdef int alloc_count = 0, dealloc_count = 0
    cdef shared_ptr[CountAllocDealloc] ptr = shared_ptr[CountAllocDealloc](
        new CountAllocDealloc(&alloc_count, &dealloc_count))
    assert alloc_count == 1
    assert dealloc_count == 0

    cdef shared_ptr[CountAllocDealloc] ptr2 = ptr
    assert alloc_count == 1
    assert dealloc_count == 0

    ptr.reset()
    assert alloc_count == 1
    assert dealloc_count == 0

    ptr2.reset()
    assert alloc_count == 1
    assert dealloc_count == 1


cdef cppclass A:
    pass

cdef cppclass B(A):
    pass

cdef cppclass C(B):
    pass

cdef shared_ptr[A] holding_subclass = shared_ptr[A](new C())
Cython-0.26.1/tests/run/anonymousenum.pyx0000664000175000017500000000014112542002467021253 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> p
42
"""

cdef enum:
    spam = 42
    grail = 17

cdef int i
i = spam

p = i
Cython-0.26.1/tests/run/initial_file_path.srctree0000664000175000017500000000453012542002467022637 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import my_test_package as p; assert not p.__file__.rstrip('co').endswith('.py'), p.__file__; p.test()"
PYTHON -c "import my_test_package.a as a; a.test()"
PYTHON -c "import my_test_package.another as p; assert not p.__file__.rstrip('co').endswith('.py'), p.__file__; p.test()"
PYTHON -c "import my_test_package.another.a as a; a.test()"

######## setup.py ########

from Cython.Build.Dependencies import cythonize
from distutils.core import setup

setup(
    ext_modules = cythonize(["my_test_package/**/*.py"]),
)

######## my_test_package/__init__.py ########

# cython: set_initial_path=SOURCEFILE

initial_path = __path__
initial_file = __file__

try:
    from . import a
    import_error = None
except ImportError as e:
    import_error = e
    import traceback
    traceback.print_exc()

def test():
    print "FILE: ", initial_file
    print "PATH: ", initial_path
    assert initial_path[0].endswith('my_test_package'), initial_path
    assert initial_file.endswith('__init__.py'), initial_file
    assert import_error is None, import_error

######## my_test_package/another/__init__.py ########

# cython: set_initial_path=SOURCEFILE

initial_path = __path__
initial_file = __file__

try:
    from . import a
    import_error = None
except ImportError as e:
    import_error = e
    import traceback
    traceback.print_exc()

def test():
    print "FILE: ", initial_file
    print "PATH: ", initial_path
    assert initial_path[0].endswith('another'), initial_path
    assert initial_file.endswith('__init__.py'), initial_file
    assert import_error is None, import_error

######## my_test_package/a.py ########

# cython: set_initial_path=SOURCEFILE

initial_file = __file__

try:
    initial_path = __path__
except NameError:
    got_name_error = True
else:
    got_name_error = False

def test():
    assert initial_file.endswith('a.py'), initial_file
    assert got_name_error, "looks like __path__ was set at module init time: " + initial_path

######## my_test_package/another/a.py ########

# cython: set_initial_path=SOURCEFILE

initial_file = __file__

try:
    initial_path = __path__
except NameError:
    got_name_error = True
else:
    got_name_error = False

def test():
    assert initial_file.endswith('a.py'), initial_file
    assert got_name_error, "looks like __path__ was set at module init time: " + initial_path
Cython-0.26.1/tests/run/pure_cdef_class_property_decorator_T264.py0000664000175000017500000000166013023021033026004 0ustar  stefanstefan00000000000000# mode: run
# ticket: 264
# tag: property, decorator

class Prop(object):
    """
    >>> p = Prop()
    >>> p.prop
    GETTING 'None'
    >>> p.prop = 1
    SETTING '1' (previously: 'None')
    >>> p.prop
    GETTING '1'
    1
    >>> p.prop = 2
    SETTING '2' (previously: '1')
    >>> p.prop
    GETTING '2'
    2
    >>> del p.prop
    DELETING '2'
    >>> p.prop
    GETTING 'None'
    """

    def __init__(self):
        self._value = None

    @property
    def prop(self):
        print("FAIL")
        return 0

    @prop.getter
    def prop(self):
        print("FAIL")

    @property
    def prop(self):
        print("GETTING '%s'" % self._value)
        return self._value

    @prop.setter
    def prop(self, value):
        print("SETTING '%s' (previously: '%s')" % (value, self._value))
        self._value = value

    @prop.deleter
    def prop(self):
        print("DELETING '%s'" % self._value)
        self._value = None
Cython-0.26.1/tests/run/subop.pyx0000664000175000017500000000657212574327400017506 0ustar  stefanstefan00000000000000cimport cython


def bigint(x):
    print(str(x).rstrip('L'))


def mixed_test():
    """
    >>> mixed_test()
    (-1, -1)
    """
    cdef int int1, int2, int3
    obj1 = 1
    obj2 = 2
    obj3 = 3
    int2 = 2
    int3 = 3

    int1 = int2 - int3
    obj1 = obj2 - int3
    return int1, obj1


def pointer_test():
    """
    >>> pointer_test()
    0
    """
    cdef int int1, int2, int3
    cdef char *ptr1, *ptr2, *ptr3
    int2 = 2
    int3 = 3
    ptr2 = "test"
    ptr3 = ptr2

    ptr1 = ptr2 - int3
    int1 = ptr2 - ptr3
    return int1


@cython.test_fail_if_path_exists('//SubNode')
def sub_x_1(x):
    """
    >>> sub_x_1(0)
    -1
    >>> sub_x_1(1)
    0
    >>> sub_x_1(-1)
    -2
    >>> bigint(2**50 - 1)
    1125899906842623
    >>> bigint(sub_x_1(2**50))
    1125899906842623
    >>> sub_x_1(1.5)
    0.5
    >>> sub_x_1(-1.5)
    -2.5
    >>> try: sub_x_1("abc")
    ... except TypeError: pass
    """
    return x - 1


@cython.test_fail_if_path_exists('//SubNode')
def sub_x_1f(x):
    """
    >>> sub_x_1f(0)
    -1.0
    >>> sub_x_1f(1)
    0.0
    >>> sub_x_1f(-1)
    -2.0
    >>> 2**52 - 1.0
    4503599627370495.0
    >>> sub_x_1f(2**52)
    4503599627370495.0
    >>> sub_x_1f(2**60) == 2**60 - 1.0 or sub_x_1f(2**60)
    True
    >>> sub_x_1f(1.5)
    0.5
    >>> sub_x_1f(-1.5)
    -2.5
    >>> try: sub_x_1f("abc")
    ... except TypeError: pass
    """
    return x - 1.0


@cython.test_fail_if_path_exists('//SubNode')
def sub_x_large(x):
    """
    >>> sub_x_large(0)
    -1073741824
    >>> sub_x_large(1)
    -1073741823
    >>> sub_x_large(-1)
    -1073741825
    >>> bigint(2**50 - 2**30)
    1125898833100800
    >>> bigint(sub_x_large(2**50))
    1125898833100800
    >>> sub_x_large(2.0**30)
    0.0
    >>> sub_x_large(2.0**30 + 1)
    1.0
    >>> sub_x_large(2.0**30 - 1)
    -1.0
    >>> 2.0 ** 31 - 2**30
    1073741824.0
    >>> sub_x_large(2.0**31)
    1073741824.0
    >>> try: sub_x_large("abc")
    ... except TypeError: pass
    """
    return x - 2**30


@cython.test_fail_if_path_exists('//SubNode')
def sub_1_x(x):
    """
    >>> sub_1_x(0)
    1
    >>> sub_1_x(-1)
    2
    >>> sub_1_x(1)
    0
    >>> bigint(1 - 2**50)
    -1125899906842623
    >>> bigint(sub_1_x(2**50))
    -1125899906842623
    >>> sub_1_x(1.5)
    -0.5
    >>> sub_1_x(-1.5)
    2.5
    >>> try: sub_1_x("abc")
    ... except TypeError: pass
    """
    return 1 - x


@cython.test_fail_if_path_exists('//SubNode')
def sub_1f_x(x):
    """
    >>> sub_1f_x(0)
    1.0
    >>> sub_1f_x(-1)
    2.0
    >>> sub_1f_x(1)
    0.0
    >>> 1.0 - 2**52
    -4503599627370495.0
    >>> sub_1f_x(2**52)
    -4503599627370495.0
    >>> sub_1f_x(2**60) == 1.0 - 2**60 or sub_1f_x(2**60)
    True
    >>> sub_1f_x(1.5)
    -0.5
    >>> sub_1f_x(-1.5)
    2.5
    >>> try: sub_1f_x("abc")
    ... except TypeError: pass
    """
    return 1.0 - x


@cython.test_fail_if_path_exists('//SubNode')
def sub_large_x(x):
    """
    >>> sub_large_x(0)
    1073741824
    >>> sub_large_x(-1)
    1073741825
    >>> sub_large_x(1)
    1073741823
    >>> sub_large_x(2**30)
    0
    >>> bigint(2**30 - 2**31)
    -1073741824
    >>> bigint(sub_large_x(2**31))
    -1073741824
    >>> sub_large_x(2.0**30)
    0.0
    >>> sub_large_x(2.0**31)
    -1073741824.0
    >>> sub_large_x(2.0**30 + 1)
    -1.0
    >>> sub_large_x(2.0**30 - 1)
    1.0
    >>> try: sub_large_x("abc")
    ... except TypeError: pass
    """
    return 2**30 - x
Cython-0.26.1/tests/run/big_indices.pyx0000664000175000017500000000060212542002467020577 0ustar  stefanstefan00000000000000def test():
    """
    >>> test()
    neg False
    pos True
    neg
    pos
    neg
    pos
    """
    cdef object D
    cdef long neg = -1
    cdef unsigned long pos = -2 # will be a large positive number

    print u"neg", neg > 0
    print u"pos", pos > 0

    D = { neg: u'neg', pos: u'pos' }

    print D[neg]
    print D[pos]

    print D[neg]
    print D[pos]
Cython-0.26.1/tests/run/file_encoding_T740.py0000664000175000017500000000012012542002467021446 0ustar  stefanstefan00000000000000# encoding: koi8-r
# mode: run
# ticket: 740
"""
>>> wtf
'wtf'
"""

wtf = 'wtf'
Cython-0.26.1/tests/run/string_comparison.pyx0000664000175000017500000003663212542002467022114 0ustar  stefanstefan00000000000000
cimport cython

import sys
IS_PY3 = sys.version_info[0] >= 3

bstring1 = b"abcdefg"
bstring2 = b"1234567"

string1 = "abcdefg"
string2 = "1234567"

ustring1 = u"abcdefg"
ustring2 = u"1234567"

# unicode

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def unicode_eq(unicode s1, unicode s2):
    """
    >>> unicode_eq(ustring1, ustring1)
    True
    >>> unicode_eq(ustring1+ustring2, ustring1+ustring2)
    True
    >>> unicode_eq(ustring1, ustring2)
    False
    """
    return s1 == s2

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def unicode_neq(unicode s1, unicode s2):
    """
    >>> unicode_neq(ustring1, ustring1)
    False
    >>> unicode_neq(ustring1+ustring2, ustring1+ustring2)
    False
    >>> unicode_neq(ustring1, ustring2)
    True
    """
    return s1 != s2

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def unicode_literal_eq(unicode s):
    """
    >>> unicode_literal_eq(ustring1)
    True
    >>> unicode_literal_eq((ustring1+ustring2)[:len(ustring1)])
    True
    >>> unicode_literal_eq(ustring2)
    False
    """
    return s == u"abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def unicode_literal_neq(unicode s):
    """
    >>> unicode_literal_neq(ustring1)
    False
    >>> unicode_literal_neq((ustring1+ustring2)[:len(ustring1)])
    False
    >>> unicode_literal_neq(ustring2)
    True
    """
    return s != u"abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
    "//CascadedCmpNode"
)
@cython.test_fail_if_path_exists(
    "//CascadedCmpNode[@is_pycmp = True]",
    "//PrimaryCmpNode[@is_pycmp = True]",
)
def unicode_cascade(unicode s1, unicode s2):
    """
    >>> unicode_cascade(ustring1, ustring1)
    True
    >>> unicode_cascade(ustring1, (ustring1+ustring2)[:len(ustring1)])
    True
    >>> unicode_cascade(ustring1, ustring2)
    False
    """
    return s1 == s2 == u"abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def unicode_cascade_untyped_end(unicode s1, unicode s2):
    """
    >>> unicode_cascade_untyped_end(ustring1, ustring1)
    True
    >>> unicode_cascade_untyped_end(ustring1, (ustring1+ustring2)[:len(ustring1)])
    True
    >>> unicode_cascade_untyped_end(ustring1, ustring2)
    False
    """
    return s1 == s2 == u"abcdefg" == (ustring1) == ustring1

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def unicode_cascade_untyped_end_bool(unicode s1, unicode s2):
    """
    >>> unicode_cascade_untyped_end_bool(ustring1, ustring1)
    True
    >>> unicode_cascade_untyped_end_bool(ustring1, (ustring1+ustring2)[:len(ustring1)])
    True
    >>> unicode_cascade_untyped_end_bool(ustring1, ustring2)
    False
    """
    if s1 == s2 == u"abcdefg" == (ustring1) == ustring1:
        return True
    else:
        return False


# str

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def str_eq(str s1, str s2):
    """
    >>> str_eq(string1, string1)
    True
    >>> str_eq(string1+string2, string1+string2)
    True
    >>> str_eq(string1, string2)
    False
    """
    return s1 == s2

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def str_neq(str s1, str s2):
    """
    >>> str_neq(string1, string1)
    False
    >>> str_neq(string1+string2, string1+string2)
    False
    >>> str_neq(string1, string2)
    True
    """
    return s1 != s2

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def str_literal_eq(str s):
    """
    >>> str_literal_eq(string1)
    True
    >>> str_literal_eq((string1+string2)[:len(string1)])
    True
    >>> str_literal_eq(string2)
    False
    """
    return s == "abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def str_literal_neq(str s):
    """
    >>> str_literal_neq(string1)
    False
    >>> str_literal_neq((string1+string2)[:len(string1)])
    False
    >>> str_literal_neq(string2)
    True
    """
    return s != "abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
@cython.test_fail_if_path_exists(
    "//CascadedCmpNode[@is_pycmp = True]",
    "//PrimaryCmpNode[@is_pycmp = True]",
)
def str_cascade(str s1, str s2):
    """
    >>> str_cascade(string1, string1)
    True
    >>> str_cascade(string1, (string1+string2)[:len(string1)])
    True
    >>> str_cascade(string1, string2)
    False
    """
    return s1 == s2 == "abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def str_cascade_untyped_end(str s1, str s2):
    """
    >>> str_cascade_untyped_end(string1, string1)
    True
    >>> str_cascade_untyped_end(string1, (string1+string2)[:len(string1)])
    True
    >>> str_cascade_untyped_end(string1, string2)
    False
    """
    return s1 == s2 == "abcdefg" == (string1) == string1

# bytes

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def bytes_eq(bytes s1, bytes s2):
    """
    >>> bytes_eq(bstring1, bstring1)
    True
    >>> bytes_eq(bstring1+bstring2, bstring1+bstring2)
    True
    >>> bytes_eq(bstring1, bstring2)
    False
    """
    return s1 == s2

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def bytes_neq(bytes s1, bytes s2):
    """
    >>> bytes_neq(bstring1, bstring1)
    False
    >>> bytes_neq(bstring1+bstring2, bstring1+bstring2)
    False
    >>> bytes_neq(bstring1, bstring2)
    True
    """
    return s1 != s2

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def bytes_literal_eq(bytes s):
    """
    >>> bytes_literal_eq(bstring1)
    True
    >>> bytes_literal_eq((bstring1+bstring2)[:len(bstring1)])
    True
    >>> bytes_literal_eq(bstring2)
    False
    """
    return s == b"abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def bytes_literal_neq(bytes s):
    """
    >>> bytes_literal_neq(bstring1)
    False
    >>> bytes_literal_neq((bstring1+bstring2)[:len(bstring1)])
    False
    >>> bytes_literal_neq(bstring2)
    True
    """
    return s != b"abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
@cython.test_fail_if_path_exists(
    "//CascadedCmpNode[@is_pycmp = True]",
    "//PrimaryCmpNode[@is_pycmp = True]",
)
def bytes_cascade(bytes s1, bytes s2):
    """
    >>> bytes_cascade(bstring1, bstring1)
    True
    >>> bytes_cascade(bstring1, (bstring1+bstring2)[:len(bstring1)])
    True
    >>> bytes_cascade(bstring1, bstring2)
    False
    """
    return s1 == s2 == b"abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def bytes_cascade_untyped_end(bytes s1, bytes s2):
    """
    >>> bytes_cascade_untyped_end(bstring1, bstring1)
    True
    >>> bytes_cascade_untyped_end(bstring1, (bstring1+bstring2)[:len(bstring1)])
    True
    >>> bytes_cascade_untyped_end(bstring1, bstring2)
    False
    """
    return s1 == s2 == b"abcdefg" == (bstring1) == bstring1


# basestring

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def basestring_eq(basestring s1, basestring s2):
    """
    >>> basestring_eq(string1, string1)
    True
    >>> basestring_eq(string1, ustring1)
    True
    >>> basestring_eq(string1+string2, string1+string2)
    True
    >>> basestring_eq(string1+ustring2, ustring1+string2)
    True
    >>> basestring_eq(string1, string2)
    False
    >>> basestring_eq(string1, ustring2)
    False
    >>> basestring_eq(ustring1, string2)
    False
    """
    return s1 == s2

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def basestring_neq(basestring s1, basestring s2):
    """
    >>> basestring_neq(string1, string1)
    False
    >>> basestring_neq(string1+string2, string1+string2)
    False
    >>> basestring_neq(string1+ustring2, ustring1+string2)
    False
    >>> basestring_neq(string1, string2)
    True
    >>> basestring_neq(string1, ustring2)
    True
    >>> basestring_neq(ustring1, string2)
    True
    """
    return s1 != s2

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def basestring_str_literal_eq(basestring s):
    """
    >>> basestring_str_literal_eq(string1)
    True
    >>> basestring_str_literal_eq((string1+string2)[:len(string1)])
    True
    >>> basestring_str_literal_eq(string2)
    False
    """
    return s == "abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def basestring_unicode_literal_eq(basestring s):
    """
    >>> basestring_unicode_literal_eq(string1)
    True
    >>> basestring_unicode_literal_eq((string1+string2)[:len(string1)])
    True
    >>> basestring_unicode_literal_eq(string2)
    False
    """
    return s == u"abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def basestring_str_literal_neq(basestring s):
    """
    >>> basestring_str_literal_neq(string1)
    False
    >>> basestring_str_literal_neq((string1+string2)[:len(string1)])
    False
    >>> basestring_str_literal_neq(string2)
    True
    """
    return s != "abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def basestring_unicode_literal_neq(basestring s):
    """
    >>> basestring_unicode_literal_neq(string1)
    False
    >>> basestring_unicode_literal_neq((string1+string2)[:len(string1)])
    False
    >>> basestring_unicode_literal_neq(string2)
    True
    """
    return s != u"abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
    "//CascadedCmpNode[@is_pycmp = False]",
)
@cython.test_fail_if_path_exists(
    "//CascadedCmpNode[@is_pycmp = True]",
    "//PrimaryCmpNode[@is_pycmp = True]",
)
def basestring_cascade_str(basestring s1, basestring s2):
    """
    >>> basestring_cascade_str(string1, string1)
    True
    >>> basestring_cascade_str(string1, (string1+string2)[:len(string1)])
    True
    >>> basestring_cascade_str(string1, string2)
    False
    """
    return s1 == s2 == "abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
    "//CascadedCmpNode[@is_pycmp = False]",
)
@cython.test_fail_if_path_exists(
    "//CascadedCmpNode[@is_pycmp = True]",
    "//PrimaryCmpNode[@is_pycmp = True]",
)
def basestring_cascade_unicode(basestring s1, basestring s2):
    """
    >>> basestring_cascade_unicode(string1, string1)
    True
    >>> basestring_cascade_unicode(ustring1, string1)
    True
    >>> basestring_cascade_unicode(string1, ustring1)
    True
    >>> basestring_cascade_unicode(string1, (string1+string2)[:len(string1)])
    True
    >>> basestring_cascade_unicode(string1, string2)
    False
    >>> basestring_cascade_unicode(ustring1, string2)
    False
    >>> basestring_cascade_unicode(string1, ustring2)
    False
    """
    return s1 == s2 == u"abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def basestring_cascade_untyped_end(basestring s1, basestring s2):
    """
    >>> basestring_cascade_untyped_end(string1, string1)
    True
    >>> basestring_cascade_untyped_end(string1, (string1+string2)[:len(string1)])
    True
    >>> basestring_cascade_untyped_end(string1, string2)
    False
    """
    return s1 == s2 == "abcdefg" == (string1) == string1


# untyped/literal comparison

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def untyped_unicode_literal_eq_bool(s):
    """
    >>> untyped_unicode_literal_eq_bool(string1)
    True
    >>> untyped_unicode_literal_eq_bool(ustring1)
    True
    >>> untyped_unicode_literal_eq_bool((string1+string2)[:len(string1)])
    True
    >>> untyped_unicode_literal_eq_bool(string2)
    False
    >>> untyped_unicode_literal_eq_bool(ustring2)
    False
    """
    return True if s == u"abcdefg" else False

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def untyped_str_literal_eq_bool(s):
    """
    >>> untyped_str_literal_eq_bool(string1)
    True
    >>> untyped_str_literal_eq_bool(ustring1)
    True
    >>> untyped_str_literal_eq_bool((string1+string2)[:len(string1)])
    True
    >>> untyped_str_literal_eq_bool(string2)
    False
    >>> untyped_str_literal_eq_bool(ustring2)
    False
    """
    return True if s == "abcdefg" else False

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = True]",
    "//CascadedCmpNode",
    "//CascadedCmpNode[@is_pycmp = False]",
)
@cython.test_fail_if_path_exists(
    "//CascadedCmpNode[@is_pycmp = True]",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def untyped_unicode_cascade(s1, unicode s2):
    """
    >>> untyped_unicode_cascade(ustring1, ustring1)
    True
    >>> untyped_unicode_cascade(ustring1, (ustring1+ustring2)[:len(ustring1)])
    True
    >>> untyped_unicode_cascade(ustring1, ustring2)
    False
    """
    return s1 == s2 == u"abcdefg"

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = False]",
    "//CascadedCmpNode",
    "//CascadedCmpNode[@is_pycmp = False]",
)
@cython.test_fail_if_path_exists(
    "//CascadedCmpNode[@is_pycmp = True]",
    "//PrimaryCmpNode[@is_pycmp = True]",
)
def untyped_unicode_cascade_bool(s1, unicode s2):
    """
    >>> untyped_unicode_cascade_bool(ustring1, ustring1)
    True
    >>> untyped_unicode_cascade_bool(ustring1, (ustring1+ustring2)[:len(ustring1)])
    True
    >>> untyped_unicode_cascade_bool(ustring1, ustring2)
    False
    """
    return True if s1 == s2 == u"abcdefg" else False

@cython.test_assert_path_exists(
    "//PrimaryCmpNode",
    "//PrimaryCmpNode[@is_pycmp = True]",
    "//CascadedCmpNode",
#    "//CascadedCmpNode[@is_pycmp = False]",
)
@cython.test_fail_if_path_exists(
    "//CascadedCmpNode[@is_pycmp = True]",
    "//PrimaryCmpNode[@is_pycmp = False]",
)
def untyped_untyped_unicode_cascade_bool(s1, s2):
    """
    >>> untyped_untyped_unicode_cascade_bool(ustring1, ustring1)
    True
    >>> untyped_untyped_unicode_cascade_bool(ustring1, (ustring1+ustring2)[:len(ustring1)])
    True
    >>> untyped_untyped_unicode_cascade_bool(ustring1, ustring2)
    False
    >>> untyped_untyped_unicode_cascade_bool(string1, string2)
    False
    >>> untyped_untyped_unicode_cascade_bool(1, 2)
    False
    >>> untyped_untyped_unicode_cascade_bool(1, 1)
    False
    """
    return True if s1 == s2 == u"abcdefg" else False


# bytes/str comparison

@cython.test_assert_path_exists(
    '//CondExprNode',
    '//CondExprNode//PrimaryCmpNode',
    '//CondExprNode//PrimaryCmpNode[@operator = "=="]',
    '//CondExprNode//PrimaryCmpNode[@operator = "!="]',
)
def literal_compare_bytes_str():
    """
    >>> literal_compare_bytes_str()
    True
    """
    # we must not constant fold the subexpressions as the result is Py2/3 sensitive
    return b'abc' != 'abc' if IS_PY3 else b'abc' == 'abc'
Cython-0.26.1/tests/run/tp_new.pyx0000664000175000017500000001311212542002467017634 0ustar  stefanstefan00000000000000# ticket: 808

cimport cython

cdef class MyType:
    cdef public args, kwargs
    def __cinit__(self, *args, **kwargs):
        self.args, self.kwargs = args, kwargs
        print "CINIT"
    def __init__(self, *args, **kwargs):
        print "INIT"

cdef class MySubType(MyType):
    def __cinit__(self, *args, **kwargs):
        self.args, self.kwargs = args, kwargs
        print "CINIT(SUB)"
    def __init__(self, *args, **kwargs):
        print "INIT"

class MyClass(object):
    def __cinit__(self, *args, **kwargs):
        self.args, self.kwargs = args, kwargs
        print "CINIT"
    def __init__(self, *args, **kwargs):
        print "INIT"

class MyTypeSubClass(MyType):
    def __cinit__(self, *args, **kwargs):
        # not called: Python class!
        print "CINIT(PYSUB)"
    def __init__(self, *args, **kwargs):
        print "INIT"

# See ticket T808, vtab must be set even if there is no __cinit__.

cdef class Base(object):
    pass

cdef class Derived(Base):
    cpdef int f(self):
        return 42

def test_derived_vtab():
    """
    >>> test_derived_vtab()
    42
    """
    cdef Derived d = Derived.__new__(Derived)
    return d.f()


# only these can be safely optimised:

@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists(
    '//SimpleCallNode/AttributeNode',
    '//PyMethodCallNode',
)
def make_new():
    """
    >>> isinstance(make_new(), MyType)
    CINIT
    True
    """
    m = MyType.__new__(MyType)
    return m

@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists(
    '//SimpleCallNode/AttributeNode',
    '//PyMethodCallNode',
)
def make_new_typed_target():
    """
    >>> isinstance(make_new_typed_target(), MyType)
    CINIT
    True
    """
    cdef MyType m
    m = MyType.__new__(MyType)
    return m

@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists(
    '//SimpleCallNode/AttributeNode',
    '//PyMethodCallNode',
)
def make_new_with_args():
    """
    >>> isinstance(make_new_with_args(), MyType)
    CINIT
    (1, 2, 3)
    {}
    True
    """
    m = MyType.__new__(MyType, 1, 2 ,3)
    print m.args
    print m.kwargs
    return m

@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists(
    '//SimpleCallNode/AttributeNode',
    '//PyMethodCallNode',
)
def make_new_with_args_kwargs():
    """
    >>> isinstance(make_new_with_args_kwargs(), MyType)
    CINIT
    (1, 2, 3)
    {'a': 4}
    True
    """
    m = MyType.__new__(MyType, 1, 2 ,3, a=4)
    print m.args
    print m.kwargs
    return m

@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists(
    '//SimpleCallNode/AttributeNode',
    '//PyMethodCallNode',
)
def make_new_builtin():
    """
    >>> isinstance(make_new_builtin(), tuple)
    True
    """
    m = dict.__new__(dict)
    m = list.__new__(list)
    m = tuple.__new__(tuple)
    return m

@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists(
    '//SimpleCallNode/AttributeNode',
    '//PyMethodCallNode',
)
def make_new_none(type t=None):
    """
    >>> make_new_none()  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ... is not a type object (NoneType)
    """
    m = t.__new__(t)
    return m

@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists(
    '//SimpleCallNode/AttributeNode',
    '//PyMethodCallNode',
)
def make_new_kwargs(type t=None):
    """
    >>> m = make_new_kwargs(MyType)
    CINIT
    >>> isinstance(m, MyType)
    True
    >>> m.args
    (1, 2, 3)
    >>> m.kwargs
    {'a': 5}
    """
    m = t.__new__(t, 1, 2, 3, a=5)
    return m

# these cannot:

@cython.test_assert_path_exists('//PyMethodCallNode/AttributeNode')
@cython.test_fail_if_path_exists('//PythonCapiCallNode')
def make_new_pyclass():
    """
    >>> isinstance(make_new_pyclass(), MyTypeSubClass)
    CINIT
    True
    """
    m = MyClass.__new__(MyClass)
    m = MyTypeSubClass.__new__(MyTypeSubClass)
    return m

@cython.test_assert_path_exists('//PyMethodCallNode/AttributeNode')
@cython.test_fail_if_path_exists('//PythonCapiCallNode')
def make_new_args(type t1=None, type t2=None):
    """
    >>> isinstance(make_new_args(), MyType)
    CINIT
    True
    >>> isinstance(make_new_args(MyType), MyType)
    CINIT
    True
    >>> isinstance(make_new_args(MyType, MyType), MyType)
    CINIT
    True

    >>> isinstance(make_new_args(MyType, MySubType), MySubType)
    Traceback (most recent call last):
    TypeError: tp_new.MyType.__new__(tp_new.MySubType) is not safe, use tp_new.MySubType.__new__()
    >>> isinstance(make_new_args(MySubType, MyType), MyType)
    Traceback (most recent call last):
    TypeError: tp_new.MySubType.__new__(tp_new.MyType): tp_new.MyType is not a subtype of tp_new.MySubType
    """
    if t1 is None:
        t1 = MyType
    if t2 is None:
        t2 = MyType
    m = t1.__new__(t2)
    return m

@cython.test_assert_path_exists('//PyMethodCallNode/AttributeNode')
@cython.test_fail_if_path_exists('//PythonCapiCallNode')
def make_new_none_typed(tuple t=None):
    """
    >>> make_new_none_typed()  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ... is not a type object (NoneType)
    """
    m = t.__new__(t)
    return m

@cython.test_assert_path_exists('//PyMethodCallNode/AttributeNode')
@cython.test_fail_if_path_exists('//PythonCapiCallNode')
def make_new_untyped(t):
    """
    >>> make_new_untyped(None)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ... is not a type object (NoneType)
    """
    m = t.__new__(t)
    return m
Cython-0.26.1/tests/run/classkwonlyargs.pyx0000664000175000017500000000555312542002467021600 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> spam = Spam()
    >>> b,c,d,e,f,g,h,k = spam.b,spam.c,spam.d,spam.e,spam.f,spam.g,spam.h,spam.k

    >>> b(1,2,3)
    >>> b(1,2,3,4)
    Traceback (most recent call last):
    TypeError: b() takes exactly 4 positional arguments (5 given)

    >>> c(1,2)
    >>> c(1,2,3)
    >>> c(1,2,3,4)
    Traceback (most recent call last):
    TypeError: c() takes at most 4 positional arguments (5 given)

    >>> d(1,2)
    >>> d(1,2, c=1)

    >>> d(1,2,3)
    Traceback (most recent call last):
    TypeError: d() takes exactly 3 positional arguments (4 given)
    >>> d(1,2, d=1)
    Traceback (most recent call last):
    TypeError: d() got an unexpected keyword argument 'd'

    >>> e(1,2)
    >>> e(1,2, c=1)
    >>> e(1,2, d=1)
    >>> e(1,2, c=1, d=2, e=3)
    >>> e(1,2,3)
    >>> e(1,2,3,4)
    Traceback (most recent call last):
    TypeError: e() takes at most 4 positional arguments (5 given)

    >>> f(1,2, c=1)
    >>> f(1,2, c=1, d=2)

    >>> f(1,2,3)
    Traceback (most recent call last):
    TypeError: f() takes exactly 3 positional arguments (4 given)
    >>> f(1,2)
    Traceback (most recent call last):
    TypeError: f() needs keyword-only argument c
    >>> f(1,2, c=1, e=2)
    Traceback (most recent call last):
    TypeError: f() got an unexpected keyword argument 'e'

    >>> g(1,2, c=1, f=2)
    >>> g(1,2, c=1, e=0, f=2, d=11)
    >>> g(1,2, c=1, f=2, e=0, x=25)

    >>> g(1,2,3)
    Traceback (most recent call last):
    TypeError: g() takes exactly 3 positional arguments (4 given)
    >>> g(1,2)
    Traceback (most recent call last):
    TypeError: g() needs keyword-only argument c
    >>> g(1,2, c=1)
    Traceback (most recent call last):
    TypeError: g() needs keyword-only argument f

    >>> h(1,2, c=1, f=2)
    >>> h(1,2, c=1, f=2, e=3)
    >>> h(1,2,3,4,5,6, c=1, f=2)
    >>> h(1,2,3,4,5,6, c=1, f=2, e=3, x=25, y=11)

    >>> h(1,2,3)
    Traceback (most recent call last):
    TypeError: h() needs keyword-only argument c
    >>> h(1,2, d=1)
    Traceback (most recent call last):
    TypeError: h() needs keyword-only argument c

    >>> k(1,2, c=1, f=2)
    >>> k(1,2, c=1, f=2, e=3)
    >>> k(1,2,3,4,5,6, d=1, f=2)
    >>> k(1,2,3,4,5,6, d=1, f=2, e=3, x=25, y=11)

    >>> k(1,2,3)
    Traceback (most recent call last):
    TypeError: k() needs keyword-only argument f
    >>> k(1,2, d=1)
    Traceback (most recent call last):
    TypeError: k() needs keyword-only argument f
"""

class Spam:
    def b(self, a, b, c):
        pass

    def c(self, a, b, c=1):
        pass

    def d(self, a, b, *, c = 88):
        pass

    def e(self, a, b, c = 88, **kwds):
        pass

    def f(self, a, b, *, c, d = 42):
        pass

    def g(self, a, b, *, c, d = 42, e = 17, f, **kwds):
        pass

    def h(self, a, b, *args, c, d = 42, e = 17, f, **kwds):
        pass

    def k(self, a, b, c=1, *args, d = 42, e = 17, f, **kwds):
        pass
Cython-0.26.1/tests/run/typeddefaultargT373.pyx0000664000175000017500000000044312542002467022110 0ustar  stefanstefan00000000000000# ticket: 373

import math

cdef class MyClass:
    """
    >>> x=MyClass()
    4
    """
    def __cinit__(self, int arg=2*2):
        print arg

cdef class MyOtherClass:
    """
    >>> x=MyOtherClass()
    8
    """
    def __cinit__(self, int arg=4*int(math.sqrt(4))):
        print arg
Cython-0.26.1/tests/run/r_starargs.pyx0000664000175000017500000000156712542002467020522 0ustar  stefanstefan00000000000000def swallow(name, airspeed, *args, **kwds):
    """
    >>> swallow("Brian", 42)
    Name: Brian
    Airspeed: 42
    Extra args: ()
    Extra keywords: []
    >>> swallow("Brian", 42, "African")
    Name: Brian
    Airspeed: 42
    Extra args: ('African',)
    Extra keywords: []
    >>> swallow("Brian", airspeed = 42)
    Name: Brian
    Airspeed: 42
    Extra args: ()
    Extra keywords: []
    >>> swallow("Brian", airspeed = 42, species = "African", coconuts = 3)
    Name: Brian
    Airspeed: 42
    Extra args: ()
    Extra keywords: [('coconuts', 3), ('species', 'African')]
    >>> swallow("Brian", 42, "African", coconuts = 3)
    Name: Brian
    Airspeed: 42
    Extra args: ('African',)
    Extra keywords: [('coconuts', 3)]
    """
    print u"Name:", name
    print u"Airspeed:", airspeed
    print u"Extra args:", args
    print u"Extra keywords:", sorted(kwds.items())
Cython-0.26.1/tests/run/module_init_error.srctree0000664000175000017500000000311412542002467022711 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import test_fail_in_init; test_fail_in_init.try_import()"
PYTHON -c "import test_fail_in_init_after_atexit; test_fail_in_init_after_atexit.try_import()"

######## setup.py ########

from Cython.Build.Dependencies import cythonize
from distutils.core import setup

setup(
    ext_modules = cythonize("fail_in_init*.pyx")
)

######## test_fail_in_init.py ########

import sys

def try_import():
    try:
        import fail_in_init
    except ValueError:
        pass
    else:
        raise RuntimeError("expected ValueError from import")

    if (3, 3) <= sys.version_info < (3, 5):
        assert 'fail_in_init' not in sys.modules
    elif 'fail_in_init' in sys.modules:
        try:
            sys.modules['fail_in_init'].fail()
        except AttributeError:
            pass  # this is "ok enough"
        except ValueError:
            pass  # this is what we had expected
        else:
            raise RuntimeError("expected ValueError from call through sys.modules")

######## fail_in_init.pyx ########

def fail():
    raise ValueError("kaputt")

fail()

######## test_fail_in_init_after_atexit.py ########

def try_import():
    try:
        import fail_in_init_after_atexit
    except ValueError:
        pass
    else:
        raise RuntimeError("expected ValueError from import")

######## fail_in_init_after_atexit.pyx ########

X = 5

def callback():
    try:
        print(X)
    except NameError:
        pass  # NameError is acceptable, a crash is not

import atexit
atexit.register(callback)

def fail():
    raise ValueError("holla!")

fail()
Cython-0.26.1/tests/run/watts1.pyx0000664000175000017500000000021512542002467017563 0ustar  stefanstefan00000000000000def test():
    """
    >>> test() == 55 + 66
    True
    """
    cdef int a,b
    cdef object foo = (55,66)
    a,b = foo
    return a + b
Cython-0.26.1/tests/run/unicode_kwargs.pyx0000664000175000017500000000332512542002467021351 0ustar  stefanstefan00000000000000# -*- coding: utf8 -*-

try:
    import platform
    IS_PYPY = platform.python_implementation() == 'PyPy'
except (ImportError, AttributeError):
    IS_PYPY = False

ustring_a = u'a'
ustring_ascii = u'abc'
ustring_nonascii = u'àöé\u0888'


def accept_kwargs(a, b, c=1, **kwargs):
    """
    >>> accept_kwargs(1, 2, 3)
    (1, 2, 3, {})
    >>> accept_kwargs(1, 2, 3, d=5)
    (1, 2, 3, {'d': 5})

    >>> accept_kwargs(1, 2, 3, **{ustring_a: 5})
    Traceback (most recent call last):
    TypeError: accept_kwargs() got multiple values for keyword argument 'a'

    >>> if not IS_PYPY: a, b, c, kwargs = accept_kwargs(1, 2, 3, **{ustring_ascii: 5})
    >>> IS_PYPY and (1,2,3,1) or (a,b,c,len(kwargs))
    (1, 2, 3, 1)
    >>> IS_PYPY and 5 or kwargs[ustring_ascii]
    5

    >>> if not IS_PYPY: a, b, c, kwargs = accept_kwargs(1, 2, 3, **{ustring_nonascii: 5})
    >>> IS_PYPY and (1,2,3,1) or (a,b,c,len(kwargs))
    (1, 2, 3, 1)
    >>> IS_PYPY and 5 or kwargs[ustring_nonascii]
    5

    >>> if not IS_PYPY: a, b, c, kwargs = accept_kwargs(1, 2, 3, **{ustring_nonascii: 5, ustring_ascii: 6})
    >>> IS_PYPY and (1,2,3,2) or (a,b,c,len(kwargs))
    (1, 2, 3, 2)
    >>> IS_PYPY and 5 or kwargs[ustring_nonascii]
    5
    >>> IS_PYPY and 6 or kwargs[ustring_ascii]
    6
    """
    return a, b, c, kwargs

def unexpected_kwarg(a, b, c=1):
    """
    >>> unexpected_kwarg(1, b=2)
    (1, 2, 1)
    >>> unexpected_kwarg(1, 2, **{ustring_ascii: 5})
    Traceback (most recent call last):
    TypeError: unexpected_kwarg() got an unexpected keyword argument 'abc'
    >>> unexpected_kwarg(1, 2, 3, d=5)
    Traceback (most recent call last):
    TypeError: unexpected_kwarg() got an unexpected keyword argument 'd'
    """
    return a, b, c
Cython-0.26.1/tests/run/class_attribute_init_values_T18.pyx0000664000175000017500000000136212542002467024572 0ustar  stefanstefan00000000000000# ticket: 18

__doc__ = u"""
>>> f = PyFoo()
>>> print(f.bar)
5
>>> print(f.baz)
someval

>>> f = MyPyFoo()
>>> print(f.bar)
7
>>> print(f.baz)
anotherval

>>> f = CyFoo()
>>> print(f.bar)
5
>>> print(f.baz)
anotherval

>>> f = MyCyFoo()
>>> print(f.bar)
7
>>> print(f.baz)
anotherval

>>> f = AnotherFoo()
>>> print(f.bar)
8
>>> print(f.baz)
yetanotherval
"""

# this works:

class PyFoo(object):
   bar = 5
   baz = u"someval"

class MyPyFoo(PyFoo):
   bar = 7
   baz = u"anotherval"

# this doesn't:

cdef class CyFoo:
    cdef public int bar = 5
    cdef public object baz = u"someval"

cdef class MyCyFoo(CyFoo):
    cdef public int bar = 7
    cdef public object baz = u"anotherval"

class AnotherFoo(CyFoo):
    bar = 8
    baz = u"yetanotherval"
Cython-0.26.1/tests/run/for_from_pyvar_loop_T601_extern_def.h0000664000175000017500000000003612542002467024750 0ustar  stefanstefan00000000000000
typedef unsigned long Ulong;
Cython-0.26.1/tests/run/lepage_1.pyx0000664000175000017500000000057312542002467020024 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> a = A(1,2,3)
    >>> a[0]
    1.0
    >>> a[1]
    2.0
    >>> a[2]
    3.0
"""

cdef class A:
    cdef double[3] x

    def __init__(self, *args):
        cdef int i, max
        max = len(args)
        if max > 3:
            max = 3
        for i from 0 <= i < max:
            self.x[i] = args[i]

    def __getitem__(self,i):
        return self.x[i]
Cython-0.26.1/tests/run/cpp_classes.pyx0000664000175000017500000001075613143605603020651 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

from libcpp.vector cimport vector

cdef extern from "shapes.h" namespace "shapes":

    cdef cppclass Shape:
        float area()

    cdef cppclass Ellipse(Shape):
        Ellipse(int a, int b) except +

    cdef cppclass Circle(Ellipse):
        int radius
        Circle(int r) except +

    cdef cppclass Rectangle(Shape):
        int width
        int height
        Rectangle() except +
        Rectangle(int h, int w) except +
        int method(int x)
        int method(bint b)

    cdef cppclass Square(Rectangle):
        int side
        Square(int s) except +

    cdef cppclass Empty(Shape):
        pass

    int constructor_count, destructor_count

def test_new_del():
    """
    >>> test_new_del()
    2 0
    2 2
    """
    c,d = constructor_count, destructor_count
    cdef Rectangle *rect = new Rectangle(10, 20)
    cdef Circle *circ = new Circle(15)
    print constructor_count-c, destructor_count-d
    del rect, circ
    print constructor_count-c, destructor_count-d

def test_default_constructor():
    """
    >>> test_default_constructor()
    0.0
    """
    shape = new Empty()
    try:
        return shape.area()
    finally:
        del shape

def test_rect_area(w, h):
    """
    >>> test_rect_area(3, 4)
    12.0
    """
    cdef Rectangle *rect = new Rectangle(w, h)
    try:
        return rect.area()
    finally:
        del rect

def test_overload_bint_int():
    """
    >>> test_overload_bint_int()
    202
    201
    """
    cdef Rectangle *rect1 = new Rectangle(10, 20)
    cdef Rectangle *rect2 = new Rectangle(10, 20)

    try:
        print rect1.method( 2)
        print rect2.method( True)
    finally:
        del rect1
        del rect2

def test_square_area(w):
    """
    >>> test_square_area(15)
    (225.0, 225.0)
    """
    cdef Square *sqr = new Square(w)
    cdef Rectangle *rect = sqr
    try:
        return rect.area(), sqr.area()
    finally:
        del sqr

cdef double get_area(Rectangle s):
    return s.area()

def test_value_call(int w):
    """
    >>> test_value_call(5)
    (25.0, 25.0)
    """
    cdef Square *sqr = new Square(w)
    cdef Rectangle *rect = sqr
    try:
        return get_area(sqr[0]), get_area(rect[0])
    finally:
        del sqr

def get_destructor_count():
    return destructor_count

def test_stack_allocation(int w, int h):
    """
    >>> d = test_stack_allocation(10, 12)
    125
    >>> get_destructor_count() - d
    1
    """
    cdef Rectangle rect
    rect.width = w
    rect.height = h
    print rect.method(5)
    return destructor_count

cdef class EmptyHolder:
    cdef Empty empty

cdef class AnotherEmptyHolder(EmptyHolder):
    cdef Empty another_empty

def test_class_member():
    """
    >>> test_class_member()
    """
    start_constructor_count = constructor_count
    start_destructor_count = destructor_count
    e1 = EmptyHolder()
    assert constructor_count - start_constructor_count == 1, \
           constructor_count - start_constructor_count
    e2 = EmptyHolder()
    assert constructor_count - start_constructor_count == 2, \
           constructor_count - start_constructor_count
    del e1, e2
    assert destructor_count - start_destructor_count == 2, \
           destructor_count - start_destructor_count

def test_derived_class_member():
    """
    >>> test_derived_class_member()
    """
    start_constructor_count = constructor_count
    start_destructor_count = destructor_count
    e = AnotherEmptyHolder()
    assert constructor_count - start_constructor_count == 2, \
           constructor_count - start_constructor_count
    del e
    assert destructor_count - start_destructor_count == 2, \
           destructor_count - start_destructor_count

cdef class TemplateClassMember:
    cdef vector[int] x
    cdef vector[vector[Empty]] vec

def test_template_class_member():
    """
    >>> test_template_class_member()
    """
    cdef vector[Empty] inner
    inner.push_back(Empty())
    inner.push_back(Empty())
    o = TemplateClassMember()
    o.vec.push_back(inner)

    start_destructor_count = destructor_count
    del o
    assert destructor_count - start_destructor_count == 2, \
           destructor_count - start_destructor_count


ctypedef vector[int]* vector_int_ptr
cdef vector[vector_int_ptr] create_to_delete() except *:
    cdef vector[vector_int_ptr] v
    v.push_back(new vector[int]())
    return v
cdef int f(int x):
    return x

def test_nested_del():
    cdef vector[vector_int_ptr] v
    v.push_back(new vector[int]())
    del v[0]
    del create_to_delete()[f(f(0))]
Cython-0.26.1/tests/run/classbody_exec.pyx0000664000175000017500000000037012542002467021331 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> print(D)
    {u'answer': (42, 42)}
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u"u'", u"'")

D = {}

def foo(x):
    return x, x

cdef class Spam:
    answer = 42
    D[u'answer'] = foo(answer)
Cython-0.26.1/tests/run/constant_folding.py0000664000175000017500000002373712542002467021521 0ustar  stefanstefan00000000000000# coding=utf8
# mode: run
# tag: constant_folding

import cython


@cython.test_fail_if_path_exists(
    "//UnaryMinusNode",
    "//UnaryPlusNode",
)
def unop_floats():
    """
    >>> unop_floats()
    (False, 2.0, -2.0, False, 2.0, -2.0, -2.0)
    """
    not1   = not 2.0
    plus1  = + 2.0
    minus1 = - 2.0
    not3   = not not not 2.0
    plus3  = +++ 2.0
    minus3 = --- 2.0
    mix    = +-++-- 2.0
    return not1, plus1, minus1, not3, plus3, minus3, mix


@cython.test_fail_if_path_exists(
    "//UnaryMinusNode",
    "//UnaryPlusNode",
    "//CoerceToPyTypeNode",
)
def unop_py_floats_tuple():
    """
    >>> unop_floats()
    (False, 2.0, -2.0, False, 2.0, -2.0, -2.0)
    """
    return (
        not 2.0,
        + 2.0,
        - 2.0,
        not not not 2.0,
        +++ 2.0,
        --- 2.0,
        +-++-- 2.0)


@cython.test_fail_if_path_exists(
    "//UnaryMinusNode",
    "//UnaryPlusNode",
)
def unop_ints():
    """
    >>> unop_ints()
    (False, 2, -2, False, 2, -2, -2)
    """
    not1   = not 2
    plus1  = + 2
    minus1 = - 2
    not3   = not not not 2
    plus3  = +++ 2
    minus3 = --- 2
    mix    = +-++-- 2
    return not1, plus1, minus1, not3, plus3, minus3, mix


@cython.test_fail_if_path_exists(
    "//UnaryMinusNode",
    "//UnaryPlusNode",
    "//NotNode",
)
def unop_bool():
    """
    >>> unop_bool()
    (False, 1, -1, False, 1, -1, -1)
    """
    not1   = not True
    plus1  = + True
    minus1 = - True
    not3   = not not not True
    plus3  = +++ True
    minus3 = --- True
    mix    = +-++-- True
    return not1, plus1, minus1, not3, plus3, minus3, mix


@cython.test_fail_if_path_exists(
    "//AddNode",
    "//SubNode",
)
def binop_bool():
    """
    >>> binop_bool()
    (2, 1, 0, True, True, 1, False, 2, 2, -2, False, True, 1, False)
    """
    plus1  = True + True
    pmix1  = True + 0
    minus1 = True - True
    and1   = True & True
    or1    = True | True
    ormix1 = True | 0
    xor1   = True ^ True
    plus3  = False + True + False + True
    pmix3  = False + True + 0 + True
    minus3 = False - True - False - True
    and3   = False & True & False & True
    or3    = False | True | False | True
    ormix3 = False | 0 | False | True
    xor3   = False ^ True ^ False ^ True
    return plus1, pmix1, minus1, and1, or1, ormix1, xor1, plus3, pmix3, minus3, and3, or3, ormix3, xor3


@cython.test_fail_if_path_exists(
    "//SliceIndexNode",
)
def slicing2():
    """
    >>> slicing2()
    ([1, 2, 3, 4], [3, 4], [1, 2, 3, 4], [3, 4], (1, 2, 3, 4), (3, 4), (1, 2, 3, 4), (3, 4))
    """
    lst0 = [1, 2, 3, 4][:]
    lst1 = [1, 2, 3, 4][2:]
    lst2 = [1, 2, 3, 4][:4]
    lst3 = [1, 2, 3, 4][2:4]

    tpl0 = (1, 2, 3, 4)[:]
    tpl1 = (1, 2, 3, 4)[2:]
    tpl2 = (1, 2, 3, 4)[:4]
    tpl3 = (1, 2, 3, 4)[2:4]

    return lst0, lst1, lst2, lst3, tpl0, tpl1, tpl2, tpl3


@cython.test_fail_if_path_exists(
    "//SliceIndexNode",
)
def str_slicing2():
    """
    >>> a,b,c,d = str_slicing2()
    >>> a == 'abc\\xE9def'[:]
    True
    >>> b == 'abc\\xE9def'[2:]
    True
    >>> c == 'abc\\xE9def'[:4]
    True
    >>> d == 'abc\\xE9def'[2:4]
    True
    """
    str0 = 'abc\xE9def'[:]
    str1 = 'abc\xE9def'[2:]
    str2 = 'abc\xE9def'[:4]
    str3 = 'abc\xE9def'[2:4]

    return str0, str1, str2, str3


@cython.test_fail_if_path_exists(
    "//IfStatNode",
)
def str_in_and_not_in():
    """
    >>> str_in_and_not_in()
    True
    """
    if 'a' in 'abc' and 'b' in 'abc' and 'c' in 'abc' and 'd' not in 'abc': return True
    else: return False


@cython.test_fail_if_path_exists(
    "//WhileStatNode",
)
def while_false():
    """
    >>> while_false()
    """
    while 1 == 0:
        return False


@cython.test_fail_if_path_exists(
    "//WhileStatNode",
    )
def while_false_else():
    """
    >>> while_false_else()
    True
    """
    while 1 == 0:
        return False
    else:
        return True


@cython.test_fail_if_path_exists(
    "//WhileStatNode//PrintStatNode",
    "//WhileStatNode//PrimaryCmpNode",
    "//WhileStatNode/BoolNode",
    "//WhileStatNode/IntNode",
)
@cython.test_assert_path_exists(
    "//WhileStatNode",
)
def while_true():
    """
    >>> while_true()
    True
    """
    while 1 == 1:
        return True
    else:
        print("FAIL")


@cython.test_fail_if_path_exists(
    "//ForInStatNode",
)
def for_in_empty():
    """
    >>> for_in_empty()
    """
    for i in []:
        print("LOOP")


@cython.test_fail_if_path_exists(
    "//ForInStatNode",
)
def for_in_empty_else():
    """
    >>> for_in_empty_else()
    True
    """
    for i in []:
        print("LOOP")
    else:
        return True


@cython.test_fail_if_path_exists(
    "//ComprehensionNode",
    "//ForInStatNode",
)
@cython.test_assert_path_exists(
    "//ListNode",
)
def for_in_empty_listcomp():
    """
    >>> for_in_empty_listcomp()
    []
    """
    return [i for i in []]


@cython.test_fail_if_path_exists(
    "//ComprehensionNode",
    "//ForInStatNode",
)
@cython.test_assert_path_exists(
    "//ListNode",
)
def for_in_empty_nested_listcomp():
    """
    >>> for_in_empty_nested_listcomp()
    []
    """
    return [x for _ in [] for x in [1, 2, 3]]


@cython.test_fail_if_path_exists(
    "//ForInStatNode//ForInStatNode",
)
@cython.test_assert_path_exists(
    "//ForInStatNode",
    "//ComprehensionNode",
)
def for_in_nested_listcomp():
    """
    >>> for_in_nested_listcomp()
    []
    """
    return [x for x in [1, 2, 3] for _ in []]


@cython.test_fail_if_path_exists(
    "//MulNode",
)
def mult_empty_list():
    """
    >>> mult_empty_list()
    []
    """
    return 5 * [] * 100


@cython.test_fail_if_path_exists(
    "//MulNode",
)
def mult_list_int_int():
    """
    >>> mult_list_int_int()
    [1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2]
    """
    return [1, 2] * 2 * 3


@cython.test_fail_if_path_exists(
    "//MulNode",
)
def mult_int_list_int():
    """
    >>> mult_int_list_int()
    [1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2]
    """
    return 3 * [1, 2] * 2


@cython.test_fail_if_path_exists(
    "//MulNode",
    "//ListNode//IntNode",
)
def neg_mult_list():
    """
    >>> neg_mult_list()
    []
    """
    return -5 * [1, 2] * -100


@cython.test_fail_if_path_exists(
    "//MulNode",
    "//ListNode//IntNode",
)
def zero_mult_list():
    """
    >>> zero_mult_list()
    []
    """
    return 0 * [1, 2] * 0


@cython.test_assert_path_exists(
    "//BoolNode",
)
@cython.test_fail_if_path_exists(
    "//PrimaryCmpNode",
    "//MulNode",
    "//ListNode//IntNode",
)
def in_mult_list():
    """
    >>> in_mult_list()
    False
    """
    return 5 in 100 * [1, 2] * 0


@cython.test_assert_path_exists(
    "//BoolNode",
)
@cython.test_fail_if_path_exists(
    "//PrimaryCmpNode",
    "//MulNode",
    "//ListNode//IntNode",
)
def not_in_mult_list():
    """
    >>> not_in_mult_list()
    True
    """
    return 5 not in 100 * [1, 2] * 0


@cython.test_assert_path_exists(
    "//BoolNode",
)
@cython.test_fail_if_path_exists(
    "//PrimaryCmpNode",
    "//MulNode",
    "//ListNode//IntNode",
)
def combined():
    """
    >>> combined()
    True
    """
    return 5 in 100 * [1, 2] * 0  or  5 not in 100 * [] * 10


@cython.test_assert_path_exists(
    '//IntNode[@value = "2"]',
    '//IntNode[@value = "4"]',
    '//IntNode[@value = "5"]',
    '//IntNode[@value = "7"]',
    '//BoolBinopNode//PrimaryCmpNode',
    '//BoolBinopNode[.//PrimaryCmpNode//IntNode[@value = "4"] and .//PrimaryCmpNode//IntNode[@value = "5"]]',
    '//PrimaryCmpNode[.//IntNode[@value = "2"] and .//IntNode[@value = "4"]]',
    '//PrimaryCmpNode[.//IntNode[@value = "5"] and .//IntNode[@value = "7"]]',
)
@cython.test_fail_if_path_exists(
    '//IntNode[@value = "1"]',
    '//IntNode[@value = "8"]',
    '//PrimaryCmpNode[.//IntNode[@value = "4"] and .//IntNode[@value = "5"]]',
    '//PrimaryCmpNode[.//IntNode[@value = "2"] and .//IntNode[@value = "7"]]',
    '//BoolNode',
)
def cascaded_cmp_with_partial_constants(a, b):
    """
    >>> cascaded_cmp_with_partial_constants(3, 6)
    True
    >>> cascaded_cmp_with_partial_constants(1, 6)
    False
    >>> cascaded_cmp_with_partial_constants(4, 6)
    False
    >>> cascaded_cmp_with_partial_constants(3, 7)
    False
    >>> cascaded_cmp_with_partial_constants(3, 6)
    True
    """
    return 1 < 2 < a < 4 < 5 < b < 7 < 8


@cython.test_assert_path_exists(
    '//IntNode[@value = "2"]',
    '//IntNode[@value = "4"]',
    '//IntNode[@value = "5"]',
    '//IntNode[@value = "7"]',
    '//BoolBinopNode',
    '//SingleAssignmentNode//BoolBinopNode',
    '//SingleAssignmentNode//BoolBinopNode//NameNode[@name = "a"]',
    '//SingleAssignmentNode//BoolBinopNode//NameNode[@name = "b"]',
    '//BoolBinopNode[.//PrimaryCmpNode//IntNode[@value = "4"] and .//PrimaryCmpNode//IntNode[@value = "5"]]',
    '//BoolNode[@value = False]',
)
@cython.test_fail_if_path_exists(
    '//SingleAssignmentNode//NameNode[@name = "c"]',
    '//IntNode[@value = "1"]',
    '//PrimaryCmpNode[.//IntNode[@value = "4"] and .//IntNode[@value = "5"]]',
    '//PrimaryCmpNode[.//IntNode[@value = "2"] and .//IntNode[@value = "7"]]',
    '//BoolNode[@value = True]',
)
def cascaded_cmp_with_partial_constants_and_false_end(a, b, c):
    """
    >>> cascaded_cmp_with_partial_constants_and_false_end(3, 6, 8)
    False
    >>> cascaded_cmp_with_partial_constants_and_false_end(1, 6, 8)
    False
    >>> cascaded_cmp_with_partial_constants_and_false_end(4, 6, 8)
    False
    >>> cascaded_cmp_with_partial_constants_and_false_end(3, 7, 8)
    False
    """
    x = 1 < 2 < a < 4 < 5 < b < 7 < 7 < c
    return x


@cython.test_assert_path_exists(
    '//PrimaryCmpNode',
    '//PrimaryCmpNode//IntNode',
    '//PrimaryCmpNode//IntNode[@value = "0"]',
    '//PrimaryCmpNode//IntNode[@value = "4294967296"]',
)
@cython.test_fail_if_path_exists(
    '//PrimaryCmpNode//IntBinopNode',
    '//PrimaryCmpNode//IntNode[@value = "1"]',
    '//PrimaryCmpNode//IntNode[@value = "32"]',
)
def const_in_binop(v):
    """
    >>> const_in_binop(-1)
    1
    >>> const_in_binop(0)
    0
    >>> const_in_binop(1 << 32)
    1
    >>> const_in_binop(1 << 32 - 1)
    0
    """
    if v < 0 or v >= (1 << 32):
        return 1
    else:
        return 0
Cython-0.26.1/tests/run/extmember.pxd0000664000175000017500000000005012542002467020300 0ustar  stefanstefan00000000000000cdef class Spam:
    cdef public Spam e
Cython-0.26.1/tests/run/compiledef.pyx0000664000175000017500000000051112542002467020446 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> t
    True
    >>> f
    False
    >>> boolexpr
    True
    >>> num6
    6
    >>> intexpr
    10
"""

DEF c_t = True
DEF c_f = False
DEF c_boolexpr = c_t and True and not (c_f or False)

DEF c_num6 = 2*3
DEF c_intexpr = c_num6 + 4

t = c_t
f = c_f
boolexpr = c_boolexpr
num6 = c_num6
intexpr = c_intexpr
Cython-0.26.1/tests/run/boolop_py.py0000664000175000017500000000177112542002467020162 0ustar  stefanstefan00000000000000
def non_simple_values(obj1, obj2, obj3, obj4):
    """
    >>> non_simple_values(1, 2, 3, 4)
    (7, 3, 7, 3, 7, 7, 5, 5)
    >>> non_simple_values(0, 0, 3, 4)
    (0, 7, 4, 4, 4, 4, 4, 4)
    >>> non_simple_values(0, 0, 1, -1)
    (0, 0, -1, 0, -1, -1, 0, 0)
    >>> non_simple_values(1, -1, 1, -1)
    (0, 0, 0, 0, 0, 0, 0, 0)
    >>> non_simple_values(1, 2, 1, -1)
    (0, 3, 0, 3, 0, 0, 1, 1)
    >>> non_simple_values(2, 1, 1, -1)
    (0, 3, 1, 3, 0, 0, 1, 1)
    """
    and1 = obj1 + obj2 and obj3 + obj4
    or1 = obj1 + obj2 or obj3 + obj4
    and_or = obj1 + obj2 and obj3 + obj4 or obj1 + obj4
    or_and = obj1 + obj2 or obj3 + obj4 and obj1 + obj4
    and_or_and = obj1 + obj2 and obj3 + obj4 or obj1 + obj4 and obj2 + obj4
    and1_or_and = (and1 or (obj1 + obj4 and obj2 + obj4))
    or_and_or = (obj1 + obj2 or obj3 + obj4) and (obj1 + obj4 or obj2 + obj4)
    or1_and_or = (or1 and (obj1 + obj4 or obj2 + obj4))
    return (and1, or1, and_or, or_and, and_or_and, and1_or_and, or_and_or, or1_and_or)
Cython-0.26.1/tests/run/overflow_check.pxi0000664000175000017500000001532312542002467021326 0ustar  stefanstefan00000000000000cimport cython

cdef object two = 2
cdef int size_in_bits = sizeof(INT) * 8

cdef bint is_signed_ = not ((-1) > 0)
cdef INT max_value_ = (two ** (size_in_bits - is_signed_) - 1)
cdef INT min_value_ = ~max_value_
cdef INT half_ = max_value_ // 2

# Python visible.
is_signed = is_signed_
max_value = max_value_
min_value = min_value_
half = half_


import operator
from libc.math cimport sqrt

cpdef check(func, op, a, b):
    cdef INT res = 0, op_res = 0
    cdef bint func_overflow = False
    cdef bint assign_overflow = False
    try:
        res = func(a, b)
    except OverflowError:
        func_overflow = True
    try:
        op_res = op(a, b)
    except OverflowError:
        assign_overflow = True
    assert func_overflow == assign_overflow, "Inconsistant overflow: %s(%s, %s)" % (func, a, b)
    if not func_overflow:
        assert res == op_res, "Inconsistant values: %s(%s, %s) == %s != %s" % (func, a, b, res, op_res)

medium_values = (max_value_ / 2, max_value_ / 3, min_value_ / 2, sqrt(max_value_) - 1, sqrt(max_value_) + 1)
def run_test(func, op):
    cdef INT offset, b
    check(func, op, 300, 200)
    check(func, op, max_value_, max_value_)
    check(func, op, max_value_, min_value_)
    if not is_signed_ or not func is test_sub:
        check(func, op, min_value_, min_value_)

    for offset in range(5):
        check(func, op, max_value_ - 1, offset)
        check(func, op, min_value_ + 1, offset)
        if is_signed_:
            check(func, op, max_value_ - 1, 2 - offset)
            check(func, op, min_value_ + 1, 2 - offset)

    for offset in range(9):
        check(func, op, max_value_ / 2, offset)
        check(func, op, min_value_ / 3, offset)
        check(func, op, max_value_ / 4, offset)
        check(func, op, min_value_ / 5, offset)
        if is_signed_:
            check(func, op, max_value_ / 2, 4 - offset)
            check(func, op, min_value_ / 3, 4 - offset)
            check(func, op, max_value_ / -4, 3 - offset)
            check(func, op, min_value_ / -5, 3 - offset)

    for offset in range(-3, 4):
        for a in medium_values:
            for b in medium_values:
                check(func, op, a, b + offset)

@cython.overflowcheck(True)
def test_add(INT a, INT b):
    """
    >>> test_add(1, 2)
    3
    >>> test_add(max_value, max_value)   #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: value too large
    >>> run_test(test_add, operator.add)
    """
    return int(a + b)
    
@cython.overflowcheck(True)
def test_sub(INT a, INT b):
    """
    >>> test_sub(10, 1)
    9
    >>> test_sub(min_value, 1)   #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: value too large
    >>> run_test(test_sub, operator.sub)
    """
    return int(a - b)

@cython.overflowcheck(True)
def test_mul(INT a, INT b):
    """
    >>> test_mul(11, 13)
    143
    >>> test_mul(max_value / 2, max_value / 2)   #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: value too large
    >>> run_test(test_mul, operator.mul)
    """
    return int(a * b)

@cython.overflowcheck(True)
def test_nested_add(INT a, INT b, INT c):
    """
    >>> test_nested_add(1, 2, 3)
    6
    >>> expect_overflow(test_nested_add, half + 1, half + 1, half + 1)
    >>> expect_overflow(test_nested_add, half - 1, half - 1, half - 1)
    """
    return int(a + b + c)

def expect_overflow(func, *args):
    try:
        res = func(*args)
    except OverflowError:
        return
    assert False, "Expected OverflowError, got %s" % res

cpdef format(INT value):
    """
    >>> format(1)
    '1'
    >>> format(half - 1)
    'half - 1'
    >>> format(half)
    'half'
    >>> format(half + 2)
    'half + 2'
    >>> format(half + half - 3)
    'half + half - 3'
    >>> format(max_value)
    'max_value'
    """
    if value == max_value_:
        return "max_value"
    elif value == half_:
        return "half"
    elif max_value_ - value <= max_value_ // 4:
        return "half + half - %s" % (half_ + half_ - value)
    elif max_value_ - value <= half_:
        return "half + %s" % (value - half_)
    elif max_value_ - value <= half_ + max_value_ // 4:
        return "half - %s" % (half_ - value)
    else:
        return "%s" % value

cdef INT called(INT value):
    print("called(%s)" % format(value))
    return value

@cython.overflowcheck(True)
def test_nested(INT a, INT b, INT c, INT d):
    """
    >>> test_nested_func(1, 2, 3)
    called(5)
    6
    >>> expect_overflow(test_nested, half, half, 1, 1)
    >>> expect_overflow(test_nested, half, 1, half, half)
    >>> expect_overflow(test_nested, half, 2, half, 2)

    >>> print(format(test_nested(half, 2, 0, 1)))
    half + half - 0
    >>> print(format(test_nested(1, 0, half, 2)))
    half + half - 0
    >>> print(format(test_nested(half, 1, 1, half)))
    half + half - 0
    """
    return int(a * b + c * d)

@cython.overflowcheck(True)
def test_nested_func(INT a, INT b, INT c):
    """
    >>> test_nested_func(1, 2, 3)
    called(5)
    6
    >>> expect_overflow(test_nested_func, half + 1, half + 1, half + 1)
    >>> expect_overflow(test_nested_func, half - 1, half - 1, half - 1)
    called(half + half - 2)
    >>> print(format(test_nested_func(1, half - 1, half - 1)))
    called(half + half - 2)
    half + half - 1
    """
    return int(a + called(b + c))


@cython.overflowcheck(True)
def test_add_const(INT a):
    """
    >>> test_add_const(1)
    101
    >>> expect_overflow(test_add_const, max_value)
    >>> expect_overflow(test_add_const , max_value - 99)
    >>> test_add_const(max_value - 100) == max_value
    True
    """
    return int(a + 100)

@cython.overflowcheck(True)
def test_sub_const(INT a):
    """
    >>> test_sub_const(101)
    1
    >>> expect_overflow(test_sub_const, min_value)
    >>> expect_overflow(test_sub_const, min_value + 99)
    >>> test_sub_const(min_value + 100) == min_value
    True
    """
    return int(a - 100)

@cython.overflowcheck(True)
def test_mul_const(INT a):
    """
    >>> test_mul_const(2)
    200
    >>> expect_overflow(test_mul_const, max_value)
    >>> expect_overflow(test_mul_const, max_value // 99)
    >>> test_mul_const(max_value // 100) == max_value - max_value % 100
    True
    """
    return int(a * 100)

@cython.overflowcheck(True)
def test_lshift(INT a, int b):
    """
    >>> test_lshift(1, 10)
    1024
    >>> expect_overflow(test_lshift, 1, 100)
    >>> expect_overflow(test_lshift, max_value, 1)
    >>> test_lshift(max_value, 0) == max_value
    True
    
    >>> check(test_lshift, operator.lshift, 10, 15)
    >>> check(test_lshift, operator.lshift, 10, 30)
    >>> check(test_lshift, operator.lshift, 100, 60)
    """
    return int(a << b)
Cython-0.26.1/tests/run/filenames.pyx0000664000175000017500000000012712542002467020305 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> print(spam)
ftang
>>> foo
42
"""

include "filenames.pxi"

foo = 42
Cython-0.26.1/tests/run/default_args_T674.py0000664000175000017500000000052212542002467021335 0ustar  stefanstefan00000000000000# mode: run
# ticket: 674

def test_inner(a):
    """
    >>> a = test_inner(1)
    >>> b = test_inner(2)
    >>> a()
    1
    >>> b()
    2
    """
    def inner(b=a):
        return b
    return inner

def test_lambda(n):
    """
    >>> [f() for f in test_lambda(3)]
    [0, 1, 2]
    """
    return [lambda v=i: v for i in range(n)]
Cython-0.26.1/tests/run/type_inference_new.pyx0000664000175000017500000000554312542002467022221 0ustar  stefanstefan00000000000000cimport cython

from cython cimport typeof, infer_types


def test_swap():
    """
    >>> test_swap()
    """
    a = 0
    b = 1
    tmp = a
    a = b
    b = tmp
    assert typeof(a) == "long", typeof(a)
    assert typeof(b) == "long", typeof(b)
    assert typeof(tmp) == "long", typeof(tmp)

def test_object_assmt():
    """
    >>> test_object_assmt()
    """
    a = 1
    b = a
    a = "str"
    assert typeof(a) == "Python object", typeof(a)
    assert typeof(b) == "long", typeof(b)


class RAdd(object):
    other = None
    def __radd__(self, other):
        self._other = other
        return self
    def __repr__(self):
        return '%s(%s)' % (type(self).__name__, self._other)


def test_inplace_assignment():
    """
    >>> test_inplace_assignment()
    RAdd([1, 2, 3])
    """
    l = [1, 2, 3]
    # inferred type of l is list, but assignment result is object
    l += RAdd()
    return l


def test_reassignment():
    """
    >>> test_reassignment()
    (1, 2, 3)
    """
    l = [1, 2, 3]
    l = (1, 2, 3)
    return l


def test_long_vs_double(cond):
    """
    >>> test_long_vs_double(0)
    """
    assert typeof(a) == "double", typeof(a)
    assert typeof(b) == "double", typeof(b)
    assert typeof(c) == "double", typeof(c)
    assert typeof(d) == "double", typeof(d)

    if cond:
        a = 1
        b = 2
        c = (a + b) / 2
    else:
        a = 1.0
        b = 2.0
        d = (a + b) / 2

def test_double_vs_pyobject():
    """
    >>> test_double_vs_pyobject()
    """
    assert typeof(a) == "Python object", typeof(a)
    assert typeof(b) == "Python object", typeof(b)
    assert typeof(d) == "double", typeof(d)

    a = []
    b = []

    a = 1.0
    b = 2.0
    d = (a + b) / 2

def test_python_objects(cond):
    """
    >>> test_python_objects(0)
    """
    if cond == 1:
        a = [1, 2, 3]
        o_list = a
    elif cond == 2:
        a = set([1, 2, 3])
        o_set = a
    else:
        a = {1:1, 2:2, 3:3}
        o_dict = a
    assert typeof(a) == "Python object", typeof(a)
    assert typeof(o_list) == "list object", typeof(o_list)
    assert typeof(o_dict) == "dict object", typeof(o_dict)
    assert typeof(o_set) == "set object", typeof(o_set)

# CF loops
def test_cf_loop():
    """
    >>> test_cf_loop()
    """
    cdef int i
    a = 0.0
    for i in range(3):
        a += 1
    assert typeof(a) == "double", typeof(a)

def test_cf_loop_intermediate():
    """
    >>> test_cf_loop()
    """
    cdef int i
    a = 0
    for i in range(3):
        b = a
        a = b + 1
    assert typeof(a) == "long", typeof(a)
    assert typeof(b) == "long", typeof(b)

# Integer overflow
def test_integer_overflow():
    """
    >>> test_integer_overflow()
    """
    a = 1
    b = 2
    c = a + b
    assert typeof(a) == "Python object", typeof(a)
    assert typeof(b) == "Python object", typeof(b)
    assert typeof(c) == "Python object", typeof(c)
Cython-0.26.1/tests/run/ctypedef_delegation.pyx0000664000175000017500000000024212542002467022336 0ustar  stefanstefan00000000000000
ctypedef char* LPSTR

def typedef_delegation():
    """
    >>> typedef_delegation()
    """
    cdef LPSTR c_str = b"ascii"
    assert c_str == b"ascii"
Cython-0.26.1/tests/run/carray_slicing.pyx0000664000175000017500000002224412542002467021337 0ustar  stefanstefan00000000000000
cimport cython

############################################################
# tests for char* slicing

cdef char* cstring = "abcABCqtp"

def slice_charptr_end():
    """
    >>> print(str(slice_charptr_end()).replace("b'", "'"))
    ('a', 'abc', 'abcABCqtp')
    """
    return cstring[:1], cstring[:3], cstring[:9]

#### BROKEN: this test assumes that the result of a char* iteration
#### becomes a bytes object, which is not the case when applying
#### carray iteration.  Contradiction.
##
## @cython.test_assert_path_exists("//ForFromStatNode",
##                                 "//ForFromStatNode//SliceIndexNode")
## @cython.test_fail_if_path_exists("//ForInStatNode")
## def slice_charptr_for_loop_py():
##     """
##     >>> slice_charptr_for_loop_py()
##     ['a', 'b', 'c']
##     ['b', 'c', 'A', 'B']
##     ['B', 'C', 'q', 't', 'p']
##     """
##     print str([ c for c in cstring[:3] ]).replace(" b'", " '").replace("[b'", "['")
##     print str([ c for c in cstring[1:5] ]).replace(" b'", " '").replace("[b'", "['")
##     print str([ c for c in cstring[4:9] ]).replace(" b'", " '").replace("[b'", "['")

@cython.test_assert_path_exists("//ForFromStatNode",
                                "//ForFromStatNode//IndexNode")
@cython.test_fail_if_path_exists("//ForInStatNode")
def slice_charptr_for_loop_c():
    """
    >>> slice_charptr_for_loop_c()
    ['a', 'b', 'c']
    ['a', 'b', 'c']
    ['b', 'c', 'A', 'B']
    ['B', 'C', 'q', 't', 'p']
    """
    cdef char c
    print [ chr(c) for c in cstring[:3] ]
    print [ chr(c) for c in cstring[None:3] ]
    print [ chr(c) for c in cstring[1:5] ]
    print [ chr(c) for c in cstring[4:9] ]

#@cython.test_assert_path_exists("//ForFromStatNode",
#                                "//ForFromStatNode//IndexNode")
#@cython.test_fail_if_path_exists("//ForInStatNode")
def slice_charptr_for_loop_c_to_bytes():
    """
    >>> slice_charptr_for_loop_c_to_bytes()
    ['a', 'b', 'c']
    ['a', 'b', 'c']
    ['b', 'c', 'A', 'B']
    ['B', 'C', 'q', 't', 'p']
    """
    cdef bytes b
    print str([ b for b in cstring[:3] ]).replace(" b'", " '").replace("[b'", "['")
    print str([ b for b in cstring[None:3] ]).replace(" b'", " '").replace("[b'", "['")
    print str([ b for b in cstring[1:5] ]).replace(" b'", " '").replace("[b'", "['")
    print str([ b for b in cstring[4:9] ]).replace(" b'", " '").replace("[b'", "['")

@cython.test_assert_path_exists("//ForFromStatNode",
                                "//ForFromStatNode//IndexNode")
@cython.test_fail_if_path_exists("//ForInStatNode")
def slice_charptr_for_loop_c_step():
    """
    >>> slice_charptr_for_loop_c_step()
    Acba ['A', 'c', 'b', 'a']
    Acba ['A', 'c', 'b', 'a']
    bA ['b', 'A']
    acB ['a', 'c', 'B']
    acB ['a', 'c', 'B']
     []
    ptqC ['p', 't', 'q', 'C']
    pq ['p', 'q']
    """
    cdef object ustring = cstring.decode('ASCII')
    cdef char c
    print ustring[3::-1],     [ chr(c) for c in cstring[3::-1] ]
    print ustring[3:None:-1], [ chr(c) for c in cstring[3:None:-1] ]
    print ustring[1:5:2],     [ chr(c) for c in cstring[1:5:2] ]
    print ustring[:5:2],      [ chr(c) for c in cstring[:5:2] ]
    print ustring[None:5:2],  [ chr(c) for c in cstring[None:5:2] ]
    print ustring[4:9:-1],    [ chr(c) for c in cstring[4:9:-1] ]
    print ustring[8:4:-1],    [ chr(c) for c in cstring[8:4:-1] ]
    print ustring[8:4:-2],    [ chr(c) for c in cstring[8:4:-2] ]

@cython.test_assert_path_exists("//ForFromStatNode",
                                "//ForFromStatNode//IndexNode")
@cython.test_fail_if_path_exists("//ForInStatNode")
def slice_charptr_for_loop_c_dynamic_bounds():
    """
    >>> slice_charptr_for_loop_c_dynamic_bounds()
    ['a', 'b', 'c']
    ['a', 'b', 'c']
    ['b', 'c', 'A', 'B']
    ['B', 'C', 'q', 't', 'p']
    """
    cdef char c
    print [ chr(c) for c in cstring[0:return3()] ]
    print [ chr(c) for c in cstring[None:return3()] ]
    print [ chr(c) for c in cstring[return1():return5()] ]
    print [ chr(c) for c in cstring[return4():return9()] ]

cdef return1(): return 1
cdef return3(): return 3
cdef return4(): return 4
cdef return5(): return 5
cdef return9(): return 9

#### BROKEN: this test assumes that the result of a char* iteration
#### becomes a bytes object, which is not the case when applying
#### carray iteration.  Contradiction.
##
## @cython.test_assert_path_exists("//ForFromStatNode",
##                                 "//ForFromStatNode//SliceIndexNode")
## @cython.test_fail_if_path_exists("//ForInStatNode")
## def slice_charptr_for_loop_py_enumerate():
##     """
##     >>> slice_charptr_for_loop_py_enumerate()
##     [(0, 'a'), (1, 'b'), (2, 'c')]
##     [(0, 'b'), (1, 'c'), (2, 'A'), (3, 'B')]
##     [(0, 'B'), (1, 'C'), (2, 'q'), (3, 't'), (4, 'p')]
##     """
##     print str([ (i,c) for i,c in enumerate(cstring[:3]) ]).replace(" b'", " '")
##     print str([ (i,c) for i,c in enumerate(cstring[1:5]) ]).replace(" b'", " '")
##     print str([ (i,c) for i,c in enumerate(cstring[4:9]) ]).replace(" b'", " '")

@cython.test_assert_path_exists("//ForFromStatNode",
                                "//ForFromStatNode//IndexNode")
@cython.test_fail_if_path_exists("//ForInStatNode")
def slice_charptr_for_loop_c_enumerate():
    """
    >>> slice_charptr_for_loop_c_enumerate()
    [(0, 97), (1, 98), (2, 99)]
    [(0, 97), (1, 98), (2, 99)]
    [(0, 98), (1, 99), (2, 65), (3, 66)]
    [(0, 66), (1, 67), (2, 113), (3, 116), (4, 112)]
    """
    cdef int c,i
    print [ (i,c) for i,c in enumerate(cstring[:3]) ]
    print [ (i,c) for i,c in enumerate(cstring[None:3]) ]
    print [ (i,c) for i,c in enumerate(cstring[1:5]) ]
    print [ (i,c) for i,c in enumerate(cstring[4:9]) ]


############################################################
# tests for int* slicing

cdef int[6] cints
for i in range(6):
    cints[i] = i

@cython.test_assert_path_exists("//ForFromStatNode",
                                "//ForFromStatNode//IndexNode")
@cython.test_fail_if_path_exists("//ForInStatNode")
def slice_intarray_for_loop_c():
    """
    >>> slice_intarray_for_loop_c()
    [0, 1, 2]
    [0, 1, 2]
    [1, 2, 3, 4]
    [4, 5]
    """
    cdef int i
    print [ i for i in cints[:3] ]
    print [ i for i in cints[None:3] ]
    print [ i for i in cints[1:5] ]
    print [ i for i in cints[4:6] ]

@cython.test_assert_path_exists("//ForFromStatNode",
                                "//ForFromStatNode//IndexNode")
@cython.test_fail_if_path_exists("//ForInStatNode")
def iter_intarray_for_loop_c():
    """
    >>> iter_intarray_for_loop_c()
    [0, 1, 2, 3, 4, 5]
    """
    cdef int i
    print [ i for i in cints ]

@cython.test_assert_path_exists("//ForFromStatNode",
                                "//ForFromStatNode//IndexNode")
@cython.test_fail_if_path_exists("//ForInStatNode")
def slice_intptr_for_loop_c():
    """
    >>> slice_intptr_for_loop_c()
    [0, 1, 2]
    [0, 1, 2]
    [1, 2, 3, 4]
    [4, 5]
    """
    cdef int* nums = cints
    cdef int i
    print [ i for i in nums[:3] ]
    print [ i for i in nums[None:3] ]
    print [ i for i in nums[1:5] ]
    print [ i for i in nums[4:6] ]


############################################################
# tests for slicing other arrays

cdef double[6] cdoubles
for i in range(6):
    cdoubles[i] = i + 0.5

cdef double* cdoubles_ptr = cdoubles

@cython.test_assert_path_exists("//ForFromStatNode",
                                "//ForFromStatNode//IndexNode")
@cython.test_fail_if_path_exists("//ForInStatNode")
def slice_doublptr_for_loop_c():
    """
    >>> slice_doublptr_for_loop_c()
    [0.5, 1.5, 2.5]
    [0.5, 1.5, 2.5]
    [1.5, 2.5, 3.5, 4.5]
    [4.5, 5.5]
    """
    cdef double d
    print [ d for d in cdoubles_ptr[:3] ]
    print [ d for d in cdoubles_ptr[None:3] ]
    print [ d for d in cdoubles_ptr[1:5] ]
    print [ d for d in cdoubles_ptr[4:6] ]

## @cython.test_assert_path_exists("//ForFromStatNode",
##                                 "//ForFromStatNode//IndexNode")
## @cython.test_fail_if_path_exists("//ForInStatNode")
## def slice_doublptr_for_loop_c_step():
##     """
##     >>> slice_doublptr_for_loop_c_step()
##     """
##     cdef double d
##     print [ d for d in cdoubles_ptr[:3:1] ]
##     print [ d for d in cdoubles_ptr[5:1:-1] ]
##     print [ d for d in cdoubles_ptr[:2:-2] ]
##     print [ d for d in cdoubles_ptr[4:6:2] ]
##     print [ d for d in cdoubles_ptr[4:6:-2] ]

@cython.test_assert_path_exists("//ForFromStatNode",
                                "//ForFromStatNode//IndexNode")
@cython.test_fail_if_path_exists("//ForInStatNode")
def iter_doublearray_for_loop_c():
    """
    >>> iter_doublearray_for_loop_c()
    [0.5, 1.5, 2.5, 3.5, 4.5, 5.5]
    """
    cdef double d
    print [ d for d in cdoubles ]


cdef struct MyStruct:
    int i

@cython.test_assert_path_exists("//ForFromStatNode",
                                "//ForFromStatNode//IndexNode")
@cython.test_fail_if_path_exists("//ForInStatNode")
def struct_ptr_iter():
    """
    >>> struct_ptr_iter()
    ([0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4])
    """
    cdef MyStruct[5] my_structs
    for i in range(5):
        my_structs[i].i = i
    cdef MyStruct value
    cdef MyStruct *ptr
    return ([ value.i for value in my_structs[:5] ],
            [ ptr.i for ptr in my_structs[:5] ],
            [ inferred.i for inferred in my_structs[:5] ])
Cython-0.26.1/tests/run/complex_numbers_cpp.pyx0000664000175000017500000000057413023021033022375 0ustar  stefanstefan00000000000000# tag: cpp

from libcpp.complex cimport complex as complex_class

def double_complex(complex_class[double] a):
    """
    >>> double_complex(1 + 2j)
    (1+2j)
    >>> double_complex(1.5 + 2.5j)
    (1.5+2.5j)
    """
    return a

def double_int(complex_class[int] a):
    """
    >>> double_int(1 + 2j)
    (1+2j)
    >>> double_int(1.5 + 2.5j)
    (1+2j)
    """
    return a
Cython-0.26.1/tests/run/complex_numbers_c99_T398.h0000664000175000017500000000062612542002467022372 0ustar  stefanstefan00000000000000#if !defined(__cplusplus)
#if (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) \
  || defined(__GNUC__)						 \
  || defined(__INTEL_COMPILER)					 \
  || defined(__IBMC__)						 \

#include 
#if !defined(_Complex_I)
#error The "complex.h" header does not define the '_Complex_I' macro.
#error Please report this to Cython developers 
#endif

#endif
#endif
Cython-0.26.1/tests/run/r_spamtype.pyx0000664000175000017500000000107412542002467020527 0ustar  stefanstefan00000000000000__doc__ = """
    >>> s = Spam()
    >>> s.get_tons()
    17
    >>> s.set_tons(42)
    >>> s.get_tons()
    42
"""

import platform
if not hasattr(platform, 'python_implementation') or platform.python_implementation() == 'CPython':
    __doc__ += """
    >>> s = None
    42 tons of spam is history.
"""

cdef class Spam:

    cdef int tons

    def __cinit__(self):
        self.tons = 17

    def __dealloc__(self):
        print self.tons, u"tons of spam is history."

    def get_tons(self):
        return self.tons

    def set_tons(self, x):
        self.tons = x
Cython-0.26.1/tests/run/set_literals.py0000664000175000017500000000427212542002467020651 0ustar  stefanstefan00000000000000# Py2.7+ only

import sys


def test_set_literal():
    """
    >>> type(test_set_literal()) is set
    True
    >>> sorted(test_set_literal())
    ['a', 'b', 1]
    """
    s1 = {1, 'a', 1, 'b', 'a'}
    return s1


def test_set_add():
    """
    >>> type(test_set_add()) is set
    True
    >>> sorted(test_set_add())
    ['a', 1, (1, 2)]
    """
    s1 = {1, (1, 2)}
    s1.add(1)
    s1.add('a')
    s1.add(1)
    s1.add((1, 2))
    return s1


def test_set_comp():
    """
    >>> type(test_set_comp()) is set
    True
    >>> sorted(test_set_comp())
    [0, 1, 2]
    """
    s1 = {i % 3 for i in range(5)}
    return s1


def test_frozenset_set_comp():
    """
    >>> type(test_frozenset_set_comp()) is frozenset
    True
    >>> sorted(test_frozenset_set_comp())
    [0, 1, 2]
    """
    s1 = frozenset({i % 3 for i in range(5)})
    return s1


def test_set_sideeffect_unhashable_failure_literal():
    """
    >>> test_set_sideeffect_unhashable_failure_literal()
    [2, 4, 5]
    """
    L = []

    def sideeffect(x):
        L.append(x)
        return x

    def unhashable_value(x):
        L.append(x)
        return set()
    try:
        s = {1, sideeffect(2), 3, unhashable_value(4), sideeffect(5)}
    except TypeError: pass
    else: assert False, "expected exception not raised"
    return L


def test_set_comp_sideeffect_unhashable_failure():
    """
    >>> test_set_comp_sideeffect_unhashable_failure()
    (None, [2, 4])
    """
    L = []

    def value(x):
        return x

    def sideeffect(x):
        L.append(x)
        return x

    def unhashable_value(x):
        L.append(x)
        return set()
    s = None
    try:
        s = {f(i) for i, f in enumerate([value, sideeffect, value, unhashable_value, sideeffect], 1)}
    except TypeError: pass
    else: assert False, "expected exception not raised"
    return s, L


def sorted(it):
    # Py3 can't compare different types
    chars = []
    nums = []
    tuples = []
    for item in it:
        if type(item) is int:
            nums.append(item)
        elif type(item) is tuple:
            tuples.append(item)
        else:
            chars.append(item)
    nums.sort()
    chars.sort()
    tuples.sort()
    return chars+nums+tuples
Cython-0.26.1/tests/run/lambda_class_T605.pyx0000664000175000017500000000130412542002467021463 0ustar  stefanstefan00000000000000# mode: run
# tag: lambda
# ticket: 605

cdef int cdef_CONST = 123
CONST = 456

cdef class Foo:
    """
    >>> obj = Foo()
    >>> obj.id(123)
    123
    >>> obj.cconst_mul(1)
    123
    >>> obj.const_mul(1)
    456
    >>> obj.foo[0](1)
    1
    """
    id = lambda self, x: x
    cconst_mul = lambda self, x: x * cdef_CONST
    const_mul = lambda self, x: x * CONST
    foo = (lambda x:x,)

class Bar:
    """
    >>> obj = Bar()
    >>> obj.id(123)
    123
    >>> obj.cconst_mul(1)
    123
    >>> obj.const_mul(1)
    456
    >>> obj.foo[0](1)
    1
    """
    id = lambda self, x: x
    cconst_mul = lambda self, x: x * cdef_CONST
    const_mul = lambda self, x: x * CONST
    foo = (lambda x:x,)
Cython-0.26.1/tests/run/longintrepr.pyx0000664000175000017500000000313413023021033020667 0ustar  stefanstefan00000000000000# mode: run

# Test longintrepr declarations by implementing a simple function

from cpython.longintrepr cimport *
cimport cython

cdef extern from *:
    Py_ssize_t* Py_SIZE_PTR "&Py_SIZE"(object)


@cython.cdivision(True)
def lshift(long a, unsigned long n):
    """
    Return a * 2^n as Python long.

    >>> print(lshift(3, 1))
    6
    >>> print(lshift(-3, 1))
    -6
    >>> print(lshift(1, 30))
    1073741824
    >>> print(lshift(-12345, 115))
    -512791237748899576593671817473776680960
    >>> print(-12345 << 115)
    -512791237748899576593671817473776680960
    >>> [i for i in range(100) if (65535 << i) != lshift(65535, i)]
    []
    >>> print(lshift(0, 12345))
    0
    >>> print(lshift(2**62, 0))   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError...
    """
    if not a:
        return _PyLong_New(0)
    cdef unsigned long apos = a if a > 0 else -a
    if (apos >> 1) >= PyLong_BASE:
        raise OverflowError

    cdef unsigned long index = n // PyLong_SHIFT
    cdef unsigned long shift = n % PyLong_SHIFT

    cdef digit d = apos
    cdef digit low = (d << shift) & PyLong_MASK
    cdef digit high = (d >> (PyLong_SHIFT - shift))

    if high == 0:
        ret = _PyLong_New(index + 1)
        (ret).ob_digit[index] = low
    else:
        ret = _PyLong_New(index + 2)
        (ret).ob_digit[index] = low
        (ret).ob_digit[index + 1] = high

    while index >= 1:
        index -= 1
        (ret).ob_digit[index] = 0

    if a < 0:
        Py_SIZE_PTR(ret)[0] *= -1

    return ret
Cython-0.26.1/tests/run/cpp_call_stack_allocated.srctree0000664000175000017500000000211412574327400024143 0ustar  stefanstefan00000000000000# tag: cpp

"""
PYTHON setup.py build_ext --inplace
PYTHON -c "from call_stack_allocated import test; test()"
"""

######## setup.py ########

from distutils.core import setup
from Cython.Build import cythonize
setup(ext_modules=cythonize('*.pyx', language='c++'))

######## call.cpp ########

class wint {
public:
  long long val;
  wint() { val = 0; }
  wint(long long val) { this->val = val; }
  long long &operator()() { return this->val; }
  long long operator()(long long i) { return this->val + i; }
  long long operator()(long long i, long long j) { return this->val + i + j; }
};

######## call.pxd ########

cdef extern from "call.cpp" nogil:
    cppclass wint:
        long long val
        wint()
        wint(long long val)
        long long& operator()()
        long long operator()(long long i)
        long long operator()(long long i, long long j)


######## call_stack_allocated.pyx ########

from call cimport wint
def test():
    cdef wint a = wint(4)
    cdef long long b = 3
    b = a()
    assert b == 4
    b = a(1ll)
    assert b == 5
    b = a(1ll, 1ll)
    assert b == 6

Cython-0.26.1/tests/run/literals.pyx0000664000175000017500000000377413023021033020155 0ustar  stefanstefan00000000000000# mode: run

def foo():
    """
    >>> foo()
    """
    a = 42
    a1 = 0123
    an1 = -0123
    assert a1 == -an1
    a2 = 0xabc
    an2 = -0xabc
    assert a2 == -an2
    a3 = 0xDEF
    an3 = -0xDEF
    assert a3 == -an3
    a4 = 1234567890L
    an4 = -1234567890L
    assert a4 == -an4
    a5 = 0o123
    an5 = -0o123
    assert a5 == -an5
    assert a5 == a1
    a6 = 0b101
    an6 = -0b101
    assert a6 == -an6 == 5

    b = 42.88e17
    b0a = 1.
    b0b = .1
    b0c = 1.1
    b0d = 1.e1
    b0e = .1e1
    b0f = 1.1e1
    b0g = 1.1e-1
    b0h = 1e1

    b1 = 3j
    b2 = 3.1415J

    b3 = c'X'
    c = "spanish inquisition"
    d = "this" "parrot" "is" "resting"
    e = 'single quoted string'
    f = '"this is quoted"'
    g = '''Triple single quoted string.'''
    h = """Triple double quoted string."""
    g1 = '''Two line triple
single quoted string.'''
    h1 = """Two line triple
double quoted string."""
    i = 'This string\
 has an ignored newline.'
    j = 'One-char escapes: \'\"\\\a\b\f\n\r\t\v'
    k = b'Oct and hex escapes: \1 \12 \123 \x45 \xaf \xAF'
    l = r'''This is\
a \three \line
raw string with some backslashes.'''
    m = 'Three backslashed ordinaries: \c\g\+'
    n = '''Triple single quoted string
with ' and " quotes'''
    o = """Triple double quoted string
with ' and " quotes"""
    p = "name_like_string"
    q = "NameLikeString2"
    r = "99_percent_un_namelike"
    s = "Not an \escape"
    t = b'this' b'parrot' b'is' b'resting'
    u = u'this' u'parrot' u'is' u'resting'


def test_float(x):
    """
    >>> test_float(1./3)
    True
    """
    return x == 1./3

def test_complex(x):
    """
    >>> test_complex(1j/3)
    True
    """
    return x == 0.3333333333333333j

def test_large_int(double x):
    """
    >>> test_large_int(0)
    2e+100
    """
    a = x + 10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
    a += 10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
    return a
Cython-0.26.1/tests/run/overflow_check_int.pyx0000664000175000017500000000012512542002467022212 0ustar  stefanstefan00000000000000# cython: overflowcheck.fold = True


ctypedef int INT

include "overflow_check.pxi"
Cython-0.26.1/tests/run/cyclic_gc.pyx0000664000175000017500000001302412542002467020261 0ustar  stefanstefan00000000000000# mode: run
# tag: cyclicgc


cimport cython

@cython.test_fail_if_path_exists('//CClassDefNode[@scope.has_cyclic_pyobject_attrs = True]')
@cython.test_assert_path_exists('//CClassDefNode',
                                '//CClassDefNode[@scope]',
                                '//CClassDefNode[@scope.has_cyclic_pyobject_attrs = False]')
cdef class ExtTypeNoGC:
    """
    >>> obj = ExtTypeNoGC()
    >>> obj = ExtTypeNoGC()
    >>> obj = ExtTypeNoGC()
    >>> obj = ExtTypeNoGC()
    >>> obj = ExtTypeNoGC()
    >>> obj = ExtTypeNoGC()
    """


@cython.test_fail_if_path_exists('//CClassDefNode[@scope.has_cyclic_pyobject_attrs = True]')
@cython.test_assert_path_exists('//CClassDefNode',
                                '//CClassDefNode[@scope]',
                                '//CClassDefNode[@scope.has_cyclic_pyobject_attrs = False]')
@cython.final
cdef class ExtTypeFinalNoGC:
    """
    >>> obj = ExtTypeFinalNoGC()
    >>> obj = ExtTypeFinalNoGC()
    >>> obj = ExtTypeFinalNoGC()
    >>> obj = ExtTypeFinalNoGC()
    >>> obj = ExtTypeFinalNoGC()
    >>> obj = ExtTypeFinalNoGC()
    """
    cdef bytes s


@cython.test_fail_if_path_exists('//CClassDefNode[@scope.has_cyclic_pyobject_attrs = True]')
@cython.test_assert_path_exists('//CClassDefNode',
                                '//CClassDefNode[@scope]',
                                '//CClassDefNode[@scope.has_cyclic_pyobject_attrs = False]')
cdef class ExtSubTypeNoGC(ExtTypeNoGC):
    """
    >>> obj = ExtSubTypeNoGC()
    >>> obj = ExtSubTypeNoGC()
    >>> obj = ExtSubTypeNoGC()
    >>> obj = ExtSubTypeNoGC()
    >>> obj = ExtSubTypeNoGC()
    >>> obj = ExtSubTypeNoGC()
    """


@cython.test_fail_if_path_exists('//CClassDefNode[@scope.has_cyclic_pyobject_attrs = True]')
@cython.test_assert_path_exists('//CClassDefNode',
                                '//CClassDefNode[@scope]',
                                '//CClassDefNode[@scope.has_cyclic_pyobject_attrs = False]')
cdef class ExtTypePyArgsNoGC:
    """
    >>> obj = ExtTypePyArgsNoGC()
    >>> obj = ExtTypePyArgsNoGC()
    >>> obj = ExtTypePyArgsNoGC()
    >>> obj = ExtTypePyArgsNoGC()
    >>> obj = ExtTypePyArgsNoGC()
    >>> obj = ExtTypePyArgsNoGC()
    """
    cdef bytes b
    cdef str s
    cdef unicode u
# eventually, this should work, too:
#    cdef ExtTypeFinalNoGC x


@cython.test_fail_if_path_exists('//CClassDefNode[@scope.has_cyclic_pyobject_attrs = True]')
@cython.test_assert_path_exists('//CClassDefNode',
                                '//CClassDefNode[@scope]',
                                '//CClassDefNode[@scope.has_cyclic_pyobject_attrs = False]')
cdef class ExtSubTypePyArgsNoGC(ExtTypePyArgsNoGC):
    """
    >>> obj = ExtSubTypePyArgsNoGC()
    >>> obj = ExtSubTypePyArgsNoGC()
    >>> obj = ExtSubTypePyArgsNoGC()
    >>> obj = ExtSubTypePyArgsNoGC()
    >>> obj = ExtSubTypePyArgsNoGC()
    >>> obj = ExtSubTypePyArgsNoGC()
    """


@cython.test_fail_if_path_exists('//CClassDefNode[@scope.has_cyclic_pyobject_attrs = False]')
@cython.test_assert_path_exists('//CClassDefNode',
                                '//CClassDefNode[@scope]',
                                '//CClassDefNode[@scope.has_cyclic_pyobject_attrs = True]')
cdef class ExtTypePyArgsWithGC:
    """
    >>> obj = ExtTypePyArgsWithGC()
    >>> obj = ExtTypePyArgsWithGC()
    >>> obj = ExtTypePyArgsWithGC()
    >>> obj = ExtTypePyArgsWithGC()
    >>> obj = ExtTypePyArgsWithGC()
    >>> obj = ExtTypePyArgsWithGC()
    >>> obj.create_cycle()
    """
    cdef bytes b
    cdef str s
    cdef unicode u
    cdef list l

    def create_cycle(self):
        self.l = [self]


@cython.test_fail_if_path_exists('//CClassDefNode[@scope.has_cyclic_pyobject_attrs = True]')
@cython.test_assert_path_exists('//CClassDefNode',
                                '//CClassDefNode[@scope]',
                                '//CClassDefNode[@scope.has_cyclic_pyobject_attrs = False]')
cdef class ExtSubTypePyArgsWithGC(ExtTypePyArgsWithGC):
    """
    >>> obj = ExtSubTypePyArgsWithGC()
    >>> obj = ExtSubTypePyArgsWithGC()
    >>> obj = ExtSubTypePyArgsWithGC()
    >>> obj = ExtSubTypePyArgsWithGC()
    >>> obj = ExtSubTypePyArgsWithGC()
    >>> obj = ExtSubTypePyArgsWithGC()
    >>> obj.create_cycle()
    """


@cython.test_fail_if_path_exists('//CClassDefNode[@scope.has_cyclic_pyobject_attrs = True]')
@cython.test_assert_path_exists('//CClassDefNode',
                                '//CClassDefNode[@scope]',
                                '//CClassDefNode[@scope.has_cyclic_pyobject_attrs = False]')
cdef class ExtSubTypePlusPyArgsWithGC(ExtSubTypePyArgsWithGC):
    """
    >>> obj = ExtSubTypePlusPyArgsWithGC()
    >>> obj = ExtSubTypePlusPyArgsWithGC()
    >>> obj = ExtSubTypePlusPyArgsWithGC()
    >>> obj = ExtSubTypePlusPyArgsWithGC()
    >>> obj = ExtSubTypePlusPyArgsWithGC()
    >>> obj = ExtSubTypePlusPyArgsWithGC()
    >>> obj.create_cycle()
    """
    cdef bytes b2
    cdef unicode u2


@cython.test_fail_if_path_exists('//CClassDefNode[@scope.has_cyclic_pyobject_attrs = False]')
@cython.test_assert_path_exists('//CClassDefNode',
                                '//CClassDefNode[@scope]',
                                '//CClassDefNode[@scope.has_cyclic_pyobject_attrs = True]')
cdef class ExtSubTypePlusGCPyArgsWithGC(ExtSubTypePlusPyArgsWithGC):
    """
    >>> obj = ExtSubTypePlusGCPyArgsWithGC()
    >>> obj = ExtSubTypePlusGCPyArgsWithGC()
    >>> obj = ExtSubTypePlusGCPyArgsWithGC()
    >>> obj = ExtSubTypePlusGCPyArgsWithGC()
    >>> obj = ExtSubTypePlusGCPyArgsWithGC()
    >>> obj = ExtSubTypePlusGCPyArgsWithGC()
    >>> obj.create_cycle()
    """
    cdef tuple t
Cython-0.26.1/tests/run/numpy_bufacc_T155.pyx0000664000175000017500000000045312542002467021535 0ustar  stefanstefan00000000000000# ticket: 155
# tag: numpy

"""
>>> myfunc()
0.5
"""

cimport numpy as np
import numpy as np

def myfunc():
    cdef np.ndarray[float, ndim=2] A = np.ones((1,1), dtype=np.float32)
    cdef int i

    for i from 0 <= i < A.shape[0]:
        A[i, :] /= 2
    return A[0,0]

include "numpy_common.pxi"
Cython-0.26.1/tests/run/fmod.pyx0000664000175000017500000000013612542002467017267 0ustar  stefanstefan00000000000000def fmod(double a, double b):
    """
    >>> fmod(7, 1.25)
    0.75
    """
    return a % b
Cython-0.26.1/tests/run/cdef_class_property_decorator_T264.pyx0000664000175000017500000000235213023021033025140 0ustar  stefanstefan00000000000000# mode: run
# ticket: 264
# tag: property, decorator

my_property = property

cdef class Prop:
    """
    >>> p = Prop()
    >>> p.prop
    GETTING 'None'
    >>> p.prop = 1
    SETTING '1' (previously: 'None')
    >>> p.prop
    GETTING '1'
    1
    >>> p.prop = 2
    SETTING '2' (previously: '1')
    >>> p.prop
    GETTING '2'
    2
    >>> p.my_prop
    GETTING '2' via my_prop
    2
    >>> del p.prop
    DELETING '2'
    >>> p.prop
    GETTING 'None'
    >>> list(p.generator_prop)
    [42]
    """
    cdef _value
    def __init__(self):
        self._value = None

    @property
    def prop(self):
        print("FAIL")
        return 0

    @prop.getter
    def prop(self):
        print("FAIL")

    @property
    def prop(self):
        print("GETTING '%s'" % self._value)
        return self._value

    @prop.setter
    def prop(self, value):
        print("SETTING '%s' (previously: '%s')" % (value, self._value))
        self._value = value

    @prop.deleter
    def prop(self):
        print("DELETING '%s'" % self._value)
        self._value = None

    @my_property
    def my_prop(self):
        print("GETTING '%s' via my_prop" % self._value)
        return self._value

    @property
    def generator_prop(self):
        yield 42
Cython-0.26.1/tests/run/pytype.pyx0000664000175000017500000000060112542002467017671 0ustar  stefanstefan00000000000000from cpython.type cimport PyType_IsSubtype

class mylist(list): pass

def test_issubtype(a, b):
    """
    >>> test_issubtype(mylist, list)
    True
    >>> test_issubtype(mylist, dict)
    False

    >>> o = object()
    >>> test_issubtype(o, list)
    Traceback (most recent call last):
    ...
    TypeError: Cannot convert object to type
    """
    return PyType_IsSubtype(a, b)
Cython-0.26.1/tests/run/extended_unpacking_T235.pyx0000664000175000017500000002011512542002467022715 0ustar  stefanstefan00000000000000# ticket: 235

__doc__ = u"""
    >>> class FakeSeq(object):
    ...     def __init__(self, length):
    ...         self._values = list(range(1,length+1))
    ...     def __getitem__(self, i):
    ...         return self._values[i]

    >>> unpack( FakeSeq(2) )
    (1, 2)
    >>> unpack_recursive( FakeSeq(4) )
    (1, [2, 3], 4)
"""

def unpack(l):
    """
    >>> unpack([1,2])
    (1, 2)
    >>> unpack('12')
    ('1', '2')
    """
    a, b = l
    return a,b

def unpack_list(list l):
    """
    >>> unpack_list([1,2])
    (1, 2)
    """
    a, b = l
    return a,b

def unpack_tuple(tuple t):
    """
    >>> unpack_tuple((1,2))
    (1, 2)
    """
    a, b = t
    return a,b

def unpack_single(l):
    """
    >>> unpack_single([1])
    [1]
    >>> unpack_single('1')
    ['1']
    """
    *a, = l
    return a

def unpack_tuple_single(tuple t):
    """
    >>> unpack_tuple_single((1,))
    [1]
    """
    *a, = t
    return a

def assign():
    """
    >>> assign()
    (1, [2, 3, 4], 5)
    """
    *a, b = 1,2,3,4,5
    assert a+[b] == [1,2,3,4,5], (a,b)
    a, *b = 1,2,3,4,5
    assert [a]+b == [1,2,3,4,5], (a,b)
    [a, *b, c] = 1,2,3,4,5
    return a,b,c

def unpack_into_list(l):
    """
    >>> unpack_into_list('123')
    ('1', ['2'], '3')
    """
    [*a, b] = l
    assert a+[b] == list(l), repr((a+[b],list(l)))
    [a, *b] = l
    assert [a]+b == list(l), repr(([a]+b,list(l)))
    [a, *b, c] = l
    return a,b,c

def unpack_into_tuple(t):
    """
    >>> unpack_into_tuple('123')
    ('1', ['2'], '3')
    """
    (*a, b) = t
    assert a+[b] == list(t), repr((a+[b],list(t)))
    (a, *b) = t
    assert [a]+b == list(t), repr(([a]+b,list(t)))
    (a, *b, c) = t
    return a,b,c

def unpack_in_loop(list_of_sequences):
    """
    >>> unpack_in_loop([(1,2), (1,2,3), (1,2,3,4)])
    1
    ([1], 2)
    ([1, 2], 3)
    ([1, 2, 3], 4)
    2
    (1, [2])
    (1, [2, 3])
    (1, [2, 3, 4])
    3
    (1, [], 2)
    (1, [2], 3)
    (1, [2, 3], 4)
    """
    print 1
    for *a,b in list_of_sequences:
        print((a,b))
    print 2
    for a,*b in list_of_sequences:
        print((a,b))
    print 3
    for a,*b, c in list_of_sequences:
        print((a,b,c))

def unpack_recursive(t):
    """
    >>> unpack_recursive((1,2,3,4))
    (1, [2, 3], 4)
    """
    *(a, *b), c  = t
    return a,b,c

def unpack_typed(t):
    """
    >>> unpack_typed((1,2))
    ([1], 2)
    """
    cdef list a
    *a, b  = t
    return a,b


def unpack_right(l):
    """
    >>> unpack_right('')
    Traceback (most recent call last):
    ValueError: need more than 0 values to unpack
    >>> unpack_right('1')
    ('1', [])
    >>> unpack_right([1])
    (1, [])
    >>> unpack_right('12')
    ('1', ['2'])
    >>> unpack_right([1,2])
    (1, [2])
    >>> unpack_right('123')
    ('1', ['2', '3'])
    >>> unpack_right([1,2,3])
    (1, [2, 3])
    """
    a, *b = l
    return a,b

def unpack_right_list(list l):
    """
    >>> unpack_right_list([])
    Traceback (most recent call last):
    ValueError: need more than 0 values to unpack
    >>> unpack_right_list([1])
    (1, [])
    >>> unpack_right_list([1,2])
    (1, [2])
    >>> unpack_right_list([1,2,3])
    (1, [2, 3])
    """
    a, *b = l
    return a,b

def unpack_right_tuple(tuple t):
    """
    >>> unpack_right_tuple(())
    Traceback (most recent call last):
    ValueError: need more than 0 values to unpack
    >>> unpack_right_tuple((1,))
    (1, [])
    >>> unpack_right_tuple((1,2))
    (1, [2])
    >>> unpack_right_tuple((1,2,3))
    (1, [2, 3])
    """
    a, *b = t
    return a,b


def unpack_left(l):
    """
    >>> unpack_left('')
    Traceback (most recent call last):
    ValueError: need more than 0 values to unpack
    >>> unpack_left('1')
    ([], '1')
    >>> unpack_left([1])
    ([], 1)
    >>> unpack_left('12')
    (['1'], '2')
    >>> unpack_left([1,2])
    ([1], 2)
    >>> unpack_left('123')
    (['1', '2'], '3')
    >>> unpack_left([1,2,3])
    ([1, 2], 3)
    """
    *a, b = l
    return a,b

def unpack_left_list(list l):
    """
    >>> unpack_left_list([])
    Traceback (most recent call last):
    ValueError: need more than 0 values to unpack
    >>> unpack_left_list([1])
    ([], 1)
    >>> unpack_left_list([1,2])
    ([1], 2)
    >>> unpack_left_list([1,2,3])
    ([1, 2], 3)
    """
    *a, b = l
    return a,b

def unpack_left_tuple(tuple t):
    """
    >>> unpack_left_tuple(())
    Traceback (most recent call last):
    ValueError: need more than 0 values to unpack
    >>> unpack_left_tuple((1,))
    ([], 1)
    >>> unpack_left_tuple((1,2))
    ([1], 2)
    >>> unpack_left_tuple((1,2,3))
    ([1, 2], 3)
    """
    *a, b = t
    return a,b


def unpack_middle(l):
    """
    >>> unpack_middle('')
    Traceback (most recent call last):
    ValueError: need more than 0 values to unpack
    >>> unpack_middle([])
    Traceback (most recent call last):
    ValueError: need more than 0 values to unpack
    >>> unpack_middle(())
    Traceback (most recent call last):
    ValueError: need more than 0 values to unpack
    >>> unpack_middle('1')
    Traceback (most recent call last):
    ValueError: need more than 1 value to unpack
    >>> unpack_middle([1])
    Traceback (most recent call last):
    ValueError: need more than 1 value to unpack
    >>> unpack_middle('12')
    ('1', [], '2')
    >>> unpack_middle([1,2])
    (1, [], 2)
    >>> unpack_middle('123')
    ('1', ['2'], '3')
    >>> unpack_middle([1,2,3])
    (1, [2], 3)
    """
    a, *b, c = l
    return a,b,c

def unpack_middle_list(list l):
    """
    >>> unpack_middle_list([])
    Traceback (most recent call last):
    ValueError: need more than 0 values to unpack
    >>> unpack_middle_list([1])
    Traceback (most recent call last):
    ValueError: need more than 1 value to unpack
    >>> unpack_middle_list([1,2])
    (1, [], 2)
    >>> unpack_middle_list([1,2,3])
    (1, [2], 3)
    """
    a, *b, c = l
    return a,b,c

def unpack_middle_tuple(tuple t):
    """
    >>> unpack_middle_tuple(())
    Traceback (most recent call last):
    ValueError: need more than 0 values to unpack
    >>> unpack_middle_tuple((1,))
    Traceback (most recent call last):
    ValueError: need more than 1 value to unpack
    >>> unpack_middle_tuple((1,2))
    (1, [], 2)
    >>> unpack_middle_tuple((1,2,3))
    (1, [2], 3)
    >>> a,b,c = unpack_middle(list(range(100)))
    >>> a, len(b), c
    (0, 98, 99)
    >>> a,b,c = unpack_middle_list(list(range(100)))
    >>> a, len(b), c
    (0, 98, 99)
    >>> a,b,c = unpack_middle_tuple(tuple(range(100)))
    >>> a, len(b), c
    (0, 98, 99)
    """
    a, *b, c = t
    return a,b,c

def unpack_many_middle(it):
    """
    >>> unpack_many_middle(list(range(14)))
    (0, 1, 2, 3, 4, [5, 6, 7, 8, 9], 10, 11, 12, 13)
    >>> unpack_many_middle(tuple(range(14)))
    (0, 1, 2, 3, 4, [5, 6, 7, 8, 9], 10, 11, 12, 13)
    >>> unpack_many_middle(iter(range(14)))
    (0, 1, 2, 3, 4, [5, 6, 7, 8, 9], 10, 11, 12, 13)
    """
    a,b,c,d,e,*f,g,h,i,j = it
    return a,b,c,d,e,f,g,h,i,j

def unpack_many_left(it):
    """
    >>> unpack_many_left(list(range(14)))
    (0, 1, 2, 3, 4, 5, 6, 7, 8, [9, 10, 11, 12, 13])
    >>> unpack_many_left(tuple(range(14)))
    (0, 1, 2, 3, 4, 5, 6, 7, 8, [9, 10, 11, 12, 13])
    >>> unpack_many_left(iter(range(14)))
    (0, 1, 2, 3, 4, 5, 6, 7, 8, [9, 10, 11, 12, 13])
    """
    a,b,c,d,e,f,g,h,i,*j = it
    return a,b,c,d,e,f,g,h,i,j

def unpack_many_right(it):
    """
    >>> unpack_many_right(list(range(14)))
    ([0, 1, 2, 3, 4], 5, 6, 7, 8, 9, 10, 11, 12, 13)
    >>> unpack_many_right(tuple(range(14)))
    ([0, 1, 2, 3, 4], 5, 6, 7, 8, 9, 10, 11, 12, 13)
    >>> unpack_many_right(iter(range(14)))
    ([0, 1, 2, 3, 4], 5, 6, 7, 8, 9, 10, 11, 12, 13)
    """
    *a,b,c,d,e,f,g,h,i,j = it
    return a,b,c,d,e,f,g,h,i,j

def unpack_many_right_loop(it):
    """
    >>> unpack_many_right_loop(list(range(14)))
    ([0, 1, 2, 3, 4], 5, 6, 7, 8, 9, 10, 11, 12, 13)
    >>> unpack_many_right_loop(tuple(range(14)))
    ([0, 1, 2, 3, 4], 5, 6, 7, 8, 9, 10, 11, 12, 13)
    >>> unpack_many_right_loop(iter(range(14)))
    ([0, 1, 2, 3, 4], 5, 6, 7, 8, 9, 10, 11, 12, 13)
    """
    cdef int i
    for i in range(1):
        *a,b,c,d,e,f,g,h,i,j = it
    return a,b,c,d,e,f,g,h,i,j
Cython-0.26.1/tests/run/fused_cpdef.pyx0000664000175000017500000000434113143605603020612 0ustar  stefanstefan00000000000000cimport cython

cy = __import__("cython")

cpdef func1(self, cython.integral x):
    print "%s," % (self,),
    if cython.integral is int:
        print 'x is int', x, cython.typeof(x)
    else:
        print 'x is long', x, cython.typeof(x)


class A(object):
    meth = func1

    def __str__(self):
        return "A"

pyfunc = func1

def test_fused_cpdef():
    """
    >>> test_fused_cpdef()
    None, x is int 2 int
    None, x is long 2 long
    None, x is long 2 long
    
    None, x is int 2 int
    None, x is long 2 long
    
    A, x is int 2 int
    A, x is long 2 long
    A, x is long 2 long
    A, x is long 2 long
    """
    func1[int](None, 2)
    func1[long](None, 2)
    func1(None, 2)

    print

    pyfunc[cy.int](None, 2)
    pyfunc(None, 2)

    print

    A.meth[cy.int](A(), 2)
    A.meth(A(), 2)
    A().meth[cy.long](2)
    A().meth(2)


def assert_raise(func, *args):
    try:
        func(*args)
    except TypeError:
        pass
    else:
        assert False, "Function call did not raise TypeError"

def test_badcall():
    """
    >>> test_badcall()
    """
    assert_raise(pyfunc)
    assert_raise(pyfunc, 1, 2, 3)
    assert_raise(pyfunc[cy.int], 10, 11, 12)
    assert_raise(pyfunc, None, object())
    assert_raise(A().meth)
    assert_raise(A.meth)
    assert_raise(A().meth[cy.int])
    assert_raise(A.meth[cy.int])

ctypedef long double long_double

cpdef multiarg(cython.integral x, cython.floating y):
    if cython.integral is int:
        print "x is an int,",
    else:
        print "x is a long,",

    if cython.floating is long_double:
        print "y is a long double:",
    elif float is cython.floating:
        print "y is a float:",
    else:
        print "y is a double:",

    print x, y

def test_multiarg():
    """
    >>> test_multiarg()
    x is an int, y is a float: 1 2.0
    x is an int, y is a float: 1 2.0
    x is a long, y is a double: 4 5.0
    >>> multiarg()
    Traceback (most recent call last):
    TypeError: Expected at least 2 arguments, got 0
    >>> multiarg(1, 2.0, 3)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...2...arg...3...
    """
    multiarg[int, float](1, 2.0)
    multiarg[cy.int, cy.float](1, 2.0)
    multiarg(4, 5.0)
Cython-0.26.1/tests/run/generator_expressions_in_class.py0000664000175000017500000000104712542002467024457 0ustar  stefanstefan00000000000000# mode: run
# tag: generators


class TestClass(object):
    """
    >>> TestClass.x
    [1, 2, 3]
    >>> list(TestClass.gen)
    []
    >>> TestClass.gen_result
    [2, 4, 6]
    >>> TestClass.test
    True
    >>> list(TestClass.gen3)
    [2, 4, 6, 8, 10, 12]
    """

    x = [1, 2, 3]

    gen = (i * 2 for i in x)

    test = all(i * 2 for i in x)

    gen_result = list(gen)

    nested_list = [[1, 2, 3], [4, 5, 6]]

    #gen2 = (i * 2 for i in x for x in nested_list)  # move to error test

    gen3 = (i * 2 for x in nested_list for i in x)
Cython-0.26.1/tests/run/code_object_cache.pyx0000664000175000017500000001017012542002467021724 0ustar  stefanstefan00000000000000# mode: run
# tag: except

# test the code object cache that is being used in exception raising

### low level tests

cimport cython

cdef extern from *:
    # evil hack to access the internal utility function
    ctypedef struct PyCodeObject
    ctypedef struct __Pyx_CodeObjectCacheEntry:
        int code_line
        PyCodeObject* code_object
    int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line)

def test_lowlevel_bisect2(*indices):
    """
    >>> test_lowlevel_bisect2(1, 2, 3, 4, 5, 6)
    [0, 0, 1, 1, 2, 2]
    """
    cdef __Pyx_CodeObjectCacheEntry* cache = [
        __Pyx_CodeObjectCacheEntry(2, NULL),
        __Pyx_CodeObjectCacheEntry(4, NULL),
        ]
    return [ __pyx_bisect_code_objects(cache, 2, i)
             for i in indices ]

def test_lowlevel_bisect5(*indices):
    """
    >>> test_lowlevel_bisect5(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)
    [0, 1, 2, 2, 2, 3, 3, 3, 4, 5, 5]
    """
    cdef __Pyx_CodeObjectCacheEntry* cache = [
        __Pyx_CodeObjectCacheEntry(1, NULL),
        __Pyx_CodeObjectCacheEntry(2, NULL),
        __Pyx_CodeObjectCacheEntry(5, NULL),
        __Pyx_CodeObjectCacheEntry(8, NULL),
        __Pyx_CodeObjectCacheEntry(9, NULL),
        ]
    return [ __pyx_bisect_code_objects(cache, 5, i)
             for i in indices ]

def test_lowlevel_bisect6(*indices):
    """
    >>> test_lowlevel_bisect6(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13)
    [0, 0, 1, 2, 2, 2, 3, 3, 4, 5, 5, 5, 6]
    """
    cdef __Pyx_CodeObjectCacheEntry* cache = [
        __Pyx_CodeObjectCacheEntry(2, NULL),
        __Pyx_CodeObjectCacheEntry(3, NULL),
        __Pyx_CodeObjectCacheEntry(6, NULL),
        __Pyx_CodeObjectCacheEntry(8, NULL),
        __Pyx_CodeObjectCacheEntry(9, NULL),
        __Pyx_CodeObjectCacheEntry(12, NULL),
        ]
    return [ __pyx_bisect_code_objects(cache, 6, i)
             for i in indices ]

### Python level tests

import sys

def tb():
    return sys.exc_info()[-1]

def raise_keyerror():
    raise KeyError

def check_code_object_identity_recursively(tb1, tb2):
    if tb1 is None or tb2 is None:
        return
    code1, code2 = tb1.tb_frame.f_code, tb2.tb_frame.f_code
    if code1 is not code2:
        print('%s != %s' % (code1, code2))
    check_code_object_identity_recursively(tb1.tb_next, tb2.tb_next)

def assert_simple_code_object_reuse():
    """
    >>> try: assert_simple_code_object_reuse()
    ... except KeyError: t1 = tb()
    >>> try: assert_simple_code_object_reuse()
    ... except KeyError: t2 = tb()
    >>> check_code_object_identity_recursively(t1.tb_next, t2.tb_next)
    """
    raise KeyError

def assert_multi_step_code_object_reuse(recursions=0):
    """
    >>> for depth in range(5):
    ...     try: assert_multi_step_code_object_reuse(depth)
    ...     except KeyError: t1 = tb()
    ...     try: assert_multi_step_code_object_reuse(depth)
    ...     except KeyError: t2 = tb()
    ...     check_code_object_identity_recursively(t1.tb_next, t2.tb_next)
    """
    if recursions:
        assert_multi_step_code_object_reuse(recursions-1)
    else:
        raise_keyerror()

def assert_simple_code_object_reuse_fused(cython.floating dummy):
    """
    DISABLED: searching for code objects based on C lineno breaks for specializations

    >> try: assert_simple_code_object_reuse_fused["float"](1.0)
    ... except KeyError: t1 = tb()
    >> try: assert_simple_code_object_reuse_fused["double"](1.0)
    ... except KeyError: t2 = tb()
    >> check_code_object_identity_recursively(t1.tb_next, t2.tb_next)
    """
    raise KeyError

def assert_multi_step_code_object_reuse_fused(recursions=0, cython.floating dummy = 2.0):
    """
    DISABLED: searching for code objects based on C lineno breaks for specializations

    >> for depth in range(5):
    ...     try: assert_multi_step_code_object_reuse_fused(depth, 1.0)
    ...     except KeyError: t1 = tb()
    ...     try: assert_multi_step_code_object_reuse_fused(depth, 1.0)
    ...     except KeyError: t2 = tb()
    ...     check_code_object_identity_recursively(t1.tb_next, t2.tb_next)
    """
    if recursions:
        assert_multi_step_code_object_reuse(recursions-1)
    else:
        raise_keyerror()
Cython-0.26.1/tests/run/crashT245.pyx0000664000175000017500000000022212542002467020015 0ustar  stefanstefan00000000000000# ticket: 245

cimport crashT245_pxd

def f():
    """
    >>> f()
    {'x': 1}
    """
    cdef crashT245_pxd.MyStruct s
    s.x = 1
    print s
Cython-0.26.1/tests/run/datetime_members.pyx0000664000175000017500000000427512542002467021660 0ustar  stefanstefan00000000000000from cpython.datetime cimport import_datetime
from cpython.datetime cimport time_new, date_new, datetime_new, timedelta_new
from cpython.datetime cimport time_tzinfo, datetime_tzinfo
from cpython.datetime cimport time_hour, time_minute, time_second, time_microsecond
from cpython.datetime cimport date_day, date_month, date_year
from cpython.datetime cimport datetime_day, datetime_month, datetime_year
from cpython.datetime cimport datetime_hour, datetime_minute, datetime_second, \
                              datetime_microsecond
from cpython.datetime cimport timedelta_days, timedelta_seconds, timedelta_microseconds

import_datetime()

def test_date(int year, int month, int day):
    '''
    >>> test_date(2012,12,31)
    (True, True, True)
    '''
    o = date_new(year, month, day)
    return o.year == date_year(o), \
           o.month == date_month(o), \
           o.day == date_day(o)

def test_datetime(int year, int month, int day, 
                  int hour, int minute, int second, int microsecond):
    '''
    >>> test_datetime(2012, 12, 31, 12, 30, 59, 12345)
    (True, True, True, True, True, True, True)
    '''
    o = datetime_new(year, month, day, hour, minute, second, microsecond, None)
    return o.year == datetime_year(o), \
           o.month == datetime_month(o), \
           o.day == datetime_day(o), \
           o.hour == datetime_hour(o), \
           o.minute == datetime_minute(o), \
           o.second == datetime_second(o), \
           o.microsecond == datetime_microsecond(o)

def test_time(int hour, int minute, int second, int microsecond):
    '''
    >>> test_time(12, 30, 59, 12345)
    (True, True, True, True)
    '''
    o = time_new(hour, minute, second, microsecond, None)
    return o.hour == time_hour(o), \
           o.minute == time_minute(o), \
           o.second == time_second(o), \
           o.microsecond == time_microsecond(o)

def test_timedelta(int days, int seconds, int microseconds):
    '''
    >>> test_timedelta(30, 1440, 123456)
    (True, True, True)
    '''
    o = timedelta_new(days, seconds, microseconds)
    return o.days == timedelta_days(o), \
           o.seconds == timedelta_seconds(o), \
           o.microseconds == timedelta_microseconds(o)

Cython-0.26.1/tests/run/ellipsis_T488.pyx0000664000175000017500000000054112542002467020715 0ustar  stefanstefan00000000000000# ticket: 488

"""
>>> test()
"""
def test():
    x = ...
    assert x is Ellipsis

    d = {}
    d[...] = 1
    assert d[...] == 1
    del d[...]
    assert ... not in d

    d[..., ...] = 1
    assert d[..., ...] == 1
    assert d[..., Ellipsis] == 1
    assert (Ellipsis, Ellipsis) in d
    del d[..., ...]
    assert (Ellipsis, Ellipsis) not in d

Cython-0.26.1/tests/run/fused_def.pyx0000664000175000017500000002265013143605603020272 0ustar  stefanstefan00000000000000# mode: run

"""
Test Python def functions without extern types
"""

cy = __import__("cython")
cimport cython

cdef class Base(object):
    def __repr__(self):
        return type(self).__name__


cdef class ExtClassA(Base):
    pass

cdef class ExtClassB(Base):
    pass

cdef enum MyEnum:
    entry0
    entry1
    entry2
    entry3
    entry4

ctypedef fused fused_t:
    str
    int
    long
    complex
    ExtClassA
    ExtClassB
    MyEnum


ctypedef ExtClassA xxxlast
ctypedef ExtClassB aaafirst


ctypedef fused fused_with_object:
    aaafirst
    object
    xxxlast
    int
    long


f = 5.6
i = 9


def opt_func(fused_t obj, cython.floating myf = 1.2, cython.integral myi = 7):
    """
    Test runtime dispatch, indexing of various kinds and optional arguments

    >>> opt_func("spam", f, i)
    str object double long
    spam 5.60 9 5.60 9
    >>> opt_func[str, float, int]("spam", f, i)
    str object float int
    spam 5.60 9 5.60 9
    >>> opt_func[str, cy.double, cy.long]("spam", f, i)
    str object double long
    spam 5.60 9 5.60 9
    >>> opt_func[str, float, cy.int]("spam", f, i)
    str object float int
    spam 5.60 9 5.60 9


    >>> opt_func(ExtClassA(), f, i)
    ExtClassA double long
    ExtClassA 5.60 9 5.60 9
    >>> opt_func[ExtClassA, float, int](ExtClassA(), f, i)
    ExtClassA float int
    ExtClassA 5.60 9 5.60 9
    >>> opt_func[ExtClassA, cy.double, cy.long](ExtClassA(), f, i)
    ExtClassA double long
    ExtClassA 5.60 9 5.60 9

    >>> opt_func(ExtClassB(), f, i)
    ExtClassB double long
    ExtClassB 5.60 9 5.60 9
    >>> opt_func[ExtClassB, cy.double, cy.long](ExtClassB(), f, i)
    ExtClassB double long
    ExtClassB 5.60 9 5.60 9

    >>> opt_func(10, f)
    long double long
    10 5.60 7 5.60 9
    >>> opt_func[int, float, int](10, f)
    int float int
    10 5.60 7 5.60 9

    >>> opt_func(10 + 2j, myf = 2.6)
    double complex double long
    (10+2j) 2.60 7 5.60 9
    >>> opt_func[cy.py_complex, float, int](10 + 2j, myf = 2.6)
    double complex float int
    (10+2j) 2.60 7 5.60 9
    >>> opt_func[cy.doublecomplex, cy.float, cy.int](10 + 2j, myf = 2.6)
    double complex float int
    (10+2j) 2.60 7 5.60 9

    >>> opt_func(object(), f)
    Traceback (most recent call last):
    TypeError: Function call with ambiguous argument types
    >>> opt_func()
    Traceback (most recent call last):
    TypeError: Expected at least 1 argument, got 0
    >>> opt_func("abc", f, i, 5)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...at most 3...
    >>> opt_func[ExtClassA, cy.float, cy.long](object(), f)
    Traceback (most recent call last):
    TypeError: Argument 'obj' has incorrect type (expected fused_def.ExtClassA, got object)
    """
    print cython.typeof(obj), cython.typeof(myf), cython.typeof(myi)
    print obj, "%.2f" % myf, myi, "%.2f" % f, i


def test_opt_func():
    """
    >>> test_opt_func()
    str object double long
    ham 5.60 4 5.60 9
    """
    opt_func("ham", f, entry4)


def func_with_object(fused_with_object obj, cython.integral myi = 7):
    """
    >>> func_with_object(1)
    long long
    1 7
    >>> func_with_object(1, 3)
    long long
    1 3
    >>> func_with_object['int', 'int'](1, 3)
    int int
    1 3
    >>> func_with_object(1j, 3)
    Python object long
    1j 3
    >>> func_with_object('abc', 3)
    Python object long
    abc 3
    >>> func_with_object(ExtClassA(), 3)
    xxxlast long
    ExtClassA 3
    >>> func_with_object(ExtClassB(), 3)
    aaafirst long
    ExtClassB 3
    >>> func_with_object['object', 'long'](ExtClassA(), 3)
    Python object long
    ExtClassA 3
    >>> func_with_object['object', 'long'](ExtClassB(), 3)
    Python object long
    ExtClassB 3
    """
    print cython.typeof(obj), cython.typeof(myi)
    print obj, myi



def args_kwargs(fused_t obj, cython.floating myf = 1.2, *args, **kwargs):
    """
    >>> args_kwargs("foo")
    str object double
    foo 1.20 5.60 () {}

    >>> args_kwargs("eggs", f, 1, 2, [], d={})
    str object double
    eggs 5.60 5.60 (1, 2, []) {'d': {}}

    >>> args_kwargs[str, float]("eggs", f, 1, 2, [], d={})
    str object float
    eggs 5.60 5.60 (1, 2, []) {'d': {}}

    """
    print cython.typeof(obj), cython.typeof(myf)
    print obj, "%.2f" % myf, "%.2f" % f, args, kwargs


class BaseClass(object):
    """
    Test fused class/static/normal methods and super() without args
    """

    @staticmethod
    def mystaticmethod(cython.integral arg1):
        print cython.typeof(arg1), arg1

    @classmethod
    def myclassmethod(cls, cython.integral arg1):
        print cls, cython.typeof(arg1), arg1

    def normalmethod(self, cython.integral arg1):
        print self, cython.typeof(arg1), arg1

    def __repr__(self):
        return "<%s.%s object>" % (__name__, type(self).__name__)

class SubClass(BaseClass):

    @staticmethod
    def mystaticmethod(self, cython.integral arg1):
        print cython.typeof(arg1), arg1
        super().mystaticmethod(arg1 + 1)

    @classmethod
    def myclassmethod(cls, cython.integral arg1):
        print cls, cython.typeof(arg1), arg1
        super().myclassmethod(arg1 + 1)

    def normalmethod(self, cython.integral arg1):
        print self, cython.typeof(arg1), arg1
        super().normalmethod(arg1 + 1)

class SubSubClass(SubClass):
    pass

def test_fused_def_super():
    """
    >>> test_fused_def_super()
    long 10
    long 11
    long 11
    long 12
    short 12
    long 13
    short 13
    long 14
     long 14
     long 15
     long 15
     long 16
     short 16
     long 17
     short 17
     long 18
     long 18
     long 19
     long 19
     long 20
     short 20
     long 21
     short 21
     long 22
    """
    obj = SubClass()
    cls = SubClass

    obj.mystaticmethod(obj, 10)
    cls.mystaticmethod(obj, 11)
    obj.mystaticmethod[cy.short](obj, 12)
    cls.mystaticmethod[cy.short](obj, 13)

    obj.myclassmethod(14)
    cls.myclassmethod(15)
    obj.myclassmethod[cy.short](16)
    cls.myclassmethod[cy.short](17)

    obj.normalmethod(18)
    cls.normalmethod(obj, 19)
    obj.normalmethod[cy.short](20)
    cls.normalmethod[cy.short](obj, 21)

def test_fused_def_classmethod():
    """
    >>> test_fused_def_classmethod()
     long 10
     long 11
     long 11
     long 12
     short 12
     long 13
     short 13
     long 14
    """
    SubSubClass().myclassmethod(10)
    SubSubClass.myclassmethod(11)

    SubSubClass().myclassmethod[cy.short](12)
    SubSubClass.myclassmethod[cy.short](13)

cdef class CBaseClass(object):
    """
    Test fused def and cpdef methods in cdef classes.

    >>> import cython as cy
    >>> obj = CBaseClass()
    >>> cls = CBaseClass

    >>> obj.mystaticmethod(10)
    long 10
    >>> obj.mystaticmethod[cy.short](10)
    short 10
    >>> cls.mystaticmethod(10)
    long 10
    >>> cls.mystaticmethod[cy.short](10)
    short 10

    >>> obj.myclassmethod(10)
    CBaseClass long 10
    >>> obj.myclassmethod[cy.short](10)
    CBaseClass short 10
    >>> cls.myclassmethod(10)
    CBaseClass long 10
    >>> cls.myclassmethod[cy.short](10)
    CBaseClass short 10

    >>> obj.normalmethod(10, 11, 12)
     long 10 11 12
    >>> obj.normalmethod[cy.short](10, 11, 12)
     short 10 11 12
    >>> cls.normalmethod(obj, 10, 11, 12)
     long 10 11 12
    >>> cls.normalmethod[cy.short](obj, 10, 11, 12)
     short 10 11 12

    >>> obj.cpdefmethod(10)
     long 10
    >>> obj.cpdefmethod[cy.short](10)
     short 10
    >>> cls.cpdefmethod(obj, 10)
     long 10
    >>> cls.cpdefmethod[cy.short](obj, 10)
     short 10
    """

    @staticmethod
    def mystaticmethod(cython.integral arg1):
        print cython.typeof(arg1), arg1

    @classmethod
    def myclassmethod(cls, cython.integral arg1):
        print cls.__name__, cython.typeof(arg1), arg1

    def normalmethod(self, cython.integral arg1, arg2, arg3):
        print self, cython.typeof(arg1), arg1, arg2, arg3

    cpdef cpdefmethod(self, cython.integral arg1):
        print self, cython.typeof(arg1), arg1

    def __repr__(self):
        return "<%s.%s object>" % (__name__, type(self).__name__)

def getcode(func):
    return getattr(func, '__code__', None) or func.func_code

def test_code_object(cython.floating dummy = 2.0):
    """
    A test for default arguments is in cyfunction_defaults

    >>> getcode(test_code_object) is getcode(test_code_object[float])
    True
    """

def create_dec(value):
    def dec(f):
        if not hasattr(f, 'order'):
            f.order = []
        f.order.append(value)
        return f
    return dec

@create_dec(1)
@create_dec(2)
@create_dec(3)
def test_decorators(cython.floating arg):
    """
    >>> test_decorators.order
    [3, 2, 1]
    """
Cython-0.26.1/tests/run/control_flow_loop.pyx0000664000175000017500000000207413023021033022066 0ustar  stefanstefan00000000000000# mode: run
# tag: forin, control-flow, werror

def for_in_break(LL, p=bool):
    """
    >>> for_in_break([[1,2,3], [4,5,6]])
    True
    >>> for_in_break([[1,2,3], [4,5,0]])
    False
    >>> for_in_break([[1,2,3], [0,4,5]])
    False
    >>> for_in_break([[1,2,3], [0,4,5], [6,7,8]])
    False

    >>> def collect(x):
    ...     v.append(x)
    ...     return x

    >>> v = []
    >>> for_in_break([[1,2,3], [4,5,6]], p=collect)
    True
    >>> v
    [1, 2, 3, 4, 5, 6]

    >>> v = []
    >>> for_in_break([[1,2,3], [4,5,0]], p=collect)
    False
    >>> v
    [1, 2, 3, 4, 5, 0]

    >>> v = []
    >>> for_in_break([[1,2,3], [0,4,5]], p=collect)
    False
    >>> v
    [1, 2, 3, 0]

    >>> v = []
    >>> for_in_break([[1,2,3], [0,4,5], [6,7,8]], p=collect)
    False
    >>> v
    [1, 2, 3, 0]
    """
    result = 'NOK'
    # implements the builtin all()
    for L in LL:
        for x in L:
            if not p(x):
                result = False
                break
        else:
            continue
        break
    else:
        result = True
    return result
Cython-0.26.1/tests/run/cunion.pyx0000664000175000017500000000367712574327400017654 0ustar  stefanstefan00000000000000cdef union Spam:
    int i
    char c
    float *p[42]

cdef Spam spam, ham

cdef void eggs_i(Spam s):
    cdef int j
    j = s.i
    s.i = j

cdef void eggs_c(Spam s):
    cdef char c
    c = s.c
    s.c = c

cdef void eggs_p(Spam s):
    cdef float *p
    p = s.p[0]
    s.p[0] = p

spam = ham


def test_i():
    """
    >>> test_i()
    """
    spam.i = 1
    eggs_i(spam)


def test_c():
    """
    >>> test_c()
    """
    spam.c = c'a'
    eggs_c(spam)


def test_p():
    """
    >>> test_p()
    """
    cdef float f
    spam.p[0] = &f
    eggs_p(spam)


cdef union AllCharptr:
    char* s1
    char* s2
    char* s3


def test_charptr_to_py():
    """
    >>> result = test_charptr_to_py()
    >>> len(result)
    3
    >>> result['s1'] == b'abc'
    True
    >>> result['s2'] == b'abc'
    True
    >>> result['s3'] == b'abc'
    True
    """
    cdef AllCharptr u
    u.s1 = b"abc"
    return u


cdef union SafeMix:
    char c
    unsigned char uc
    signed char sc
    short w
    int i
    long l
    size_t z
    float f
    double d


def test_safe_type_mix_from_to_py(v):
    """
    >>> test_safe_type_mix_from_to_py({'l': 32, 'c': 32})
    Traceback (most recent call last):
    ValueError: More than one union attribute passed: 'c' and 'l'

    >>> result = test_safe_type_mix_from_to_py({'c': 32})
    >>> sorted(result)
    ['c', 'd', 'f', 'i', 'l', 'sc', 'uc', 'w', 'z']
    >>> result['c']
    32
    >>> result['z'] != 0
    True

    >>> result = test_safe_type_mix_from_to_py({'uc': 32})
    >>> len(result)
    9
    >>> result['uc']
    32

    >>> result = test_safe_type_mix_from_to_py({'l': 100})
    >>> result['l']
    100

    >>> result = test_safe_type_mix_from_to_py({'z': 0})
    >>> result['z']
    0
    >>> result['i']
    0
    >>> result['l']
    0

    >>> result = test_safe_type_mix_from_to_py({'d': 2**52 - 1})
    >>> result['d']
    4503599627370495.0
    >>> result['z'] != 0
    True
    """
    cdef SafeMix u = v
    return u
Cython-0.26.1/tests/run/arithmetic_analyse_types_helper.h0000664000175000017500000000105612542002467024403 0ustar  stefanstefan00000000000000/* A set of mutually incompatable return types. */

struct short_return { char *msg; };
struct int_return { char *msg; };
struct longlong_return { char *msg; };

/* A set of overloaded methods. */

short_return f(short arg) {
    short_return val;
    arg++;
    val.msg = (char*)"short called";
    return val;
}

int_return f(int arg) {
    int_return val;
    arg++;
    val.msg = (char*)"int called";
    return val;
}

longlong_return f(long long arg) {
    longlong_return val;
    arg++;
    val.msg = (char*)"long long called";
    return val;
}
 
 
Cython-0.26.1/tests/run/closure_class_T596.pyx0000664000175000017500000000210112542002467021724 0ustar  stefanstefan00000000000000# mode: run
# tag: closures
# ticket: 596

def simple(a, b):
    """
    >>> kls = simple(1, 2)
    >>> kls().result()
    3
    """
    class Foo:
        def result(self):
            return a + b
    return Foo

def nested_classes(a, b):
    """
    >>> kls = nested_classes(1, 2)
    >>> kls().result(-3)
    0
    """
    class Foo:
        class Bar:
            def result(self, c):
                return a + b + c
    return Foo.Bar

def staff(a, b):
    """
    >>> kls = staff(1, 2)
    >>> kls.static()
    (1, 2)
    >>> kls.klass()
    ('Foo', 1, 2)
    >>> obj = kls()
    >>> obj.member()
    (1, 2)
    """
    class Foo:
        def member(self):
            return a, b
        @staticmethod
        def static():
            return a, b
        @classmethod
        def klass(cls):
            return cls.__name__, a, b
    return Foo

def nested2(a):
    """
    >>> obj = nested2(1)
    >>> f = obj.run(2)
    >>> f()
    3
    """
    class Foo:
        def run(self, b):
            def calc():
                return a + b
            return calc
    return Foo()
Cython-0.26.1/tests/run/wundram1.pyx0000664000175000017500000000011012542002467020070 0ustar  stefanstefan00000000000000"""
>>> x == 5 or repr(x)
True
"""


cdef unsigned int ui
ui = 5
x = ui
Cython-0.26.1/tests/run/ext_attribute_cache.pyx0000664000175000017500000000153012542002467022347 0ustar  stefanstefan00000000000000# mode: run
# tag: tpflags, type_version_tag

cimport cython


cdef extern from *:
    unsigned long PY_VERSION_HEX
    unsigned long Py_TPFLAGS_HAVE_VERSION_TAG
    ctypedef struct PyTypeObject:
        unsigned long tp_flags


def test_flag(t):
    return ((t).tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG) != 0


cdef class ImplicitAttrCache(object):
    """
    >>> flag = test_flag(ImplicitAttrCache)
    >>> print(flag)
    True
    """
    cdef public int x
    cdef object y


@cython.type_version_tag(True)
cdef class ExplicitAttrCache(object):
    """
    >>> flag = test_flag(ImplicitAttrCache)
    >>> print(flag)
    True
    """
    cdef public int x
    cdef object y


@cython.type_version_tag(False)
cdef class NoAttrCache(object):
    """
    >>> test_flag(NoAttrCache)
    False
    """
    cdef public int x
    cdef object y

Cython-0.26.1/tests/run/varargcall.pyx0000664000175000017500000000027312574327400020464 0ustar  stefanstefan00000000000000cdef grail(const char *blarg, ...):
    pass


def swallow():
    """
    >>> swallow()
    """
    grail("spam")
    grail("spam", 42)
    grail("spam", b"abc")
    grail("spam", "abc")
Cython-0.26.1/tests/run/and.pyx0000664000175000017500000000255112542002467017107 0ustar  stefanstefan00000000000000a,b = 'a *','b *' # use non-interned strings

def and2_assign(a,b):
    """
    >>> a,b = 'a *','b *' # use non-interned strings
    >>> and2_assign(2,3) == (2 and 3)
    True
    >>> and2_assign('a', 'b') == ('a' and 'b')
    True
    >>> and2_assign(a, b) == (a and b)
    True
    """
    c = a and b
    return c

def and2(a,b):
    """
    >>> and2(2,3) == (2 and 3)
    True
    >>> and2(0,2) == (0 and 2)
    True
    >>> and2('a', 'b') == ('a' and 'b')
    True
    >>> and2(a, b) == (a and b)
    True
    >>> and2('', 'b') == ('' and 'b')
    True
    >>> and2([], [1]) == ([] and [1])
    True
    >>> and2([], [a]) == ([] and [a])
    True
    """
    return a and b

def and3(a,b,c):
    """
    >>> and3(0,1,2) == (0 and 1 and 2)
    True
    >>> and3([],(),[1]) == ([] and () and [1])
    True
    """
    d = a and b and c
    return d

def and2_no_result(a,b):
    """
    >>> and2_no_result(2,3)
    >>> and2_no_result(0,2)
    >>> and2_no_result('a','b')
    >>> and2_no_result(a,b)
    >>> a and b
    'b *'
    """
    a and b

def and2_literal():
    """
    >>> and2_literal()
    5
    """
    return True and 5

def c_int_results(int x):
    """
    >>> c_int_results(7)
    (0, 0)
    >>> c_int_results(5)
    (1, 1)
    """
    cdef int expr1, expr2, r1, r2

    expr1 = x == 5
    expr2 = 1
    r1 = expr1 and expr2

    r2 = (x==5) and 1
    return r1, r2
Cython-0.26.1/tests/run/ptrdiff_t.pyx0000664000175000017500000000211213023021033020300 0ustar  stefanstefan00000000000000from cython cimport typeof

def test(ptrdiff_t i):
    """
    >>> int(test(0))
    0
    >>> int(test(1))
    1
    >>> int(test(2))
    2
    >>> int(test(-1))
    -1
    >>> int(test(-2))
    -2
    >>> int(test((1<<31)-1))
    2147483647
    """
    return i

cdef class A:
    """
    >>> try: test(1<<200)
    ... except (OverflowError, TypeError): print("ERROR")
    ERROR

    >>> a = A(1,2)
    >>> a.a == 1
    True
    >>> a.b == 2
    True
    >>> print(a.foo(5))
    5
    >>> try: a.foo(1<<200)
    ... except (OverflowError, TypeError): print("ERROR")
    ERROR
    """
    cdef public ptrdiff_t a
    cdef readonly ptrdiff_t b

    def __init__(self, ptrdiff_t a, object b):
        self.a = a
        self.b = b

    cpdef ptrdiff_t foo(self, ptrdiff_t x):
        cdef object o = x
        return o

def test_types():
    """
    >>> test_types()
    """
    cdef int a = 1, b = 2
    assert typeof(&a - &b) == "ptrdiff_t", typeof(&a - &b)
    assert typeof((&a - &b) + 1) == "ptrdiff_t", typeof((&a - &b) + 1)
    assert typeof(&a + (&b - &a)) == "int *", typeof(&a + (&b - &a))
Cython-0.26.1/tests/run/cdef_classmethod.pyx0000664000175000017500000000336012542002467021633 0ustar  stefanstefan00000000000000
cimport cython

cdef class cclass:

    @classmethod
    def test0(cls):
        """
        >>> cclass.test0()
        'type object'
        """
        return cython.typeof(cls)

    @classmethod
    def test0_args(*args):
        """
        >>> cclass.test0_args(1,2,3)
        ('Python object', (1, 2, 3))
        """
        return cython.typeof(args[0]), args[1:]

    @classmethod
    def test1(cls, arg):
        """
        >>> cclass.test1(1)
        ('type object', 1)
        """
        return cython.typeof(cls), arg

    @classmethod
    def test2(cls, arg1, arg2):
        """
        >>> cclass.test2(1,2)
        ('type object', 1, 2)
        """
        return cython.typeof(cls), arg1, arg2

    @classmethod
    def test1_args(cls, *args):
        """
        >>> cclass.test1_args(1,2,3)
        ('type object', (1, 2, 3))
        """
        return cython.typeof(cls), args

    @classmethod
    def test2_args(cls, arg, *args):
        """
        >>> cclass.test2_args(1,2,3)
        ('type object', 1, (2, 3))
        """
        return cython.typeof(cls), arg, args

    @classmethod
    def test0_args_kwargs(*args, **kwargs):
        """
        >>> cclass.test0_args_kwargs(1,2,3)
        ('Python object', (1, 2, 3), {})
        """
        return cython.typeof(args[0]), args[1:], kwargs

    @classmethod
    def test1_args_kwargs(cls, *args, **kwargs):
        """
        >>> cclass.test1_args_kwargs(1,2,3)
        ('type object', (1, 2, 3), {})
        """
        return cython.typeof(cls), args, kwargs

    @classmethod
    def test2_args_kwargs(cls, arg, *args, **kwargs):
        """
        >>> cclass.test2_args_kwargs(1,2,3)
        ('type object', 1, (2, 3), {})
        """
        return cython.typeof(cls), arg, args, kwargs
Cython-0.26.1/tests/run/forfrom.pyx0000664000175000017500000000052012542002467020011 0ustar  stefanstefan00000000000000
def for_else():
    """
    >>> for_else()
    30
    >>> print( int_comp() )
    00*01*02
    """
    cdef int i, j=0, k=2
    for i from 0 <= i < 10:
        j += k
    else:
        k = j+10
    return k

def int_comp():
    cdef int i
    return u'*'.join(tuple([ u"%02d" % i
                             for i from 0 <= i < 3 ]))
Cython-0.26.1/tests/run/cpp_exceptions_nogil.pyx0000664000175000017500000001235612542002467022564 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

cdef int raise_TypeError() except *:
    raise TypeError("custom")

cdef extern from "cpp_exceptions_nogil_helper.h" nogil:
    cdef void foo "foo"(int i) except +
    cdef void bar "foo"(int i) except +ValueError
    cdef void spam"foo"(int i) except +raise_TypeError

cdef int foo_nogil(int i) nogil except *:
    foo(i)

def test_foo_nogil():
    """
    >>> test_foo_nogil()
    """
    foo_nogil(0)
    with nogil:
        foo_nogil(0)

def test_foo():
    """
    >>> test_foo()
    """
    #
    foo(0)
    foo(0)
    with nogil:
        foo(0)
        foo(0)
    #
    try:
        with nogil:
            foo(0)
    finally:
        pass
    #
    try:
        with nogil:
            foo(0)
        with nogil:
            foo(0)
    finally:
        pass
    #
    try:
        with nogil:
            foo(0)
        with nogil:
            foo(1)
    except:
        with nogil:
            foo(0)
    finally:
        with nogil:
            foo(0)
        pass
    #
    try:
        with nogil:
            foo(0)
            foo(0)
    finally:
        pass
    #
    try:
        with nogil:
            foo(0)
            foo(1)
    except:
        with nogil:
            foo(0)
    finally:
        with nogil:
            foo(0)
        pass
    #
    try:
        with nogil:
            foo(0)
        try:
            with nogil:
                foo(1)
        except:
            with nogil:
                foo(1)
        finally:
            with nogil:
                foo(0)
            pass
    except:
        with nogil:
            foo(0)
    finally:
        with nogil:
            foo(0)
        pass
    #
    try:
        with nogil:
            foo(0)
        try:
            with nogil:
                foo(1)
        except:
            with nogil:
                foo(1)
        finally:
            with nogil:
                foo(1)
            pass
    except:
        with nogil:
            foo(0)
    finally:
        with nogil:
            foo(0)
        pass
    #

def test_bar():
    """
    >>> test_bar()
    """
    #
    bar(0)
    bar(0)
    with nogil:
        bar(0)
        bar(0)
    #
    try:
        with nogil:
            bar(0)
    finally:
        pass
    #
    try:
        with nogil:
            bar(0)
        with nogil:
            bar(0)
    finally:
        pass
    #
    try:
        with nogil:
            bar(0)
        with nogil:
            bar(1)
    except ValueError:
        with nogil:
            bar(0)
    finally:
        with nogil:
            bar(0)
        pass
    #
    try:
        with nogil:
            bar(0)
            bar(0)
    finally:
        pass
    #
    try:
        with nogil:
            bar(0)
            bar(1)
    except ValueError:
        with nogil:
            bar(0)
    finally:
        with nogil:
            bar(0)
        pass
    #
    try:
        with nogil:
            bar(0)
        try:
            with nogil:
                bar(1)
        except ValueError:
            with nogil:
                bar(1)
        finally:
            with nogil:
                bar(0)
            pass
    except ValueError:
        with nogil:
            bar(0)
    finally:
        with nogil:
            bar(0)
        pass
    #
    try:
        with nogil:
            bar(0)
        try:
            with nogil:
                bar(1)
        except ValueError:
            with nogil:
                bar(1)
        finally:
            with nogil:
                bar(1)
            pass
    except ValueError:
        with nogil:
            bar(0)
    finally:
        with nogil:
            bar(0)
        pass
    #

def test_spam():
    """
    >>> test_spam()
    """
    #
    spam(0)
    spam(0)
    with nogil:
        spam(0)
        spam(0)
    #
    try:
        with nogil:
            spam(0)
    finally:
        pass
    #
    try:
        with nogil:
            spam(0)
        with nogil:
            spam(0)
    finally:
        pass
    #
    try:
        with nogil:
            spam(0)
        with nogil:
            spam(1)
    except TypeError:
        with nogil:
            spam(0)
    finally:
        with nogil:
            spam(0)
        pass
    #
    try:
        with nogil:
            spam(0)
            spam(0)
    finally:
        pass
    #
    try:
        with nogil:
            spam(0)
            spam(1)
    except TypeError:
        with nogil:
            spam(0)
    finally:
        with nogil:
            spam(0)
        pass
    #
    try:
        with nogil:
            spam(0)
        try:
            with nogil:
                spam(1)
        except TypeError:
            with nogil:
                spam(1)
        finally:
            with nogil:
                spam(0)
            pass
    except TypeError:
        with nogil:
            spam(0)
    finally:
        with nogil:
            spam(0)
        pass
    #
    try:
        with nogil:
            spam(0)
        try:
            with nogil:
                spam(1)
        except TypeError:
            with nogil:
                spam(1)
        finally:
            with nogil:
                spam(1)
            pass
    except TypeError:
        with nogil:
            spam(0)
    finally:
        with nogil:
            spam(0)
        pass
    #
Cython-0.26.1/tests/run/always_allow_keywords_T295.pyx0000664000175000017500000000241012542002467023507 0ustar  stefanstefan00000000000000# ticket: 295

cimport cython


def func1(arg):
    """
    >>> func1(None)
    >>> func1(*[None])
    >>> func1(arg=None)
    Traceback (most recent call last):
    ...
    TypeError: func1() takes no keyword arguments
    """
    pass

@cython.always_allow_keywords(False)
def func2(arg):
    """
    >>> func2(None)
    >>> func2(*[None])
    >>> func2(arg=None)
    Traceback (most recent call last):
    ...
    TypeError: func2() takes no keyword arguments
    """
    pass

@cython.always_allow_keywords(True)
def func3(arg):
    """
    >>> func3(None)
    >>> func3(*[None])
    >>> func3(arg=None)
    """
    pass

cdef class A:
    """
    >>> A().meth1(None)
    >>> A().meth1(*[None])
    >>> A().meth1(arg=None)
    Traceback (most recent call last):
    ...
    TypeError: meth1() takes no keyword arguments
    >>> A().meth2(None)
    >>> A().meth2(*[None])
    >>> A().meth2(arg=None)
    Traceback (most recent call last):
    ...
    TypeError: meth2() takes no keyword arguments
    >>> A().meth3(None)
    >>> A().meth3(*[None])
    >>> A().meth3(arg=None)
    """

    def meth1(self, arg):
        pass

    @cython.always_allow_keywords(False)
    def meth2(self, arg):
        pass

    @cython.always_allow_keywords(True)
    def meth3(self, arg):
        pass
Cython-0.26.1/tests/run/internal_cdef_class.pyx0000664000175000017500000000076112542002467022330 0ustar  stefanstefan00000000000000
cimport cython


@cython.internal
cdef class InternalType:
    """
    NOTE: this doesn't fail because it is never tested !
    >>> i = InternalType
    """

cdef class PublicType:
    """
    >>> p = PublicType
    """

def test():
    """
    >>> p,i = test()

    >>> p = PublicType

    >>> i = InternalType         # doctest: +ELLIPSIS
    Traceback (most recent call last):
    NameError: ...name 'InternalType' is not defined
    """
    p = PublicType
    i = InternalType
    return p,i
Cython-0.26.1/tests/run/carrays.pyx0000664000175000017500000000132012542002467020002 0ustar  stefanstefan00000000000000def test1():
    """
    >>> test1()
    2
    """
    cdef int[2][2] x
    x[0][0] = 1
    x[0][1] = 2
    x[1][0] = 3
    x[1][1] = 4
    return f(x)[1]

cdef int* f(int x[2][2]):
    return x[0]


def test2():
    """
    >>> test2()
    0
    """
    cdef int[5] a1
    cdef int a2[2+3]
    return sizeof(a1) - sizeof(a2)

cdef enum:
    MY_SIZE_A = 2
    MY_SIZE_B = 3

def test3():
    """
    >>> test3()
    (2, 3)
    """
    cdef int a[MY_SIZE_A]
    cdef int b[MY_SIZE_B]
    return sizeof(a)/sizeof(int), sizeof(b)/sizeof(int)


from libc cimport limits

def test_cimported_attribute():
    """
    >>> test_cimported_attribute()
    True
    """
    cdef char a[limits.CHAR_MAX]
    return sizeof(a) >= 127
Cython-0.26.1/tests/run/r_pythonapi.pyx0000664000175000017500000000064612542002467020704 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> x = spam()
    >>> print(repr(x))
    u'Ftang\\x00Ftang!'
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u" u'", u" '")

cdef extern from "string.h":
    void memcpy(char *d, char *s, int n)

from cpython cimport PyUnicode_DecodeUTF8

def spam():
    cdef char[12] buf
    memcpy(buf, "Ftang\0Ftang!", sizeof(buf))
    return PyUnicode_DecodeUTF8(buf, sizeof(buf), NULL)
Cython-0.26.1/tests/run/import_error_T734.py0000664000175000017500000000034412542002467021417 0ustar  stefanstefan00000000000000# mode: run
# ticket: 734

def test_import_error():
    """
    >>> test_import_error()   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ImportError: cannot import name ...xxx...
    """
    from sys import xxx
Cython-0.26.1/tests/run/exttype_freelist.pyx0000664000175000017500000002507712542002467021754 0ustar  stefanstefan00000000000000# mode: run
# tag: freelist, cyclicgc

cimport cython

@cython.freelist(4)
cdef class ExtTypeNoGC:
    """
    >>> obj = ExtTypeNoGC()
    >>> obj = ExtTypeNoGC()
    >>> obj = ExtTypeNoGC()
    >>> obj = ExtTypeNoGC()
    >>> obj = ExtTypeNoGC()
    >>> obj = ExtTypeNoGC()

    >>> class PyClass(ExtTypeNoGC): a = 1
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj

    >>> class PyClass(ExtTypeNoGC): __slots__ = ()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj
    """


cdef class ExtSubTypeNoGC(ExtTypeNoGC):
    """
    >>> obj = ExtSubTypeNoGC()
    >>> obj = ExtSubTypeNoGC()
    >>> obj = ExtSubTypeNoGC()
    >>> obj = ExtSubTypeNoGC()
    >>> obj = ExtSubTypeNoGC()
    >>> obj = ExtSubTypeNoGC()

    >>> class PyClass(ExtSubTypeNoGC): a = 1
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj

    >>> class PyClass(ExtSubTypeNoGC): __slots__ = ()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj
    """
    cdef bytes x


@cython.freelist(4)
cdef class ExtTypeWithGC:
    """
    >>> obj = ExtTypeWithGC()
    >>> obj = ExtTypeWithGC()
    >>> obj = ExtTypeWithGC()
    >>> obj = ExtTypeWithGC()
    >>> obj = ExtTypeWithGC()
    >>> obj = ExtTypeWithGC()

    >>> class PyClass(ExtTypeWithGC): a = 1
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj

    >>> class PyClass(ExtTypeWithGC): __slots__ = ()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj
    """
    cdef attribute

    def __init__(self):
        self.attribute = object()


def tpnew_ExtTypeWithGC():
    """
    >>> obj = tpnew_ExtTypeWithGC()
    >>> obj = tpnew_ExtTypeWithGC()
    >>> obj = tpnew_ExtTypeWithGC()
    >>> obj = tpnew_ExtTypeWithGC()
    >>> obj = tpnew_ExtTypeWithGC()
    >>> obj = tpnew_ExtTypeWithGC()
    """
    return ExtTypeWithGC.__new__(ExtTypeWithGC)


cdef class ExtSubType(ExtTypeWithGC):
    """
    >>> obj = ExtSubType()
    >>> obj = ExtSubType()
    >>> obj = ExtSubType()
    >>> obj = ExtSubType()
    >>> obj = ExtSubType()
    >>> obj = ExtSubType()

    >>> class PyClass(ExtSubType): a = 1
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj

    >>> class PyClass(ExtSubType): __slots__ = ()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj
    """


cdef class LargerExtSubType(ExtSubType):
    """
    >>> obj = LargerExtSubType()
    >>> obj = LargerExtSubType()
    >>> obj = LargerExtSubType()
    >>> obj = LargerExtSubType()
    >>> obj = LargerExtSubType()
    >>> obj = LargerExtSubType()

    >>> class PyClass(LargerExtSubType): a = 1
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj

    >>> class PyClass(LargerExtSubType): __slots__ = ()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj
    """
    cdef attribute2

    def __cinit__(self):
        self.attribute2 = object()


@cython.freelist(4)
cdef class ExtTypeWithCAttr:
    """
    >>> obj = ExtTypeWithCAttr()
    >>> obj = ExtTypeWithCAttr()
    >>> obj = ExtTypeWithCAttr()
    >>> obj = ExtTypeWithCAttr()
    >>> obj = ExtTypeWithCAttr()
    >>> obj = ExtTypeWithCAttr()

    >>> class PyClass(ExtTypeWithCAttr): a = 1
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj

    >>> class PyClass(ExtTypeWithCAttr): __slots__ = ()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj
    """
    cdef int cattr

    def __cinit__(self):
        assert self.cattr == 0
        self.cattr = 1


cdef class ExtSubTypeWithCAttr(ExtTypeWithCAttr):
    """
    >>> obj = ExtSubTypeWithCAttr()
    >>> obj = ExtSubTypeWithCAttr()
    >>> obj = ExtSubTypeWithCAttr()
    >>> obj = ExtSubTypeWithCAttr()
    >>> obj = ExtSubTypeWithCAttr()
    >>> obj = ExtSubTypeWithCAttr()

    >>> class PyClass(ExtSubTypeWithCAttr): a = 1
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()

    >>> class PyClass(ExtSubTypeWithCAttr): __slots__ = ()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    """


cdef class ExtTypeWithCAttrNoFreelist:
    """
    For comparison with normal CPython instantiation.

    >>> obj = ExtTypeWithCAttrNoFreelist()
    >>> obj = ExtTypeWithCAttrNoFreelist()
    >>> obj = ExtTypeWithCAttrNoFreelist()
    >>> obj = ExtTypeWithCAttrNoFreelist()
    >>> obj = ExtTypeWithCAttrNoFreelist()
    >>> obj = ExtTypeWithCAttrNoFreelist()

    >>> class PyClass(ExtTypeWithCAttrNoFreelist): a = 1
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj

    >>> class PyClass(ExtTypeWithCAttrNoFreelist): __slots__ = ()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj
    """
    cdef int cattr

    def __cinit__(self):
        assert self.cattr == 0
        self.cattr = 1


@cython.freelist(4)
cdef class ExtTypeWithCMethods:
    """
    >>> obj = ExtTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)

    >>> class PyClass(ExtTypeWithCMethods): a = 1
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = PyClass()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = PyClass()
    >>> del PyClass, obj

    >>> class PyClass(ExtTypeWithCMethods): __slots__ = ()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = PyClass()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = PyClass()
    >>> del PyClass, obj
    """
    cdef int cattr

    def __cinit__(self):
        assert self.cattr == 0
        self.cattr = 1

    cdef int get_cattr(self):
        return self.cattr

    cdef set_cattr(self, int value):
        self.cattr = value


def test_cmethods(ExtTypeWithCMethods obj not None):
    x = obj.get_cattr()
    obj.set_cattr(2)
    return x, obj.get_cattr()


cdef class ExtSubTypeWithCMethods(ExtTypeWithCMethods):
    """
    >>> obj = ExtSubTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtSubTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtSubTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtSubTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtSubTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtSubTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)

    >>> class PyClass(ExtSubTypeWithCMethods): a = 1
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj

    >>> class PyClass(ExtSubTypeWithCMethods): __slots__ = ()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj
    """


cdef class ExtSubTypeWithMoreCMethods(ExtSubTypeWithCMethods):
    """
    >>> obj = ExtSubTypeWithMoreCMethods()
    >>> test_more_cmethods(obj)
    (2, 3, 3)
    >>> obj = ExtSubTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)
    >>> obj = ExtSubTypeWithMoreCMethods()
    >>> test_more_cmethods(obj)
    (2, 3, 3)
    >>> obj2 = ExtSubTypeWithMoreCMethods()
    >>> test_more_cmethods(obj2)
    (2, 3, 3)
    >>> obj2 = ExtSubTypeWithCMethods()
    >>> test_cmethods(obj2)
    (1, 2)
    >>> obj = ExtSubTypeWithMoreCMethods()
    >>> test_more_cmethods(obj)
    (2, 3, 3)
    >>> obj2 = ExtTypeWithCMethods()
    >>> test_cmethods(obj2)
    (1, 2)
    >>> obj = ExtSubTypeWithMoreCMethods()
    >>> test_more_cmethods(obj)
    (2, 3, 3)
    >>> obj2 = ExtSubTypeWithCMethods()
    >>> test_cmethods(obj2)
    (1, 2)
    >>> obj = ExtSubTypeWithMoreCMethods()
    >>> test_more_cmethods(obj)
    (2, 3, 3)
    >>> obj2 = ExtSubTypeWithCMethods()
    >>> test_cmethods(obj2)
    (1, 2)
    >>> obj = ExtTypeWithCMethods()
    >>> test_cmethods(obj)
    (1, 2)

    >>> class PyClass(ExtSubTypeWithMoreCMethods): a = 1
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj

    >>> class PyClass(ExtSubTypeWithMoreCMethods): __slots__ = ()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj
    """
    def __cinit__(self):
        assert self.cattr == 1
        self.cattr = 2

    cdef int get_cattr2(self):
        return self.cattr

    cdef set_cattr2(self, int value):
        self.cattr = value


def test_more_cmethods(ExtSubTypeWithMoreCMethods obj not None):
    x = obj.get_cattr()
    assert obj.get_cattr2() == x
    obj.set_cattr2(2)
    assert obj.get_cattr2() == 2
    obj.set_cattr(3)
    return x, obj.get_cattr(), obj.get_cattr2()


@cython.freelist(4)
cdef class ExtTypeWithRefCycle:
    """
    >>> obj = first = ExtTypeWithRefCycle()
    >>> obj.attribute is None
    True
    >>> obj = ExtTypeWithRefCycle(obj)
    >>> obj.attribute is first
    True
    >>> obj = ExtTypeWithRefCycle(obj)
    >>> obj = ExtTypeWithRefCycle(obj)
    >>> obj = ExtTypeWithRefCycle(obj)
    >>> obj = ExtTypeWithRefCycle(obj)
    >>> obj.attribute is not None
    True
    >>> first.attribute = obj
    >>> del obj, first

    >>> class PyClass(ExtTypeWithRefCycle): a = 1
    >>> obj = PyClass()
    >>> obj.attribute = obj
    >>> obj.attribute = PyClass(obj)
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj

    >>> class PyClass(ExtTypeWithRefCycle): __slots__ = ()
    >>> obj = PyClass()
    >>> obj.attribute = obj
    >>> obj.attribute = PyClass(obj)
    >>> obj = PyClass()
    >>> obj = PyClass()
    >>> del PyClass, obj
    """
    cdef public attribute

    def __init__(self, obj=None):
        self.attribute = obj
Cython-0.26.1/tests/run/pep448_test_extcall.pyx0000664000175000017500000002774612574327400022163 0ustar  stefanstefan00000000000000# mode: run
# tag: pep448

from __future__ import print_function

import sys

IS_PY3 = sys.version_info[0] >= 3

if IS_PY3:
    __doc__ = """
>>> def f(*, w): pass
>>> try: errors_call_no_args(f)
... except TypeError: pass
... else: print("FAILED!")

>>> def f(*, a, b, c, d, e): pass
>>> try: errors_call_no_args(f)
... except TypeError: pass
... else: print("FAILED!")

>>> def f(*, kw, b): pass
>>> try: errors_call_3args_2kwargs(f)
... except TypeError: pass
... else: print("FAILED!")

>>> def f(a, b=2, *, kw): pass
>>> try: errors_call_3args_1kwarg(f)
... except TypeError: pass
... else: print("FAILED!")

>>> def f(*, kw): pass
>>> try: errors_call_1arg_1kwarg(f)
... except TypeError: pass
... else: print("FAILED!")
"""

# test for method/function calls. adapted from CPython's "test_extcall.py".

def sortdict(d):
    return '{%s}' % ', '.join(['%r: %r' % item for item in sorted(d.items())])

# We're going the use these types for extra testing

try:
    from collections import UserList, UserDict
except ImportError:
    from UserList import UserList
    from UserDict import UserDict


# We're defining four helper functions

def e(a,b):
    print(a, b)

def f(*a, **k):
    print(a, sortdict(k))

def g(x, *y, **z):
    print(x, y, sortdict(z))

def h(j=1, a=2, h=3):
    print(j, a, h)


# Argument list examples

def call_f_positional():
    """
    >>> call_f_positional()
    () {}
    (1,) {}
    (1, 2) {}
    (1, 2, 3) {}
    (1, 2, 3, 4, 5) {}
    (1, 2, 3, 4, 5) {}
    (1, 2, 3, 4, 5) {}
    (1, 2, 3, 4, 5) {}
    (1, 2, 3, 4, 5, 6, 7) {}
    (1, 2, 3, 4, 5, 6, 7) {}
    (1, 2, 3, 4, 5, 6, 7) {}
    (1, 2) {}
    """
    f()
    f(1)
    f(1, 2)
    f(1, 2, 3)
    f(1, 2, 3, *(4, 5))
    f(1, 2, 3, *[4, 5])
    f(*[1, 2, 3], 4, 5)
    f(1, 2, 3, *UserList([4, 5]))
    f(1, 2, 3, *[4, 5], *[6, 7])
    f(1, *[2, 3], 4, *[5, 6], 7)
    f(*UserList([1, 2]), *UserList([3, 4]), 5, *UserList([6, 7]))
    f(1, *[] or () and {}, *() and [], *{} or [] and (), *{} and [] or (), 2)


# Here we add keyword arguments

def call_f_kwargs():
    """
    >>> call_f_kwargs()
    (1, 2, 3) {'a': 4, 'b': 5}
    (1, 2, 3, 4, 5) {'a': 6, 'b': 7}
    (1, 2, 3, 6, 7) {'a': 8, 'b': 9, 'x': 4, 'y': 5}
    (1, 2, 3, 4, 5) {'a': 6, 'b': 7, 'c': 8}
    (1, 2, 3, 4, 5) {'a': 8, 'b': 9, 'x': 6, 'y': 7}
    (1, 2, 3) {'a': 4, 'b': 5}
    (1, 2, 3, 4, 5) {'a': 6, 'b': 7}
    (1, 2, 3, 6, 7) {'a': 8, 'b': 9, 'x': 4, 'y': 5}
    (1, 2, 3, 4, 5) {'a': 8, 'b': 9, 'x': 6, 'y': 7}
    (1, 2) {'a': 3}
    """

    f(1, 2, 3, **{'a':4, 'b':5})
    f(1, 2, 3, *[4, 5], **{'a':6, 'b':7})
    f(1, 2, 3, x=4, y=5, *(6, 7), **{'a':8, 'b': 9})
    f(1, 2, 3, *[4, 5], **{'c': 8}, **{'a':6, 'b':7})
    f(1, 2, 3, *(4, 5), x=6, y=7, **{'a':8, 'b': 9})

    f(1, 2, 3, **UserDict(a=4, b=5))
    f(1, 2, 3, *(4, 5), **UserDict(a=6, b=7))
    f(1, 2, 3, x=4, y=5, *(6, 7), **UserDict(a=8, b=9))
    f(1, 2, 3, *(4, 5), x=6, y=7, **UserDict(a=8, b=9))

    f(1, *[] or () and {}, *() and [], *{} or [] and (), *{} and [] or (), 2,
      **{} and {} or {}, **{} or {} and {}, **{} and {}, a=3)


# Examples with invalid arguments (TypeErrors). We're also testing the function
# names in the exception messages.
#
# Verify clearing of SF bug #733667

def errors_f1():
    """
    >>> errors_f1()  # doctest: +ELLIPSIS
    Traceback (most recent call last):
        ...
    TypeError: ...got multiple values for keyword argument 'a'
    """
    f(1, 2, **{'a': -1, 'b': 5}, **{'a': 4, 'c': 6})


def errors_f2():
    """
    >>> errors_f2()  # doctest: +ELLIPSIS
    Traceback (most recent call last):
        ...
    TypeError: ...multiple values for keyword argument 'a'
    """
    f(1, 2, **{'a': -1, 'b': 5}, a=4, c=6)


def errors_e1():
    """
    >>> try: errors_e1()
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    e(c=4)


def errors_e2():
    """
    >>> try: errors_e2()
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    e(a=1, b=2, c=4)


def errors_g1():
    """
    >>> errors_g1()
    Traceback (most recent call last):
      ...
    TypeError: g() takes at least 1 positional argument (0 given)

    # TypeError: g() missing 1 required positional argument: 'x'
    """
    g()


def errors_g2():
    """
    >>> errors_g2()
    Traceback (most recent call last):
      ...
    TypeError: g() takes at least 1 positional argument (0 given)

    # TypeError: g() missing 1 required positional argument: 'x'
    """
    g(*())


def errors_g3():
    """
    >>> errors_g3()
    Traceback (most recent call last):
      ...
    TypeError: g() takes at least 1 positional argument (0 given)

    # TypeError: g() missing 1 required positional argument: 'x'
    """
    g(*(), **{})


def call_g_positional():
    """
    >>> call_g_positional()
    1 () {}
    1 (2,) {}
    1 (2, 3) {}
    1 (2, 3, 4, 5) {}
    """
    g(1)
    g(1, 2)
    g(1, 2, 3)
    g(1, 2, 3, *(4, 5))



def call_nonseq_positional1():
    """
    >>> call_nonseq_positional1()
    Traceback (most recent call last):
      ...
    TypeError: 'Nothing' object is not iterable

    # TypeError: g() argument after * must be a sequence, not Nothing
    """
    class Nothing(object): pass
    g(*Nothing())


def call_nonseq_positional2():
    """
    >>> call_nonseq_positional2()
    Traceback (most recent call last):
      ...
    TypeError: 'Nothing' object is not iterable

    # TypeError: g() argument after * must be a sequence, not Nothing
    """
    class Nothing(object):
        def __len__(self): return 5
    g(*Nothing())


def call_seqlike_positional1():
    """
    >>> call_seqlike_positional1()
    0 (1, 2) {}
    """
    class Nothing(object):
        def __len__(self): return 5
        def __getitem__(self, i):
            if i<3: return i
            else: raise IndexError(i)

    g(*Nothing())


def call_seqlike_positional2():
    """
    >>> call_seqlike_positional2()
    0 (1, 2, 3) {}
    """
    class Nothing:
        def __init__(self): self.c = 0
        def __iter__(self): return self
        def __next__(self):
            if self.c == 4:
                raise StopIteration
            c = self.c
            self.c += 1
            return c
        next = __next__

    g(*Nothing())


# Make sure that the function doesn't stomp the dictionary

def call_kwargs_unmodified1():
    """
    >>> call_kwargs_unmodified1()
    1 () {'a': 1, 'b': 2, 'c': 3, 'd': 4}
    True
    """
    d = {'a': 1, 'b': 2, 'c': 3}
    d2 = d.copy()
    g(1, d=4, **d)
    return d == d2


# What about willful misconduct?

def call_kwargs_unmodified2():
    """
    >>> call_kwargs_unmodified2()
    {}
    """
    def saboteur(**kw):
        kw['x'] = 'm'
        return kw

    d = {}
    kw = saboteur(a=1, **d)
    return d


def errors_args_kwargs_overlap():
    """
    >>> errors_args_kwargs_overlap()  # doctest: +ELLIPSIS
    Traceback (most recent call last):
      ...
    TypeError: ...got multiple values for... argument 'x'
    """
    g(1, 2, 3, **{'x': 4, 'y': 5})


def errors_non_string_kwarg():
    """
    >>> errors_non_string_kwarg()  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...keywords must be strings
    """
    f(**{1:2})


def errors_unexpected_kwarg():
    """
    >>> errors_unexpected_kwarg()
    Traceback (most recent call last):
      ...
    TypeError: h() got an unexpected keyword argument 'e'
    """
    h(**{'e': 2})


def errors_call_nonseq():
    """
    >>> try: errors_call_nonseq()
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    h(*h)


def errors_call_builtin_nonseq():
    """
    >>> try: errors_call_builtin_nonseq()
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    dir(*h)


def errors_call_none_nonseq():
    """
    >>> try: errors_call_none_nonseq()
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    None(*h)


def errors_call_nonmapping_kwargs():
    """
    >>> try: errors_call_nonmapping_kwargs()
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    h(**h)


def errors_call_builtin_nonmapping_kwargs():
    """
    >>> try: errors_call_builtin_nonmapping_kwargs()
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    dir(**h)


def errors_call_none_nonmapping_kwargs():
    """
    >>> try: errors_call_none_nonmapping_kwargs()
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    None(**h)


'''  # compile time error in Cython
def errors_call_builtin_duplicate_kwarg():
    """
    >>> errors_call_builtin_duplicate_kwarg()  # doctest: +ELLIPSIS
    Traceback (most recent call last):
      ...
    TypeError: ...got multiple values for keyword argument 'b'
    """
    dir(b=1, **{'b': 1})
'''


# Another helper function

def f2(*a, **b):
    return a, b


def call_many_kwargs():
    """
    call_many_kwargs()
    (3, 512, True)
    """
    d = {}
    for i in range(512):
        key = 'k%d' % i
        d[key] = i
    a, b = f2(1, *(2,3), **d)
    return len(a), len(b), b == d


def call_method(Foo):
    """
    >>> class Foo(object):
    ...     def method(self, arg1, arg2):
    ...         print(arg1+arg2)

    >>> call_method(Foo)
    3
    3
    5
    5
    """
    x = Foo()
    Foo.method(*(x, 1, 2))
    Foo.method(x, *(1, 2))
    if sys.version_info[0] >= 3:
        Foo.method(*(1, 2, 3))
        Foo.method(1, *[2, 3])
    else:
        print(5)
        print(5)


# A PyCFunction that takes only positional parameters should allow an
# empty keyword dictionary to pass without a complaint, but raise a
# TypeError if te dictionary is not empty

def call_builtin_empty_dict():
    """
    >>> call_builtin_empty_dict()
    """
    silence = id(1, *{})
    silence = id(1, **{})


def call_builtin_nonempty_dict():
    """
    >>> call_builtin_nonempty_dict()
    Traceback (most recent call last):
      ...
    TypeError: id() takes no keyword arguments
    """
    return id(1, **{'foo': 1})


''' Cython: currently just passes empty kwargs into f() while CPython keeps the content

# A corner case of keyword dictionary items being deleted during
# the function call setup. See .

def call_kwargs_modified_while_building():
    """
    >>> call_kwargs_modified_while_building()
    1 2
    """
    class Name(str):
        def __eq__(self, other):
            try:
                 del x[self]
            except KeyError:
                 pass
            return str.__eq__(self, other)
        def __hash__(self):
            return str.__hash__(self)

    x = {Name("a"):1, Name("b"):2}
    def f(a, b):
        print(a,b)
    f(**x)
'''


# Too many arguments:

def errors_call_one_arg(f):
    """
    >>> def f(): pass
    >>> try: errors_call_one_arg(f)
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    f(1)

def errors_call_2args(f):
    """
    >>> def f(a): pass
    >>> try: errors_call_2args(f)
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    f(1, 2)

def errors_call_3args(f):
    """
    >>> def f(a, b=1): pass
    >>> try: errors_call_3args(f)
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    f(1, 2, 3)


def errors_call_1arg_1kwarg(f):
    # Py3 only
    f(1, kw=3)


def errors_call_3args_2kwargs(f):
    # Py3 only
    f(1, 2, 3, b=3, kw=3)


def errors_call_3args_1kwarg(f):
    # Py3 only
    f(2, 3, 4, kw=4)


# Too few and missing arguments:

def errors_call_no_args(f):
    """
    >>> def f(a): pass
    >>> try: errors_call_no_args(f)
    ... except TypeError: pass
    ... else: print("FAILED!")

    >>> def f(a, b): pass
    >>> try: errors_call_no_args(f)
    ... except TypeError: pass
    ... else: print("FAILED!")

    >>> def f(a, b, c): pass
    >>> try: errors_call_no_args(f)
    ... except TypeError: pass
    ... else: print("FAILED!")

    >>> def f(a, b, c, d, e): pass
    >>> try: errors_call_no_args(f)
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    f()


def errors_call_one_missing_kwarg(f):
    """
    >>> def f(a, b=4, c=5, d=5): pass
    >>> try: errors_call_one_missing_kwarg(f)
    ... except TypeError: pass
    ... else: print("FAILED!")
    """
    f(c=12, b=9)
Cython-0.26.1/tests/run/cyfunction_METH_O_GH1728.pyx0000664000175000017500000000050013143605603022510 0ustar  stefanstefan00000000000000# cython: binding=True
# mode: run
# tag: cyfunction

cdef class TestMethodOneArg:
    def meth(self, arg):
        pass

def call_meth(x):
    """
    >>> call_meth(TestMethodOneArg())
    Traceback (most recent call last):
    ...
    TypeError: meth() takes exactly one argument (0 given)
    """
    return x.meth()
Cython-0.26.1/tests/run/cdef_opt.pyx0000664000175000017500000000104512542002467020125 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> a = A()
    >>> a.foo()
    (True, 'yo')
    >>> a.foo(False)
    (False, 'yo')
    >>> a.foo(10, 'yes')
    (True, 'yes')

"""

cdef class A:
    cpdef foo(self, bint a=True, b="yo"):
        return a, b

def call0():
    """
    >>> call0()
    (True, 'yo')
    """
    cdef A a = A()
    return a.foo()

def call1():
    """
    >>> call1()
    (False, 'yo')
    """
    cdef A a = A()
    return a.foo(False)

def call2():
    """
    >>> call2()
    (False, 'go')
    """
    cdef A a = A()
    return a.foo(False, "go")
Cython-0.26.1/tests/run/cross_closure_type_inference.pyx0000664000175000017500000000351112542002467024306 0ustar  stefanstefan00000000000000# mode: run
# tag: typeinference

cimport cython


def test_outer_inner_double():
    """
    >>> print(test_outer_inner_double())
    double
    """
    x = 1.0
    def inner():
        nonlocal x
        x = 2.0
    inner()
    assert x == 2.0, str(x)
    return cython.typeof(x)


def test_outer_inner_double_int():
    """
    >>> print(test_outer_inner_double_int())
    ('double', 'double')
    """
    x = 1.0
    y = 2
    def inner():
        nonlocal x, y
        x = 1
        y = 2.0
    inner()
    return cython.typeof(x), cython.typeof(y)


def test_outer_inner_pyarg():
    """
    >>> print(test_outer_inner_pyarg())
    2
    long
    """
    x = 1
    def inner(y):
        return x + y
    print inner(1)
    return cython.typeof(x)


def test_outer_inner_carg():
    """
    >>> print(test_outer_inner_carg())
    2.0
    long
    """
    x = 1
    def inner(double y):
        return x + y
    print inner(1)
    return cython.typeof(x)


def test_outer_inner_incompatible():
    """
    >>> print(test_outer_inner_incompatible())
    Python object
    """
    x = 1.0
    def inner():
        nonlocal x
        x = 'test'
    inner()
    return cython.typeof(x)


def test_outer_inner_ptr():
    """
    >>> print(test_outer_inner_ptr())
    double *
    """
    x = 1.0
    xptr_outer = &x
    def inner():
        nonlocal x
        x = 1
        xptr_inner = &x
        assert cython.typeof(xptr_inner) == cython.typeof(xptr_outer), (
            '%s != %s' % (cython.typeof(xptr_inner), cython.typeof(xptr_outer)))
    inner()
    return cython.typeof(xptr_outer)


def test_outer_inner2_double():
    """
    >>> print(test_outer_inner2_double())
    double
    """
    x = 1.0
    def inner1():
        nonlocal x
        x = 2
    def inner2():
        nonlocal x
        x = 3.0
    inner1()
    inner2()
    return cython.typeof(x)
Cython-0.26.1/tests/run/pure_mode_cmethod_inheritance_T583.pxd0000664000175000017500000000071112542002467025072 0ustar  stefanstefan00000000000000cdef class Base:
    cpdef str noargs(self)
    cpdef str int_arg(self, int i)
    cpdef str _class(tp)

cdef class Derived(Base):
    cpdef str noargs(self)
    cpdef str int_arg(self, int i)
    cpdef str _class(tp)

cdef class DerivedDerived(Derived):
    cpdef str noargs(self)
    cpdef str int_arg(self, int i)
    cpdef str _class(tp)

cdef class Derived2(Base):
    cpdef str noargs(self)
    cpdef str int_arg(self, int i)
    cpdef str _class(tp)
Cython-0.26.1/tests/run/libc_math.pyx0000664000175000017500000000154513023021033020252 0ustar  stefanstefan00000000000000# mode: run

from libc.math cimport (M_E, M_LOG2E, M_LOG10E, M_LN2, M_LN10, M_PI, M_PI_2,
        M_PI_4, M_1_PI, M_2_PI, M_2_SQRTPI, M_SQRT2, M_SQRT1_2)
from libc.math cimport (acos, asin, atan, atan2, cos, sin, tan, cosh, sinh,
        tanh, acosh, asinh, atanh, exp, log, log10, pow, sqrt)
cimport libc.math as libc_math


def test_pi():
    """
    >>> import math
    >>> test_pi() == math.pi
    True
    """
    return M_PI


def test_renamed_constants(math):
    """
    >>> import math
    >>> test_renamed_constants(math)
    """
    assert libc_math.M_E == libc_math.e == math.e
    assert libc_math.M_PI == libc_math.pi == math.pi


def test_sin(x):
    """
    >>> test_sin(0)
    0.0
    >>> from math import sin
    >>> [sin(k) == test_sin(k) for k in range(10)]
    [True, True, True, True, True, True, True, True, True, True]
    """
    return sin(x)
Cython-0.26.1/tests/run/starred_target_T664.pyx0000664000175000017500000000060012542002467022073 0ustar  stefanstefan00000000000000# ticket: 664

def assign():
    """
    >>> assign()
    (1, [2, 3, 4, 5])
    """
    a, *b = 1, 2, 3, 4, 5
    return a, b

def assign3():
    """
    >>> assign3()
    (1, [2, 3, 4, 5], 6)
    """
    a, *b, c = 1, 2, 3, 4, 5, 6
    return a, b, c

def assign4():
    """
    >>> assign4()
    (1, [2, 3, 4], 5, 6)
    """
    a, *b, c, d = 1, 2, 3, 4, 5, 6
    return a, b, c, d
Cython-0.26.1/tests/run/pyarray.pyx0000664000175000017500000001021712542002467020032 0ustar  stefanstefan00000000000000# tag: array

import array  # Python builtin module  
from cpython cimport array  # array.pxd / arrayarray.h

a = array.array('f', [1.0, 2.0, 3.0])

def test_len(a):
    """
    >>> a = array.array('f', [1.0, 2.0, 3.0])
    >>> len(a)
    3
    >>> int(test_len(a))
    3
    >>> assert len(a) == test_len(a)
    """
    cdef array.array ca = a  # for C-fast array usage
    return len(ca)

def test_copy(a):
    """
    >>> a = array.array('f', [1.0, 2.0, 3.0])
    >>> test_copy(a)
    array('f', [1.0, 2.0, 3.0])
    """
    cdef array.array ca = a
    cdef array.array b
    b = array.copy(ca)
    assert a == b
    a[2] = 3.5
    assert b[2] != a[2]
    return b


def test_fast_access(a):
    """
    >>> a = array.array('f', [1.0, 2.0, 3.0])
    >>> test_fast_access(a)
    """
    
    cdef array.array ca = a
    
    cdef float value
    with nogil:
        value = ca.data.as_floats[1]
    assert value == 2.0, value

    #assert ca._c[:5] == b'\x00\x00\x80?\x00', repr(ca._c[:5])

    with nogil:
        ca.data.as_floats[1] += 2.0
    assert ca.data.as_floats[1] == 4.0


def test_fast_buffer_access(a):
    """
    >>> a = array.array('f', [1.0, 2.0, 3.0])
    >>> test_fast_buffer_access(a)
    """
    
    cdef array.array[float] ca = a
    
    cdef float value
    with nogil:
        value = ca[1]
    assert value == 2.0, value

    with nogil:
        ca[1] += 2.0
    assert ca[1] == 4.0


def test_new_zero(a):
    """
    >>> a = array.array('f', [1.0, 2.0, 3.0])
    >>> test_new_zero(a)
    array('f', [0.0, 0.0, 0.0])
    """
    cdef array.array cb = array.clone(a, len(a), True)
    assert len(cb) == len(a)
    return cb


def test_set_zero(a):
    """
    >>> a = array.array('f', [1.0, 2.0, 3.0])
    >>> test_set_zero(a)
    array('f', [0.0, 0.0, 0.0])
    """
    cdef array.array cb = array.copy(a)
    array.zero(cb)
    assert a[1] != 0.0, a
    assert cb[1] == 0.0, cb
    return cb

def test_resize(a):
    """
    >>> a = array.array('f', [1.0, 2.0, 3.0])
    >>> test_resize(a)
    """
    cdef array.array cb = array.copy(a)
    array.resize(cb, 10)
    for i in range(10):
        cb.data.as_floats[i] = i
    assert len(cb) == 10
    assert cb[9] == cb[-1] == cb.data.as_floats[9] == 9

def test_resize_smart(a):
    """
    >>> a = array.array('d', [1, 2, 3])
    >>> test_resize_smart(a)
    2
    """
    cdef array.array cb = array.copy(a)
    array.resize_smart(cb, 2)
    return len(cb)

def test_buffer():
    """
    >>> test_buffer()
    """
    cdef object a = array.array('i', [1, 2, 3])
    cdef object[int] ca = a
    assert ca[0] == 1
    assert ca[2] == 3

def test_buffer_typed():
    """
    >>> test_buffer_typed()
    """
    cdef array.array a = array.array('i', [1, 2, 3])
    cdef object[int] ca = a
    assert ca[0] == 1
    assert ca[2] == 3

def test_view():
    """
    >>> test_view()
    """
    cdef object a = array.array('i', [1, 2, 3])
    cdef int[:] ca = a
    assert ca[0] == 1
    assert ca[2] == 3

def test_view_typed():
    """
    >>> test_view_typed()
    """
    cdef array.array a = array.array('i', [1, 2, 3])
    cdef int[:] ca = a
    assert ca[0] == 1
    assert ca[2] == 3

def test_extend():
    """
    >>> test_extend()
    """
    cdef array.array ca = array.array('i', [1, 2, 3])
    cdef array.array cb = array.array('i', [4, 5])
    cdef array.array cf = array.array('f', [1.0, 2.0, 3.0])
    array.extend(ca, cb)
    assert list(ca) == [1, 2, 3, 4, 5], list(ca)
    try:
        array.extend(ca, cf)
    except TypeError:
        pass
    else:
        assert False, 'extending incompatible array types did not raise'

def test_likes(a):
    """
    >>> a = array.array('f', [1.0, 2.0, 3.0])
    >>> test_likes(a)
    array('f', [0.0, 0.0, 0.0])
    """
    cdef array.array z = array.clone(a, len(a), True)
    cdef array.array e = array.clone(a, len(a), False)
    assert len(e) == len(a)
    return z

def test_extend_buffer():
    """
    >>> test_extend_buffer()
    array('l', [15, 37, 389, 5077])
    """
    cdef array.array ca = array.array('l', [15, 37])
    cdef long[2] s
    s[0] = 389
    s[1] = 5077
    array.extend_buffer(ca,  &s, 2)

    assert ca.data.as_ulongs[3] == 5077
    assert len(ca) == 4
    return ca
Cython-0.26.1/tests/run/locals.pyx0000664000175000017500000000373413023021033017607 0ustar  stefanstefan00000000000000# mode: run
# tag: builtins, locals, dir

def get_locals(x, *args, **kwds):
    """
    >>> sorted( get_locals(1,2,3, k=5).items() )
    [('args', (2, 3)), ('kwds', {'k': 5}), ('x', 1), ('y', 'hi'), ('z', 5)]
    """
    cdef int z = 5
    y = "hi"
    return locals()

def get_vars(x, *args, **kwds):
    """
    >>> sorted( get_vars(1,2,3, k=5).items() )
    [('args', (2, 3)), ('kwds', {'k': 5}), ('x', 1), ('y', 'hi'), ('z', 5)]
    """
    cdef int z = 5
    y = "hi"
    return vars()

def get_dir(x, *args, **kwds):
    """
    >>> sorted( get_dir(1,2,3, k=5) )
    ['args', 'kwds', 'x', 'y', 'z']
    """
    cdef int z = 5
    y = "hi"
    return dir()

def in_locals(x, *args, **kwds):
    """
    >>> in_locals('z')
    True
    >>> in_locals('args')
    True
    >>> in_locals('X')
    False
    """
    cdef int z = 5
    y = "hi"
    return x in locals()

def in_dir(x, *args, **kwds):
    """
    >>> in_dir('z')
    True
    >>> in_dir('args')
    True
    >>> in_dir('X')
    False
    """
    cdef int z = 5
    y = "hi"
    return x in dir()

def in_vars(x, *args, **kwds):
    """
    >>> in_vars('z')
    True
    >>> in_vars('args')
    True
    >>> in_vars('X')
    False
    """
    cdef int z = 5
    y = "hi"
    return x in vars()

def sorted(it):
    l = list(it)
    l.sort()
    return l

def locals_ctype():
    """
    >>> locals_ctype()
    False
    """
    cdef int *p = NULL
    return 'p' in locals()

def locals_ctype_inferred():
    """
    >>> locals_ctype_inferred()
    False
    """
    cdef int *p = NULL
    b = p
    return 'b' in locals()


def pass_on_locals(f):
    """
    >>> def print_locals(l, **kwargs):
    ...     print(sorted(l))

    >>> pass_on_locals(print_locals)
    ['f']
    ['f']
    ['f']
    """
    f(locals())
    f(l=locals())
    f(l=locals(), a=1)


def buffers_in_locals(object[char, ndim=1] a):
    """
    >>> sorted(buffers_in_locals(b'abcdefg'))
    ['a', 'b']
    """
    cdef object[unsigned char, ndim=1] b = a

    return locals()
Cython-0.26.1/tests/run/overflow_check_uint.pyx0000664000175000017500000000013712542002467022402 0ustar  stefanstefan00000000000000# cython: overflowcheck.fold = False


ctypedef unsigned int INT

include "overflow_check.pxi"
Cython-0.26.1/tests/run/bytearray_ascii_auto_encoding.pyx0000664000175000017500000000044312542002467024413 0ustar  stefanstefan00000000000000#cython: c_string_type = bytearray
#cython: c_string_encoding = ascii

"End of first directives"

include "unicode_ascii_auto_encoding.pyx"

auto_string_type = bytearray

def check_auto_string_type():
    """
    >>> check_auto_string_type()
    """
    assert auto_string_type is bytearray
Cython-0.26.1/tests/run/builtincomplex.pyx0000664000175000017500000000236412542002467021405 0ustar  stefanstefan00000000000000
from cpython.complex cimport complex

def complex_attributes():
    """
    >>> complex_attributes()
    (1.0, 2.0)
    """
    cdef complex c = 1+2j
    return (c.real, c.imag)

def complex_attributes_assign():
    """
    >>> complex_attributes_assign()
    (10.0, 20.0)
    """
    cdef complex c = 1+2j
    c.cval.real, c.cval.imag = 10, 20
    return (c.real, c.imag)

def complex_cstruct_assign():
    """
    >>> complex_cstruct_assign()
    (10.0, 20.0)
    """
    cdef complex c = 1+2j
    cval = &c.cval
    cval.real, cval.imag = 10, 20
    return (c.real, c.imag)

def complex_coercion():
    """
    >>> complex_coercion()
    (1.0, 2.0, 1.0, 2.0)
    """
    cdef complex py_c = 1+2j
    cdef double complex c_c = py_c
    cdef object py = c_c
    return (c_c.real, c_c.imag, py.real, py.imag)

def complex_arg(complex c):
    """
    >>> complex_arg(1+2j)
    (1.0, 2.0)
    """
    return (c.real, c.imag)

def complex_conjugate_nonsimple_float():
    """
    >>> complex_conjugate_nonsimple_float()
    1.0
    """
    x = float(1.0).conjugate()
    return x

cdef double float_result():
    return 1.0

def complex_conjugate_nonsimple():
    """
    >>> complex_conjugate_nonsimple()
    1.0
    """
    x = float_result().conjugate()
    return x
Cython-0.26.1/tests/run/dict.pyx0000664000175000017500000000505312574327400017272 0ustar  stefanstefan00000000000000import sys

IS_PY35 = sys.version_info >= (3, 5)


def empty():
    """
    >>> empty()
    {}
    """
    d = {}
    return d

def keyvalue(key, value):
    """
    >>> keyvalue(1, 2)
    {1: 2}
    """
    d = {key:value}
    return d

def keyvalues(key1, value1, key2, value2):
    """
    >>> sorted(keyvalues(1, 2, 3, 4).items())
    [(1, 2), (3, 4)]
    """
    d = {key1:value1, key2:value2}
    return d

def keyvalues2(key1, value1, key2, value2):
    """
    >>> sorted(keyvalues2(1, 2, 3, 4).items())
    [(1, 2), (3, 4)]
    """
    d = {key1:value1, key2:value2,}
    return d

def constant():
    """
    >>> len(constant())
    2
    >>> print(constant()['parrot'])
    resting
    >>> print(constant()['answer'])
    42
    """
    d = {u"parrot":u"resting", u"answer":42}
    return d

def dict_call():
    """
    >>> print(dict_call()['parrot'])
    resting
    >>> print(dict_call()['answer'])
    42
    """
    d = dict(parrot=u"resting", answer=42)
    return d

def dict_call_dict():
    """
    >>> print(dict_call_dict()['parrot'])
    resting
    >>> print(dict_call_dict()['answer'])
    42
    """
    d = dict(dict(parrot=u"resting", answer=42))
    return d

def dict_call_kwargs():
    """
    >>> print(dict_call_kwargs()['parrot1'])
    resting
    >>> print(dict_call_kwargs()['parrot2'])
    resting
    >>> print(dict_call_kwargs()['answer1'])
    42
    >>> print(dict_call_kwargs()['answer2'])
    42
    """
    kwargs = dict(parrot1=u"resting", answer1=42)
    d = dict(parrot2=u"resting", answer2=42, **kwargs)
    return d


def items_of_dict_call():
    """
    >>> items_of_dict_call()
    [('answer1', 42), ('answer2', 42), ('parrot1', 'resting'), ('parrot2', 'resting')]
    """
    kwargs = dict(parrot1="resting", answer1=42)
    items = dict(kwargs.items(), parrot2="resting", answer2=42, **kwargs).items()
    return sorted(items)


def item_creation_sideeffect(L, sideeffect, unhashable):
    """
    >>> def sideeffect(x):
    ...     L.append(x)
    ...     return x
    >>> def unhashable(x):
    ...     L.append(x)
    ...     return [x]

    >>> L = []
    >>> item_creation_sideeffect(L, sideeffect, unhashable)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...unhashable...
    >>> L
    [2, 4]

    >>> L = []
    >>> {1:2, sideeffect(2): 3, 3: 4, unhashable(4): 5, sideeffect(5): 6}  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...unhashable...
    >>> L if IS_PY35 else (L + [5])
    [2, 4, 5]
    """
    return {1:2, sideeffect(2): 3, 3: 4, unhashable(4): 5, sideeffect(5): 6}
Cython-0.26.1/tests/run/strliterals.pyx0000664000175000017500000006361412542002467020724 0ustar  stefanstefan00000000000000__doc__ = ur"""
    >>> s1
    'abc\x11'
    >>> s1 == 'abc\x11'
    True
    >>> len(s1)
    4

    >>> s2
    'abc\\x11'
    >>> s2 == r'abc\x11'
    True
    >>> len(s2)
    7

    >>> s3
    'abc\\x11'
    >>> s3 == R'abc\x11'
    True
    >>> len(s3)
    7

    >>> s4
    b'abc\x11'
    >>> s4 == b'abc\x11'
    True
    >>> len(s4)
    4

    >>> s5
    b'abc\x11'
    >>> s5 == B'abc\x11'
    True
    >>> len(s5)
    4

    >>> s6
    b'abc\\x11'
    >>> s6 == br'abc\x11'
    True
    >>> len(s6)
    7

    >>> s7
    b'abc\\x11'
    >>> s7 == Br'abc\x11'
    True
    >>> len(s7)
    7

    >>> s8
    b'abc\\x11'
    >>> s8 == bR'abc\x11'
    True
    >>> len(s8)
    7

    >>> s9
    b'abc\\x11'
    >>> s9 == BR'abc\x11'
    True
    >>> len(s9)
    7

    >>> u1
    u'abc\x11'
    >>> u1 == u'abc\x11'
    True
    >>> len(u1)
    4

    >>> u2
    u'abc\x11'
    >>> u2 == U'abc\x11'
    True
    >>> len(u2)
    4

    >>> u3
    u'abc\\x11'
    >>> u3 == ur'abc\x11'
    True
    >>> len(u3)
    7

    >>> u4
    u'abc\\x11'
    >>> u4 == Ur'abc\x11'
    True
    >>> len(u4)
    7

    >>> u5
    u'abc\\x11'
    >>> u5 == uR'abc\x11'
    True
    >>> len(u5)
    7

    >>> u6
    u'abc\\x11'
    >>> u6 == UR'abc\x11'
    True
    >>> len(u6)
    7

    >>> sresc
    '\\12\\\'\\"\\\\'
    >>> sresc == r'\12\'\"\\'
    True
    >>> len(sresc)
    9

    >>> bresc
    b'\\12\\\'\\"\\\\'
    >>> bresc == br'\12\'\"\\'
    True
    >>> len(bresc)
    9

    >>> uresc
    u'\\12\\\'\\"\\\\'
    >>> uresc == ur'\12\'\"\\'
    True
    >>> len(uresc)
    9

    >>> bytes_uescape
    b'\\u1234\\U12345678\\u\\u1\\u12\\uX'
    >>> bytes_uescape == b'\\u1234\\U12345678\\u\\u1\\u12\\uX'
    True
    >>> len(bytes_uescape)
    28

    >>> (sys.version_info[0] >= 3 and sys.maxunicode == 1114111 and len(str_uescape) == 4 or
    ...  sys.version_info[0] >= 3 and sys.maxunicode == 65535   and len(str_uescape) == 5 or
    ...  sys.version_info[0] <  3 and len(str_uescape) == 28 or
    ...  len(str_uescape))
    True
    >>> (sys.version_info[0] >= 3 and str_uescape[0] == 'c' or
    ...  sys.version_info[0] <  3 and str_uescape[0] == '\\' or
    ...  str_uescape[0])
    True
    >>> print(str_uescape[-1])
    B
    >>> (sys.version_info[0] >= 3 and ord(str_uescape[-2]) == 0x2603 or
    ...  sys.version_info[0] <  3 and str_uescape[-12:-1]  == b'\\N{SNOWMAN}' or
    ...  sys.version_info[0] >= 3 and ord(str_uescape[-2]) or str_uescape[-12:-1])
    True

    >>> same_cname
    [b'abc\xf0_2', b'abc\xf0', b'abc\xf1', b'abc\xf2', b'abc\xf3', b'abc_2', b'abc_3']

    >>> newlines
    'Aaa\n'

    >>> len(long_escapes)
    3033
    >>> len(even_lots_of_slashes)
    3000
    >>> len(odd_lots_of_slashes)
    3001
    >>> len(lots_of_tabs_and_newlines)
    4321
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u" u'", u" '").replace(u" U'", u" '").replace(u" ur'", u" r'").replace(u" uR'", u" R'").replace(u" Ur'", u" r'").replace(u" UR'", u" R'")
else:
    __doc__ = __doc__.replace(u" b'", u" '").replace(u" B'", u" '").replace(u" br'", u" r'").replace(u" bR'", u" R'").replace(u" Br'", u" r'").replace(u" BR'", u" R'").replace(u"[b'", u"['")

s1 = "abc\x11"
s2 = r"abc\x11"
s3 = R"abc\x11"
s4 = b"abc\x11"
s5 = B"abc\x11"
s6 = br"abc\x11"
s7 = Br"abc\x11"
s8 = bR"abc\x11"
s9 = BR"abc\x11"

# and in reversed order: r+b
s6_2 = rb"abc\x11"
s7_2 = rB"abc\x11"
s8_2 = Rb"abc\x11"
s9_2 = RB"abc\x11"

assert s6 == s6_2
assert s7 == s7_2
assert s8 == s8_2
assert s9 == s9_2

u1 = u"abc\x11"
u2 = U"abc\x11"
u3 = ur"abc\x11"
u4 = Ur"abc\x11"
u5 = uR"abc\x11"
u6 = UR"abc\x11"

sresc =  r'\12\'\"\\'
bresc = br'\12\'\"\\'
uresc = ur'\12\'\"\\'

bytes_uescape = b'\u1234\U12345678\u\u1\u12\uX'
str_uescape = '\u0063\U00012345\N{SNOWMAN}\x42'

same_cname = [b'abc\xf0_2', b'abc\xf0', b'abc\xf1', b'abc\xf2', b'abc\xf3', b'abc_2', b'abc_3']

newlines = "Aaa\n"

# T640, long literals with escapes
long_escapes = b"""\x31\x39\x37\x36\xe5\xb9\xb4\x39\xe6\x9c\x88\x39\xe6\x97\xa5\xef\xbc\x8c\xe5\x9c\xa8\xe6\xaf\x9b\xe6\xb3\xbd\xe4\xb8\x9c\xe9\x80\x9d\xe4\xb8\x96\xe4\xb9\x8b\xe5\x90\x8e\xef\xbc\x8c\xe4\xb8\xad\xe5\x9b\xbd\xe5\xbc\x80\xe5\xa7\x8b\xe7\x94\xb1\xe8\x87\xaa\xe7\x94\xb1\xe5\x8c\x96\xe7\x9f\xa5\xe8\xaf\x86\xe5\x88\x86\xe5\xad\x90\xe3\x80\x81\xe9\xa2\x86\xe5\xaf\xbc\xe9\x98\xb6\xe5\xb1\x82\xe7\x9a\x84\xe5\x85\xb7\xe6\x9c\x89\xe6\x94\xb9\xe9\x9d\xa9\xe6\x80\x9d\xe6\x83\xb3\xe7\x9a\x84\xe4\xba\xba\xe5\xa3\xab\xe5\x92\x8c\xe5\xb9\xbf\xe5\xa4\xa7\xe6\xb0\x91\xe9\x97\xb4\xe5\x85\xb1\xe5\x90\x8c\xe8\xbf\x9b\xe8\xa1\x8c\xe7\x9a\x84\xe2\x80\x9c\xe6\x80\x9d\xe6\x83\xb3\xe8\xa7\xa3\xe6\x94\xbe\xe2\x80\x9d\xe8\xbf\x90\xe5\x8a\xa8\xe3\x80\x82\x31\x39\x37\x38\xe5\xb9\xb4\xef\xbc\x8c\xe4\xb8\xad\xe5\x9b\xbd\xe5\x85\xb1\xe4\xba\xa7\xe5\x85\x9a\xe5\x8d\x81\xe4\xb8\x80\xe5\xb1\x8a\xe4\xb8\x89\xe4\xb8\xad\xe5\x85\xa8\xe4\xbc\x9a\xe6\x8f\x90\xe5\x87\xba\xe6\x94\xb9\xe9\x9d\xa9\xe5\xbc\x80\xe6\x94\xbe\xe7\x9a\x84\xe5\x9f\xba\xe6\x9c\xac\xe5\x9b\xbd\xe7\xad\x96\xef\xbc\x8c\xe4\xb8\xad\xe5\x9b\xbd\xe4\xba\xba\xe6\xb0\x91\xe5\xaf\xb9\xe5\x85\xb6\xe6\x9c\x89\xe8\x8e\xab\xe5\xa4\xa7\xe7\x9a\x84\xe6\x86\xa7\xe6\x86\xac\xef\xbc\x8c\xe5\xb8\x8c\xe6\x9c\x9b\xe6\x91\x86\xe8\x84\xb1\xe5\x8d\x81\xe5\xb9\xb4\xe6\x96\x87\xe9\x9d\xa9\xe7\x9a\x84\xe6\xb7\xb7\xe4\xb9\xb1\xe5\x8f\x8a\xe8\xbf\x87\xe5\x8e\xbb\xe7\x9a\x84\xe8\xb4\xab\xe7\xa9\xb7\xe3\x80\x82\x31\x39\x38\x35\xe5\xb9\xb4\xef\xbc\x8c\xe6\x94\xbf\xe5\xba\x9c\xe6\x89\xa9\xe5\xa4\xa7\xe4\xba\x86\xe4\xbc\x81\xe4\xb8\x9a\xe7\x9a\x84\xe8\x87\xaa\xe4\xb8\xbb\xe6\x9d\x83\xef\xbc\x8c\xe9\x81\xa3\xe8\xbf\x94\xe7\xa7\x81\xe8\x90\xa5\xe4\xbc\x81\xe4\xb8\x9a\xe4\xb8\xad\xe7\x9a\x84\xe5\x85\xac\xe6\x96\xb9\xe4\xbb\xa3\xe8\xa1\xa8\xef\xbc\x8c\xe5\xbc\x95\xe5\x85\xa5\xe5\xb8\x82\xe5\x9c\xba\xe7\xbb\x8f\xe6\xb5\x8e\xe4\xb8\xad\xe7\x9a\x84\xe8\xae\xb8\xe5\xa4\x9a\xe8\xa7\x82\xe5\xbf\xb5\xef\xbc\x8c\xe5\x8f\x91\xe5\xb1\x95\xe4\xb8\xba\xe7\xa4\xbe\xe4\xbc\x9a\xe4\xb8\xbb\xe4\xb9\x89\xe5\xb8\x82\xe5\x9c\xba\xe7\xbb\x8f\xe6\xb5\x8e\xef\xbc\x9b\xe4\xbd\x86\xe5\x90\x8c\xe6\x97\xb6\xe4\xba\xa6\xe5\x9c\xa8\xe5\x8e\x9f\xe6\x9c\x89\xe8\xae\xa1\xe5\x88\x92\xe7\xbb\x8f\xe6\xb5\x8e\xe7\x90\x86\xe8\xae\xba\xe9\x81\xad\xe5\x88\xb0\xe6\x8a\x9b\xe5\xbc\x83\xe7\x9a\x84\xe6\x83\x85\xe5\x86\xb5\xe4\xb8\x8b\xe5\xbc\x95\xe5\x8f\x91\xe4\xba\x86\xe5\x9b\xbd\xe5\x86\x85\xe6\xb0\x91\xe4\xbc\x97\xe7\x9a\x84\xe6\x80\x9d\xe6\x83\xb3\xe6\xb7\xb7\xe4\xb9\xb1\xe3\x80\x82\xe5\x8f\x8a\xe5\x90\x8e\xe5\x90\x84\xe5\x9c\xb0\xe5\x9b\xbd\xe8\x90\xa5\xe4\xbc\x81\xe4\xb8\x9a\xe5\x85\xb3\xe9\x97\xad\xef\xbc\x8c\xe5\x85\xa8\xe5\x9b\xbd\xe7\xba\xa6\xe6\x9c\x89\xe6\x95\xb0\xe7\x99\xbe\xe4\xb8\x87\xe5\xb7\xa5\xe4\xba\xba\xe5\xa4\xb1\xe4\xb8\x9a\xef\xbc\x8c\xe5\x9c\xa8\xe5\xbd\x93\xe6\x97\xb6\xe4\xb8\xad\xe5\x9b\xbd\xe6\x94\xbf\xe5\xba\x9c\xe5\x8f\x97\xe5\x88\xb0\xe4\xba\x86\xe6\x9e\x81\xe5\xa4\xa7\xe5\x86\xb2\xe5\x87\xbb\xe3\x80\x82\xe5\x90\x8c\xe6\x97\xb6\xe4\xba\xa6\xe5\xbc\x95\xe5\x8f\x91\xe8\xb4\xaa\xe6\xb1\xa1\xe8\x85\x90\xe8\xb4\xa5\xe7\x89\xa9\xe4\xbb\xb7\xe5\x8d\x87\xe6\xb6\xa8\xe7\xad\x89\xe9\x97\xae\xe9\xa2\x98\xef\xbc\x8c\xe5\x9c\xa8\xe6\xb0\x91\xe9\x97\xb4\xe9\x80\xa0\xe6\x88\x90\xe4\xb8\x80\xe5\xae\x9a\xe7\x9a\x84\xe4\xb8\x8d\xe6\xbb\xa1\xe3\x80\x82\x5b\x31\x5d\x5b\x32\x5d\x32\x30\xe4\xb8\x96\xe7\xba\xaa\x38\x30\xe5\xb9\xb4\xe4\xbb\xa3\xef\xbc\x8c\xe4\xb8\x96\xe7\x95\x8c\xe6\xad\xa3\xe5\xa4\x84\xe4\xba\x8e\xe5\x86\xb7\xe6\x88\x98\xe7\x9a\x84\xe6\x9c\x80\xe5\x90\x8e\xe9\x98\xb6\xe6\xae\xb5\xe3\x80\x82\x31\x39\x38\x35\xe5\xb9\xb4\xef\xbc\x8c\xe8\x8b\x8f\xe5\x85\xb1\xe4\xb8\xad\xe5\xa4\xae\xe6\x80\xbb\xe4\xb9\xa6\xe8\xae\xb0\xe6\x88\x88\xe5\xb0\x94\xe5\xb7\xb4\xe4\xb9\x94\xe5\xa4\xab\xe4\xb8\x8a\xe5\x8f\xb0\xef\xbc\x8c\xe6\x8e\xa8\xe8\xa1\x8c\xe4\xbb\xa5\xe4\xba\xba\xe9\x81\x93\xe4\xb8\xbb\xe4\xb9\x89\xe4\xb8\xba\xe6\xa0\xb8\xe5\xbf\x83\xe7\x9a\x84\xe2\x80\x9c\xe6\x96\xb0\xe6\x80\x9d\xe7\xbb\xb4\xe2\x80\x9d\xe8\xbf\x90\xe5\x8a\xa8\xef\xbc\x8c\xe5\x9c\xa8\xe7\xa4\xbe\xe4\xbc\x9a\xe4\xb8\xbb\xe4\xb9\x89\xe9\x98\xb5\xe8\x90\xa5\xe5\x86\x85\xe4\xba\xa7\xe7\x94\x9f\xe5\xb9\xbf\xe6\xb3\x9b\xe5\xbd\xb1\xe5\x93\x8d\xe3\x80\x82\xe8\xa2\xab\xe5\x8c\x97\xe4\xba\xac\xe6\x94\xbf\xe5\xba\x9c\xe7\xa7\xb0\xe2\x80\x9c\xe8\xb5\x84\xe4\xba\xa7\xe9\x98\xb6\xe7\xba\xa7\xe8\x87\xaa\xe7\x94\xb1\xe5\x8c\x96\xe2\x80\x9d\xe6\x80\x9d\xe6\x83\xb3\xe7\x9a\x84\xe8\xa5\xbf\xe6\x96\xb9\xe6\xb0\x91\xe4\xb8\xbb\xe6\x80\x9d\xe6\xbd\xae\xe4\xb9\x9f\xe5\x9c\xa8\xe4\xb8\xad\xe5\x9b\xbd\xe5\xbe\x97\xe5\x88\xb0\xe5\xb9\xbf\xe6\xb3\x9b\xe4\xbc\xa0\xe6\x92\xad\xe3\x80\x82\xe5\xbe\x88\xe5\xa4\x9a\xe4\xba\xba\xe8\xae\xa4\xe4\xb8\xba\xe9\x9a\x8f\xe7\x9d\x80\xe6\x94\xb9\xe9\x9d\xa9\xe5\xbc\x80\xe6\x94\xbe\xe4\xbb\xa5\xe5\x8f\x8a\xe5\xb8\x82\xe5\x9c\xba\xe7\xbb\x8f\xe6\xb5\x8e\xe7\x90\x86\xe5\xbf\xb5\xe7\x9a\x84\xe5\xbc\x95\xe5\x85\xa5\xef\xbc\x8c\xe5\xae\xa3\xe5\x91\x8a\xe4\xb8\xad\xe5\x9b\xbd\xe5\x85\xb1\xe4\xba\xa7\xe5\x85\x9a\xe8\x83\x8c\xe5\xbc\x83\xe4\xba\x86\xe9\xa9\xac\xe5\x88\x97\xe4\xb8\xbb\xe4\xb9\x89\xe7\x9a\x84\xe5\x9f\xba\xe6\x9c\xac\xe4\xbf\xa1\xe6\x9d\xa1\xef\xbc\x8c\xe4\xb8\xad\xe5\x9b\xbd\xe5\xb7\xb2\xe4\xb8\x8d\xe5\x86\x8d\xe6\x98\xaf\xe4\xb8\x80\xe4\xb8\xaa\xe7\xa4\xbe\xe4\xbc\x9a\xe4\xb8\xbb\xe4\xb9\x89\xe5\x9b\xbd\xe5\xae\xb6\xef\xbc\x8c\xe8\x80\x8c\xe6\x98\xaf\xe5\x85\xb7\xe6\x9c\x89\xe6\x9f\x90\xe7\xa7\x8d\xe8\xb5\x84\xe6\x9c\xac\xe4\xb8\xbb\xe4\xb9\x89\xe6\x80\xa7\xe8\xb4\xa8\xe7\x9a\x84\xe7\xa4\xbe\xe4\xbc\x9a\xe3\x80\x82\x31\x39\x38\x38\xe5\xb9\xb4\xef\xbc\x8c\xe5\x85\xac\xe5\xbc\x80\xe5\x91\xbc\xe5\x94\xa4\xe2\x80\x9c\xe8\x94\x9a\xe8\x93\x9d\xe8\x89\xb2\xe2\x80\x9d\xe8\xa5\xbf\xe6\x96\xb9\xe6\x96\x87\xe6\x98\x8e\xe7\x9a\x84\xe6\x94\xbf\xe8\xae\xba\xe7\x94\xb5\xe8\xa7\x86\xe7\x89\x87\xe3\x80\x8a\xe6\xb2\xb3\xe6\xae\x87\xe3\x80\x8b\xe5\x9c\xa8\xe4\xb8\xad\xe5\xa4\xae\xe7\x94\xb5\xe8\xa7\x86\xe5\x8f\xb0\xe5\x85\xac\xe5\xbc\x80\xe6\x92\xad\xe5\x87\xba\xef\xbc\x8c\xe5\x9c\xa8\xe5\x85\xa8\xe5\x9b\xbd\xe8\x8c\x83\xe5\x9b\xb4\xe5\x86\x85\xe5\xbc\x95\xe8\xb5\xb7\xe8\xbd\xb0\xe5\x8a\xa8\xef\xbc\x8c\xe6\x88\x90\xe4\xb8\xba\xe5\x85\xad\xe5\x9b\x9b\xe8\xbf\x90\xe5\x8a\xa8\xe7\x9a\x84\xe6\x80\x9d\xe6\x83\xb3\xe5\x89\x8d\xe5\xaf\xbc\xe3\x80\x82\xe9\x9a\x8f\xe7\x9d\x80\xe4\xb8\xad\xe5\x9b\xbd\xe7\x9a\x84\xe5\xbc\x80\xe6\x94\xbe\xef\xbc\x8c\xe4\xb8\xad\xe5\x9b\xbd\xe4\xba\xba\xe5\xbc\x80\xe5\xa7\x8b\xe6\x9b\xb4\xe5\xa4\x9a\xe5\x9c\xb0\xe6\x8e\xa5\xe8\xa7\xa6\xe8\xa5\xbf\xe6\x96\xb9\xe6\xb0\x91\xe4\xb8\xbb\xe4\xba\xba\xe6\x9d\x83\xe6\x80\x9d\xe6\x83\xb3\xef\xbc\x8c\xe5\xbe\x88\xe5\xa4\x9a\xe7\x9f\xa5\xe8\xaf\x86\xe5\x88\x86\xe5\xad\x90\xe5\xbc\x80\xe5\xa7\x8b\xe5\x85\xac\xe5\xbc\x80\xe6\x8f\x90\xe5\x80\xa1\xe4\xba\xba\xe6\x9d\x83\xe4\xb8\x8e\xe6\xb0\x91\xe4\xb8\xbb\xef\xbc\x8c\xe8\xae\xb8\xe5\xa4\x9a\xe5\xad\xa6\xe7\x94\x9f\xe6\x9b\xb4\xe6\x98\xaf\xe9\x80\x9a\xe8\xbf\x87\xe5\x90\x84\xe7\xa7\x8d\xe5\xbd\xa2\xe5\xbc\x8f\xe8\xa1\xa8\xe8\xbe\xbe\xe8\xbf\x99\xe6\x96\xb9\xe9\x9d\xa2\xe7\x9a\x84\xe8\xaf\x89\xe6\xb1\x82\xe3\x80\x82\xe4\xbb\x8e\xe4\xb8\x96\xe7\x95\x8c\xe8\x8c\x83\xe5\x9b\xb4\xe5\x86\x85\xe7\x9c\x8b\xef\xbc\x8c\xe5\x85\xad\xe5\x9b\x9b\xe8\xbf\x90\xe5\x8a\xa8\xe5\xb9\xb6\xe9\x9d\x9e\xe6\x98\xaf\xe4\xb8\x80\xe4\xb8\xaa\xe5\xad\xa4\xe7\xab\x8b\xe7\x9a\x84\xe4\xba\x8b\xe4\xbb\xb6\xef\xbc\x8c\xe8\x80\x8c\xe6\x98\xaf\xe5\xbd\x93\xe6\x97\xb6\xe6\x95\xb4\xe4\xb8\xaa\xe7\xa4\xbe\xe4\xbc\x9a\xe4\xb8\xbb\xe4\xb9\x89\xe9\x98\xb5\xe8\x90\xa5\xe5\x86\x85\xe6\xb0\x91\xe4\xb8\xbb\xe8\xbf\x90\xe5\x8a\xa8\xe7\x9a\x84\xe4\xb8\x80\xe4\xb8\xaa\xe9\x87\x8d\xe8\xa6\x81\xe7\x8e\xaf\xe8\x8a\x82\xe3\x80\x82\xe5\x9c\xa8\xe5\x85\xad\xe5\x9b\x9b\xe4\xba\x8b\xe4\xbb\xb6\xe5\x8f\x91\xe7\x94\x9f\xe7\x9a\x84\xe5\x90\x8c\xe4\xb8\x80\xe5\xa4\xa9\xef\xbc\x8c\xe6\xb3\xa2\xe5\x85\xb0\xe5\x9b\xa2\xe7\xbb\x93\xe5\xb7\xa5\xe4\xbc\x9a\xe5\x9c\xa8\xe5\xa4\xa7\xe9\x80\x89\xe4\xb8\xad\xe8\x8e\xb7\xe8\x83\x9c\xef\xbc\x8c\xe6\x8e\xa8\xe7\xbf\xbb\xe7\xa4\xbe\xe4\xbc\x9a\xe4\xb8\xbb\xe4\xb9\x89\xe5\x88\xb6\xe5\xba\xa6\xe3\x80\x82\xe9\x9a\x8f\xe5\x90\x8e\xe4\xb8\x8d\xe5\x88\xb0\xe4\xb8\x80\xe5\xb9\xb4\xef\xbc\x8c\xe4\xb8\x9c\xe6\xac\xa7\xe7\xa4\xbe\xe4\xbc\x9a\xe4\xb8\xbb\xe4\xb9\x89\xe5\x9b\xbd\xe5\xae\xb6\xe4\xb9\x9f\xe5\x85\x88\xe5\x90\x8e\xe5\x8f\x91\xe7\x94\x9f\xe5\x92\x8c\xe5\xb9\xb3\xe6\xbc\x94\xe5\x8f\x98\xef\xbc\x8c\xe4\xb8\xa4\xe5\xb9\xb4\xe5\x90\x8e\xe8\x8b\x8f\xe8\x81\x94\xe4\xba\xa6\xe5\xae\xa3\xe5\x91\x8a\xe8\xa7\xa3\xe4\xbd\x93\xef\xbc\x8c\xe7\xa4\xbe\xe4\xbc\x9a\xe4\xb8\xbb\xe4\xb9\x89\xe9\x98\xb5\xe8\x90\xa5\xe8\xa7\xa3\xe4\xbd\x93\xe3\x80\x82\xe8\xbf\x99\xe4\xba\x9b\xe5\xae\x9e\xe8\xa1\x8c\xe7\xa4\xbe\xe4\xbc\x9a\xe4\xb8\xbb\xe4\xb9\x89\xe5\x88\xb6\xe5\xba\xa6\xe7\x9a\x84\xe5\x9b\xbd\xe5\xae\xb6\xe5\x9c\xa8\xe4\xb8\x8d\xe5\x88\xb0\x35\xe5\xb9\xb4\xe7\x9a\x84\xe6\x97\xb6\xe9\x97\xb4\xe4\xb8\xad\xe5\x8f\x91\xe7\x94\x9f\xe4\xba\x86\xe6\x94\xbf\xe6\x9d\x83\xe8\xbd\xae\xe6\x9b\xbf\xef\xbc\x8c\xe5\xb9\xb6\xe6\x94\xb9\xe5\x8f\x98\xe4\xba\x86\xe5\x8e\x9f\xe6\x9c\x89\xe7\x9a\x84\xe6\x94\xbf\xe6\xb2\xbb\xe4\xbd\x93\xe5\x88\xb6\xe3\x80\x82\x31\x39\x38\x37\xe5\xb9\xb4\x31\xe6\x9c\x88\xef\xbc\x8c\xe5\x8e\x9f\xe6\x9c\xac\xe8\xa2\xab\xe9\x82\x93\xe5\xb0\x8f\xe5\xb9\xb3\xe9\x80\x89\xe5\xae\x9a\xe4\xb8\xba\xe6\x8e\xa5\xe7\x8f\xad\xe4\xba\xba\xe7\x9a\x84\xe8\x83\xa1\xe8\x80\x80\xe9\x82\xa6\xe8\xa2\xab\xe8\xbf\xab\xe4\xb8\x8b\xe5\x8f\xb0\xef\xbc\x8c\xe4\xbb\x96\xe8\xa2\xab\xe6\x8c\x87\xe8\xbf\x9d\xe5\x8f\x8d\xe4\xb8\xad\xe5\x9b\xbd\xe5\x85\xb1\xe4\xba\xa7\xe5\x85\x9a\xe7\x9a\x84\xe6\xb0\x91\xe4\xb8\xbb\xe9\x9b\x86\xe4\xb8\xad\xe5\x88\xb6\xef\xbc\x8c\xe7\xba\xb5\xe5\xae\xb9\xe8\xb5\x84\xe4\xba\xa7\xe9\x98\xb6\xe7\xba\xa7\xe8\x87\xaa\xe7\x94\xb1\xe5\x8c\x96\xef\xbc\x8c\xe6\xb2\xa1\xe6\x9c\x89\xe5\xaf\xb9\xe6\xb8\xb8\xe8\xa1\x8c\xe9\x87\x87\xe5\x8f\x96\xe6\x9c\x89\xe6\x95\x88\xe6\x8e\xaa\xe6\x96\xbd\xef\xbc\x8c\xe8\xa6\x81\xe5\xaf\xb9\x31\x39\x38\x36\xe5\xb9\xb4\xe5\xad\xa6\xe7\x94\x9f\xe8\xbf\x90\xe5\x8a\xa8\xe7\x9a\x84\xe5\xa4\xb1\xe6\x8e\xa7\xe8\xb4\x9f\xe8\xb4\xa3\xe3\x80\x82\xe5\x8f\x8a\xe5\x90\x8e\xe4\xb8\xad\xe5\x85\xb1\xe5\x85\x9a\xe5\x86\x85\xe5\x8f\x8d\xe6\x94\xb9\xe9\x9d\xa9\xe7\x9a\x84\xe2\x80\x9c\xe4\xbf\x9d\xe5\xae\x88\xe2\x80\x9d\xe5\x8a\xbf\xe5\x8a\x9b\xe6\x8e\x80\xe8\xb5\xb7\xe4\xb8\x80\xe8\x82\xa1\xe5\x8f\x8d\xe5\x8f\xb3\xe6\xb5\xaa\xe6\xbd\xae\xe3\x80\x82\x0a\x0a\xe6\xb3\x95\xe8\xbd\xae\xe5\x8a\x9f\xe5\x8f\x88\xe7\xa7\xb0\xe6\xb3\x95\xe8\xbd\xae\xe5\xa4\xa7\xe6\xb3\x95\xef\xbc\x8c\xe6\xb3\x95\xe8\xbd\xae\xe5\x8a\x9f\xe5\x8a\x9f\xe6\xb3\x95\xe6\x98\xaf\xe7\x94\xb1\xe4\xba\x94\xe5\xa5\x97\xe5\x8a\xa8\xe4\xbd\x9c\xe7\xbb\x84\xe6\x88\x90\xef\xbc\x8c\xe4\xbd\x86\xe4\xb8\x8d\xe5\x90\x8c\xe4\xba\x8e\xe4\xb8\x80\xe8\x88\xac\xe6\xb0\x94\xe5\x8a\x9f\xe7\x9a\x84\xe6\x98\xaf\xe7\x9d\x80\xe9\x87\x8d\xe5\xbf\x83\xe6\x80\xa7\xe7\x9a\x84\xe4\xbf\xae\xe7\x82\xbc\xef\xbc\x8c\xe5\x8d\xb3\xe2\x80\x9c\xe7\x9c\x9f\xe3\x80\x81\xe5\x96\x84\xe3\x80\x81\xe5\xbf\x8d\xe2\x80\x9d\xe7\x9a\x84\xe5\x8e\x9f\xe5\x88\x99\xe3\x80\x82\xe4\xb8\x80\xe4\xba\x9b\xe4\xba\xba\xe8\xae\xa4\xe4\xb8\xba\xe6\xb3\x95\xe8\xbd\xae\xe5\x8a\x9f\xe5\x80\x9f\xe7\x94\xa8\xe4\xba\x86\xe5\xbe\x88\xe5\xa4\x9a\xe4\xbd\x9b\xe6\x95\x99\xe8\xa7\x82\xe5\xbf\xb5\xef\xbc\x8c\xe5\xa6\x82\xe6\xb3\x95\xe8\xbd\xae\xe3\x80\x81\xe4\xb8\x9a\xe7\xad\x89\xef\xbc\x8c\xe5\x9b\xa0\xe8\x80\x8c\xe8\xa7\x86\xe4\xb9\x8b\xe4\xb8\xba\xe4\xb8\x80\xe7\xa7\x8d\xe5\xae\x97\xe6\x95\x99\xe3\x80\x82\xe4\xbd\x86\xe6\xb3\x95\xe8\xbd\xae\xe5\x8a\x9f\xe5\xad\xa6\xe5\x91\x98\xe8\xae\xa4\xe4\xb8\xba\xe2\x80\x9c\xe6\xb3\x95\xe8\xbd\xae\xe2\x80\x9d\xe5\x92\x8c\xe2\x80\x9c\xe4\xb8\x9a\xe2\x80\x9d\xe9\x83\xbd\xe4\xb8\x8d\xe6\x98\xaf\xe4\xbd\x9b\xe6\x95\x99\xe4\xb8\x93\xe7\x94\xa8\xe7\x9a\x84\xef\xbc\x8c\xe5\x85\xb6\xe4\xb8\xad\xe4\xb8\x9a\xe7\x9a\x84\xe6\xa6\x82\xe5\xbf\xb5\xe5\x9c\xa8\xe5\xa9\x86\xe7\xbd\x97\xe9\x97\xa8\xe6\x95\x99\xe6\x88\x96\xe6\x9b\xb4\xe6\x97\xa9\xe7\x9a\x84\xe4\xbf\xae\xe7\x82\xbc\xe6\x96\xb9\xe6\xb3\x95\xe5\x92\x8c\xe5\xae\x97\xe6\x95\x99\xe5\xb0\xb1\xe5\xb7\xb2\xe7\xbb\x8f\xe5\xad\x98\xe5\x9c\xa8\xe4\xba\x86\xe3\x80\x82\x0a\x0a\xe5\x8f\xb0\xe7\x8b\xac\xe5\x95\x8a\xe5\x8f\xb0\xe7\x8b\xac\xe4\xb8\x80\xe5\x8f\xb0\xe7\x8b\xac\xe7\xab\x8b\xe6\x9c\x8d\xe5\x8a\xa1\xe5\x99\xa8\x0a\x0a"""

even_lots_of_slashes = r"\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"

odd_lots_of_slashes = r"0\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"

lots_of_tabs_and_newlines = r'0\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n'
Cython-0.26.1/tests/run/dynamic_args.pyx0000664000175000017500000000074712542002467021012 0ustar  stefanstefan00000000000000# mode: run
# ticket: 674

cdef class Foo:
    cdef str name

    def __init__(self, name):
        self.name = name

    def __repr__(self):
        return '<%s>' % self.name

def test_exttype_args(a, b, c):
    """
    >>> f1 = test_exttype_args([1, 2, 3], 123, Foo('Foo'))
    >>> f2 = test_exttype_args([0], 0, Foo('Bar'))
    >>> f1()
    ([1, 2, 3], 123, )
    >>> f2()
    ([0], 0, )
    """
    def inner(a=a, int b=b, Foo c=c):
        return a, b, c
    return inner
Cython-0.26.1/tests/run/complex_numbers_c89_T398.h0000664000175000017500000000003212542002467022360 0ustar  stefanstefan00000000000000#define CYTHON_CCOMPLEX 0
Cython-0.26.1/tests/run/tuple_unpack_string.pyx0000664000175000017500000000340212542002467022421 0ustar  stefanstefan00000000000000# mode: run
# tag: string, unicode, sequence unpacking, starexpr

def unpack_single_str():
    """
    >>> print(unpack_single_str())
    a
    """
    a, = 'a'
    return a

def unpack_str():
    """
    >>> a,b = unpack_str()
    >>> print(a)
    a
    >>> print(b)
    b
    """
    a,b = 'ab'
    return a,b

def star_unpack_str():
    """
    >>> a,b,c = star_unpack_str()
    >>> print(a)
    a
    >>> type(b) is list
    True
    >>> print(''.join(b))
    bbb
    >>> print(c)
    c
    """
    a,*b,c = 'abbbc'
    return a,b,c

def unpack_single_unicode():
    """
    >>> print(unpack_single_unicode())
    a
    """
    a, = u'a'
    return a

def unpack_unicode():
    """
    >>> a,b = unpack_unicode()
    >>> print(a)
    a
    >>> print(b)
    b
    """
    a,b = u'ab'
    return a,b

def star_unpack_unicode():
    """
    >>> a,b,c = star_unpack_unicode()
    >>> print(a)
    a
    >>> type(b) is list
    True
    >>> print(''.join(b))
    bbb
    >>> print(c)
    c
    """
    a,*b,c = u'abbbc'
    return a,b,c

# the following is not supported due to Py2/Py3 bytes differences

## def unpack_single_bytes():
##     """
##     >>> print(unpack_single_bytes().decode('ASCII'))
##     a
##     """
##     a, = b'a'
##     return a

## def unpack_bytes():
##     """
##     >>> a,b = unpack_bytes()
##     >>> print(a.decode('ASCII'))
##     a
##     >>> print(b.decode('ASCII'))
##     b
##     """
##     a,b = b'ab'
##     return a,b

## def star_unpack_bytes():
##     """
##     >>> a,b,c = star_unpack_bytes()
##     >>> print(a.decode('ASCII'))
##     a
##     >>> type(b) is list
##     True
##     >>> print(''.join([ch.decode('ASCII') for ch in b]))
##     bbb
##     >>> print(c.decode('ASCII'))
##     c
##     """
##     a,*b,c = b'abbbc'
##     return a,b,c
Cython-0.26.1/tests/run/cython3.pyx0000664000175000017500000002564312542002467017743 0ustar  stefanstefan00000000000000# cython: language_level=3, binding=True
# mode: run
# tag: generators, python3, exceptions

cimport cython

__doc__ = """
>>> items = sorted(locals_function(1).items())
>>> for item in items:
...     print('%s = %r' % item)
a = 1
b = 2
x = u'abc'

>>> except_as_deletes
True
>>> no_match_does_not_touch_target
True
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(" u'", " '")

def locals_function(a, b=2):
    x = 'abc'
    return locals()


### true division

def truediv(x):
    """
    >>> truediv(4)
    2.0
    >>> truediv(3)
    1.5
    """
    return x / 2


def truediv_int(int x):
    """
    >>> truediv_int(4)
    2.0
    >>> truediv_int(3)
    1.5
    """
    return x / 2


@cython.cdivision(True)
def cdiv_int(int x):
    """
    >>> cdiv_int(4)
    2
    >>> cdiv_int(3)
    1
    """
    return x / 2


### module level except-as tests

exc = [None]
e = None
try:
    raise KeyError
except AttributeError as e:
    exc[0] = e
except KeyError       as e:
    exc[0] = e
except IndexError     as e:
    exc[0] = e
except:
    exc[0] = 'SOMETHING ELSE'

try:
    e
except NameError:
    except_as_deletes = True
else:
    except_as_deletes = False


e = 123
try:
    raise TypeError
except NameError as e:
    pass
except TypeError:
    pass
no_match_does_not_touch_target = (e == 123)


### more except-as tests

def except_as_no_raise_does_not_touch_target(a):
    """
    >>> except_as_no_raise_does_not_touch_target(TypeError)
    (1, 1)
    """
    d = a  # mark used

    b = 1
    try:
        i = 1
    except a as b:
        i = 2
    return i, b


def except_as_raise_deletes_target(x, a):
    """
    >>> except_as_raise_deletes_target(None, TypeError)
    1
    1
    >>> except_as_raise_deletes_target(TypeError('test'), TypeError)
    Traceback (most recent call last):
    UnboundLocalError: local variable 'b' referenced before assignment
    >>> except_as_raise_deletes_target(ValueError('test'), TypeError)
    Traceback (most recent call last):
    ValueError: test
    >>> except_as_raise_deletes_target(None, TypeError)
    1
    1
    """
    b = 1
    try:
        i = 1
        if x:
            raise x
    except a as b:
        i = 2
        assert isinstance(b, a)
    print(b)  # raises UnboundLocalError if except clause was executed
    return i


def except_as_raise_deletes_target_even_after_del(x, a):
    """
    >>> except_as_raise_deletes_target_even_after_del(None, TypeError)
    1
    1
    >>> except_as_raise_deletes_target_even_after_del(TypeError('test'), TypeError)
    2
    >>> except_as_raise_deletes_target_even_after_del(ValueError('test'), TypeError)
    Traceback (most recent call last):
    ValueError: test
    >>> except_as_raise_deletes_target_even_after_del(None, TypeError)
    1
    1
    """
    b = 1
    try:
        i = 1
        if x:
            raise x
    except a as b:
        i = 2
        assert isinstance(b, a)
        del b  # let's see if Cython can still 'del' it after this line!
    try:
        print(b)  # raises UnboundLocalError if except clause was executed
    except UnboundLocalError:
        pass
    else:
        if x:
            print("UnboundLocalError not raised!")
    return i


def except_as_raise_deletes_target_on_error(x, a):
    """
    >>> except_as_raise_deletes_target_on_error(None, TypeError)
    1
    1
    >>> except_as_raise_deletes_target_on_error(TypeError('test'), TypeError)
    Traceback (most recent call last):
    UnboundLocalError: local variable 'b' referenced before assignment
    >>> except_as_raise_deletes_target_on_error(ValueError('test'), TypeError)
    Traceback (most recent call last):
    ValueError: test
    >>> except_as_raise_deletes_target_on_error(None, TypeError)
    1
    1
    """
    b = 1
    try:
        try:
            i = 1
            if x:
                raise x
        except a as b:
            i = 2
            raise IndexError("TEST")
    except IndexError as e:
        assert 'TEST' in str(e), str(e)
    print(b)  # raises UnboundLocalError if except clause was executed
    return i


def except_as_raise_with_empty_except(x, a):
    """
    >>> except_as_raise_with_empty_except(None, TypeError)
    1
    >>> except_as_raise_with_empty_except(TypeError('test'), TypeError)
    >>> except_as_raise_with_empty_except(ValueError('test'), TypeError)
    Traceback (most recent call last):
    ValueError: test
    >>> except_as_raise_with_empty_except(None, TypeError)
    1
    """
    try:
        if x:
            raise x
        b = 1
    except a as b:  # previously raised UnboundLocalError
        pass
    try:
        print(b)  # raises UnboundLocalError if except clause was executed
    except UnboundLocalError:
        if not x:
            print("unexpected UnboundLocalError raised!")
    else:
        if x:
            print("expected UnboundLocalError not raised!")


def except_as_deletes_target_in_gen(x, a):
    """
    >>> list(except_as_deletes_target_in_gen(None, TypeError))
    [(1, 1), (2, 1), (5, 1)]
    >>> list(except_as_deletes_target_in_gen(TypeError('test'), TypeError))
    [(1, 1), 3, 6]
    >>> list(except_as_deletes_target_in_gen(ValueError('test'), TypeError))
    [(1, 1), (4, 1), (5, 1)]
    """
    b = 1
    try:
        i = 1
        yield (1, b)
        if x:
            raise x
        yield (2, b)
    except a as b:
        i = 2
        assert isinstance(b, a)
        yield 3
    except:
        yield (4, b)
    try:
        yield (5, b)
    except UnboundLocalError:
        yield 6


### Py3 feature tests

def print_function(*args):
    """
    >>> print_function(1,2,3)
    1 2 3
    """
    print(*args) # this isn't valid Py2 syntax


def exec3_function(cmd):
    """
    >>> exec3_function('a = 1+1')['a']
    2
    """
    g = {}
    l = {}
    exec(cmd, g, l)
    return l

def exec2_function(cmd):
    """
    >>> exec2_function('a = 1+1')['a']
    2
    """
    g = {}
    exec(cmd, g)
    return g

EXEC_GLOBAL = [5]

def exec1_function(cmd):
    """
    >>> exec1_function('EXEC_GLOBAL.append(1)')
    [1]
    """
    old = len(EXEC_GLOBAL)
    exec(cmd)
    return EXEC_GLOBAL[old:]

ustring = "abcdefg"

def unicode_literals():
    """
    >>> print( unicode_literals() )
    True
    abcdefg
    """
    print(isinstance(ustring, unicode) or type(ustring))
    return ustring

def str_type_is_unicode():
    """
    >>> str_type, s = str_type_is_unicode()
    >>> isinstance(s, type(ustring)) or (s, str_type)
    True
    >>> isinstance(s, str_type) or (s, str_type)
    True
    >>> isinstance(ustring, str_type) or str_type
    True
    """
    cdef str s = 'abc'
    return str, s

def loop_over_unicode_literal():
    """
    >>> print( loop_over_unicode_literal() )
    Py_UCS4
    """
    # Py_UCS4 can represent any Unicode character
    for uchar in 'abcdefg':
        assert uchar in 'abcdefg'
    return cython.typeof(uchar)

def list_comp():
    """
    >>> list_comp()
    [0, 4, 8]
    """
    x = 'abc'
    result = [x*2 for x in range(5) if x % 2 == 0]
    assert x == 'abc' # don't leak in Py3 code
    return result

def list_comp_with_lambda():
    """
    >>> list_comp_with_lambda()
    [0, 4, 8]
    """
    x = 'abc'
    result = [x*2 for x in range(5) if (lambda x:x % 2)(x) == 0]
    assert x == 'abc' # don't leak in Py3 code
    return result

module_level_lc = [ module_level_loopvar*2 for module_level_loopvar in range(4) ]
def list_comp_module_level():
    """
    >>> module_level_lc
    [0, 2, 4, 6]
    >>> module_level_loopvar         # doctest: +ELLIPSIS
    Traceback (most recent call last):
    NameError: ...name 'module_level_loopvar' is not defined
    """

module_level_list_genexp = list(module_level_genexp_loopvar*2 for module_level_genexp_loopvar in range(4))
def genexpr_module_level():
    """
    >>> module_level_list_genexp
    [0, 2, 4, 6]
    >>> module_level_genexp_loopvar         # doctest: +ELLIPSIS
    Traceback (most recent call last):
    NameError: ...name 'module_level_genexp_loopvar' is not defined
    """

def list_comp_unknown_type(l):
    """
    >>> list_comp_unknown_type(range(5))
    [0, 4, 8]
    """
    return [x*2 for x in l if x % 2 == 0]

def listcomp_as_condition(sequence):
    """
    >>> listcomp_as_condition(['a', 'b', '+'])
    True
    >>> listcomp_as_condition('ab+')
    True
    >>> listcomp_as_condition('abc')
    False
    """
    if [1 for c in sequence if c in '+-*/<=>!%&|([^~,']:
        return True
    return False

def set_comp():
    """
    >>> sorted(set_comp())
    [0, 4, 8]
    """
    x = 'abc'
    result = {x*2 for x in range(5) if x % 2 == 0}
    assert x == 'abc' # don't leak
    return result

def dict_comp():
    """
    >>> sorted(dict_comp().items())
    [(0, 0), (2, 4), (4, 8)]
    """
    x = 'abc'
    result = {x:x*2 for x in range(5) if x % 2 == 0}
    assert x == 'abc' # don't leak
    return result

# in Python 3, d.keys/values/items() are the iteration methods
@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
@cython.test_fail_if_path_exists(
    "//ForInStatNode")
def dict_iter(dict d):
    """
    >>> d = {'a' : 1, 'b' : 2, 'c' : 3}
    >>> keys, values, items = dict_iter(d)
    >>> sorted(keys)
    ['a', 'b', 'c']
    >>> sorted(values)
    [1, 2, 3]
    >>> sorted(items)
    [('a', 1), ('b', 2), ('c', 3)]

    >>> dict_iter({})
    ([], [], [])
    """
    keys = [ key for key in d.keys() ]
    values = [ value for value in d.values() ]
    items = [ item for item in d.items() ]
    return keys, values, items

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
@cython.test_fail_if_path_exists(
    "//ForInStatNode")
def dict_iter_new_dict():
    """
    >>> dict_keys, keys, values, items = dict_iter_new_dict()
    >>> sorted(dict_keys)
    [11, 22, 33]
    >>> sorted(keys)
    [11, 22, 33]
    >>> sorted(values)
    [1, 2, 3]
    >>> sorted(items)
    [(11, 1), (22, 2), (33, 3)]
    """
    dict_keys = [ key for key in {11 : 1, 22 : 2, 33 : 3} ]
    keys = [ key for key in {11 : 1, 22 : 2, 33 : 3}.keys() ]
    values = [ value for value in {11 : 1, 22 : 2, 33 : 3}.values() ]
    items = [ item for item in {11 : 1, 22 : 2, 33 : 3}.items() ]
    return dict_keys, keys, values, items

def int_literals():
    """
    >>> int_literals()
    long
    long
    unsigned long
    unsigned long
    """
    print(cython.typeof(1L))
    print(cython.typeof(10000000000000L))
    print(cython.typeof(1UL))
    print(cython.typeof(10000000000000UL))

def annotation_syntax(a: "test new test", b : "other" = 2, *args: "ARGS", **kwargs: "KWARGS") -> "ret":
    """
    >>> annotation_syntax(1)
    3
    >>> annotation_syntax(1,3)
    4

    >>> len(annotation_syntax.__annotations__)
    5
    >>> print(annotation_syntax.__annotations__['a'])
    test new test
    >>> print(annotation_syntax.__annotations__['b'])
    other
    >>> print(annotation_syntax.__annotations__['args'])
    ARGS
    >>> print(annotation_syntax.__annotations__['kwargs'])
    KWARGS
    >>> print(annotation_syntax.__annotations__['return'])
    ret
    """
    return a+b
Cython-0.26.1/tests/run/attr.pyx0000664000175000017500000000226612542002467017322 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> class Test(object):
    ...     def __init__(self, i):
    ...         self.i = i
    >>> b = Test(1)
    >>> b.spam = Test(2)
    >>> b.spam.eggs = Test(3)
    >>> b.spam.eggs.spam = Test(4)
    >>> b.spam.eggs.spam.eggs = Test(5)

    >>> a = f(b)
    >>> a.i
    2
    >>> b.i
    1
    >>> a.spam.i
    1
    >>> b.spam.i
    2
    >>> a.spam.eggs.i
    Traceback (most recent call last):
    AttributeError: 'Test' object has no attribute 'eggs'
    >>> b.spam.eggs.i
    3
    >>> a.spam.spam.i
    2
    >>> b.spam.spam.i
    1
    >>> a.spam.eggs.spam.i
    Traceback (most recent call last):
    AttributeError: 'Test' object has no attribute 'eggs'
    >>> b.spam.eggs.spam.i
    4

    >>> a = g(b)
    >>> a.i
    3
    >>> b.i
    1
    >>> a.spam.i
    4
    >>> b.spam.i
    2
    >>> a.spam.eggs.i
    1
    >>> b.spam.eggs.i
    3
    >>> a.spam.spam.i
    Traceback (most recent call last):
    AttributeError: 'Test' object has no attribute 'spam'
    >>> b.spam.spam.i
    1
    >>> a.spam.eggs.spam.i
    2
    >>> b.spam.eggs.spam.i
    4
"""

def f(b):
    a = b.spam
    a.spam = b
    return a

def g(b):
    a = b.spam.eggs
    a.spam.eggs = b
    return a
Cython-0.26.1/tests/run/fused_cpp.pyx0000664000175000017500000000073412542002467020316 0ustar  stefanstefan00000000000000# tag: cpp

cimport cython
from libcpp.vector cimport vector

def test_cpp_specialization(cython.floating element):
    """
    >>> import cython
    >>> test_cpp_specialization[cython.float](10.0)
    vector[float] * float 10.0
    >>> test_cpp_specialization[cython.double](10.0)
    vector[double] * double 10.0
    """
    cdef vector[cython.floating] *v = new vector[cython.floating]()
    v.push_back(element)
    print cython.typeof(v), cython.typeof(element), v.at(0)
Cython-0.26.1/tests/run/modop.pyx0000664000175000017500000000623012542002467017461 0ustar  stefanstefan00000000000000import sys


def modobj(obj2, obj3):
    """
    >>> modobj(9,2)
    1
    >>> modobj('%d', 5)
    '5'
    """
    obj1 = obj2 % obj3
    return obj1


def mod_obj_10(int2):
    """
    >>> 0 % 10
    0
    >>> mod_obj_10(0)
    0
    >>> 1 % 10
    1
    >>> mod_obj_10(1)
    1
    >>> (-1) % 10
    9
    >>> mod_obj_10(-1)
    9
    >>> 9 % 10
    9
    >>> mod_obj_10(9)
    9
    >>> 10 % 10
    0
    >>> mod_obj_10(10)
    0
    >>> (-10) % 10
    0
    >>> mod_obj_10(-10)
    0
    >>> (-12) % 10
    8
    >>> mod_obj_10(-12)
    8
    >>> 10002 % 10
    2
    >>> mod_obj_10(10002)
    2
    >>> int((2**25) % 10)
    2
    >>> int(mod_obj_10(2**25))
    2
    >>> int((-2**25) % 10)
    8
    >>> int(mod_obj_10(-2**25))
    8
    >>> int((-2**31-1) % 10)
    1
    >>> int(mod_obj_10(int(-2**31-1)))
    1
    >>> int((2**50) % 10)
    4
    >>> int(mod_obj_10(2**50))
    4
    >>> int((-2**50) % 10)
    6
    >>> int(mod_obj_10(-2**50))
    6
    >>> int((-2**63-1) % 10)
    1
    >>> int(mod_obj_10(-2**63-1))
    1
    >>> int((2**200) % 10)
    6
    >>> int(mod_obj_10(2**200))
    6
    >>> int((-2**200) % 10)
    4
    >>> int(mod_obj_10(-2**200))
    4
    """
    int1 = int2 % 10
    return int1


def mod_obj_17(int2):
    """
    >>> 0 % 17
    0
    >>> mod_obj_17(0)
    0
    >>> 1 % 17
    1
    >>> mod_obj_17(1)
    1
    >>> (-1) % 17
    16
    >>> mod_obj_17(-1)
    16
    >>> 9 % 17
    9
    >>> mod_obj_17(16)
    16
    >>> 17 % 17
    0
    >>> mod_obj_17(17)
    0
    >>> (-17) % 17
    0
    >>> mod_obj_17(-17)
    0
    >>> (-18) % 17
    16
    >>> mod_obj_17(-18)
    16
    >>> 10002 % 17
    6
    >>> mod_obj_17(10002)
    6
    >>> int((2**25) % 17)
    2
    >>> int(mod_obj_17(2**25))
    2
    >>> int((-2**25) % 17)
    15
    >>> int(mod_obj_17(-2**25))
    15
    >>> int((-2**31-1) % 17)
    7
    >>> int(mod_obj_17(int(-2**31-1)))
    7
    >>> int((2**50) % 17)
    4
    >>> int(mod_obj_17(2**50))
    4
    >>> int((-2**50) % 17)
    13
    >>> int(mod_obj_17(-2**50))
    13
    >>> int((-2**63-1) % 17)
    7
    >>> int(mod_obj_17(-2**63-1))
    7
    >>> int((2**200) % 17)
    1
    >>> int(mod_obj_17(2**200))
    1
    >>> int((-2**200) % 17)
    16
    >>> int(mod_obj_17(-2**200))
    16
    """
    int1 = int2 % 17
    return int1


def mod_obj_m2(int2):
    """
    >>> 0 % -2
    0
    >>> mod_obj_m2(0)
    0
    >>> 1 % -2
    -1
    >>> mod_obj_m2(1)
    -1
    >>> 9 % -2
    -1
    >>> mod_obj_m2(9)
    -1
    """
    int1 = int2 % -2
    return int1


def mod_obj_m2f(obj2):
    """
    >>> 0 % -2.0 == 0.0    # -0.0 in Py2.7+
    True
    >>> mod_obj_m2f(0)
    -0.0
    >>> 1 % -2.0
    -1.0
    >>> mod_obj_m2f(1)
    -1.0
    >>> 9 % -2.0
    -1.0
    >>> mod_obj_m2f(9)
    -1.0
    """
    result = obj2 % -2.0
    return result


def modint(int int2, int int3):
    """
    >>> modint(9,2)
    1
    """
    cdef int int1
    int1 = int2 % int3
    return int1


def modptr():
    """
    >>> print(modptr() if sys.version_info[0] < 3 else 'spameggs')
    spameggs
    """
    cdef char *str2, *str3
    str2 = "spam%s"
    str3 = "eggs"
    obj1 = str2 % str3  # '%' operator doesn't work on byte strings in Py3
    return obj1
Cython-0.26.1/tests/run/argdefault.pyx0000664000175000017500000000275312542002467020467 0ustar  stefanstefan00000000000000GLB0 = (1, 2)
def f0(arg=GLB0):
    """
    >>> f0()
    (1, 2)
    """
    return arg
def g0(arg=(1, 2)):
    """
    >>> g0()
    (1, 2)
    """
    return arg


GLB1 = [1, 2]
def f1(arg=GLB1):
    """
    >>> f1()
    [1, 2]
    """
    return arg
def g1(arg=[1, 2]):
    """
    >>> g1()
    [1, 2]
    """
    return arg


cdef GLB2 = {1: 2}
def f2(arg=GLB2):
    """
    >>> f2()
    {1: 2}
    """
    return arg
def g2(arg={1: 2}):
    """
    >>> g2()
    {1: 2}
    """
    return arg


class Foo(object):
    pass
cdef GLB3 = Foo()
def f3(arg=GLB3):
    """
    >>> f3() #doctest: +ELLIPSIS
    
    """
    return arg
def g3(arg=Foo()):
    """
    >>> g3() #doctest: +ELLIPSIS
    
    """
    return arg


cdef class Bar:
    pass
cdef Bar GLB4 = Bar()
def f4(arg=GLB4):
    """
    >>> f4() #doctest: +ELLIPSIS
    
    """
    return arg
def g4(arg=Bar()):
    """
    >>> g4() #doctest: +ELLIPSIS
    
    """
    return arg


cdef class Bla:
    pass
cdef Bla GLB5 = Bla()
def f5(Bla arg=GLB5):
    """
    >>> f5() #doctest: +ELLIPSIS
    
    """
    return arg
def g5(Bla arg=Bla()):
    """
    >>> g5() #doctest: +ELLIPSIS
    
    """
    return arg


cdef int GLB6 = 7
def f6(int arg=GLB6):
    """
    >>> f6()
    7
    """
    return arg
def g6(int arg=7):
    """
    >>> g6()
    7
    """
    return arg
Cython-0.26.1/tests/run/concatcstrings.pyx0000664000175000017500000000030612542002467021365 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> spam == u'C string 1' + u'C string 2'
    True
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u" u'", u" '")

spam = u"C string 1" + u"C string 2"
Cython-0.26.1/tests/run/cimport_from_sys_path.srctree0000664000175000017500000000120413143605603023577 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import a"

######## setup.py ########

from Cython.Build import cythonize
from distutils.core import setup

# Add ./site-packages to sys.path
from os.path import realpath
import sys
sys.path.append(realpath('site-packages'))

setup(
  ext_modules = cythonize("*.pyx"),
)

######## site-packages/b/__init__.py ########

######## site-packages/b/other.pxd ########

cdef extern from "foo.c":
    int foo(int)

######## site-packages/b/foo.c ########

static int foo(int a)
{
    return a * a;
}

######## a.pyx ########

from b.other cimport foo
print foo(10)

cimport b.other
print b.other.foo(10)
Cython-0.26.1/tests/run/struct_conversion_extern_header.h0000664000175000017500000000010012542002467024426 0ustar  stefanstefan00000000000000struct my_date_t {
    int year;
    int month;
    int day;
};
Cython-0.26.1/tests/run/ticket_124.pyx0000664000175000017500000000017612542002467020217 0ustar  stefanstefan00000000000000def spam(dict d):
    """
    >>> spam(dict(test=2))
    False
    """
    for elm in d:
        return False
    return True
Cython-0.26.1/tests/run/iterdict.pyx0000664000175000017500000003201713023021033020135 0ustar  stefanstefan00000000000000
cimport cython

dict_size = 4
d = dict(zip(range(10,dict_size+10), range(dict_size)))


def dict_iteritems(dict d):
    """
    >>> it = dict_iteritems(d)
    >>> type(it) is list
    False
    >>> sorted(it)
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    """
    return d.iteritems()


def dict_iterkeys(dict d):
    """
    >>> it = dict_iterkeys(d)
    >>> type(it) is list
    False
    >>> sorted(it)
    [10, 11, 12, 13]
    """
    return d.iterkeys()


def dict_itervalues(dict d):
    """
    >>> it = dict_itervalues(d)
    >>> type(it) is list
    False
    >>> sorted(it)
    [0, 1, 2, 3]
    """
    return d.itervalues()


@cython.test_fail_if_path_exists(
    "//WhileStatNode")
def items(dict d):
    """
    >>> items(d)
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    """
    l = []
    for k,v in d.items():
        l.append((k,v))
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iteritems(dict d):
    """
    >>> iteritems(d)
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    >>> iteritems({})
    []
    """
    l = []
    for k,v in d.iteritems():
        l.append((k,v))
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def optimistic_iteritems(d):
    """
    >>> optimistic_iteritems(d)
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    >>> optimistic_iteritems({})
    []
    >>> class mydict(object):
    ...     def __init__(self, t): self.t = t
    ...     def iteritems(self): return self.t(d.items())
    >>> optimistic_iteritems(mydict(list))
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    >>> optimistic_iteritems(mydict(tuple))
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    >>> optimistic_iteritems(mydict(iter))
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    """
    l = []
    for k,v in d.iteritems():
        l.append((k,v))
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iteritems_dict():
    """
    >>> iteritems_dict()
    [(11, 1), (12, 2), (13, 3)]
    """
    l = []
    for k,v in {11 : 1, 12 : 2, 13 : 3}.iteritems():
        l.append((k,v))
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iteritems_int(dict d):
    """
    >>> iteritems_int(d)
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    >>> iteritems_int({})
    []
    >>> iteritems_int({'a': 1})
    Traceback (most recent call last):
    TypeError: an integer is required
    >>> iteritems_int({1: 'b'})
    Traceback (most recent call last):
    TypeError: an integer is required
    >>> iteritems_int({'a': 'b'})
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    cdef int k,v
    l = []
    for k,v in d.iteritems():
        l.append((k,v))
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def optimistic_iteritems_int(d):
    """
    >>> optimistic_iteritems_int(d)
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    >>> optimistic_iteritems_int({})
    []
    >>> class mydict(object):
    ...     def __init__(self, t): self.t = t
    ...     def iteritems(self): return self.t(d.items())
    >>> optimistic_iteritems_int(mydict(list))
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    >>> optimistic_iteritems_int(mydict(tuple))
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    >>> optimistic_iteritems_int(mydict(iter))
    [(10, 0), (11, 1), (12, 2), (13, 3)]

    >>> optimistic_iteritems_int({'a': 1})
    Traceback (most recent call last):
    TypeError: an integer is required
    >>> optimistic_iteritems_int({1: 'b'})
    Traceback (most recent call last):
    TypeError: an integer is required
    >>> optimistic_iteritems_int({'a': 'b'})
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    cdef int k,v
    l = []
    for k,v in d.iteritems():
        l.append((k,v))
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iteritems_tuple(dict d):
    """
    >>> iteritems_tuple(d)
    [(10, 0), (11, 1), (12, 2), (13, 3)]
    >>> iteritems_tuple({})
    []
    """
    l = []
    for t in d.iteritems():
        l.append(t)
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iteritems_listcomp(dict d):
    cdef list l = [(k,v) for k,v in d.iteritems()]
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iterkeys(dict d):
    """
    >>> iterkeys(d)
    [10, 11, 12, 13]
    >>> iterkeys({})
    []
    """
    l = []
    for k in d.iterkeys():
        l.append(k)
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def optimistic_iterkeys(d):
    """
    >>> optimistic_iterkeys(d)
    [10, 11, 12, 13]
    >>> optimistic_iterkeys({})
    []
    >>> class mydict(object):
    ...     def __init__(self, t): self.t = t
    ...     def iterkeys(self): return self.t(d)
    >>> optimistic_iterkeys(mydict(lambda x:x))
    [10, 11, 12, 13]
    >>> optimistic_iterkeys(mydict(lambda x:x.keys()))
    [10, 11, 12, 13]
    >>> optimistic_iterkeys(mydict(list))
    [10, 11, 12, 13]
    >>> optimistic_iterkeys(mydict(tuple))
    [10, 11, 12, 13]
    >>> optimistic_iterkeys(mydict(iter))
    [10, 11, 12, 13]
    """
    l = []
    for k in d.iterkeys():
        l.append(k)
    l.sort()
    return l

@cython.test_fail_if_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def optimistic_iterkeys_argerror(d):
    """
    >>> try: optimistic_iterkeys_argerror(d)
    ... except (TypeError, AttributeError): pass
    """
    for k in d.iterkeys(1):
        print k

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iterkeys_int(dict d):
    """
    >>> iterkeys_int(d)
    [10, 11, 12, 13]
    >>> iterkeys_int({})
    []
    >>> iterkeys_int({'a': 'b'})
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    cdef int k
    l = []
    for k in d.iterkeys():
        l.append(k)
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iterdict(dict d):
    """
    >>> iterdict(d)
    [10, 11, 12, 13]
    >>> iterdict({})
    []
    """
    l = []
    for k in d:
        l.append(k)
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iterdict_int(dict d):
    """
    >>> iterdict_int(d)
    [10, 11, 12, 13]
    >>> iterdict_int({})
    []
    >>> iterdict_int({'a': 'b'})
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    cdef int k
    l = []
    for k in d:
        l.append(k)
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iterdict_reassign(dict d):
    """
    >>> iterdict_reassign(d)
    [10, 11, 12, 13]
    >>> iterdict_reassign({})
    []
    """
    cdef dict d_new = {}
    l = []
    for k in d:
        d = d_new
        l.append(k)
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iterdict_listcomp(dict d):
    """
    >>> iterdict_listcomp(d)
    [10, 11, 12, 13]
    >>> iterdict_listcomp({})
    []
    """
    cdef list l = [k for k in d]
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def itervalues(dict d):
    """
    >>> itervalues(d)
    [0, 1, 2, 3]
    >>> itervalues({})
    []
    """
    l = []
    for v in d.itervalues():
        l.append(v)
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def optimistic_itervalues(d):
    """
    >>> optimistic_itervalues(d)
    [0, 1, 2, 3]
    >>> optimistic_itervalues({})
    []
    >>> class mydict(object):
    ...     def __init__(self, t): self.t = t
    ...     def itervalues(self): return self.t(d.values())
    >>> optimistic_itervalues(mydict(lambda x:x))
    [0, 1, 2, 3]
    >>> optimistic_itervalues(mydict(list))
    [0, 1, 2, 3]
    >>> optimistic_itervalues(mydict(tuple))
    [0, 1, 2, 3]
    >>> optimistic_itervalues(mydict(iter))
    [0, 1, 2, 3]
    """
    l = []
    for v in d.itervalues():
        l.append(v)
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def itervalues_int(dict d):
    """
    >>> itervalues_int(d)
    [0, 1, 2, 3]
    >>> itervalues_int({})
    []
    >>> itervalues_int({'a': 'b'})
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    cdef int v
    l = []
    for v in d.itervalues():
        l.append(v)
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def itervalues_listcomp(dict d):
    """
    >>> itervalues_listcomp(d)
    [0, 1, 2, 3]
    >>> itervalues_listcomp({})
    []
    """
    cdef list l = [v for v in d.itervalues()]
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def itervalues_kwargs(**d):
    """
    >>> itervalues_kwargs(a=1, b=2, c=3, d=4)
    [1, 2, 3, 4]
    >>> itervalues_kwargs()
    []
    """
    cdef list l = [v for v in d.itervalues()]
    l.sort()
    return l

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def iterdict_change_size(dict d):
    """
    >>> count, i = 0, -1
    >>> d = {1:2, 10:20}
    >>> for i in d:
    ...     d[i+1] = 5
    ...     count += 1
    ...     if count > 5:
    ...         break # safety
    Traceback (most recent call last):
    RuntimeError: dictionary changed size during iteration

    >>> iterdict_change_size({1:2, 10:20})
    Traceback (most recent call last):
    RuntimeError: dictionary changed size during iteration
    >>> print( iterdict_change_size({}) )
    DONE
    """
    cdef int count = 0
    i = -1
    for i in d:
        d[i+1] = 5
        count += 1
        if count > 5:
            break # safety
    return "DONE"

@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def optimistic_iterdict_change_size(d):
    """
    >>> count, i = 0, -1
    >>> d = {1:2, 10:20}
    >>> for i in d:
    ...     d[i+1] = 5
    ...     count += 1
    ...     if count > 5:
    ...         break # safety
    Traceback (most recent call last):
    RuntimeError: dictionary changed size during iteration

    >>> optimistic_iterdict_change_size({1:2, 10:20})
    Traceback (most recent call last):
    RuntimeError: dictionary changed size during iteration
    >>> print( optimistic_iterdict_change_size({}) )
    DONE
    >>> class mydict(object):
    ...     _d = {1:2, 10:20}
    ...     def iterkeys(self): return self._d
    ...     def __setitem__(self, key, value): self._d[key] = value
    >>> optimistic_iterdict_change_size(mydict())
    Traceback (most recent call last):
    RuntimeError: dictionary changed size during iteration
    """
    cdef int count = 0
    i = -1
    for i in d.iterkeys():
        d[i+1] = 5
        count += 1
        if count > 5:
            break # safety
    return "DONE"


@cython.test_assert_path_exists(
    "//WhileStatNode",
    "//WhileStatNode//DictIterationNextNode")
def values_of_expression(**kwargs):
    """
    >>> sorted(values_of_expression(a=3, b=4))
    [3, 4]
    """
    # this can be optimised even in Py2
    return [ arg for arg in dict(kwargs.items()).values() ]


def items_of_expression(*args, **kwargs):
    """
    >>> sorted(items_of_expression(a=3, b=4))
    [('a', 3), ('b', 4)]

    >>> sorted(items_of_expression([('a', 3)], b=4))
    [('a', 3), ('b', 4)]
    """
    return [item for item in dict(*args, **kwargs).items()]


def iteritems_of_expression(*args, **kwargs):
    """
    >>> sorted(iteritems_of_expression(a=3, b=4))
    [('a', 3), ('b', 4)]

    >>> sorted(iteritems_of_expression([('a', 3)], b=4))
    [('a', 3), ('b', 4)]
    """
    return [item for item in dict(*args, **kwargs).iteritems()]


def for_in_items_of_expression(*args, **kwargs):
    """
    >>> sorted(for_in_items_of_expression(a=3, b=4))
    [('a', 3), ('b', 4)]

    >>> sorted(for_in_items_of_expression([('a', 3)], b=4))
    [('a', 3), ('b', 4)]
    """
    result = []
    for k, v in dict(*args, **kwargs).items():
        result.append((k, v))
    return result


def for_in_iteritems_of_expression(*args, **kwargs):
    """
    >>> sorted(for_in_iteritems_of_expression(a=3, b=4))
    [('a', 3), ('b', 4)]

    >>> sorted(for_in_iteritems_of_expression([('a', 3)], b=4))
    [('a', 3), ('b', 4)]
    """
    result = []
    for k, v in dict(*args, **kwargs).iteritems():
        result.append((k, v))
    return result
Cython-0.26.1/tests/run/numpy_ValueError_T172.pyx0000664000175000017500000000015412542002467022375 0ustar  stefanstefan00000000000000# ticket: 172
# tag: numpy

__doc__ = u"""
    >>> 1
    1
"""
cimport numpy
class ValueError(object): pass
Cython-0.26.1/tests/run/if_else_expr_cpp.pyx0000664000175000017500000000134712574327400021657 0ustar  stefanstefan00000000000000# mode: run
# tag: condexpr, cpp

cdef extern from "if_else_expr_cpp_helper.h":
    cdef cppclass Holder:
        int value
        Holder()
        Holder(int value)

    cdef Holder v1
    cdef Holder v2
    cdef Holder& get_v1()
    cdef Holder& get_v2()

cdef reset() :
    v1.value = 1
    v2.value = 2

def test_one_ref(bint b):
    """
    >>> test_one_ref(False)
    1
    >>> test_one_ref(True)
    100
    """
    reset()
    return (Holder(100) if b else get_v1()).value

def test_both_ref(bint b):
    """
    >>> test_both_ref(False)
    (1, 100)
    >>> test_both_ref(True)
    (100, 2)
    """
    reset()
    try:
        (get_v1() if b else get_v2()).value = 100
        return v1.value, v2.value
    finally:
        reset()
Cython-0.26.1/tests/run/type_inference_T768_cpp.pyx0000664000175000017500000000057512542002467022742 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp
# ticket: 768
from cython cimport typeof

cdef extern from "shapes.h" namespace "shapes":
    cdef cppclass Shape:
        float area()

    cdef cppclass Circle(Shape):
        int radius
        Circle(int)

def type_inference_del_cpp():
    """
    >>> type_inference_del_cpp()
    'Circle *'
    """
    x = new Circle(10)
    del x
    return typeof(x)
Cython-0.26.1/tests/run/numpy_test.pyx0000664000175000017500000006470613143605603020565 0ustar  stefanstefan00000000000000# tag: numpy
# cannot be named "numpy" in order to not clash with the numpy module!

cimport numpy as np
cimport cython

import re
import sys

from libc.stdlib cimport malloc

def little_endian():
    cdef int endian_detector = 1
    return (&endian_detector)[0] != 0

__test__ = {}

def testcase(f):
    __test__[f.__name__] = f.__doc__
    return f

def testcase_have_buffer_interface(f):
    major, minor, *rest = np.__version__.split('.')
    if (int(major), int(minor)) >= (1, 5) and sys.version_info[:2] >= (2, 6):
        __test__[f.__name__] = f.__doc__
    return f

if little_endian():
    my_endian = '<'
    other_endian = '>'
else:
    my_endian = '>'
    other_endian = '<'

try:
    import numpy as np
    __doc__ = u"""

    >>> assert_dtype_sizes()

    >>> basic()
    [[0 1 2 3 4]
     [5 6 7 8 9]]
    2 0 9 5

    >>> three_dim()
    [[[  0.   1.   2.   3.]
      [  4.   5.   6.   7.]]
    <_BLANKLINE_>
     [[  8.   9.  10.  11.]
      [ 12.  13.  14.  15.]]
    <_BLANKLINE_>
     [[ 16.  17.  18.  19.]
      [ 20.  21.  22.  23.]]]
    6.0 0.0 13.0 8.0

    >>> obj_array()
    [a 1 {}]
    a 1 {}

    Test various forms of slicing, picking etc.
    >>> a = np.arange(10, dtype='l').reshape(2, 5)
    >>> print_long_2d(a)
    0 1 2 3 4
    5 6 7 8 9
    >>> print_long_2d(a[::-1, ::-1])
    9 8 7 6 5
    4 3 2 1 0
    >>> print_long_2d(a[1:2, 1:3])
    6 7
    >>> print_long_2d(a[::2, ::2])
    0 2 4
    >>> print_long_2d(a[::4, :])
    0 1 2 3 4
    >>> print_long_2d(a[:, 1:5:2])
    1 3
    6 8
    >>> print_long_2d(a[:, 5:1:-2])
    4 2
    9 7
    >>> print_long_2d(a[:, [3, 1]])
    3 1
    8 6
    >>> print_long_2d(a.T)
    0 5
    1 6
    2 7
    3 8
    4 9

    Write to slices
    >>> b = a.copy()
    >>> put_range_long_1d(b[:, 3])
    >>> print (b)
    [[0 1 2 0 4]
     [5 6 7 1 9]]
    >>> put_range_long_1d(b[::-1, 3])
    >>> print (b)
    [[0 1 2 1 4]
     [5 6 7 0 9]]
    >>> a = np.zeros(9, dtype='l')
    >>> put_range_long_1d(a[1::3])
    >>> print (a)
    [0 0 0 0 1 0 0 2 0]

    Write to picked subarrays. This should NOT change the original
    array as picking creates a new mutable copy.
    >>> a = np.zeros(10, dtype='l').reshape(2, 5)
    >>> put_range_long_1d(a[[0, 0, 1, 1, 0], [0, 1, 2, 4, 3]])
    >>> print (a)
    [[0 0 0 0 0]
     [0 0 0 0 0]]

    Test contiguous access modes:
    >>> c_arr = np.array(np.arange(12, dtype='i').reshape(3,4), order='C')
    >>> f_arr = np.array(np.arange(12, dtype='i').reshape(3,4), order='F')
    >>> test_c_contig(c_arr)
    0 1 2 3
    4 5 6 7
    8 9 10 11
    >>> test_f_contig(f_arr)
    0 1 2 3
    4 5 6 7
    8 9 10 11
    >>> test_c_contig(f_arr) #doctest: +ELLIPSIS
    Traceback (most recent call last):
       ...
    ValueError: ndarray is not C...contiguous
    >>> test_f_contig(c_arr) #doctest: +ELLIPSIS
    Traceback (most recent call last):
       ...
    ValueError: ndarray is not Fortran contiguous
    >>> test_c_contig(c_arr[::2,::2]) #doctest: +ELLIPSIS
    Traceback (most recent call last):
       ...
    ValueError: ndarray is not C...contiguous

    >>> test_dtype('b', inc1_byte)
    >>> test_dtype('B', inc1_ubyte)
    >>> test_dtype('h', inc1_short)
    >>> test_dtype('H', inc1_ushort)
    >>> test_dtype('i', inc1_int)
    >>> test_dtype('I', inc1_uint)
    >>> test_dtype('l', inc1_long)
    >>> test_dtype('L', inc1_ulong)

    >>> test_dtype('f', inc1_float)
    >>> test_dtype('d', inc1_double)
    >>> test_dtype('g', inc1_longdouble)
    >>> test_dtype('O', inc1_object)
    >>> test_dtype('F', inc1_cfloat) # numpy format codes differ from buffer ones here
    >>> test_dtype('D', inc1_cdouble)
    >>> test_dtype('G', inc1_clongdouble)
    >>> test_dtype('F', inc1_cfloat_struct)
    >>> test_dtype('D', inc1_cdouble_struct)
    >>> test_dtype('G', inc1_clongdouble_struct)

    >>> test_dtype(np.int, inc1_int_t)
    >>> test_dtype(np.longlong, inc1_longlong_t)
    >>> test_dtype(np.float, inc1_float_t)
    >>> test_dtype(np.double, inc1_double_t)
    >>> test_dtype(np.intp, inc1_intp_t)
    >>> test_dtype(np.uintp, inc1_uintp_t)

    >>> test_dtype(np.longdouble, inc1_longdouble_t)

    >>> test_dtype(np.int32, inc1_int32_t)
    >>> test_dtype(np.float64, inc1_float64_t)

    Endian tests:
    >>> test_dtype('%si' % my_endian, inc1_int)
    >>> test_dtype('%si' % other_endian, inc1_int)  #doctest: +ELLIPSIS
    Traceback (most recent call last):
       ...
    ValueError: ...



    >>> test_recordarray()

    >>> print(test_nested_dtypes(np.zeros((3,), dtype=np.dtype([\
            ('a', np.dtype('i,i')),\
            ('b', np.dtype('i,i'))\
        ]))))                              # doctest: +NORMALIZE_WHITESPACE
    array([((0, 0), (0, 0)), ((1, 2), (1, 4)), ((1, 2), (1, 4))], 
          dtype=[('a', [('f0', '!i4'), ('f1', '!i4')]), ('b', [('f0', '!i4'), ('f1', '!i4')])])

    >>> print(test_nested_dtypes(np.zeros((3,), dtype=np.dtype([\
            ('a', np.dtype('i,f')),\
            ('b', np.dtype('i,i'))\
        ]))))
    Traceback (most recent call last):
        ...
    ValueError: Buffer dtype mismatch, expected 'int' but got 'float' in 'DoubleInt.y'

    >>> print(test_packed_align(np.zeros((1,), dtype=np.dtype('b,i', align=False))))
    [(22, 23)]


    The output changed in Python 3:
    >> print(test_unpacked_align(np.zeros((1,), dtype=np.dtype('b,i', align=True))))
    array([(22, 23)],
          dtype=[('f0', '|i1'), ('', '|V3'), ('f1', '!i4')])

    ->

    array([(22, 23)],
          dtype={'names':['f0','f1'], 'formats':['i1','!i4'], 'offsets':[0,4], 'itemsize':8, 'aligned':True})


    >>> print(test_unpacked_align(np.zeros((1,), dtype=np.dtype('b,i', align=True))))
    [(22, 23)]

    >>> print(test_packed_align(np.zeros((1,), dtype=np.dtype('b,i', align=True)))) #doctest: +ELLIPSIS
    Traceback (most recent call last):
        ...
    ValueError: ...

    >>> print(test_unpacked_align(np.zeros((1,), dtype=np.dtype('b,i', align=False)))) #doctest: +ELLIPSIS
    Traceback (most recent call last):
        ...
    ValueError: ...


    >>> test_good_cast()
    True
    >>> test_bad_cast()
    Traceback (most recent call last):
        ...
    ValueError: Item size of buffer (1 byte) does not match size of 'int' (4 bytes)

    >>> test_complextypes()
    1,1
    1,1
    8,16

    >>> test_point_record()         # doctest: +NORMALIZE_WHITESPACE
    array([(0., 0.), (1., -1.), (2., -2.)], 
          dtype=[('x', '!f8'), ('y', '!f8')])

"""

    if np.__version__ >= '1.6' and False:
        __doc__ += u"""
        Tests are DISABLED as the buffer format parser does not align members
        of aligned structs in padded structs in relation to the possibly
        unaligned initial offset.

        The following expose bugs in Numpy (versions prior to 2011-04-02):
        >>> print(test_partially_packed_align(np.zeros((1,), dtype=np.dtype([('a', 'b'), ('b', 'i'), ('sub', np.dtype('b,i')), ('c', 'i')], align=True))))
        array([(22, 23, (24, 25), 26)],
              dtype=[('a', '|i1'), ('', '|V3'), ('b', '!i4'), ('sub', [('f0', '|i1'), ('f1', '!i4')]), ('', '|V3'), ('c', '!i4')])

        >>> print(test_partially_packed_align_2(np.zeros((1,), dtype=np.dtype([('a', 'b'), ('b', 'i'), ('c', 'b'), ('sub', np.dtype('b,i', align=True))]))))
        array([(22, 23, 24, (27, 28))],
              dtype=[('a', '|i1'), ('b', '!i4'), ('c', '|i1'), ('sub', [('f0', '|i1'), ('', '|V3'), ('f1', '!i4')])])

        >>> print(test_partially_packed_align(np.zeros((1,), dtype=np.dtype([('a', 'b'), ('b', 'i'), ('sub', np.dtype('b,i')), ('c', 'i')], align=False)))) #doctest: +ELLIPSIS
        Traceback (most recent call last):
            ...
        ValueError: ...

        >>> print(test_partially_packed_align_2(np.zeros((1,), dtype=np.dtype([('a', 'b'), ('b', 'i'), ('c', 'b'), ('sub', np.dtype('b,i', align=False))])))) #doctest: +ELLIPSIS
        Traceback (most recent call last):
            ...
        ValueError: ...
        """

except:
    __doc__ = u""

__test__[__name__] = __doc__

def assert_dtype_sizes():
    assert sizeof(np.int8_t) == 1
    assert sizeof(np.int16_t) == 2
    assert sizeof(np.int32_t) == 4
    assert sizeof(np.int64_t) == 8
    assert sizeof(np.uint8_t) == 1
    assert sizeof(np.uint16_t) == 2
    assert sizeof(np.uint32_t) == 4
    assert sizeof(np.uint64_t) == 8
    assert sizeof(np.float32_t) == 4
    assert sizeof(np.float64_t) == 8
    assert sizeof(np.complex64_t) == 8
    assert sizeof(np.complex128_t) == 16

def ndarray_str(arr):
    u"""
    Since Py2.3 doctest don't support , manually replace blank lines
    with <_BLANKLINE_>
    """
    return unicode(arr).replace(u'\n\n', u'\n<_BLANKLINE_>\n')

def basic():
    cdef object[int, ndim=2] buf = np.arange(10, dtype='i').reshape((2, 5))
    print buf
    print buf[0, 2], buf[0, 0], buf[1, 4], buf[1, 0]

def three_dim():
    cdef object[double, ndim=3] buf = np.arange(24, dtype='d').reshape((3,2,4))
    print ndarray_str(buf)
    print buf[0, 1, 2], buf[0, 0, 0], buf[1, 1, 1], buf[1, 0, 0]

def obj_array():
    cdef object[object, ndim=1] buf = np.array(["a", 1, {}])
    print str(buf).replace('"', '').replace("'", '')
    print buf[0], buf[1], buf[2]


def print_long_2d(np.ndarray[long, ndim=2] arr):
    cdef int i, j
    for i in range(arr.shape[0]):
        print u" ".join([unicode(arr[i, j]) for j in range(arr.shape[1])])

def put_range_long_1d(np.ndarray[long] arr):
    u"""Writes 0,1,2,... to array and returns array"""
    cdef int value = 0, i
    for i in range(arr.shape[0]):
        arr[i] = value
        value += 1

def test_c_contig(np.ndarray[int, ndim=2, mode='c'] arr):
    cdef int i, j
    for i in range(arr.shape[0]):
        print u" ".join([unicode(arr[i, j]) for j in range(arr.shape[1])])

def test_f_contig(np.ndarray[int, ndim=2, mode='fortran'] arr):
    cdef int i, j
    for i in range(arr.shape[0]):
        print u" ".join([unicode(arr[i, j]) for j in range(arr.shape[1])])

# Exhaustive dtype tests -- increments element [1] by 1 (or 1+1j) for all dtypes
def inc1_byte(np.ndarray[char] arr):                    arr[1] += 1
def inc1_ubyte(np.ndarray[unsigned char] arr):          arr[1] += 1
def inc1_short(np.ndarray[short] arr):                  arr[1] += 1
def inc1_ushort(np.ndarray[unsigned short] arr):        arr[1] += 1
def inc1_int(np.ndarray[int] arr):                      arr[1] += 1
def inc1_uint(np.ndarray[unsigned int] arr):            arr[1] += 1
def inc1_long(np.ndarray[long] arr):                    arr[1] += 1
def inc1_ulong(np.ndarray[unsigned long] arr):          arr[1] += 1
def inc1_longlong(np.ndarray[long long] arr):           arr[1] += 1
def inc1_ulonglong(np.ndarray[unsigned long long] arr): arr[1] += 1

def inc1_float(np.ndarray[float] arr):                  arr[1] += 1
def inc1_double(np.ndarray[double] arr):                arr[1] += 1
def inc1_longdouble(np.ndarray[long double] arr):       arr[1] += 1

def inc1_cfloat(np.ndarray[float complex] arr):            arr[1] = arr[1] + 1 + 1j
def inc1_cdouble(np.ndarray[double complex] arr):          arr[1] = (arr[1] + 1) + 1j
def inc1_clongdouble(np.ndarray[long double complex] arr): arr[1] = arr[1] + (1 + 1j)

def inc1_cfloat_struct(np.ndarray[np.cfloat_t] arr):
    arr[1].real += 1
    arr[1].imag += 1

def inc1_cdouble_struct(np.ndarray[np.cdouble_t] arr):
    arr[1].real += 1
    arr[1].imag += 1

def inc1_clongdouble_struct(np.ndarray[np.clongdouble_t] arr):
    cdef long double x
    x = arr[1].real + 1
    arr[1].real = x
    arr[1].imag = arr[1].imag + 1

def inc1_object(np.ndarray[object] arr):
    o = arr[1]
    o += 1
    arr[1] = o # unfortunately, += segfaults for objects


def inc1_int_t(np.ndarray[np.int_t] arr):               arr[1] += 1
def inc1_long_t(np.ndarray[np.long_t] arr):             arr[1] += 1
def inc1_longlong_t(np.ndarray[np.longlong_t] arr):     arr[1] += 1
def inc1_float_t(np.ndarray[np.float_t] arr):           arr[1] += 1
def inc1_double_t(np.ndarray[np.double_t] arr):         arr[1] += 1
def inc1_longdouble_t(np.ndarray[np.longdouble_t] arr): arr[1] += 1
def inc1_intp_t(np.ndarray[np.intp_t] arr):             arr[1] += 1
def inc1_uintp_t(np.ndarray[np.uintp_t] arr):           arr[1] += 1

# The tests below only work on platforms that has the given types
def inc1_int32_t(np.ndarray[np.int32_t] arr):           arr[1] += 1
def inc1_float64_t(np.ndarray[np.float64_t] arr):       arr[1] += 1


def test_dtype(dtype, inc1):
    if dtype in ("g", np.longdouble,
                 "G", np.clongdouble):
        if sizeof(double) == sizeof(long double): # MSVC
            return
    if dtype in ('F', 'D', 'G'):
        a = np.array([0, 10+10j], dtype=dtype)
        inc1(a)
        if a[1] != (11 + 11j): print u"failed!", a[1]
    else:
        a = np.array([0, 10], dtype=dtype)
        inc1(a)
        if a[1] != 11: print u"failed!"

cdef struct DoubleInt:
    int x, y

def test_recordarray():
    cdef object[DoubleInt] arr
    arr = np.array([(5,5), (4, 6)], dtype=np.dtype('i,i'))
    cdef DoubleInt rec
    rec = arr[0]
    if rec.x != 5: print u"failed"
    if rec.y != 5: print u"failed"
    rec.y += 5
    arr[1] = rec
    arr[0].x -= 2
    arr[0].y += 3
    if arr[0].x != 3: print u"failed"
    if arr[0].y != 8: print u"failed"
    if arr[1].x != 5: print u"failed"
    if arr[1].y != 10: print u"failed"

cdef struct NestedStruct:
    DoubleInt a
    DoubleInt b

cdef struct BadDoubleInt:
    float x
    int y

cdef struct BadNestedStruct:
    DoubleInt a
    BadDoubleInt b

def test_nested_dtypes(obj):
    cdef object[NestedStruct] arr = obj
    arr[1].a.x = 1
    arr[1].a.y = 2
    arr[1].b.x = arr[0].a.y + 1
    arr[1].b.y = 4
    arr[2] = arr[1]
    return repr(arr).replace('<', '!').replace('>', '!')

def test_bad_nested_dtypes():
    cdef object[BadNestedStruct] arr

def test_good_cast():
    # Check that a signed int can round-trip through casted unsigned int access
    cdef np.ndarray[unsigned int, cast=True] arr = np.array([-100], dtype='i')
    cdef unsigned int data = arr[0]
    return -100 == data

def test_bad_cast():
    # This should raise an exception
    cdef np.ndarray[int, cast=True] arr = np.array([1], dtype='b')

cdef packed struct PackedStruct:
    char a
    int b

cdef struct UnpackedStruct:
    char a
    int b

cdef struct PartiallyPackedStruct:
    char a
    int b
    PackedStruct sub
    int c

cdef packed struct PartiallyPackedStruct2:
    char a
    int b
    char c
    UnpackedStruct sub

def test_packed_align(np.ndarray[PackedStruct] arr):
    arr[0].a = 22
    arr[0].b = 23
    return list(arr)

def test_unpacked_align(np.ndarray[UnpackedStruct] arr):
    arr[0].a = 22
    arr[0].b = 23
    # return repr(arr).replace('<', '!').replace('>', '!')
    return list(arr)

def test_partially_packed_align(np.ndarray[PartiallyPackedStruct] arr):
    arr[0].a = 22
    arr[0].b = 23
    arr[0].sub.a = 24
    arr[0].sub.b = 25
    arr[0].c = 26
    return repr(arr).replace('<', '!').replace('>', '!')

def test_partially_packed_align_2(np.ndarray[PartiallyPackedStruct2] arr):
    arr[0].a = 22
    arr[0].b = 23
    arr[0].c = 24
    arr[0].sub.a = 27
    arr[0].sub.b = 28
    return repr(arr).replace('<', '!').replace('>', '!')

def test_complextypes():
    cdef np.complex64_t x64 = 1, y64 = 1j
    cdef np.complex128_t x128 = 1, y128 = 1j
    x64 = x64 + y64
    print "%.0f,%.0f" % (x64.real, x64.imag)
    x128 = x128 + y128
    print "%.0f,%.0f" % (x128.real, x128.imag)
    print "%d,%d" % (sizeof(x64), sizeof(x128))


cdef struct Point:
    np.float64_t x, y

def test_point_record():
    cdef np.ndarray[Point] test
    Point_dtype = np.dtype([('x', np.float64), ('y', np.float64)])
    test = np.zeros(3, Point_dtype)
    cdef int i
    for i in range(3):
        test[i].x = i
        test[i].y = -i
    print re.sub(
        r'\.0+\b', '.', repr(test).replace('<', '!').replace('>', '!')
                                  .replace('( ', '(').replace(',  ', ', '))

# Test fused np.ndarray dtypes and runtime dispatch
@testcase
def test_fused_ndarray_floating_dtype(np.ndarray[cython.floating, ndim=1] a):
    """
    >>> import cython
    >>> sorted(test_fused_ndarray_floating_dtype.__signatures__)
    ['double', 'float']


    >>> test_fused_ndarray_floating_dtype[cython.double](np.arange(10, dtype=np.float64))
    ndarray[double,ndim=1] ndarray[double,ndim=1] 5.0 6.0
    >>> test_fused_ndarray_floating_dtype(np.arange(10, dtype=np.float64))
    ndarray[double,ndim=1] ndarray[double,ndim=1] 5.0 6.0

    >>> test_fused_ndarray_floating_dtype[cython.float](np.arange(10, dtype=np.float32))
    ndarray[float,ndim=1] ndarray[float,ndim=1] 5.0 6.0
    >>> test_fused_ndarray_floating_dtype(np.arange(10, dtype=np.float32))
    ndarray[float,ndim=1] ndarray[float,ndim=1] 5.0 6.0
    """
    cdef np.ndarray[cython.floating, ndim=1] b = a
    print cython.typeof(a), cython.typeof(b), a[5], b[6]


double_array = np.linspace(0, 1, 100)
int32_array = np.arange(100, dtype=np.int32)

cdef fused fused_external:
    np.int32_t
    np.int64_t
    np.float32_t
    np.float64_t

@testcase
def test_fused_external(np.ndarray[fused_external, ndim=1] a):
    """
    >>> import cython
    >>> sorted(test_fused_external.__signatures__)
    ['float32_t', 'float64_t', 'int32_t', 'int64_t']

    >>> test_fused_external["float64_t"](double_array)
    float64

    >>> test_fused_external["int32_t"](int32_array)
    int32

    >>> test_fused_external(np.arange(100, dtype=np.int64))
    int64
    """
    print a.dtype

cdef fused fused_buffers:
    np.ndarray[np.int32_t, ndim=1]
    np.int64_t[::1]

@testcase
def test_fused_buffers(fused_buffers arg):
    """
    >>> sorted(test_fused_buffers.__signatures__)
    ['int64_t[::1]', 'ndarray[int32_t,ndim=1]']
    """

cpdef _fused_cpdef_buffers(np.ndarray[fused_external] a):
    print a.dtype

@testcase
def test_fused_cpdef_buffers():
    """
    >>> test_fused_cpdef_buffers()
    int32
    int32
    """
    _fused_cpdef_buffers[np.int32_t](int32_array)

    cdef np.ndarray[np.int32_t] typed_array = int32_array
    _fused_cpdef_buffers(typed_array)

@testcase
def test_fused_ndarray_integral_dtype(np.ndarray[cython.integral, ndim=1] a):
    """
    >>> import cython
    >>> sorted(test_fused_ndarray_integral_dtype.__signatures__)
    ['int', 'long', 'short']

    >>> test_fused_ndarray_integral_dtype[cython.int](np.arange(10, dtype=np.dtype('i')))
    5 6
    >>> test_fused_ndarray_integral_dtype(np.arange(10, dtype=np.dtype('i')))
    5 6

    >>> test_fused_ndarray_integral_dtype[cython.long](np.arange(10, dtype='l'))
    5 6
    >>> test_fused_ndarray_integral_dtype(np.arange(10, dtype='l'))
    5 6
    """
    cdef np.ndarray[cython.integral, ndim=1] b = a
    # Don't print the types, the platform specific sizes can make the dispatcher
    # select different integer types with equal sizeof()
    print a[5], b[6]

cdef fused fused_dtype:
    float complex
    double complex
    object

@testcase
def test_fused_ndarray_other_dtypes(np.ndarray[fused_dtype, ndim=1] a):
    """
    >>> import cython
    >>> sorted(test_fused_ndarray_other_dtypes.__signatures__)
    ['double complex', 'float complex', 'object']
    >>> test_fused_ndarray_other_dtypes(np.arange(10, dtype=np.complex64))
    ndarray[float complex,ndim=1] ndarray[float complex,ndim=1] (5+0j) (6+0j)
    >>> test_fused_ndarray_other_dtypes(np.arange(10, dtype=np.complex128))
    ndarray[double complex,ndim=1] ndarray[double complex,ndim=1] (5+0j) (6+0j)
    >>> test_fused_ndarray_other_dtypes(np.arange(10, dtype=np.object))
    ndarray[Python object,ndim=1] ndarray[Python object,ndim=1] 5 6
    """
    cdef np.ndarray[fused_dtype, ndim=1] b = a
    print cython.typeof(a), cython.typeof(b), a[5], b[6]


# Test fusing the array types together and runtime dispatch
cdef struct Foo:
    int a
    float b

cdef fused fused_FooArray:
    np.ndarray[Foo, ndim=1]

cdef fused fused_ndarray:
    np.ndarray[float, ndim=1]
    np.ndarray[double, ndim=1]
    np.ndarray[Foo, ndim=1]

def get_Foo_array():
    cdef Foo[:] result =  malloc(sizeof(Foo) * 10)
    result[5].b = 9.0
    return np.asarray(result)

@testcase_have_buffer_interface
def test_fused_ndarray(fused_ndarray a):
    """
    >>> import cython
    >>> sorted(test_fused_ndarray.__signatures__)
    ['ndarray[Foo,ndim=1]', 'ndarray[double,ndim=1]', 'ndarray[float,ndim=1]']

    >>> test_fused_ndarray(get_Foo_array())
    ndarray[Foo,ndim=1] ndarray[Foo,ndim=1]
    9.0
    >>> test_fused_ndarray(np.arange(10, dtype=np.float64))
    ndarray[double,ndim=1] ndarray[double,ndim=1]
    5.0
    >>> test_fused_ndarray(np.arange(10, dtype=np.float32))
    ndarray[float,ndim=1] ndarray[float,ndim=1]
    5.0
    """
    cdef fused_ndarray b = a
    print cython.typeof(a), cython.typeof(b)

    if fused_ndarray in fused_FooArray:
        print b[5].b
    else:
        print b[5]

cpdef test_fused_cpdef_ndarray(fused_ndarray a):
    """
    >>> import cython
    >>> sorted(test_fused_cpdef_ndarray.__signatures__)
    ['ndarray[Foo,ndim=1]', 'ndarray[double,ndim=1]', 'ndarray[float,ndim=1]']

    >>> test_fused_cpdef_ndarray(get_Foo_array())
    ndarray[Foo,ndim=1] ndarray[Foo,ndim=1]
    9.0
    >>> test_fused_cpdef_ndarray(np.arange(10, dtype=np.float64))
    ndarray[double,ndim=1] ndarray[double,ndim=1]
    5.0
    >>> test_fused_cpdef_ndarray(np.arange(10, dtype=np.float32))
    ndarray[float,ndim=1] ndarray[float,ndim=1]
    5.0
    """
    cdef fused_ndarray b = a
    print cython.typeof(a), cython.typeof(b)

    if fused_ndarray in fused_FooArray:
        print b[5].b
    else:
        print b[5]

testcase_have_buffer_interface(test_fused_cpdef_ndarray)

@testcase_have_buffer_interface
def test_fused_cpdef_ndarray_cdef_call():
    """
    >>> test_fused_cpdef_ndarray_cdef_call()
    ndarray[Foo,ndim=1] ndarray[Foo,ndim=1]
    9.0
    """
    cdef np.ndarray[Foo, ndim=1] foo_array = get_Foo_array()
    test_fused_cpdef_ndarray(foo_array)

cdef fused int_type:
    np.int32_t
    np.int64_t

float64_array = np.arange(10, dtype=np.float64)
float32_array = np.arange(10, dtype=np.float32)
int32_array = np.arange(10, dtype=np.int32)
int64_array = np.arange(10, dtype=np.int64)

@testcase
def test_dispatch_non_clashing_declarations_repeating_types(np.ndarray[cython.floating] a1,
                                                            np.ndarray[int_type] a2,
                                                            np.ndarray[cython.floating] a3,
                                                            np.ndarray[int_type] a4):
    """
    >>> test_dispatch_non_clashing_declarations_repeating_types(float64_array, int32_array, float64_array, int32_array)
    1.0 2 3.0 4
    >>> test_dispatch_non_clashing_declarations_repeating_types(float64_array, int64_array, float64_array, int64_array)
    1.0 2 3.0 4
    >>> test_dispatch_non_clashing_declarations_repeating_types(float64_array, int32_array, float64_array, int64_array)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ValueError: Buffer dtype mismatch, expected 'int32_t'...
    >>> test_dispatch_non_clashing_declarations_repeating_types(float64_array, int64_array, float64_array, int32_array)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ValueError: Buffer dtype mismatch, expected 'int64_t'...
    """
    print a1[1], a2[2], a3[3], a4[4]

ctypedef np.int32_t typedeffed_type

cdef fused typedeffed_fused_type:
    typedeffed_type
    int
    long

@testcase
def test_dispatch_typedef(np.ndarray[typedeffed_fused_type] a):
    """
    >>> test_dispatch_typedef(int32_array)
    5
    """
    print a[5]


cdef extern from "types.h":
    ctypedef char actually_long_t

cdef fused confusing_fused_typedef:
    actually_long_t
    int
    unsigned long
    double complex
    unsigned char
    signed char

def test_dispatch_external_typedef(np.ndarray[confusing_fused_typedef] a):
    """
    >>> test_dispatch_external_typedef(np.arange(-5, 5, dtype=np.long))
    -2
    """
    print a[3]

# test fused memoryview slices
cdef fused memslice_fused_dtype:
    float
    double
    int
    long
    float complex
    double complex
    object

@testcase
def test_fused_memslice_other_dtypes(memslice_fused_dtype[:] a):
    """
    >>> import cython
    >>> sorted(test_fused_memslice_other_dtypes.__signatures__)
    ['double', 'double complex', 'float', 'float complex', 'int', 'long', 'object']
    >>> test_fused_memslice_other_dtypes(np.arange(10, dtype=np.complex64))
    float complex[:] float complex[:] (5+0j) (6+0j)
    >>> test_fused_memslice_other_dtypes(np.arange(10, dtype=np.complex128))
    double complex[:] double complex[:] (5+0j) (6+0j)
    >>> test_fused_memslice_other_dtypes(np.arange(10, dtype=np.float32))
    float[:] float[:] 5.0 6.0
    >>> test_fused_memslice_other_dtypes(np.arange(10, dtype=np.dtype('i')))
    int[:] int[:] 5 6
    >>> test_fused_memslice_other_dtypes(np.arange(10, dtype=np.object))
    object[:] object[:] 5 6
    """
    cdef memslice_fused_dtype[:] b = a
    print cython.typeof(a), cython.typeof(b), a[5], b[6]

cdef fused memslice_fused:
    float[:]
    double[:]
    int[:]
    long[:]
    float complex[:]
    double complex[:]
    object[:]

@testcase
def test_fused_memslice(memslice_fused a):
    """
    >>> import cython
    >>> sorted(test_fused_memslice.__signatures__)
    ['double complex[:]', 'double[:]', 'float complex[:]', 'float[:]', 'int[:]', 'long[:]', 'object[:]']
    >>> test_fused_memslice(np.arange(10, dtype=np.complex64))
    float complex[:] float complex[:] (5+0j) (6+0j)
    >>> test_fused_memslice(np.arange(10, dtype=np.complex128))
    double complex[:] double complex[:] (5+0j) (6+0j)
    >>> test_fused_memslice(np.arange(10, dtype=np.float32))
    float[:] float[:] 5.0 6.0
    >>> test_fused_memslice(np.arange(10, dtype=np.dtype('i')))
    int[:] int[:] 5 6
    >>> test_fused_memslice(np.arange(10, dtype=np.object))
    object[:] object[:] 5 6
    """
    cdef memslice_fused b = a
    print cython.typeof(a), cython.typeof(b), a[5], b[6]

@testcase
def test_dispatch_memoryview_object():
    """
    >>> test_dispatch_memoryview_object()
    int[:] int[:] 5 6
    """
    cdef int[:] m = np.arange(10, dtype=np.dtype('i'))
    cdef int[:] m2 = m
    cdef int[:] m3 =  m
    test_fused_memslice(m3)

cdef fused ndim_t:
    double[:]
    double[:, :]
    double[:, :, :]

@testcase
def test_dispatch_ndim(ndim_t array):
    """
    >>> test_dispatch_ndim(np.empty(5, dtype=np.double))
    double[:] 1
    >>> test_dispatch_ndim(np.empty((5, 5), dtype=np.double))
    double[:, :] 2
    >>> test_dispatch_ndim(np.empty((5, 5, 5), dtype=np.double))
    double[:, :, :] 3

    Test indexing using Cython.Shadow
    >>> import cython
    >>> test_dispatch_ndim[cython.double[:]](np.empty(5, dtype=np.double))
    double[:] 1
    >>> test_dispatch_ndim[cython.double[:, :]](np.empty((5, 5), dtype=np.double))
    double[:, :] 2
    """
    print cython.typeof(array), np.asarray(array).ndim

include "numpy_common.pxi"
Cython-0.26.1/tests/run/common_utility_types.srctree0000664000175000017500000000117512542002467023474 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import runner"

######## setup.py ########

from Cython.Build.Dependencies import cythonize

from distutils.core import setup

setup(
  ext_modules = cythonize("*.pyx"),
)

######## a.pyx ########

# cython: binding=True

def funcA():
    return

######## b.pyx ########

# cython: binding=True

def funcB():
    return

######## runner.py ########

print("importing...")
import a, b
print(type(a.funcA))

assert type(a.funcA).__name__ == 'cython_function_or_method'
assert type(a.funcA) is type(b.funcB)

assert a.funcA.func_globals is a.__dict__
assert b.funcB.func_globals is b.__dict__
Cython-0.26.1/tests/run/r_extcomplex2.pyx0000664000175000017500000000065512542002467021143 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> c = eggs()
    >>> c
    (17+42j)
    >>> spam(c)
    Real: 17.0
    Imag: 42.0
"""

cdef extern from "complexobject.h":

    struct Py_complex:
        double real
        double imag

    ctypedef class __builtin__.complex [object PyComplexObject]:
        cdef Py_complex cval

def spam(complex c):
    print u"Real:", c.cval.real
    print u"Imag:", c.cval.imag

def eggs():
    return complex(17, 42)
Cython-0.26.1/tests/run/addressof.pyx0000664000175000017500000000020112542002467020305 0ustar  stefanstefan00000000000000def f(int a):
    """
    >>> f(5)
    5
    """
    cdef int i,j
    cdef int *p
    i = a
    p = &i
    j = p[0]
    return j
Cython-0.26.1/tests/run/slice2b.pyx0000664000175000017500000000040612542002467017665 0ustar  stefanstefan00000000000000cdef extern from *:
    ctypedef class __builtin__.list [ object PyListObject ]:
        pass

def slice_of_typed_value():

    """
    >>> slice_of_typed_value()
    [1, 2, 3]
    """
    cdef object a = []
    cdef list L = [1, 2, 3]
    a[:] = L
    return a
Cython-0.26.1/tests/run/numpy_cimport.pyx0000664000175000017500000000016412542002467021250 0ustar  stefanstefan00000000000000# tag: numpy

"""
>>> import sys
>>> 'numpy' in sys.modules
True
"""
cimport numpy as np
include "numpy_common.pxi"
Cython-0.26.1/tests/run/py35_pep492_interop.pyx0000664000175000017500000000401413023021033021765 0ustar  stefanstefan00000000000000# cython: language_level=3, binding=True
# mode: run
# tag: pep492, asyncfor, await


def run_async(coro):
    #assert coro.__class__ is types.GeneratorType
    assert coro.__class__.__name__ in ('coroutine', 'GeneratorWrapper'), coro.__class__.__name__

    buffer = []
    result = None
    while True:
        try:
            buffer.append(coro.send(None))
        except StopIteration as ex:
            result = ex.args[0] if ex.args else None
            break
    return buffer, result


def run_async__await__(coro):
    assert coro.__class__.__name__ in ('coroutine', 'GeneratorWrapper'), coro.__class__.__name__
    aw = coro.__await__()
    buffer = []
    result = None
    i = 0
    while True:
        try:
            if i % 2:
                buffer.append(next(aw))
            else:
                buffer.append(aw.send(None))
            i += 1
        except StopIteration as ex:
            result = ex.args[0] if ex.args else None
            break
    return buffer, result


async def await_pyobject(awaitable):
    """
    >>> async def simple():
    ...     return 10

    >>> buffer, result = run_async(await_pyobject(simple()))
    >>> result
    10

    >>> async def awaiting(awaitable):
    ...     return await awaitable

    >>> buffer, result = run_async(await_pyobject(awaiting(simple())))
    >>> result
    10
    """
    return await awaitable


def await_cyobject():
    """
    >>> async def run_await(awaitable):
    ...     return await awaitable

    >>> simple, awaiting = await_cyobject()

    >>> buffer, result = run_async(run_await(simple()))
    >>> result
    10

    >>> buffer, result = run_async(run_await(awaiting(simple())))
    >>> result
    10

    >>> buffer, result = run_async(run_await(awaiting(awaiting(simple()))))
    >>> result
    10

    >>> buffer, result = run_async(run_await(awaiting(run_await(awaiting(simple())))))
    >>> result
    10
    """

    async def simple():
        return 10

    async def awaiting(awaitable):
        return await awaitable

    return simple, awaiting
Cython-0.26.1/tests/run/decorators_py_T593.py0000664000175000017500000000264712542002467021564 0ustar  stefanstefan00000000000000# mode: run
# ticket: 593
# tag: property, decorator

"""
>>> am_i_buggy
False
"""

def testme(func):
    try:
        am_i_buggy
        return True
    except NameError:
        return False

@testme
def am_i_buggy():
    pass

def called_deco(a,b,c):
    a.append( (1,b,c) )
    def count(f):
        a.append( (2,b,c) )
        return f
    return count

L = []

@called_deco(L, 5, c=6)
@called_deco(L, c=3, b=4)
@called_deco(L, 1, 2)
def wrapped_func(x):
    """
    >>> L
    [(1, 5, 6), (1, 4, 3), (1, 1, 2), (2, 1, 2), (2, 4, 3), (2, 5, 6)]
    >>> wrapped_func(99)
    99
    >>> L
    [(1, 5, 6), (1, 4, 3), (1, 1, 2), (2, 1, 2), (2, 4, 3), (2, 5, 6)]
    """
    return x


def class_in_closure(x):
    """
    >>> C1, c0 = class_in_closure(5)
    >>> C1().smeth1()
    (5, ())
    >>> C1.smeth1(1,2)
    (5, (1, 2))
    >>> C1.smeth1()
    (5, ())
    >>> c0.smeth0()
    1
    >>> c0.__class__.smeth0()
    1
    """
    class ClosureClass1(object):
        @staticmethod
        def smeth1(*args):
            return x, args

    class ClosureClass0(object):
        @staticmethod
        def smeth0():
            return 1

    return ClosureClass1, ClosureClass0()

def class_not_in_closure():
    """
    >>> c = class_not_in_closure()
    >>> c.smeth0()
    1
    >>> c.__class__.smeth0()
    1
    """
    class ClosureClass0(object):
        @staticmethod
        def smeth0():
            return 1

    return ClosureClass0()
Cython-0.26.1/tests/run/if.pyx0000664000175000017500000000151212542002467016737 0ustar  stefanstefan00000000000000def f(a, b):
    """
    >>> f(0,0)
    0
    >>> f(1,2)
    2
    >>> f(1,-1)
    1
    """
    x = 0
    if a:
        x = 1
    if a+b:
        x = 2
    return x

def g(a, b):
    """
    >>> g(1,2)
    1
    >>> g(0,2)
    2
    >>> g(0,0)
    0
    """
    x = 0
    if a:
        x = 1
    elif b:
        x = 2
    return x

def h(a, b):
    """
    >>> h(1,2)
    1
    >>> h(0,2)
    2
    >>> h(0,0)
    3
    """
    x = 0
    if a:
        x = 1
    elif b:
        x = 2
    else:
        x = 3
    return x

try:
    import __builtin__  as builtins
except ImportError:
    import builtins

def i(a, b):
    """
    >>> i(1,2)
    1
    >>> i(2,2)
    2
    >>> i(2,1)
    0
    """
    x = 0
    if builtins.str(a).upper() == u"1":
        x = 1
    if builtins.str(a+b).lower() not in (u"1", u"3"):
        x = 2
    return x
Cython-0.26.1/tests/run/absolute_import.srctree0000664000175000017500000000263412542002467022406 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import pkg.my_test_module"
PYTHON -c "import pkg.b; pkg.b.test_reimport()"
PYTHON -c "import pkg.c; pkg.c.test_reimport()"

######## setup.py ########

from Cython.Build.Dependencies import cythonize
from distutils.core import setup

setup(
    ext_modules = cythonize("**/*.pyx"),
    )

######## pkg/__init__.py ########

######## pkg/a_module.pyx ########

######## pkg/my_test_module.pyx ########

import sys
from . import a_module
assert a_module in sys.modules.values(), list(sys.modules)
assert sys.modules['pkg.a_module'] is a_module, list(sys.modules)

######## pkg/b.pyx ########

from __future__ import absolute_import

import sys
try:
    import my_test_module
except ImportError:
    pass
else:
    assert "expected ImportError on absolute import"

import pkg.my_test_module

assert pkg.my_test_module in sys.modules.values(), list(sys.modules)
assert sys.modules['pkg.my_test_module'] is pkg.my_test_module, list(sys.modules)

def test_reimport():
    import pkg.my_test_module as mod
    assert pkg.my_test_module is mod

######## pkg/c.pyx ########

from __future__ import absolute_import

import sys
from pkg import my_test_module

assert my_test_module in sys.modules.values(), list(sys.modules)
assert sys.modules['pkg.my_test_module'] is my_test_module, list(sys.modules)

def test_reimport():
    from pkg import my_test_module as mod
    assert my_test_module is mod
Cython-0.26.1/tests/run/complex_numbers_c89_T398_long_double.pyx0000664000175000017500000000016213143605603025325 0ustar  stefanstefan00000000000000# ticket: 398

cdef extern from "complex_numbers_c89_T398.h": pass
include "complex_numbers_T305_long_double.pyx"
Cython-0.26.1/tests/run/coercearraytoptr.pyx0000664000175000017500000000067112542002467021736 0ustar  stefanstefan00000000000000cdef char* cstring = "abcdefg"

cdef void spam(char *target):
    cdef char* s = cstring
    while s[0]:
        target[0] = s[0]
        s += 1
        target += 1
    target[0] = c'\0'

cdef struct Grail:
    char silly[42]

def eggs():
    """
    >>> print(str(eggs()).replace("b'", "'"))
    ('abcdefg', 'abcdefg')
    """
    cdef char[42] silly
    cdef Grail grail
    spam(silly)
    spam(grail.silly)
    return silly, grail.silly
Cython-0.26.1/tests/run/cascadedassignment.pyx0000664000175000017500000000257612542002467022174 0ustar  stefanstefan00000000000000import cython

@cython.test_fail_if_path_exists(
    '//CascadedAssignmentNode//CoerceFromPyTypeNode',
    '//CascadedAssignmentNode//CoerceToPyTypeNode',
)
@cython.test_assert_path_exists('//CascadedAssignmentNode')
def test_cascaded_assignment_simple():
    """
    >>> test_cascaded_assignment_simple()
    5
    """
    a = b = c = 5
    return a

@cython.test_fail_if_path_exists(
    '//CascadedAssignmentNode//CoerceFromPyTypeNode',
    '//CascadedAssignmentNode//CoerceToPyTypeNode',
)
@cython.test_assert_path_exists('//CascadedAssignmentNode')
def test_cascaded_assignment_typed():
    """
    >>> test_cascaded_assignment_typed()
    int Python object double
    (5, 5, 5.0)
    """
    cdef int a
    cdef object b
    cdef double c

    a = b = c = 5

    print cython.typeof(a), cython.typeof(b), cython.typeof(c)
    return a, b, c

def test_cascaded_assignment_builtin_expr():
    """
    This test is useful as previously the rhs expr node got replaced resulting
    in CloneNode generating None in the C source.

    >>> test_cascaded_assignment_builtin_expr()
    (10.0, 10.0, 10.0)
    """
    a = b = c = float(10)
    return a, b, c

def expr():
    print "expr called"
    return 10

def test_cascaded_assignment_evaluate_expr():
    """
    >>> test_cascaded_assignment_evaluate_expr()
    expr called
    (10.0, 10.0, 10.0)
    """
    a = b = c = float(expr())
    return a, b, c
Cython-0.26.1/tests/run/numpy_parallel.pyx0000664000175000017500000000116012542002467021364 0ustar  stefanstefan00000000000000# tag: numpy
# tag: openmp

cimport cython
from cython.parallel import prange
cimport numpy as np
include "numpy_common.pxi"


@cython.boundscheck(False)
def test_parallel_numpy_arrays():
    """
    >>> test_parallel_numpy_arrays()
    -5
    -4
    -3
    -2
    -1
    0
    1
    2
    3
    4
    """
    cdef Py_ssize_t i
    cdef np.ndarray[np.int_t] x

    try:
        import numpy
    except ImportError:
        for i in range(-5, 5):
            print i
        return

    x = numpy.zeros(10, dtype=numpy.int)

    for i in prange(x.shape[0], nogil=True):
        x[i] = i - 5

    for i in x:
        print i

Cython-0.26.1/tests/run/cpp_operators.pyx0000664000175000017500000001322013023021033021201 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

from cython cimport typeof

cimport cython.operator
from cython.operator cimport typeid, dereference as deref

from libc.string cimport const_char
from libcpp cimport bool

cdef out(s, result_type=None):
    print '%s [%s]' % (s.decode('ascii'), result_type)

cdef extern from "cpp_operators_helper.h":
    cdef cppclass TestOps:

        const_char* operator+()
        const_char* operator-()
        const_char* operator*()
        const_char* operator~()
        const_char* operator!()

        const_char* operator++()
        const_char* operator--()
        const_char* operator++(int)
        const_char* operator--(int)

        const_char* operator+(int)
        const_char* operator-(int)
        const_char* operator*(int)
        const_char* operator/(int)
        const_char* operator%(int)

        const_char* operator|(int)
        const_char* operator&(int)
        const_char* operator^(int)
        const_char* operator,(int)

        const_char* operator<<(int)
        const_char* operator>>(int)

        const_char* operator==(int)
        const_char* operator!=(int)
        const_char* operator>=(int)
        const_char* operator<=(int)
        const_char* operator>(int)
        const_char* operator<(int)

        const_char* operator[](int)
        const_char* operator()(int)

    cdef cppclass TruthClass:
        TruthClass()
        TruthClass(bool)
        bool operator bool()
        bool value

cdef cppclass TruthSubClass(TruthClass):
    pass

def test_unops():
    """
    >>> test_unops()
    unary + [const_char *]
    unary - [const_char *]
    unary ~ [const_char *]
    unary * [const_char *]
    unary ! [const_char *]
    """
    cdef TestOps* t = new TestOps()
    out(+t[0], typeof(+t[0]))
    out(-t[0], typeof(-t[0]))
    out(~t[0], typeof(~t[0]))
    x = deref(t[0])
    out(x, typeof(x))
    out(not t[0], typeof(not t[0]))
    del t

def test_incdec():
    """
    >>> test_incdec()
    unary ++ [const_char *]
    unary -- [const_char *]
    post ++ [const_char *]
    post -- [const_char *]
    """
    cdef TestOps* t = new TestOps()
    a = cython.operator.preincrement(t[0])
    out(a, typeof(a))
    b = cython.operator.predecrement(t[0])
    out(b, typeof(b))
    c = cython.operator.postincrement(t[0])
    out(c, typeof(c))
    d = cython.operator.postdecrement(t[0])
    out(d, typeof(d))
    del t

def test_binop():
    """
    >>> test_binop()
    binary + [const_char *]
    binary - [const_char *]
    binary * [const_char *]
    binary / [const_char *]
    binary % [const_char *]
    binary & [const_char *]
    binary | [const_char *]
    binary ^ [const_char *]
    binary << [const_char *]
    binary >> [const_char *]
    binary COMMA [const_char *]
    """
    cdef TestOps* t = new TestOps()
    out(t[0] + 1, typeof(t[0] + 1))
    out(t[0] - 1, typeof(t[0] - 1))
    out(t[0] * 1, typeof(t[0] * 1))
    out(t[0] / 1, typeof(t[0] / 1))
    out(t[0] % 1, typeof(t[0] % 1))

    out(t[0] & 1, typeof(t[0] & 1))
    out(t[0] | 1, typeof(t[0] | 1))
    out(t[0] ^ 1, typeof(t[0] ^ 1))

    out(t[0] << 1, typeof(t[0] << 1))
    out(t[0] >> 1, typeof(t[0] >> 1))

    x = cython.operator.comma(t[0], 1)
    out(x, typeof(x))
    del t

def test_cmp():
    """
    >>> test_cmp()
    binary == [const_char *]
    binary != [const_char *]
    binary >= [const_char *]
    binary > [const_char *]
    binary <= [const_char *]
    binary < [const_char *]
    """
    cdef TestOps* t = new TestOps()
    out(t[0] == 1, typeof(t[0] == 1))
    out(t[0] != 1, typeof(t[0] != 1))
    out(t[0] >= 1, typeof(t[0] >= 1))
    out(t[0] > 1, typeof(t[0] > 1))
    out(t[0] <= 1, typeof(t[0] <= 1))
    out(t[0] < 1, typeof(t[0] < 1))
    del t

def test_index_call():
    """
    >>> test_index_call()
    binary [] [const_char *]
    binary () [const_char *]
    """
    cdef TestOps* t = new TestOps()
    out(t[0][100], typeof(t[0][100]))
    out(t[0](100), typeof(t[0](100)))
    del t

def test_bool_op():
    """
    >>> test_bool_op()
    """
    cdef TruthClass yes = TruthClass(True)
    cdef TruthClass no = TruthClass(False)
    if yes:
        pass
    else:
        assert False
    if no:
        assert False

def test_bool_cond():
    """
    >>> test_bool_cond()
    """
    assert (TruthClass(False) or TruthClass(False)).value == False
    assert (TruthClass(False) or TruthClass(True)).value == True
    assert (TruthClass(True) or TruthClass(False)).value == True
    assert (TruthClass(True) or TruthClass(True)).value == True

    assert (TruthClass(False) and TruthClass(False)).value == False
    assert (TruthClass(False) and TruthClass(True)).value == False
    assert (TruthClass(True) and TruthClass(False)).value == False
    assert (TruthClass(True) and TruthClass(True)).value == True


ctypedef int* int_ptr

def test_typeid_op():
    """
    >>> test_typeid_op()
    """
    cdef TruthClass* test_1 = new TruthClass()
    cdef TruthSubClass* test_2 = new TruthSubClass()
    cdef TruthClass* test_3 =  test_2
    cdef TruthClass* test_4 =  0

    assert typeid(TruthClass).name()
    assert typeid(test_1).name()
    assert typeid(TruthClass) == typeid(deref(test_1))

    assert typeid(TruthSubClass).name()
    assert typeid(test_2).name()
    assert typeid(TruthSubClass) == typeid(deref(test_2))
    assert typeid(TruthSubClass) == typeid(deref(test_3))
    assert typeid(TruthClass) != typeid(deref(test_3))

    assert typeid(TruthClass).name()
    assert typeid(test_3).name()
    assert typeid(TruthSubClass).name()
    assert typeid(deref(test_2)).name()
    assert typeid(int_ptr).name()

    try:
        typeid(deref(test_4))
        assert False
    except TypeError:
        assert True

    del test_1, test_2
Cython-0.26.1/tests/run/bytes_char_coercion.pyx0000664000175000017500000001120212542002467022342 0ustar  stefanstefan00000000000000
cimport cython

def coerce_char_default(char c):
    """
    Default char -> int coercion

    >>> coerce_char_default(ord('A')) == ord('A')
    True
    """
    return c


def coerce_uchar_default(unsigned char c):
    """
    Default char -> int coercion

    >>> coerce_uchar_default(ord('A')) == ord('A')
    True
    """
    return c


@cython.test_assert_path_exists("//CoerceIntToBytesNode")
@cython.test_fail_if_path_exists("//CoerceToPyTypeNode")
def coerce_char_bytes_cast(char c):
    """
    Explicit char -> bytes coercion

    >>> coerce_char_bytes_cast(ord('A')) == 'A'.encode('ASCII')
    True
    """
    return c


@cython.test_assert_path_exists("//CoerceIntToBytesNode")
@cython.test_fail_if_path_exists("//CoerceToPyTypeNode")
def coerce_uchar_bytes_cast(unsigned char c):
    """
    Explicit uchar -> bytes coercion

    >>> coerce_uchar_bytes_cast(ord('A')) == 'A'.encode('ASCII')
    True
    >>> b = coerce_uchar_bytes_cast(ord('\\xff'))
    >>> b == '\\xff' or b == '\\xff'.encode('ISO-8859-1') # Py2 or Py3
    True
    """
    return c


@cython.test_assert_path_exists("//CoerceIntToBytesNode")
@cython.test_fail_if_path_exists("//CoerceToPyTypeNode")
def coerce_int_bytes_cast(int c):
    """
    Explicit int -> bytes coercion

    >>> coerce_int_bytes_cast(ord('A')) == 'A'.encode('ASCII')
    True
    >>> coerce_int_bytes_cast(ord('A') + 0x100)
    Traceback (most recent call last):
    OverflowError: value too large to pack into a byte
    >>> coerce_int_bytes_cast(ord('A') - 0x100)
    Traceback (most recent call last):
    OverflowError: value too large to pack into a byte
    """
    return c


@cython.test_assert_path_exists("//CoerceIntToBytesNode")
@cython.test_fail_if_path_exists("//CoerceToPyTypeNode")
def coerce_uint_bytes_cast(unsigned int c):
    """
    Explicit uint -> bytes coercion

    >>> coerce_uint_bytes_cast(ord('A')) == 'A'.encode('ASCII')
    True
    >>> b = coerce_uint_bytes_cast(ord('\\xff'))
    >>> b == '\\xff' or b == '\\xff'.encode('ISO-8859-1') # Py2 or Py3
    True

    >>> coerce_uint_bytes_cast(ord('A') + 0x100)
    Traceback (most recent call last):
    OverflowError: value too large to pack into a byte
    """
    return c


@cython.test_assert_path_exists("//CoerceIntToBytesNode")
@cython.test_fail_if_path_exists("//CoerceToPyTypeNode")
def coerce_char_bytes_assign(char c):
    """
    Implicit char -> bytes coercion in assignments

    >>> coerce_char_bytes_assign(ord('A')) == 'A'.encode('ASCII')
    True
    """
    cdef bytes s = c
    return s


@cython.test_assert_path_exists("//CoerceIntToBytesNode")
@cython.test_fail_if_path_exists("//CoerceToPyTypeNode")
def coerce_uchar_bytes_assign(unsigned char c):
    """
    Implicit uchar -> bytes coercion in assignments

    >>> coerce_uchar_bytes_assign(ord('A')) == 'A'.encode('ASCII')
    True
    >>> b = coerce_uchar_bytes_assign(ord('\\xff'))
    >>> b == '\\xff' or b == '\\xff'.encode('ISO-8859-1') # Py2 or Py3
    True
    """
    cdef bytes s = c
    return s


@cython.test_assert_path_exists("//CoerceIntToBytesNode")
@cython.test_fail_if_path_exists("//CoerceToPyTypeNode")
def coerce_int_bytes_assign(int c):
    """
    Implicit int -> bytes coercion in assignments

    >>> coerce_int_bytes_assign(ord('A')) == 'A'.encode('ASCII')
    True

    >>> coerce_int_bytes_assign(ord('A') + 0x100)
    Traceback (most recent call last):
    OverflowError: value too large to pack into a byte
    >>> coerce_int_bytes_assign(ord('A') - 0x100)
    Traceback (most recent call last):
    OverflowError: value too large to pack into a byte
    """
    cdef bytes s = c
    return s


@cython.test_assert_path_exists("//CoerceIntToBytesNode")
@cython.test_fail_if_path_exists("//CoerceToPyTypeNode")
def coerce_uint_bytes_assign(unsigned int c):
    """
    Implicit uint -> bytes coercion in assignments

    >>> coerce_uint_bytes_assign(ord('A')) == 'A'.encode('ASCII')
    True
    >>> b = coerce_uint_bytes_assign(ord('\\xff'))
    >>> b == '\\xff' or b == '\\xff'.encode('ISO-8859-1') # Py2 or Py3
    True

    >>> coerce_uint_bytes_assign(ord('A') + 0x100)
    Traceback (most recent call last):
    OverflowError: value too large to pack into a byte
    """
    cdef bytes s = c
    return s


def inplace_ops_use_arithmetic():
    """
    >>> print(inplace_ops_use_arithmetic().decode('ascii'))
    bc
    """
    cdef char* s = 'abc'
    cdef object x = 1
    s += 1
    s += 2*x
    s -= 1
    s -= x
    return s


@cython.test_fail_if_path_exists('//CoerceFromPyTypeNode')
def indexing_to_char(bytes s):
    """
    >>> ord('b')
    98
    >>> indexing_to_char('abc'.encode('ascii'))
    98
    """
    cdef unsigned char c = s[1]
    return c
Cython-0.26.1/tests/run/intern_T431.pyx0000664000175000017500000000062412542002467020356 0ustar  stefanstefan00000000000000# ticket: 431

__doc__ = u"""
>>> s == s_interned
True
>>> s == s_interned_dynamic
True
>>> s == 'abc' == s_interned == s_interned_dynamic
True
"""

import sys
if sys.version_info[0] < 3:
    __doc__ += u"""
>>> intern(s) is s_interned
True
>>> intern('abc') is s_interned
True
>>> intern('abc') is s_interned_dynamic
True
"""

s = 'abc'

s_interned = intern(s)

s_interned_dynamic = intern('a'+'b'+'c')
Cython-0.26.1/tests/run/cpp_stl_string_ascii_auto_encoding.pyx0000664000175000017500000000650213151203171025435 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror
# cython: c_string_encoding=ascii, c_string_type=unicode

from libcpp.string cimport string
from libcpp.vector cimport vector

b_asdf = b'asdf'
s_asdf = 'asdf'
u_asdf = u'asdf'
u_s = u's'


def test_conversion(py_obj):
    """
    >>> test_conversion(b_asdf) == u_asdf or test_conversion(b_asdf)
    True
    >>> test_conversion(u_asdf) == u_asdf or test_conversion(u_asdf)
    True
    >>> test_conversion(123)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: expected ..., int found
    """
    cdef string s = py_obj
    assert len(py_obj) == s.length(), '%d != %d' % (len(py_obj), s.length())
    return s


def test_empty(py_obj):
    """
    >>> test_empty('')
    True
    >>> test_empty('abc')
    False
    >>> test_empty(u_asdf[:0])
    True
    >>> test_empty(u_asdf)
    False
    """
    cdef string a = py_obj
    return a.empty()


def test_push_back(a):
    """
    >>> test_push_back(b_asdf) == u_asdf + u_s
    True
    >>> test_push_back(u_asdf) == u_asdf + u_s
    True
    """
    cdef string s = a
    s.push_back(ord('s'))
    return s


def test_clear(a):
    """
    >>> test_clear(u_asdf) == u_s[:0]
    True
    >>> test_clear(b_asdf) == u_s[:0]
    True
    """
    cdef string s = a
    s.clear()
    return s


def test_assign(char *a):
    """
    >>> test_assign(b_asdf) == 'ggg'
    True
    """
    cdef string s = string(a)
    s.assign("ggg")
    return s.c_str()


def test_bytes_cast(a):
    """
    >>> b = test_bytes_cast(b'abc')
    >>> isinstance(b, bytes)
    True
    >>> print(b.decode('ascii'))
    abc
    >>> b = test_bytes_cast(b'abc\\xe4\\xfc')
    >>> isinstance(b, bytes)
    True
    >>> len(b)
    5
    >>> print(b[:3].decode('ascii'))
    abc
    >>> print(ord(b[3:4]))
    228
    >>> print(ord(b[4:5]))
    252
    """
    cdef string s = a
    assert s.length() == len(a), "%d != %d" % (s.length(), len(a))
    return s


def test_bytearray_cast(a):
    """
    >>> b = test_bytearray_cast(b'abc')
    >>> isinstance(b, bytearray)
    True
    >>> print(b.decode('ascii'))
    abc
    >>> b = test_bytearray_cast(b'abc\\xe4\\xfc')
    >>> isinstance(b, bytearray)
    True
    >>> len(b)
    5
    >>> print(b[:3].decode('ascii'))
    abc
    >>> print(ord(b[3:4]))
    228
    >>> print(ord(b[4:5]))
    252
    """
    cdef string s = a
    assert s.length() == len(a), "%d != %d" % (s.length(), len(a))
    return s


def test_unicode_cast(a):
    """
    >>> u = test_unicode_cast(b'abc')
    >>> type(u) is type(u_asdf) or type(u)
    True
    >>> print(u)
    abc
    """
    cdef string s = a
    assert s.length() == len(a), "%d != %d" % (s.length(), len(a))
    return s


def test_str_cast(a):
    """
    >>> s = test_str_cast(b'abc')
    >>> type(s) is type(s_asdf) or type(s)
    True
    >>> print(s)
    abc
    """
    cdef string s = a
    assert s.length() == len(a), "%d != %d" % (s.length(), len(a))
    return s


def test_vector_of_strings(*strings):
    """
    >>> results = test_vector_of_strings(b_asdf, u_asdf)
    >>> results == [u_asdf, u_asdf] or results
    True
    >>> type(results[0]) is type(u_asdf) or type(results[0])
    True
    >>> type(results[1]) is type(u_asdf) or type(results[1])
    True
    """
    cdef vector[string] v = strings
    return v
Cython-0.26.1/tests/run/builtin_len.pyx0000664000175000017500000000547412542002467020660 0ustar  stefanstefan00000000000000# -*- coding: utf-8 -*-

cimport cython

unicode_str = u'ab jd  üöä ôñ ÄÖ'
bytes_str   = b'ab jd  sdflk as sa  sadas asdas fsdf '

_frozenset = frozenset
_set = set

@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def len_unicode(unicode s):
    """
    >>> len(unicode_str)
    16
    >>> len_unicode(unicode_str)
    16
    >>> len_unicode(None)
    Traceback (most recent call last):
    TypeError: object of type 'NoneType' has no len()
    """
    return len(s)

@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def len_bytes(bytes s):
    """
    >>> len(bytes_str)
    37
    >>> len_bytes(bytes_str)
    37
    >>> len_bytes(None)
    Traceback (most recent call last):
    TypeError: object of type 'NoneType' has no len()
    """
    return len(s)

#@cython.test_assert_path_exists(
#    "//CoerceToPyTypeNode",
#    "//PythonCapiCallNode")
def len_str(str s):
    """
    >>> len('abcdefg')
    7
    >>> len_str('abcdefg')
    7
    >>> len_unicode(None)
    Traceback (most recent call last):
    TypeError: object of type 'NoneType' has no len()
    """
    return len(s)

@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def len_list(list s):
    """
    >>> l = [1,2,3,4]
    >>> len(l)
    4
    >>> len_list(l)
    4
    >>> len_list(None)
    Traceback (most recent call last):
    TypeError: object of type 'NoneType' has no len()
    """
    return len(s)

@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def len_tuple(tuple s):
    """
    >>> t = (1,2,3,4)
    >>> len(t)
    4
    >>> len_tuple(t)
    4
    >>> len_tuple(None)
    Traceback (most recent call last):
    TypeError: object of type 'NoneType' has no len()
    """
    return len(s)

@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def len_dict(dict s):
    """
    >>> d = dict(a=1, b=2, c=3, d=4)
    >>> len(d)
    4
    >>> len_dict(d)
    4
    >>> len_dict(None)
    Traceback (most recent call last):
    TypeError: object of type 'NoneType' has no len()
    """
    return len(s)

@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def len_set(set s):
    """
    >>> s = _set((1,2,3,4))
    >>> len(s)
    4
    >>> len_set(s)
    4
    >>> len_set(None)
    Traceback (most recent call last):
    TypeError: object of type 'NoneType' has no len()
    """
    return len(s)

@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def len_frozenset(frozenset s):
    """
    >>> s = _frozenset((1,2,3,4))
    >>> len(s)
    4
    >>> len_frozenset(s)
    4
    >>> len_set(None)
    Traceback (most recent call last):
    TypeError: object of type 'NoneType' has no len()
    """
    return len(s)
Cython-0.26.1/tests/run/unicodemethods.pyx0000664000175000017500000004273112574327400021365 0ustar  stefanstefan00000000000000# -*- coding: utf-8 -*-

cimport cython

import sys

PY_VERSION = sys.version_info

text = u'ab jd  sdflk as sa  sadas asdas fsdf '
sep = u'  '
format1 = u'abc%sdef'
format2 = u'abc%sdef%sghi'
unicode_sa = u'sa'

multiline_text = u'''\
ab jd
sdflk as sa
sadas asdas fsdf '''

def print_all(l):
    for s in l:
        print(s)


# unicode.split(s, [sep, [maxsplit]])

@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
def split(unicode s):
    """
    >>> print_all( text.split() )
    ab
    jd
    sdflk
    as
    sa
    sadas
    asdas
    fsdf
    >>> print_all( split(text) )
    ab
    jd
    sdflk
    as
    sa
    sadas
    asdas
    fsdf
    """
    return s.split()

@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
def split_sep(unicode s, sep):
    """
    >>> print_all( text.split(sep) )
    ab jd
    sdflk as sa
    sadas asdas fsdf 
    >>> print_all( split_sep(text, sep) )
    ab jd
    sdflk as sa
    sadas asdas fsdf 
    """
    return s.split(sep)

@cython.test_fail_if_path_exists(
    "//CoerceToPyTypeNode",
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//CoerceFromPyTypeNode",
    "//PythonCapiCallNode")
def split_sep_max(unicode s, sep, max):
    """
    >>> print_all( text.split(sep, 1) )
    ab jd
    sdflk as sa  sadas asdas fsdf 
    >>> print_all( split_sep_max(text, sep, 1) )
    ab jd
    sdflk as sa  sadas asdas fsdf 
    """
    return s.split(sep, max)

@cython.test_fail_if_path_exists(
    "//CoerceToPyTypeNode", "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
def split_sep_max_int(unicode s, sep):
    """
    >>> print_all( text.split(sep, 1) )
    ab jd
    sdflk as sa  sadas asdas fsdf 
    >>> print_all( split_sep_max_int(text, sep) )
    ab jd
    sdflk as sa  sadas asdas fsdf 
    """
    return s.split(sep, 1)


# unicode.splitlines(s, [keepends])

@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
def splitlines(unicode s):
    """
    >>> len(multiline_text.splitlines())
    3
    >>> print_all( multiline_text.splitlines() )
    ab jd
    sdflk as sa
    sadas asdas fsdf 
    >>> len(splitlines(multiline_text))
    3
    >>> print_all( splitlines(multiline_text) )
    ab jd
    sdflk as sa
    sadas asdas fsdf 
    """
    return s.splitlines()

@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
def splitlines_keep(unicode s, keep):
    """
    >>> len(multiline_text.splitlines(True))
    3
    >>> print_all( multiline_text.splitlines(True) )
    ab jd
    
    sdflk as sa
    
    sadas asdas fsdf 
    >>> len(splitlines_keep(multiline_text, True))
    3
    >>> print_all( splitlines_keep(multiline_text, True) )
    ab jd
    
    sdflk as sa
    
    sadas asdas fsdf 
    """
    return s.splitlines(keep)

@cython.test_fail_if_path_exists(
    "//CoerceToPyTypeNode", "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
def splitlines_keep_bint(unicode s):
    """
    >>> len(multiline_text.splitlines(True))
    3
    >>> print_all( multiline_text.splitlines(True) )
    ab jd
    
    sdflk as sa
    
    sadas asdas fsdf 
    >>> print_all( multiline_text.splitlines(False) )
    ab jd
    sdflk as sa
    sadas asdas fsdf 
    >>> len(splitlines_keep_bint(multiline_text))
    7
    >>> print_all( splitlines_keep_bint(multiline_text) )
    ab jd
    
    sdflk as sa
    
    sadas asdas fsdf 
    --
    ab jd
    sdflk as sa
    sadas asdas fsdf 
    """
    return s.splitlines(True) + ['--'] + s.splitlines(False)


# unicode.join(s, iterable)

pipe_sep = u'|'

@cython.test_fail_if_path_exists(
    "//CoerceToPyTypeNode", "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode",
    "//SimpleCallNode//AttributeNode[@is_py_attr = true]")
@cython.test_assert_path_exists(
    "//PythonCapiCallNode",
)
def join(unicode sep, l):
    """
    >>> l = text.split()
    >>> len(l)
    8
    >>> print( pipe_sep.join(l) )
    ab|jd|sdflk|as|sa|sadas|asdas|fsdf
    >>> print( join(pipe_sep, l) )
    ab|jd|sdflk|as|sa|sadas|asdas|fsdf
    """
    return sep.join(l)


@cython.test_fail_if_path_exists(
    "//CoerceToPyTypeNode", "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode", "//NoneCheckNode",
    "//SimpleCallNode//AttributeNode[@is_py_attr = true]")
@cython.test_assert_path_exists(
    "//PythonCapiCallNode",
)
def join_sep(l):
    """
    >>> l = text.split()
    >>> len(l)
    8
    >>> print( '|'.join(l) )
    ab|jd|sdflk|as|sa|sadas|asdas|fsdf
    >>> print( join_sep(l) )
    ab|jd|sdflk|as|sa|sadas|asdas|fsdf
    """
    result = u'|'.join(l)
    assert cython.typeof(result) == 'unicode object', cython.typeof(result)
    return result


@cython.test_fail_if_path_exists(
    "//CoerceToPyTypeNode", "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode", "//NoneCheckNode",
    "//SimpleCallNode//AttributeNode[@is_py_attr = true]"
)
@cython.test_assert_path_exists(
    "//PythonCapiCallNode",
    "//InlinedGeneratorExpressionNode"
)
def join_sep_genexpr(l):
    """
    >>> l = text.split()
    >>> len(l)
    8
    >>> print( '<<%s>>' % '|'.join(s + ' ' for s in l) )
    <>
    >>> print( '<<%s>>' % join_sep_genexpr(l) )
    <>
    """
    result = u'|'.join(s + u' ' for s in l)
    assert cython.typeof(result) == 'unicode object', cython.typeof(result)
    return result


@cython.test_fail_if_path_exists(
    "//CoerceToPyTypeNode", "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode",
)
@cython.test_assert_path_exists(
    "//PythonCapiCallNode",
    "//InlinedGeneratorExpressionNode"
)
def join_sep_genexpr_dictiter(dict d):
    """
    >>> l = text.split()
    >>> d = dict(zip(range(len(l)), l))
    >>> print('|'.join( sorted(' '.join('%s:%s' % (k, v) for k, v in d.items()).split()) ))
    0:ab|1:jd|2:sdflk|3:as|4:sa|5:sadas|6:asdas|7:fsdf
    >>> print('|'.join( sorted(join_sep_genexpr_dictiter(d).split())) )
    0:ab|1:jd|2:sdflk|3:as|4:sa|5:sadas|6:asdas|7:fsdf
    """
    result = u' '.join('%s:%s' % (k, v) for k, v in d.iteritems())
    assert cython.typeof(result) == 'unicode object', cython.typeof(result)
    return result


@cython.test_assert_path_exists(
    "//PythonCapiCallNode",
)
def join_unbound(unicode sep, l):
    """
    >>> l = text.split()
    >>> len(l)
    8
    >>> print( pipe_sep.join(l) )
    ab|jd|sdflk|as|sa|sadas|asdas|fsdf
    >>> print( join_unbound(pipe_sep, l) )
    ab|jd|sdflk|as|sa|sadas|asdas|fsdf
    """
    join = unicode.join
    return join(sep, l)


# unicode.startswith(s, prefix, [start, [end]])

@cython.test_fail_if_path_exists(
    "//CoerceToPyTypeNode",
    "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
def startswith(unicode s, sub):
    """
    >>> text.startswith('ab ')
    True
    >>> startswith(text, 'ab ')
    'MATCH'
    >>> text.startswith('ab X')
    False
    >>> startswith(text, 'ab X')
    'NO MATCH'

    >>> PY_VERSION < (2,5) or text.startswith(('ab', 'ab '))
    True
    >>> startswith(text, ('ab', 'ab '))
    'MATCH'
    >>> PY_VERSION < (2,5) or not text.startswith((' ab', 'ab X'))
    True
    >>> startswith(text, (' ab', 'ab X'))
    'NO MATCH'
    """
    if s.startswith(sub):
        return 'MATCH'
    else:
        return 'NO MATCH'

@cython.test_fail_if_path_exists(
    "//CoerceToPyTypeNode",
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//CoerceFromPyTypeNode",
    "//PythonCapiCallNode")
def startswith_start_end(unicode s, sub, start, end):
    """
    >>> text.startswith('b ', 1, 5)
    True
    >>> startswith_start_end(text, 'b ', 1, 5)
    'MATCH'
    >>> text.startswith('ab ', -1000, 5000)
    True
    >>> startswith_start_end(text, 'ab ', -1000, 5000)
    'MATCH'
    >>> text.startswith('b X', 1, 5)
    False
    >>> startswith_start_end(text, 'b X', 1, 5)
    'NO MATCH'
    """
    if s.startswith(sub, start, end):
        return 'MATCH'
    else:
        return 'NO MATCH'


# unicode.endswith(s, prefix, [start, [end]])

@cython.test_fail_if_path_exists(
    "//CoerceToPyTypeNode",
    "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
def endswith(unicode s, sub):
    """
    >>> text.endswith('fsdf ')
    True
    >>> endswith(text, 'fsdf ')
    'MATCH'
    >>> text.endswith('fsdf X')
    False
    >>> endswith(text, 'fsdf X')
    'NO MATCH'

    >>> PY_VERSION < (2,5) or text.endswith(('fsdf', 'fsdf '))
    True
    >>> endswith(text, ('fsdf', 'fsdf '))
    'MATCH'
    >>> PY_VERSION < (2,5) or not text.endswith(('fsdf', 'fsdf X'))
    True
    >>> endswith(text, ('fsdf', 'fsdf X'))
    'NO MATCH'
    """
    if s.endswith(sub):
        return 'MATCH'
    else:
        return 'NO MATCH'

@cython.test_fail_if_path_exists(
    "//CoerceToPyTypeNode",
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//CoerceFromPyTypeNode",
    "//PythonCapiCallNode")
def endswith_start_end(unicode s, sub, start, end):
    """
    >>> text.endswith('fsdf', 10, len(text)-1)
    True
    >>> endswith_start_end(text, 'fsdf', 10, len(text)-1)
    'MATCH'
    >>> text.endswith('fsdf ', 10, len(text)-1)
    False
    >>> endswith_start_end(text, 'fsdf ', 10, len(text)-1)
    'NO MATCH'

    >>> text.endswith('fsdf ', -1000, 5000)
    True
    >>> endswith_start_end(text, 'fsdf ', -1000, 5000)
    'MATCH'

    >>> PY_VERSION < (2,5) or text.endswith(('fsd', 'fsdf'), 10, len(text)-1)
    True
    >>> endswith_start_end(text, ('fsd', 'fsdf'), 10, len(text)-1)
    'MATCH'
    >>> PY_VERSION < (2,5) or not text.endswith(('fsdf ', 'fsdf X'), 10, len(text)-1)
    True
    >>> endswith_start_end(text, ('fsdf ', 'fsdf X'), 10, len(text)-1)
    'NO MATCH'
    """
    if s.endswith(sub, start, end):
        return 'MATCH'
    else:
        return 'NO MATCH'


# unicode.__contains__(s, sub)

@cython.test_fail_if_path_exists(
    "//CoerceFromPyTypeNode", "//AttributeNode")
@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode", "//PrimaryCmpNode")
def in_test(unicode s, substring):
    """
    >>> in_test(text, 'sa')
    True
    >>> in_test(text, 'XYZ')
    False
    >>> in_test(None, 'sa')
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not iterable
    """
    return substring in s


# unicode.__concat__(s, suffix)

def concat_any(unicode s, suffix):
    """
    >>> concat(text, 'sa') == text + 'sa'  or  concat(text, 'sa')
    True
    >>> concat(None, 'sa')   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...
    >>> concat(text, None)   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...
    >>> class RAdd(object):
    ...     def __radd__(self, other):
    ...         return 123
    >>> concat(None, 'sa')   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...
    """
    assert cython.typeof(s + suffix) == 'Python object', cython.typeof(s + suffix)
    return s + suffix


def concat(unicode s, str suffix):
    """
    >>> concat(text, 'sa') == text + 'sa'  or  concat(text, 'sa')
    True
    >>> concat(None, 'sa')   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...
    >>> concat(text, None)   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...
    >>> class RAdd(object):
    ...     def __radd__(self, other):
    ...         return 123
    >>> concat(None, 'sa')   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...
    """
    assert cython.typeof(s + object()) == 'Python object', cython.typeof(s + object())
    assert cython.typeof(s + suffix) == 'unicode object', cython.typeof(s + suffix)
    return s + suffix


def concat_literal_str(str suffix):
    """
    >>> concat_literal_str('sa') == 'abcsa'  or  concat_literal_str('sa')
    True
    >>> concat_literal_str(None)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...NoneType...
    """
    assert cython.typeof(u'abc' + object()) == 'Python object', cython.typeof(u'abc' + object())
    assert cython.typeof(u'abc' + suffix) == 'unicode object', cython.typeof(u'abc' + suffix)
    return u'abc' + suffix


def concat_literal_unicode(unicode suffix):
    """
    >>> concat_literal_unicode(unicode_sa) == 'abcsa'  or  concat_literal_unicode(unicode_sa)
    True
    >>> concat_literal_unicode(None)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...NoneType...
    """
    assert cython.typeof(u'abc' + suffix) == 'unicode object', cython.typeof(u'abc' + suffix)
    return u'abc' + suffix


# unicode.__mod__(format, values)

def mod_format(unicode s, values):
    """
    >>> mod_format(format1, 'sa') == 'abcsadef'  or  mod_format(format1, 'sa')
    True
    >>> mod_format(format2, ('XYZ', 'ABC')) == 'abcXYZdefABCghi'  or  mod_format(format2, ('XYZ', 'ABC'))
    True
    >>> mod_format(None, 'sa')   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: unsupported operand type(s) for %: 'NoneType' and 'str'
    >>> class RMod(object):
    ...     def __rmod__(self, other):
    ...         return 123
    >>> mod_format(None, RMod())
    123
    """
    assert cython.typeof(s % values) == 'Python object', cython.typeof(s % values)
    return s % values


def mod_format_literal(values):
    """
    >>> mod_format_literal('sa') == 'abcsadef'  or  mod_format(format1, 'sa')
    True
    >>> mod_format_literal(('sa',)) == 'abcsadef'  or  mod_format(format1, ('sa',))
    True
    >>> mod_format_literal(['sa']) == "abc['sa']def"  or  mod_format(format1, ['sa'])
    True
    """
    assert cython.typeof(u'abc%sdef' % values) == 'unicode object', cython.typeof(u'abc%sdef' % values)
    return u'abc%sdef' % values


def mod_format_tuple(*values):
    """
    >>> mod_format_tuple('sa') == 'abcsadef'  or  mod_format(format1, 'sa')
    True
    >>> mod_format_tuple()
    Traceback (most recent call last):
    TypeError: not enough arguments for format string
    """
    assert cython.typeof(u'abc%sdef' % values) == 'unicode object', cython.typeof(u'abc%sdef' % values)
    return u'abc%sdef' % values


# unicode.find(s, sub, [start, [end]])

@cython.test_fail_if_path_exists(
    "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def find(unicode s, substring):
    """
    >>> text.find('sa')
    16
    >>> find(text, 'sa')
    16
    """
    cdef Py_ssize_t pos = s.find(substring)
    return pos

@cython.test_fail_if_path_exists(
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def find_start_end(unicode s, substring, start, end):
    """
    >>> text.find('sa', 17, 25)
    20
    >>> find_start_end(text, 'sa', 17, 25)
    20
    """
    cdef Py_ssize_t pos = s.find(substring, start, end)
    return pos


# unicode.rfind(s, sub, [start, [end]])

@cython.test_fail_if_path_exists(
    "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def rfind(unicode s, substring):
    """
    >>> text.rfind('sa')
    20
    >>> rfind(text, 'sa')
    20
    """
    cdef Py_ssize_t pos = s.rfind(substring)
    return pos

@cython.test_fail_if_path_exists(
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def rfind_start_end(unicode s, substring, start, end):
    """
    >>> text.rfind('sa', 14, 19)
    16
    >>> rfind_start_end(text, 'sa', 14, 19)
    16
    """
    cdef Py_ssize_t pos = s.rfind(substring, start, end)
    return pos


# unicode.count(s, sub, [start, [end]])

@cython.test_fail_if_path_exists(
    "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def count(unicode s, substring):
    """
    >>> text.count('sa')
    2
    >>> count(text, 'sa')
    2
    """
    cdef Py_ssize_t pos = s.count(substring)
    return pos

@cython.test_fail_if_path_exists(
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//PythonCapiCallNode")
def count_start_end(unicode s, substring, start, end):
    """
    >>> text.count('sa', 14, 21)
    1
    >>> text.count('sa', 14, 22)
    2
    >>> count_start_end(text, 'sa', 14, 21)
    1
    >>> count_start_end(text, 'sa', 14, 22)
    2
    """
    cdef Py_ssize_t pos = s.count(substring, start, end)
    return pos


# unicode.replace(s, sub, repl, [maxcount])

@cython.test_fail_if_path_exists(
    "//CoerceFromPyTypeNode",
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
def replace(unicode s, substring, repl):
    """
    >>> print( text.replace('sa', 'SA') )
    ab jd  sdflk as SA  SAdas asdas fsdf 
    >>> print( replace(text, 'sa', 'SA') )
    ab jd  sdflk as SA  SAdas asdas fsdf 
    """
    return s.replace(substring, repl)

@cython.test_fail_if_path_exists(
    "//CastNode", "//TypecastNode")
@cython.test_assert_path_exists(
    "//CoerceFromPyTypeNode",
    "//PythonCapiCallNode")
def replace_maxcount(unicode s, substring, repl, maxcount):
    """
    >>> print( text.replace('sa', 'SA', 1) )
    ab jd  sdflk as SA  sadas asdas fsdf 
    >>> print( replace_maxcount(text, 'sa', 'SA', 1) )
    ab jd  sdflk as SA  sadas asdas fsdf 
    """
    return s.replace(substring, repl, maxcount)
Cython-0.26.1/tests/run/cpdef_extern_func_in_py.pxd0000664000175000017500000000010512542002467023170 0ustar  stefanstefan00000000000000
cdef extern from "math.h":
    cpdef double pxd_sqrt "sqrt"(double)
Cython-0.26.1/tests/run/r_extstarargs.pyx0000664000175000017500000000162112542002467021232 0ustar  stefanstefan00000000000000__doc__ = u"""
  >>> s = Swallow("Brian", 42)
  Name: Brian
  Airspeed: 42
  Extra args: ()
  Extra keywords: []

  >>> s = Swallow("Brian", 42, "African")
  Name: Brian
  Airspeed: 42
  Extra args: ('African',)
  Extra keywords: []

  >>> s = Swallow("Brian", airspeed = 42)
  Name: Brian
  Airspeed: 42
  Extra args: ()
  Extra keywords: []

  >>> s = Swallow("Brian", airspeed = 42, species = "African", coconuts = 3)
  Name: Brian
  Airspeed: 42
  Extra args: ()
  Extra keywords: [('coconuts', 3), ('species', 'African')]

  >>> s = Swallow("Brian", 42, "African", coconuts = 3)
  Name: Brian
  Airspeed: 42
  Extra args: ('African',)
  Extra keywords: [('coconuts', 3)]
"""

cdef class Swallow:

    def __init__(self, name, airspeed, *args, **kwds):
        print u"Name:", name
        print u"Airspeed:", airspeed
        print u"Extra args:", args
        print u"Extra keywords:", sorted(kwds.items())
Cython-0.26.1/tests/run/complex_int_T446.pyx0000664000175000017500000000234612542002467021411 0ustar  stefanstefan00000000000000# ticket: 446

import cython

cdef extern from "complex_int_T446_fix.h":
    pass

def test_arith(int complex a, int complex b):
    """
    >>> test_arith(4, 2)
    ((-4+0j), (6+0j), (2+0j), (8+0j))
    >>> test_arith(6+9j, 3j)
    ((-6-9j), (6+12j), (6+6j), (-27+18j))
    >>> test_arith(29+11j, 5+7j)
    ((-29-11j), (34+18j), (24+4j), (68+258j))
    """
    return -a, a+b, a-b, a*b

@cython.cdivision(False)
def test_div_by_zero(long complex z):
    """
    >>> test_div_by_zero(4j)
    -25j
    >>> test_div_by_zero(0)
    Traceback (most recent call last):
    ...
    ZeroDivisionError: float division
    """
    return 100/z

def test_coercion(int a, long b, int complex c):
    """
    >>> test_coercion(1, -2, 3-3j)
    (1+0j)
    (-2+0j)
    (3-3j)
    (5-6j)
    """
    cdef double complex z
    z = a; print z
    z = b; print z
    z = c; print z
    return z + a + b + c


def test_conjugate(long complex z):
    """
    >>> test_conjugate(2+3j)
    (2-3j)
    """
    return z.conjugate()

def test_conjugate2(short complex z):
    """
    >>> test_conjugate2(2+3j)
    (2-3j)
    """
    return z.conjugate()

def test_conjugate3(long long complex z):
    """
    >>> test_conjugate3(2+3j)
    (2-3j)
    """
    return z.conjugate()
Cython-0.26.1/tests/run/bytesmethods.pyx0000664000175000017500000001565413143605603021066 0ustar  stefanstefan00000000000000cimport cython

b_a = b'a'
b_b = b'b'


@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode")
def bytes_startswith(bytes s, sub, start=None, stop=None):
    """
    >>> bytes_startswith(b_a, b_a)
    True
    >>> bytes_startswith(b_a+b_b, b_a)
    True
    >>> bytes_startswith(b_a, b_b)
    False
    >>> bytes_startswith(b_a+b_b, b_b)
    False
    >>> bytes_startswith(b_a, (b_a, b_b))
    True
    >>> bytes_startswith(b_a, b_a, 1)
    False
    >>> bytes_startswith(b_a, b_a, 0, 0)
    False
    """

    if start is None:
      return s.startswith(sub)
    elif stop is None:
      return s.startswith(sub, start)
    else:
      return s.startswith(sub, start, stop)


@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode")
def bytes_endswith(bytes s, sub, start=None, stop=None):
    """
    >>> bytes_endswith(b_a, b_a)
    True
    >>> bytes_endswith(b_b+b_a, b_a)
    True
    >>> bytes_endswith(b_a, b_b)
    False
    >>> bytes_endswith(b_b+b_a, b_b)
    False
    >>> bytes_endswith(b_a, (b_a, b_b))
    True
    >>> bytes_endswith(b_a, b_a, 1)
    False
    >>> bytes_endswith(b_a, b_a, 0, 0)
    False
    """

    if start is None:
      return s.endswith(sub)
    elif stop is None:
      return s.endswith(sub, start)
    else:
      return s.endswith(sub, start, stop)


@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode")
def bytes_decode(bytes s, start=None, stop=None):
    """
    >>> s = b_a+b_b+b_a+b_a+b_b
    >>> print(bytes_decode(s))
    abaab

    >>> print(bytes_decode(s, 2))
    aab
    >>> print(bytes_decode(s, -3))
    aab

    >>> print(bytes_decode(s, None, 4))
    abaa
    >>> print(bytes_decode(s, None, 400))
    abaab
    >>> print(bytes_decode(s, None, -2))
    aba
    >>> print(bytes_decode(s, None, -4))
    a
    >>> print(bytes_decode(s, None, -5))
    
    >>> print(bytes_decode(s, None, -200))
    

    >>> print(bytes_decode(s, 2, 5))
    aab
    >>> print(bytes_decode(s, 2, 500))
    aab
    >>> print(bytes_decode(s, 2, -1))
    aa
    >>> print(bytes_decode(s, 2, -3))
    
    >>> print(bytes_decode(s, 2, -300))
    
    >>> print(bytes_decode(s, -3, -1))
    aa
    >>> print(bytes_decode(s, -300, 300))
    abaab
    >>> print(bytes_decode(s, -300, -4))
    a
    >>> print(bytes_decode(s, -300, -5))
    
    >>> print(bytes_decode(s, -300, -6))
    
    >>> print(bytes_decode(s, -300, -500))
    

    >>> s[:'test']                       # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError:...
    >>> print(bytes_decode(s, 'test'))   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError:...
    >>> print(bytes_decode(s, None, 'test'))    # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError:...
    >>> print(bytes_decode(s, 'test', 'test'))  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError:...

    >>> print(bytes_decode(None))
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'decode'
    >>> print(bytes_decode(None, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    >>> print(bytes_decode(None, None, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    >>> print(bytes_decode(None, 0, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    """
    if start is None:
        if stop is None:
            return s.decode('utf8')
        else:
            return s[:stop].decode('utf8')
    elif stop is None:
        return s[start:].decode('utf8')
    else:
        return s[start:stop].decode('utf8')


@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode")
def bytes_decode_utf16(bytes s):
    """
    >>> s = 'abc'.encode('UTF-16')
    >>> print(bytes_decode_utf16(s))
    abc
    """
    return s.decode('utf16')


@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode")
def bytes_decode_utf16_le(bytes s):
    """
    >>> s = 'abc'.encode('UTF-16LE')
    >>> assert s != 'abc'.encode('UTF-16BE')
    >>> print(bytes_decode_utf16_le(s))
    abc
    """
    return s.decode('utf_16_le')


@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode")
def bytes_decode_utf16_be(bytes s):
    """
    >>> s = 'abc'.encode('UTF-16BE')
    >>> assert s != 'abc'.encode('UTF-16LE')
    >>> print(bytes_decode_utf16_be(s))
    abc
    """
    return s.decode('utf_16_be')


@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode")
def bytes_decode_unbound_method(bytes s, start=None, stop=None):
    """
    >>> s = b_a+b_b+b_a+b_a+b_b
    >>> print(bytes_decode_unbound_method(s))
    abaab
    >>> print(bytes_decode_unbound_method(s, 1))
    baab
    >>> print(bytes_decode_unbound_method(s, None, 3))
    aba
    >>> print(bytes_decode_unbound_method(s, 1, 4))
    baa

    >>> print(bytes_decode_unbound_method(None))
    Traceback (most recent call last):
    TypeError: descriptor 'decode' requires a 'bytes' object but received a 'NoneType'
    >>> print(bytes_decode_unbound_method(None, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    >>> print(bytes_decode_unbound_method(None, None, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    >>> print(bytes_decode_unbound_method(None, 0, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    """
    if start is None:
        if stop is None:
            return bytes.decode(s, 'utf8')
        else:
            return bytes.decode(s[:stop], 'utf8')
    elif stop is None:
        return bytes.decode(s[start:], 'utf8')
    else:
        return bytes.decode(s[start:stop], 'utf8')


@cython.test_assert_path_exists(
    "//SimpleCallNode",
    "//SimpleCallNode//NoneCheckNode",
    "//SimpleCallNode//AttributeNode[@is_py_attr = false]")
def bytes_join(bytes s, *args):
    """
    >>> print(bytes_join(b_a, b_b, b_b, b_b).decode('utf8'))
    babab
    """
    result = s.join(args)
    assert cython.typeof(result) == 'Python object', cython.typeof(result)
    return result


@cython.test_fail_if_path_exists(
    "//SimpleCallNode//NoneCheckNode",
)
@cython.test_assert_path_exists(
    "//SimpleCallNode",
    "//SimpleCallNode//AttributeNode[@is_py_attr = false]")
def literal_join(*args):
    """
    >>> print(literal_join(b_b, b_b, b_b, b_b).decode('utf8'))
    b|b|b|b
    """
    result = b'|'.join(args)
    assert cython.typeof(result) == 'Python object', cython.typeof(result)
    return result
Cython-0.26.1/tests/run/staticmethod.pyx0000664000175000017500000000523713023021033021022 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> class1.plus1(1)
2
>>> class2.plus1(1)
2
>>> class3.plus1(1)
2
>>> class4.plus1(1)
2
>>> class4().plus1(1)
2
>>> class4.bplus1(1)
2
>>> class4().bplus1(1)
2
"""

cimport cython

def f_plus(a):
    return a + 1

class class1:
    plus1 = f_plus

class class2(object):
    plus1 = f_plus

cdef class class3:
    plus1 = f_plus

class class4:
    @staticmethod
    def plus1(a):
        return a + 1

    @staticmethod
    @cython.binding(True)
    def bplus1(a):
        return a + 1


def nested_class():
    """
    >>> cls = nested_class()
    >>> cls.plus1(1)
    2
    >>> obj = cls()
    >>> obj.plus1(1)
    2
    """
    class class5(object):
        def __new__(cls): # implicit staticmethod
            return object.__new__(cls)

        @staticmethod
        def plus1(a):
            return a + 1
    return class5


cdef class BaseClass(object):
    """
    Test cdef static methods with super() and Python subclasses

    >>> obj = BaseClass()
    >>> obj.mystaticmethod(obj, 1)
    1
    >>> BaseClass.mystaticmethod(obj, 1)
    1
    >>> obj.mystaticmethod2(1, 2, 3)
    1 2 3
    >>> BaseClass.mystaticmethod2(1, 2, 3)
    1 2 3
    """

    @staticmethod
    def mystaticmethod(self, arg1):
        print arg1

    @staticmethod
    @cython.binding(True)
    def mystaticmethod2(a, b, c):
        print a, b, c


cdef class SubClass(BaseClass):
    """
    >>> obj = SubClass()
    >>> obj.mystaticmethod(obj, 1)
    1
    2
    >>> SubClass.mystaticmethod(obj, 1)
    1
    2
    """

    @staticmethod
    def mystaticmethod(self, arg1):
        print arg1
        super().mystaticmethod(self, arg1 + 1)


class SubSubClass(SubClass):
    """
    >>> obj = SubSubClass()
    >>> obj.mystaticmethod(obj, 1)
    1
    2
    3
    >>> SubSubClass.mystaticmethod(obj, 1)
    1
    2
    3
    """

    @staticmethod
    def mystaticmethod(self, arg1):
        print arg1
        super().mystaticmethod(self, arg1 + 1)


cdef class ArgsKwargs(object):
    @staticmethod
    def with_first_arg(arg1, *args, **kwargs):
        """
        >>> ArgsKwargs().with_first_arg(1, 2, 3, a=4, b=5)
        (1, 'pos', 2, 3, ('a', 4), ('b', 5))
        """
        return (arg1, 'pos') + args + tuple(sorted(kwargs.items()))

    @staticmethod
    def only_args_kwargs(*args, **kwargs):
        """
        >>> ArgsKwargs().only_args_kwargs()
        ()
        >>> ArgsKwargs().only_args_kwargs(1, 2, a=3)
        (1, 2, ('a', 3))
        """
        return args + tuple(sorted(kwargs.items()))


class StaticmethodSubclass(staticmethod):
    """
    >>> s = StaticmethodSubclass(None)
    >>> s.is_subtype()
    True
    """
    def is_subtype(self):
        return isinstance(self, staticmethod)
Cython-0.26.1/tests/run/r_mang1.pyx0000664000175000017500000000050512542002467017666 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> import re
    >>> t
    (u'2',)
    >>> t == re.search('(\\d+)', '-2.80 98\\n').groups()
    True
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u"(u'", u"('")

# this is a string constant test, not a test for 're'

import re
t = re.search(u'(\d+)', u'-2.80 98\n').groups()
Cython-0.26.1/tests/run/kostyrka.pyx0000664000175000017500000000020112542002467020202 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> t = TEST()
    >>> 1 in t
    True
"""

cdef class TEST:
    def __contains__(self, x):
        return 42
Cython-0.26.1/tests/run/richcmp_str_equals.py0000664000175000017500000000073112542002467022042 0ustar  stefanstefan00000000000000# mode: run

class plop(object):
    def __init__(self):
        pass

class testobj(object):
    def __init__(self):
        pass

    def __eq__(self, other):
        return plop()

def test_equals(x):
    """
    >>> x = testobj()
    >>> result = test_equals(x)
    >>> isinstance(result, plop)
    True
    >>> test_equals('hihi')
    False
    >>> test_equals('coucou')
    True
    """
    eq = x == 'coucou'  # not every str equals returns a bool ...
    return eq
Cython-0.26.1/tests/run/cpp_vector_in_generator.pyx0000664000175000017500000000033412542002467023242 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp

from libcpp.vector cimport vector

def stack_vector_in_generator(vector[int] vint):
    """
    >>> tuple( stack_vector_in_generator([1,2]) )
    (1, 2)
    """
    for i in vint:
        yield i
Cython-0.26.1/tests/run/print_refcount.pyx0000664000175000017500000000156112542002467021406 0ustar  stefanstefan00000000000000# mode: run
import sys

def test_print_refcount():
    """
    >>> test_print_refcount()
    """
    old_stdout = sys.stdout
    class StdoutGuard:
        def __getattr__(self, attr):
            sys.stdout = old_stdout
            raise RuntimeError
    sys.stdout = StdoutGuard()
    try:
        print "Hello", "world!"
    except RuntimeError:
        pass
    finally:
        sys.stdout = old_stdout
    class TriggerSIGSEGV(object):
        pass

def test_printone_refcount():
    """
    >>> test_printone_refcount()
    """
    old_stdout = sys.stdout
    class StdoutGuard:
        def __getattr__(self, attr):
            sys.stdout = old_stdout
            raise RuntimeError
    sys.stdout = StdoutGuard()
    try:
        print "Oops!"
    except RuntimeError:
        pass
    finally:
        sys.stdout = old_stdout
    class TriggerSIGSEGV(object):
        pass
Cython-0.26.1/tests/run/unbound_special_methods.pyx0000664000175000017500000000332612542002467023243 0ustar  stefanstefan00000000000000# mode: run
# tag: special_method

cimport cython

text = u'ab jd  sdflk as sa  sadas asdas fsdf '


@cython.test_fail_if_path_exists(
    "//CoerceFromPyTypeNode")
@cython.test_assert_path_exists(
    "//CoerceToPyTypeNode",
    "//AttributeNode",
    "//AttributeNode[@entry.cname = 'PyUnicode_Contains']")
def unicode_contains(unicode s, substring):
    """
    >>> unicode_contains(text, 'fl')
    True
    >>> unicode_contains(text, 'XYZ')
    False
    >>> unicode_contains(None, 'XYZ')
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute '__contains__'
    """
    return s.__contains__(substring)


@cython.test_fail_if_path_exists(
    "//CoerceFromPyTypeNode")
@cython.test_assert_path_exists(
#    "//CoerceToPyTypeNode",
    "//NameNode[@entry.cname = 'PyUnicode_Contains']")
def unicode_contains_unbound(unicode s, substring):
    """
    >>> unicode_contains_unbound(text, 'fl')
    True
    >>> unicode_contains_unbound(text, 'XYZ')
    False
    >>> unicode_contains_unbound(None, 'XYZ')   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: descriptor '__contains__' requires a '...' object but received a 'NoneType'
    """
    return unicode.__contains__(s, substring)


cdef class UnicodeSubclass(unicode):
    """
    >>> u = UnicodeSubclass(text)
    >>> 'fl' in u
    False
    >>> 'XYZ' in u
    True
    >>> u.method('fl')
    False
    >>> u.method('XYZ')
    True
    >>> u.operator('fl')
    False
    >>> u.operator('XYZ')
    True
    """
    def __contains__(self, substring):
        return substring not in (self + u'x')

    def method(self, other):
        return self.__contains__(other)

    def operator(self, other):
        return other in self
Cython-0.26.1/tests/run/pyintop.pyx0000664000175000017500000001046413023021033020032 0ustar  stefanstefan00000000000000# mode: run


def bigint(x):
    # avoid 'L' postfix in Py2.x
    print(str(x).rstrip('L'))

def bigints(x):
    # avoid 'L' postfix in Py2.x
    print(str(x).replace('L', ''))


def or_obj(obj2, obj3):
    """
    >>> or_obj(2, 3)
    3
    """
    obj1 = obj2 | obj3
    return obj1


def or_int(obj2):
    """
    >>> or_int(1)
    17
    >>> or_int(16)
    16
    """
    obj1 = obj2 | 0x10
    return obj1


def xor_obj(obj2, obj3):
    """
    >>> xor_obj(2, 3)
    1
    """
    obj1 = obj2 ^ obj3
    return obj1


def xor_int(obj2):
    """
    >>> xor_int(2)
    18
    >>> xor_int(16)
    0
    """
    obj1 = obj2 ^ 0x10
    return obj1


def and_obj(obj2, obj3):
    """
    >>> and_obj(2, 3)
    2
    """
    obj1 = obj2 & obj3
    return obj1


def and_int(obj2):
    """
    >>> and_int(1)
    0
    >>> and_int(18)
    16
    """
    obj1 = obj2 & 0x10
    return obj1


def lshift_obj(obj2, obj3):
    """
    >>> lshift_obj(2, 3)
    16
    """
    obj1 = obj2 << obj3
    return obj1


def rshift_obj(obj2, obj3):
    """
    >>> rshift_obj(2, 3)
    0
    """
    obj1 = obj2 >> obj3
    return obj1


def rshift_int(obj2):
    """
    >>> rshift_int(2)
    0

    >>> rshift_int(27)
    3
    >>> (-27) >> 3
    -4
    >>> rshift_int(-27)
    -4

    >>> rshift_int(32)
    4
    >>> (-32) >> 3
    -4
    >>> rshift_int(-32)
    -4

    >>> (2**28) >> 3
    33554432
    >>> rshift_int(2**28)
    33554432
    >>> (-2**28) >> 3
    -33554432
    >>> rshift_int(-2**28)
    -33554432

    >>> (2**30) >> 3
    134217728
    >>> rshift_int(2**30)
    134217728
    >>> rshift_int(-2**30)
    -134217728

    >>> bigint((2**60) >> 3)
    144115188075855872
    >>> bigint(rshift_int(2**60))
    144115188075855872
    >>> bigint(rshift_int(-2**60))
    -144115188075855872
    """
    obj1 = obj2 >> 3
    return obj1


def lshift_int(obj):
    """
    >>> lshift_int(0)
    (0, 0, 0, 0)
    >>> bigints(lshift_int(1))
    (8, 2147483648, 9223372036854775808, 10633823966279326983230456482242756608)
    >>> bigints(lshift_int(-1))
    (-8, -2147483648, -9223372036854775808, -10633823966279326983230456482242756608)
    >>> bigints(lshift_int(2))
    (16, 4294967296, 18446744073709551616, 21267647932558653966460912964485513216)

    >>> bigints(lshift_int(27))
    (216, 57982058496, 249031044995078946816, 287113247089541828547222325020554428416)
    >>> (-27) << 3
    -216
    >>> bigints(lshift_int(-27))
    (-216, -57982058496, -249031044995078946816, -287113247089541828547222325020554428416)

    >>> bigints(lshift_int(32))
    (256, 68719476736, 295147905179352825856, 340282366920938463463374607431768211456)
    >>> (-32) << 3
    -256
    >>> bigints(lshift_int(-32))
    (-256, -68719476736, -295147905179352825856, -340282366920938463463374607431768211456)

    >>> bigint((2**28) << 3)
    2147483648
    >>> bigints(lshift_int(2**28))
    (2147483648, 576460752303423488, 2475880078570760549798248448, 2854495385411919762116571938898990272765493248)
    >>> bigint((-2**28) << 3)
    -2147483648
    >>> bigints(lshift_int(-2**28))
    (-2147483648, -576460752303423488, -2475880078570760549798248448, -2854495385411919762116571938898990272765493248)

    >>> bigint((2**30) << 3)
    8589934592
    >>> bigints(lshift_int(2**30))
    (8589934592, 2305843009213693952, 9903520314283042199192993792, 11417981541647679048466287755595961091061972992)
    >>> bigints(lshift_int(-2**30))
    (-8589934592, -2305843009213693952, -9903520314283042199192993792, -11417981541647679048466287755595961091061972992)

    >>> bigint((2**60) << 3)
    9223372036854775808
    >>> bigints(lshift_int(2**60))
    (9223372036854775808, 2475880078570760549798248448, 10633823966279326983230456482242756608, 12259964326927110866866776217202473468949912977468817408)
    >>> bigints(lshift_int(-2**60))
    (-9223372036854775808, -2475880078570760549798248448, -10633823966279326983230456482242756608, -12259964326927110866866776217202473468949912977468817408)
    """
    r1 = obj << 3
    r2 = obj << 31
    r3 = obj << 63
    r4 = obj << 123
    return r1, r2, r3, r4


def mixed_obj(obj2, obj3):
    """
    >>> mixed_obj(2, 3)
    16
    """
    obj1 = obj2 << obj3 | obj2 >> obj3
    return obj1


def mixed_int(obj2):
    """
    >>> mixed_int(2)
    18
    >>> mixed_int(16)
    0
    >>> mixed_int(17)
    1
    """
    obj1 = (obj2 ^ 0x10) | (obj2 & 0x01)
    return obj1
Cython-0.26.1/tests/run/qualname.pyx0000664000175000017500000000361712542002467020154 0ustar  stefanstefan00000000000000# cython: binding=True
# mode: run
# tag: cyfunction,qualname

import sys


def test_qualname():
    """
    >>> test_qualname.__qualname__
    'test_qualname'
    >>> test_qualname.__qualname__ = 123 #doctest:+ELLIPSIS
    Traceback (most recent call last):
    TypeError: __qualname__ must be set to a ... object
    >>> test_qualname.__qualname__ = 'foo'
    >>> test_qualname.__qualname__
    'foo'
    """


def test_builtin_qualname():
    """
    >>> test_builtin_qualname()
    list.append
    len
    """
    if sys.version_info >= (3, 3):
        print([1, 2, 3].append.__qualname__)
        print(len.__qualname__)
    else:
        print('list.append')
        print('len')


def test_nested_qualname():
    """
    >>> outer, lambda_func, XYZ = test_nested_qualname()

    >>> outer().__qualname__
    'test_nested_qualname..outer..Test'
    >>> outer().test.__qualname__
    'test_nested_qualname..outer..Test.test'
    >>> outer()().test.__qualname__
    'test_nested_qualname..outer..Test.test'

    >>> outer()().test().__qualname__
    'XYZinner'
    >>> outer()().test().Inner.__qualname__
    'XYZinner.Inner'
    >>> outer()().test().Inner.inner.__qualname__
    'XYZinner.Inner.inner'

    >>> lambda_func.__qualname__
    'test_nested_qualname..'

    >>> XYZ.__qualname__
    'XYZ'
    >>> XYZ.Inner.__qualname__
    'XYZ.Inner'
    >>> XYZ.Inner.inner.__qualname__
    'XYZ.Inner.inner'
    """
    def outer():
        class Test(object):
            def test(self):
                global XYZinner
                class XYZinner:
                    class Inner:
                        def inner(self):
                            pass

                return XYZinner
        return Test

    global XYZ
    class XYZ(object):
        class Inner(object):
            def inner(self):
                pass

    return outer, lambda:None, XYZ
Cython-0.26.1/tests/run/dictintindex.pyx0000664000175000017500000001063212542002467021032 0ustar  stefanstefan00000000000000def test_get_char_neg():
    """
    >>> test_get_char_neg()
    0
    """
    cdef char key = -1
    if -1 < 0:
        d = {-1:0}
    else:
        d = {255:0}
    return d[key]
def test_get_char_zero():
    """
    >>> test_get_char_zero()
    1
    """
    cdef char key = 0
    d = {0:1}
    return d[key]
def test_get_char_pos():
    """
    >>> test_get_char_pos()
    2
    """
    cdef char key = 1
    d = {1:2}
    return d[key]


def test_get_uchar_zero():
    """
    >>> test_get_uchar_zero()
    1
    """
    cdef unsigned char key = 0
    d = {0:1}
    return d[key]
def test_get_uchar_pos():
    """
    >>> test_get_uchar_pos()
    2
    """
    cdef unsigned char key = 1
    d = {1:2}
    return d[key]


def test_get_int_neg():
    """
    >>> test_get_int_neg()
    0
    """
    cdef int key = -1
    d = {-1:0}
    return d[key]
def test_get_int_zero():
    """
    >>> test_get_int_zero()
    1
    """
    cdef int key = 0
    d = {0:1}
    return d[key]
def test_get_int_pos():
    """
    >>> test_get_int_pos()
    2
    """
    cdef int key = 1
    d = {1:2}
    return d[key]


def test_get_uint_zero():
    """
    >>> test_get_uint_zero()
    1
    """
    cdef unsigned int key = 0
    d = {0:1}
    return d[key]
def test_get_uint_pos():
    """
    >>> test_get_uint_pos()
    2
    """
    cdef unsigned int key = 1
    d = {1:2}
    return d[key]


def test_get_longlong_neg():
    """
    >>> test_get_longlong_neg()
    0
    """
    cdef long long key = -1
    d = {-1:0}
    return d[key]
def test_get_longlong_zero():
    """
    >>> test_get_longlong_zero()
    1
    """
    cdef long long key = 0
    d = {0:1}
    return d[key]
def test_get_longlong_pos():
    """
    >>> test_get_longlong_pos()
    2
    """
    cdef long long key = 1
    d = {1:2}
    return d[key]
def test_get_longlong_big():
    """
    >>> test_get_longlong_big()
    3
    """
    cdef unsigned int shift = sizeof(long)+2
    cdef long long big = 1
    cdef long long key = big<>> test_get_ulonglong_zero()
    1
    """
    cdef unsigned long long key = 0
    d = {0:1}
    return d[key]
def test_get_ulonglong_pos():
    """
    >>> test_get_ulonglong_pos()
    2
    """
    cdef unsigned long long key = 1
    d = {1:2}
    return d[key]
def test_get_ulonglong_big():
    """
    >>> test_get_ulonglong_big()
    3
    """
    cdef unsigned int shift = sizeof(long)+2
    cdef unsigned long long big = 1
    cdef unsigned long long key = big<>> test_del_char()
    Traceback (most recent call last):
    KeyError: 0
    """
    cdef char key = 0
    d = {0:1}
    del d[key]
    return d[key]

def test_del_uchar():
    """
    >>> test_del_uchar()
    Traceback (most recent call last):
    KeyError: 0
    """
    cdef unsigned char key = 0
    d = {0:1}
    del d[key]
    return d[key]

def test_del_int():
    """
    >>> test_del_int()
    Traceback (most recent call last):
    KeyError: 0
    """
    cdef int key = 0
    d = {0:1}
    del d[key]
    return d[key]

def test_del_uint():
    """
    >>> test_del_uint()  #doctest: +ELLIPSIS
    Traceback (most recent call last):
    KeyError: 0...
    """
    cdef unsigned int key = 0
    d = {0:1}
    del d[key]
    return d[key]

def test_del_longlong():
    """
    >>> test_del_longlong() #doctest: +ELLIPSIS
    Traceback (most recent call last):
    KeyError: 0...
    """
    cdef long long key = 0
    d = {0:1}
    del d[key]
    return d[key]

def test_del_ulonglong():
    """
    >>> test_del_ulonglong() #doctest: +ELLIPSIS
    Traceback (most recent call last):
    KeyError: 0...
    """
    cdef unsigned long long key = 0
    d = {0:1}
    del d[key]
    return d[key]

def test_del_longlong_big():
    """
    >>> test_del_longlong_big() #doctest: +ELLIPSIS
    Traceback (most recent call last):
    KeyError: ...
    """
    cdef int shift = sizeof(long)+2
    cdef long long big = 1
    cdef long long key = big<>> test_del_ulonglong_big() #doctest: +ELLIPSIS
    Traceback (most recent call last):
    KeyError: ...
    """
    cdef unsigned int shift = sizeof(long)+2
    cdef unsigned long long big = 1
    cdef unsigned long long key = big<>> class1.plus(1)
6
>>> class1.view()
class1
>>> class1().view()
class1
>>> class1.bview()
class1
>>> class1().bview()
class1

>>> class2.view()
class2
>>> class2.plus(1)
7

>>> class3.view()
class3
>>> class3.bview()
class3
>>> class3().bview()
class3
>>> class3.plus(1)
8

>>> class4.view()
class4
>>> class5.view()
class5
"""

cimport cython

def f_plus(cls, a):
    return cls.a + a


class class1:
    a = 5
    plus = classmethod(f_plus)
    def view(cls):
        print cls.__name__
    view = classmethod(view)

    @classmethod
    @cython.binding(True)
    def bview(cls):
        print cls.__name__


class class2(object):
    a = 6
    plus = classmethod(f_plus)
    def view(cls):
        print cls.__name__
    view = classmethod(view)


cdef class class3:
    a = 7
    plus = classmethod(f_plus)
    def view(cls):
        print cls.__name__
    view = classmethod(view)

    @classmethod
    @cython.binding(True)
    def bview(cls):
        print cls.__name__


class class4:
    @classmethod
    def view(cls):
        print cls.__name__


class class5(class4):
    pass
Cython-0.26.1/tests/run/cpdef_enums.pxd0000664000175000017500000000037412574327400020613 0ustar  stefanstefan00000000000000cdef extern from *:
    cpdef enum: # ExternPxd
        FOUR "4"
        EIGHT "8"

    cdef enum: # ExternSecretPxd
        SIXTEEN "16"

cpdef enum PxdEnum:
    RANK_0 = 11
    RANK_1 = 37
    RANK_2 = 389

cdef enum PxdSecretEnum:
    RANK_3 = 5077
Cython-0.26.1/tests/run/array_address.pyx0000664000175000017500000000043512542002467021167 0ustar  stefanstefan00000000000000ctypedef int five_ints[5]

def test_array_address(int ix, int x):
    """
    >>> test_array_address(0, 100)
    100
    >>> test_array_address(2, 200)
    200
    """
    cdef five_ints a
    a[:] = [1, 2, 3, 4, 5]
    cdef five_ints *a_ptr = &a
    a_ptr[0][ix] = x
    return a[ix]
Cython-0.26.1/tests/run/class_func_in_control_structures_T87.pyx0000664000175000017500000000201412542002467025652 0ustar  stefanstefan00000000000000# ticket: 87

__doc__ = u"""
>>> d = Defined()
>>> n = NotDefined()         # doctest: +ELLIPSIS
Traceback (most recent call last):
NameError: ...name 'NotDefined' is not defined
"""

if True:
    class Defined(object):
        """
        >>> isinstance(Defined(), Defined)
        True
        """

if False:
    class NotDefined(object):
        """
        >>> NotDefined() # fails when defined
        """

def test_class_cond(x):
    """
    >>> Test, test = test_class_cond(True)
    >>> test.A
    1
    >>> Test().A
    1
    >>> Test, test = test_class_cond(False)
    >>> test.A
    2
    >>> Test().A
    2
    """
    if x:
        class Test(object):
            A = 1
    else:
        class Test(object):
            A = 2
    return Test, Test()

def test_func_cond(x):
    """
    >>> func = test_func_cond(True)
    >>> func()
    1
    >>> func = test_func_cond(False)
    >>> func()
    2
    """
    if x:
        def func():
            return 1
    else:
        def func():
            return 2
    return func
Cython-0.26.1/tests/run/withstat.pyx0000664000175000017500000001336612542002467020222 0ustar  stefanstefan00000000000000from __future__ import with_statement

import sys

def typename(t):
    name = type(t).__name__
    if sys.version_info < (2,5):
        if name == 'classobj' and issubclass(t, MyException):
            name = 'type'
        elif name == 'instance' and isinstance(t, MyException):
            name = 'MyException'
    return u"" % name

class MyException(Exception):
    pass

class ContextManager(object):
    def __init__(self, value, exit_ret = None):
        self.value = value
        self.exit_ret = exit_ret

    def __exit__(self, a, b, tb):
        print u"exit", typename(a), typename(b), typename(tb)
        return self.exit_ret

    def __enter__(self):
        print u"enter"
        return self.value

def no_as():
    """
    >>> no_as()
    enter
    hello
    exit   
    """
    with ContextManager(u"value"):
        print u"hello"

def basic():
    """
    >>> basic()
    enter
    value
    exit   
    """
    with ContextManager(u"value") as x:
        print x

def with_pass():
    """
    >>> with_pass()
    enter
    exit   
    """
    with ContextManager(u"value") as x:
        pass

def with_exception(exit_ret):
    """
    >>> with_exception(None)
    enter
    value
    exit   
    outer except
    >>> with_exception(True)
    enter
    value
    exit   
    """
    try:
        with ContextManager(u"value", exit_ret=exit_ret) as value:
            print value
            raise MyException()
    except:
        print u"outer except"

def multitarget():
    """
    >>> multitarget()
    enter
    1 2 3 4 5
    exit   
    """
    with ContextManager((1, 2, (3, (4, 5)))) as (a, b, (c, (d, e))):
        print a, b, c, d, e

def tupletarget():
    """
    >>> tupletarget()
    enter
    (1, 2, (3, (4, 5)))
    exit   
    """
    with ContextManager((1, 2, (3, (4, 5)))) as t:
        print t

def typed():
    """
    >>> typed()
    enter
    10
    exit   
    """
    cdef unsigned char i
    c = ContextManager(255)
    with c as i:
        i += 11
        print i

def multimanager():
    """
    >>> multimanager()
    enter
    enter
    enter
    enter
    enter
    enter
    2
    value
    1 2 3 4 5
    nested
    exit   
    exit   
    exit   
    exit   
    exit   
    exit   
    """
    with ContextManager(1), ContextManager(2) as x, ContextManager(u'value') as y,\
            ContextManager(3), ContextManager((1, 2, (3, (4, 5)))) as (a, b, (c, (d, e))):
        with ContextManager(u'nested') as nested:
            print x
            print y
            print a, b, c, d, e
            print nested

# Tests borrowed from pyregr test_with.py,
# modified to follow the constraints of Cython.
import unittest

class Dummy(object):
    def __init__(self, value=None, gobble=False):
        if value is None:
            value = self
        self.value = value
        self.gobble = gobble
        self.enter_called = False
        self.exit_called = False

    def __enter__(self):
        self.enter_called = True
        return self.value

    def __exit__(self, *exc_info):
        self.exit_called = True
        self.exc_info = exc_info
        if self.gobble:
            return True

class InitRaises(object):
    def __init__(self): raise RuntimeError()

class EnterRaises(object):
    def __enter__(self): raise RuntimeError()
    def __exit__(self, *exc_info): pass

class ExitRaises(object):
    def __enter__(self): pass
    def __exit__(self, *exc_info): raise RuntimeError()

class NestedWith(unittest.TestCase):
    """
    >>> NestedWith().runTest()
    """

    def runTest(self):
        self.testNoExceptions()
        self.testExceptionInExprList()
        self.testExceptionInEnter()
        self.testExceptionInExit()
        self.testEnterReturnsTuple()

    def testNoExceptions(self):
        with Dummy() as a, Dummy() as b:
            self.assertTrue(a.enter_called)
            self.assertTrue(b.enter_called)
        self.assertTrue(a.exit_called)
        self.assertTrue(b.exit_called)

    def testExceptionInExprList(self):
        try:
            with Dummy() as a, InitRaises():
                pass
        except:
            pass
        self.assertTrue(a.enter_called)
        self.assertTrue(a.exit_called)

    def testExceptionInEnter(self):
        try:
            with Dummy() as a, EnterRaises():
                self.fail('body of bad with executed')
        except RuntimeError:
            pass
        else:
            self.fail('RuntimeError not reraised')
        self.assertTrue(a.enter_called)
        self.assertTrue(a.exit_called)

    def testExceptionInExit(self):
        body_executed = False
        with Dummy(gobble=True) as a, ExitRaises():
            body_executed = True
        self.assertTrue(a.enter_called)
        self.assertTrue(a.exit_called)
        self.assertTrue(body_executed)
        self.assertNotEqual(a.exc_info[0], None)

    def testEnterReturnsTuple(self):
        with Dummy(value=(1,2)) as (a1, a2), \
             Dummy(value=(10, 20)) as (b1, b2):
            self.assertEqual(1, a1)
            self.assertEqual(2, a2)
            self.assertEqual(10, b1)
            self.assertEqual(20, b2)
Cython-0.26.1/tests/run/set.pyx0000664000175000017500000002112013143605603017130 0ustar  stefanstefan00000000000000
cimport cython


def cython_set():
    """
    >>> cython_set() is set
    True
    """
    assert set is cython.set
    return cython.set


def cython_frozenset():
    """
    >>> cython_frozenset() is frozenset
    True
    """
    assert frozenset is cython.frozenset
    return cython.frozenset


def cython_set_override():
    """
    >>> cython_set_override() is set
    True
    """
    set = 1
    return cython.set


def cython_frozenset_override():
    """
    >>> cython_frozenset_override() is frozenset
    True
    """
    frozenset = 1
    return cython.frozenset


def test_set_literal():
    """
    >>> type(test_set_literal()) is set
    True
    >>> sorted(test_set_literal())
    ['a', 'b', 1]
    """
    cdef set s1 = {1,'a',1,'b','a'}
    return s1


def test_set_add():
    """
    >>> type(test_set_add()) is set
    True
    >>> sorted(test_set_add())
    ['a', 1, (1, 2)]
    """
    cdef set s1
    s1 = set([1, (1, 2)])
    s1.add(1)
    s1.add('a')
    s1.add(1)
    s1.add((1,2))
    return s1


def test_set_update(v=None):
    """
    >>> type(test_set_update()) is set
    True
    >>> sorted(test_set_update())
    ['a', 'b', 'c', 1, 2, (1, 2)]
    >>> sorted(test_set_update([]))
    ['a', 'b', 'c', 1, 2, (1, 2)]
    >>> try: test_set_update(object())
    ... except TypeError: pass
    ... else: print("NOT RAISED!")
    """
    cdef set s1
    s1 = set([1, (1, 2)])
    s1.update((1,))
    s1.update('abc')
    s1.update(set([1]))
    s1.update(frozenset((1,2)))
    if v is not None:
        s1.update(v)
    return s1


def test_set_multi_update():
    """
    >>> type(test_set_multi_update()) is set
    True
    >>> sorted(test_set_multi_update())
    ['a', 'b', 'c', 1, 2, 3]
    """
    cdef set s1 = set()
    s1.update('abc', set([1, 3]), frozenset([1, 2]))
    return s1


def test_object_update(v=None):
    """
    >>> type(test_object_update()) is set
    True
    >>> sorted(test_object_update())
    ['a', 'b', 'c', 1, 2, (1, 2)]
    >>> sorted(test_object_update([]))
    ['a', 'b', 'c', 1, 2, (1, 2)]
    >>> try: test_object_update(object())
    ... except TypeError: pass
    ... else: print("NOT RAISED!")
    """
    cdef object s1
    s1 = set([1, (1, 2)])
    s1.update((1,))
    s1.update('abc')
    s1.update(set([1]))
    s1.update(frozenset((1,2)))
    if v is not None:
        s1.update(v)
    return s1


def test_set_clear():
    """
    >>> type(test_set_clear()) is set
    True
    >>> list(test_set_clear())
    []
    """
    cdef set s1
    s1 = set([1])
    s1.clear()
    return s1


def test_set_clear_None():
    """
    >>> test_set_clear_None()
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'clear'
    """
    cdef set s1 = None
    s1.clear()


def test_set_list_comp():
    """
    >>> type(test_set_list_comp()) is set
    True
    >>> sorted(test_set_list_comp())
    [0, 1, 2]
    """
    cdef set s1
    s1 = set([i%3 for i in range(5)])
    return s1


def test_frozenset_list_comp():
    """
    >>> type(test_frozenset_list_comp()) is frozenset
    True
    >>> sorted(test_frozenset_list_comp())
    [0, 1, 2]
    """
    cdef frozenset s1
    s1 = frozenset([i%3 for i in range(5)])
    return s1


def test_set_pop():
    """
    >>> type(test_set_pop()) is set
    True
    >>> list(test_set_pop())
    []
    """
    cdef set s1
    s1 = set()
    s1.add('2')
    two = s1.pop()
    return s1


@cython.test_fail_if_path_exists("//SimpleCallNode//NameNode")
def test_object_pop(s):
    """
    >>> s = set([2])
    >>> test_object_pop(s)
    2
    >>> list(s)
    []
    """
    return s.pop()


def test_noop_pop():
    """
    >>> test_noop_pop()
    """
    set([0]).pop()


def test_noop_pop_exception():
    """
    >>> try: test_noop_pop_exception()
    ... except KeyError: pass
    ... else: print("KeyError expected but not raised!")
    """
    set([]).pop()


def test_set_discard():
    """
    >>> type(test_set_discard()) is set
    True
    >>> sorted(test_set_discard())
    ['12', 233]
    """
    cdef set s1
    s1 = set()
    s1.add('12')
    s1.add(3)
    s1.add(233)
    s1.discard('3')
    s1.discard(3)
    return s1


def test_set_sideeffect_unhashable_failure():
    """
    >>> test_set_sideeffect_unhashable_failure()
    [2, 4, 5]
    """
    L = []
    def sideeffect(x):
        L.append(x)
        return x
    def unhashable_value(x):
        L.append(x)
        return set()
    try:
        s = set([1,sideeffect(2),3,unhashable_value(4),sideeffect(5)])
    except TypeError: pass
    else: assert False, "expected exception not raised"
    return L


def test_set_sideeffect_unhashable_failure_literal():
    """
    >>> test_set_sideeffect_unhashable_failure_literal()
    [2, 4, 5]
    """
    L = []
    def sideeffect(x):
        L.append(x)
        return x
    def unhashable_value(x):
        L.append(x)
        return set()
    try:
        s = {1,sideeffect(2),3,unhashable_value(4),sideeffect(5)}
    except TypeError: pass
    else: assert False, "expected exception not raised"
    return L


def test_frozenset_sideeffect_unhashable_failure():
    """
    >>> test_frozenset_sideeffect_unhashable_failure()
    [2, 4, 5]
    """
    L = []
    def sideeffect(x):
        L.append(x)
        return x
    def unhashable_value(x):
        L.append(x)
        return set()
    try:
        s = frozenset([1,sideeffect(2),3,unhashable_value(4),sideeffect(5)])
    except TypeError: pass
    else: assert False, "expected exception not raised"
    return L


@cython.test_assert_path_exists("//SetNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode",
    "//PythonCapiCallNode"
)
def test_set_of_list():
    """
    >>> s = test_set_of_list()
    >>> isinstance(s, set)
    True
    >>> sorted(s)
    [1, 2, 3]
    """
    return set([1, 2, 3])


@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//SetNode")
def test_frozenset_of_list():
    """
    >>> s = test_frozenset_of_list()
    >>> isinstance(s, frozenset)
    True
    >>> sorted(s)
    [1, 2, 3]
    """
    return frozenset([1, 2, 3])


@cython.test_assert_path_exists("//SetNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def test_set_of_tuple():
    """
    >>> s = test_set_of_tuple()
    >>> isinstance(s, set)
    True
    >>> sorted(s)
    [1, 2, 3]
    """
    return set((1, 2, 3))


@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//SetNode")
def test_frozenset_of_tuple():
    """
    >>> s = test_frozenset_of_tuple()
    >>> isinstance(s, frozenset)
    True
    >>> sorted(s)
    [1, 2, 3]
    """
    return frozenset((1, 2, 3))


@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode",
    "//SetNode"
)
def test_set_of_iterable(x):
    """
    >>> s = test_set_of_iterable([1, 2, 3])
    >>> isinstance(s, set)
    True
    >>> sorted(s)
    [1, 2, 3]
    """
    return set(x)


@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode",
    "//SetNode"
)
def test_frozenset_of_iterable(x):
    """
    >>> s = test_frozenset_of_iterable([1, 2, 3])
    >>> isinstance(s, frozenset)
    True
    >>> sorted(s)
    [1, 2, 3]

    >>> s = test_frozenset_of_iterable(frozenset([1, 2, 3]))
    >>> isinstance(s, frozenset)
    True
    >>> sorted(s)
    [1, 2, 3]
    """
    return frozenset(x)


@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode",
    "//SetNode"
)
def test_empty_frozenset():
    """
    >>> s = test_empty_frozenset()
    >>> isinstance(s, frozenset)
    True
    >>> len(s)
    0
    >>> s is frozenset()   # singleton!
    True
    """
    return frozenset()


@cython.test_fail_if_path_exists(
    '//ListNode//ListNode',
    '//ListNode//PythonCapiCallNode//PythonCapiCallNode',
    '//ListNode//SimpleCallNode//SimpleCallNode',
)
def test_singleton_empty_frozenset():
    """
    >>> test_singleton_empty_frozenset()  # from CPython's test_set.py
    1
    """
    f = frozenset()
    efs = [frozenset(), frozenset([]), frozenset(()), frozenset(''),
           frozenset(), frozenset([]), frozenset(()), frozenset(''),
           frozenset(range(0)), frozenset(frozenset()),
           frozenset(f), f]
    return len(set(map(id, efs)))


def sorted(it):
    # Py3 can't compare different types
    chars = []
    nums = []
    tuples = []
    for item in it:
        if type(item) is int:
            nums.append(item)
        elif type(item) is tuple:
            tuples.append(item)
        else:
            chars.append(item)
    nums.sort()
    chars.sort()
    tuples.sort()
    return chars+nums+tuples
Cython-0.26.1/tests/run/cython2_bytes.pyx0000664000175000017500000000030012542002467021127 0ustar  stefanstefan00000000000000# -*- coding: utf-8 -*-
# cython: language_level=2

b = b'abcüöä \x12'

cdef char* cs = 'abcüöä \x12'

def compare_cs():
    """
    >>> b == compare_cs()
    True
    """
    return cs
Cython-0.26.1/tests/run/call_py_cy.pyx0000664000175000017500000001542613023021033020451 0ustar  stefanstefan00000000000000# mode: run
# tag: cyfunction,call,python
# cython: binding=True

#######
# Test that Cython and Python functions can call each other in various signature combinations.
#######

py_call_noargs = eval("lambda: 'noargs'")
py_call_onearg = eval("lambda arg: arg")
py_call_twoargs = eval("lambda arg, arg2: (arg, arg2)")
py_call_starargs = eval("lambda *args: args")
py_call_pos_and_starargs = eval("lambda arg, *args: (arg, args)")
py_call_starstarargs = eval("lambda **kw: sorted(kw.items())")
py_call_args_and_starstarargs = eval("lambda *args, **kw: (args, sorted(kw.items()))")


def cy_call_noargs():
    """
    >>> cy_call_noargs()
    'noargs'
    """
    return py_call_noargs()


def cy_call_onearg(f):
    """
    >>> cy_call_onearg(py_call_onearg)
    'onearg'
    >>> try: cy_call_onearg(py_call_noargs)
    ... except TypeError: pass
    ... else: print("FAILED!")
    >>> try: cy_call_onearg(py_call_twoargs)
    ... except TypeError: pass
    ... else: print("FAILED!")

    >>> class Class(object):
    ...     def method(self, arg): return arg

    >>> cy_call_onearg(Class().method)
    'onearg'
    """
    return f('onearg')


def cy_call_twoargs(f, arg):
    """
    >>> cy_call_twoargs(py_call_twoargs, 132)
    (132, 'twoargs')

    >>> class Class2(object):
    ...     def method(self, arg, arg2): return arg, arg2
    >>> cy_call_twoargs(Class2().method, 123)
    (123, 'twoargs')

    >>> class Class1(object):
    ...     def method(self, arg): return arg
    >>> cy_call_twoargs(Class1.method, Class1())
    'twoargs'
    """
    return f(arg, 'twoargs')


def cy_call_two_kwargs(f, arg):
    """
    >>> cy_call_two_kwargs(py_call_twoargs, arg=132)
    (132, 'two-kwargs')
    >>> cy_call_two_kwargs(f=py_call_twoargs, arg=132)
    (132, 'two-kwargs')
    >>> cy_call_two_kwargs(arg=132, f=py_call_twoargs)
    (132, 'two-kwargs')

    >>> class Class(object):
    ...     def method(self, arg, arg2): return arg, arg2

    >>> cy_call_two_kwargs(Class().method, 123)
    (123, 'two-kwargs')
    """
    return f(arg2='two-kwargs', arg=arg)


def cy_call_starargs(*args):
    """
    >>> cy_call_starargs()
    ()
    >>> cy_call_starargs(1)
    (1,)
    >>> cy_call_starargs(1, 2)
    (1, 2)
    >>> cy_call_starargs(1, 2, 3)
    (1, 2, 3)
    """
    return py_call_starargs(*args)


def cy_call_pos_and_starargs(f, *args):
    """
    >>> cy_call_pos_and_starargs(py_call_onearg)
    'no-arg'
    >>> cy_call_pos_and_starargs(py_call_onearg, 123)
    123
    >>> cy_call_pos_and_starargs(py_call_twoargs, 123, 321)
    (123, 321)
    >>> cy_call_pos_and_starargs(py_call_starargs)
    ('no-arg',)
    >>> cy_call_pos_and_starargs(py_call_starargs, 123)
    (123,)
    >>> cy_call_pos_and_starargs(py_call_starargs, 123, 321)
    (123, 321)
    >>> cy_call_pos_and_starargs(py_call_pos_and_starargs)
    ('no-arg', ())
    >>> cy_call_pos_and_starargs(py_call_pos_and_starargs, 123)
    (123, ())
    >>> cy_call_pos_and_starargs(py_call_pos_and_starargs, 123, 321)
    (123, (321,))
    >>> cy_call_pos_and_starargs(py_call_pos_and_starargs, 123, 321, 234)
    (123, (321, 234))

    >>> class Class(object):
    ...     def method(self, arg, arg2): return arg, arg2

    >>> cy_call_pos_and_starargs(Class().method, 123, 321)
    (123, 321)
    >>> cy_call_pos_and_starargs(Class.method, Class(), 123, 321)
    (123, 321)
    """
    return f(args[0] if args else 'no-arg', *args[1:])


def cy_call_starstarargs(**kw):
    """
    >>> kw = {}
    >>> cy_call_starstarargs(**kw)
    []
    >>> kw = {'a': 123}
    >>> cy_call_starstarargs(**kw)
    [('a', 123)]
    >>> kw = {'a': 123, 'b': 321}
    >>> cy_call_starstarargs(**kw)
    [('a', 123), ('b', 321)]
    """
    return py_call_starstarargs(**kw)


def cy_call_kw_and_starstarargs(f=None, arg1=None, **kw):
    """
    >>> kw = {}
    >>> cy_call_kw_and_starstarargs(**kw)
    [('arg', None)]
    >>> try: cy_call_kw_and_starstarargs(py_call_noargs, **kw)
    ... except TypeError: pass
    >>> try: cy_call_kw_and_starstarargs(py_call_twoargs, **kw)
    ... except TypeError: pass
    ... else: print("FAILED!")
    >>> cy_call_kw_and_starstarargs(py_call_onearg, **kw)
    >>> cy_call_kw_and_starstarargs(f=py_call_onearg, **kw)
    >>> cy_call_kw_and_starstarargs(py_call_pos_and_starargs, **kw)
    (None, ())

    >>> kw = {'arg1': 123}
    >>> cy_call_kw_and_starstarargs(**kw)
    [('arg', 123)]
    >>> cy_call_kw_and_starstarargs(py_call_onearg, **kw)
    123
    >>> cy_call_kw_and_starstarargs(f=py_call_onearg, **kw)
    123
    >>> cy_call_kw_and_starstarargs(py_call_twoargs, arg2=321, **kw)
    (123, 321)
    >>> cy_call_kw_and_starstarargs(f=py_call_twoargs, arg2=321, **kw)
    (123, 321)
    >>> try: cy_call_kw_and_starstarargs(py_call_twoargs, **kw)
    ... except TypeError: pass
    ... else: print("FAILED!")
    >>> try: cy_call_kw_and_starstarargs(py_call_twoargs, arg2=321, other=234, **kw)
    ... except TypeError: pass
    ... else: print("FAILED!")
    >>> cy_call_kw_and_starstarargs(py_call_pos_and_starargs, **kw)
    (123, ())

    >>> try: cy_call_kw_and_starstarargs(arg=321, **kw)   # duplicate kw in Python call
    ... except TypeError: pass
    ... else: print("FAILED!")

    >>> kw = {'a': 123}
    >>> cy_call_kw_and_starstarargs(**kw)
    [('a', 123), ('arg', None)]
    >>> cy_call_kw_and_starstarargs(arg1=321, **kw)
    [('a', 123), ('arg', 321)]

    >>> kw = {'a': 123, 'b': 321}
    >>> cy_call_kw_and_starstarargs(**kw)
    [('a', 123), ('arg', None), ('b', 321)]
    >>> cy_call_kw_and_starstarargs(arg1=234, **kw)
    [('a', 123), ('arg', 234), ('b', 321)]

    >>> class Class2(object):
    ...     def method(self, arg, arg2): return arg, arg2

    >>> cy_call_kw_and_starstarargs(Class2().method, arg1=123, arg2=321)
    (123, 321)
    """
    return (f or py_call_starstarargs)(arg=arg1, **kw)


def cy_call_pos_and_starstarargs(f=None, arg1=None, **kw):
    """
    >>> cy_call_pos_and_starstarargs(arg=123)
    ((None,), [('arg', 123)])
    >>> cy_call_pos_and_starstarargs(arg1=123)
    ((123,), [])
    >>> cy_call_pos_and_starstarargs(arg=123, arg2=321)
    ((None,), [('arg', 123), ('arg2', 321)])
    >>> cy_call_pos_and_starstarargs(arg1=123, arg2=321)
    ((123,), [('arg2', 321)])

    >>> class Class2(object):
    ...     def method(self, arg, arg2=None): return arg, arg2

    >>> cy_call_pos_and_starstarargs(Class2().method, 123)
    (123, None)
    >>> cy_call_pos_and_starstarargs(Class2().method, 123, arg2=321)
    (123, 321)
    >>> cy_call_pos_and_starstarargs(Class2().method, arg1=123, arg2=321)
    (123, 321)
    >>> cy_call_pos_and_starstarargs(Class2.method, Class2(), arg=123)
    (123, None)
    >>> cy_call_pos_and_starstarargs(Class2.method, Class2(), arg=123, arg2=321)
    (123, 321)
    >>> cy_call_pos_and_starstarargs(Class2.method, arg1=Class2(), arg=123, arg2=321)
    (123, 321)
    """
    return (f or py_call_args_and_starstarargs)(arg1, **kw)
Cython-0.26.1/tests/run/tryfinallychaining.pyx0000664000175000017500000000205612542002467022243 0ustar  stefanstefan00000000000000# mode: run
# tag: exceptions, tryfinally

import sys
IS_PY3 = sys.version_info[0] >= 3


def test_finally_c():
    """
    >>> def test_finally_py():
    ...     try:
    ...         raise AttributeError()
    ...     finally:
    ...         raise KeyError()

    >>> try:
    ...     test_finally_py()
    ... except KeyError:
    ...     print(sys.exc_info()[0] is KeyError or sys.exc_info()[0])
    ...     if IS_PY3:
    ...         print(isinstance(sys.exc_info()[1].__context__, AttributeError)
    ...               or sys.exc_info()[1].__context__)
    ...     else:
    ...         print(True)
    True
    True

    >>> try:
    ...     test_finally_c()
    ... except KeyError:
    ...     print(sys.exc_info()[0] is KeyError or sys.exc_info()[0])
    ...     if IS_PY3:
    ...         print(isinstance(sys.exc_info()[1].__context__, AttributeError)
    ...               or sys.exc_info()[1].__context__)
    ...     else:
    ...         print(True)
    True
    True
    """
    try:
        raise AttributeError()
    finally:
        raise KeyError()
Cython-0.26.1/tests/run/isnot.pyx0000664000175000017500000000216512542002467017502 0ustar  stefanstefan00000000000000# mode: run
# tag: is_not

cimport cython

@cython.test_fail_if_path_exists('//NotNode')
def is_not(a, b):
    """
    >>> is_not(1, 2)
    True
    >>> x = 1
    >>> is_not(x, x)
    False
    """
    return a is not b


@cython.test_fail_if_path_exists('//NotNode')
def not_is_not(a, b):
    """
    >>> not_is_not(1, 2)
    False
    >>> x = 1
    >>> not_is_not(x, x)
    True
    """
    return not a is not b


@cython.test_fail_if_path_exists('//NotNode')
def not_is(a, b):
    """
    >>> not_is(1, 2)
    True
    >>> x = 1
    >>> not_is(x, x)
    False
    """
    return not a is b


@cython.test_fail_if_path_exists('//NotNode')
def is_not_None(a):
    """
    >>> is_not_None(1)
    True
    >>> is_not_None(None)
    False
    """
    return a is not None


@cython.test_fail_if_path_exists('//NotNode')
def not_is_not_None(a):
    """
    >>> not_is_not_None(1)
    False
    >>> not_is_not_None(None)
    True
    """
    return not a is not None


@cython.test_fail_if_path_exists('//NotNode')
def not_is_None(a):
    """
    >>> not_is_None(1)
    True
    >>> not_is_None(None)
    False
    """
    return not a is None
Cython-0.26.1/tests/run/posix_resource.pyx0000664000175000017500000000115012542002467021410 0ustar  stefanstefan00000000000000# tag: posix

from posix.unistd cimport *
from posix.resource cimport *


def test_getpriority():
    """
    >>> test_getpriority()
    0
    """
    ret = getpriority(PRIO_PROCESS, getpid())
    # DISABLED - does not work on current test server
    return 0  # ret


def test_getrlimit():
    """
    >>> test_getrlimit()
    0
    True
    """
    cdef rlimit rlim
    rlim.rlim_cur = 0

    ret = getrlimit(RLIMIT_CPU, &rlim)
    print(ret)
    return rlim.rlim_cur != 0


def test_getrusage():
    """
    >>> test_getrusage()
    0
    """
    cdef rusage r
    ret = getrusage(RUSAGE_SELF, &r)
    return ret
Cython-0.26.1/tests/run/kwonlyargs.pyx0000664000175000017500000000722112542002467020544 0ustar  stefanstefan00000000000000def b(a, b, c):
    """
    >>> b(1,2,3)
    >>> b(1,2,3,4)
    Traceback (most recent call last):
    TypeError: b() takes exactly 3 positional arguments (4 given)
    """
    a, b, c = b, c, a

def c(a, b, c=1):
    """
    >>> c(1,2)
    >>> c(1,2,3)
    >>> c(1,2,3,4)
    Traceback (most recent call last):
    TypeError: c() takes at most 3 positional arguments (4 given)
    """
    a, b, c = b, c, a

def d(a, b, *, c = 88):
    """
    >>> d(1,2)
    >>> d(1,2, c=1)
    >>> d(1,2,3)
    Traceback (most recent call last):
    TypeError: d() takes exactly 2 positional arguments (3 given)
    >>> d(1,2, d=1)
    Traceback (most recent call last):
    TypeError: d() got an unexpected keyword argument 'd'
    """
    a, b, c = b, c, a

def e(a, b, c = 88, **kwds):
    """
    >>> e(1,2)
    >>> e(1,2, c=1)
    >>> e(1,2, d=1)
    >>> e(1,2, c=1, d=2, e=3)
    >>> e(1,2,3)
    >>> e(1,2,3,4)
    Traceback (most recent call last):
    TypeError: e() takes at most 3 positional arguments (4 given)
    """
    a, b, c = b, c, a

def f(a, b, *, c, d = 42):
    """
    >>> f(1,2, c=1)
    >>> f(1,2, c=1, d=2)
    >>> f(1,2,3)
    Traceback (most recent call last):
    TypeError: f() takes exactly 2 positional arguments (3 given)
    >>> f(1,2)
    Traceback (most recent call last):
    TypeError: f() needs keyword-only argument c
    >>> f(1,2, c=1, e=2)
    Traceback (most recent call last):
    TypeError: f() got an unexpected keyword argument 'e'
    """
    a, b, c, d = b, c, d, a

def g(a, b, *, c, d = 42, e = 17, f, **kwds):
    """
    >>> g(1,2, c=1, f=2)
    >>> g(1,2, c=1, e=0, f=2, d=11)
    >>> g(1,2, c=1, f=2, e=0, x=25)
    >>> g(1,2,3)
    Traceback (most recent call last):
    TypeError: g() takes exactly 2 positional arguments (3 given)
    >>> g(1,2)
    Traceback (most recent call last):
    TypeError: g() needs keyword-only argument c
    >>> g(1,2, c=1)
    Traceback (most recent call last):
    TypeError: g() needs keyword-only argument f
    """
    a, b, c, d, e, f = b, c, d, e, f, a

def h(a, b, *args, c, d = 42, e = 17, f, **kwds):
    """
    >>> h(1,2, c=1, f=2)
    >>> h(1,2, c=1, f=2, e=3)
    >>> h(1,2,3,4,5,6, c=1, f=2)
    >>> h(1,2,3,4,5,6, c=1, f=2, e=3, x=25, y=11)
    >>> h(1,2,3)
    Traceback (most recent call last):
    TypeError: h() needs keyword-only argument c
    >>> h(1,2, d=1)
    Traceback (most recent call last):
    TypeError: h() needs keyword-only argument c
    """
    a, b, c, d, e, f = b, c, d, e, f, a

def k(a, b, c=1, *args, d = 42, e = 17, f, **kwds):
    """
    >>> k(1,2, c=1, f=2)
    >>> k(1,2, c=1, f=2, e=3)
    >>> k(1,2,3,4,5,6, d=1, f=2)
    >>> k(1,2,3,4,5,6, d=1, f=2, e=3, x=25, y=11)
    >>> k(1,2,3)
    Traceback (most recent call last):
    TypeError: k() needs keyword-only argument f
    >>> k(1,2, d=1)
    Traceback (most recent call last):
    TypeError: k() needs keyword-only argument f
    """
    a, b, c, d, e, f = b, c, d, e, f, a

def l(*, a, b, c = 88):
    """
    >>> l(a=1, b=2)
    >>> l(a=1, b=2, c=1)
    >>> l(1,2,3)
    Traceback (most recent call last):
    TypeError: l() takes exactly 0 positional arguments (3 given)
    >>> l(1,2, d=1)
    Traceback (most recent call last):
    TypeError: l() takes exactly 0 positional arguments (2 given)
    >>> l(1,2,3)
    Traceback (most recent call last):
    TypeError: l() takes exactly 0 positional arguments (3 given)
    >>> l(1,2, d=1)
    Traceback (most recent call last):
    TypeError: l() takes exactly 0 positional arguments (2 given)
    """
    a, b, c = b, c, a

def m(a, *, b, c = 88):
    """
    >>> m(1, b=2)
    >>> m(a=1, b=2)
    >>> m(a=1, b=2, c=1)
    """
    a, b, c = b, c, a

def n(a, *, b, c = 88):
    a, b, c = b, c, a
Cython-0.26.1/tests/run/defnode_err_val.pyx0000664000175000017500000000047412542002467021465 0ustar  stefanstefan00000000000000# mode: run

cdef class TestErrVal(object):
    def __cinit__(self, TestErrVal a):
        pass


def test_errval():
    """
    >>> test_errval()
    Traceback (most recent call last):
    ...
    TypeError: Argument 'a' has incorrect type (expected defnode_err_val.TestErrVal, got int)
    """
    TestErrVal(123)
Cython-0.26.1/tests/run/cdef_members_T517.pyx0000664000175000017500000000541612542002467021503 0ustar  stefanstefan00000000000000# ticket: 517
#cython: embedsignature=True

__doc__ = u"""
>>> a = A()
>>> a.h = 7
>>> a.i = 127
>>> a.l = 255
>>> a.q = 255
>>> a.f = 1.0/2.0
>>> a.d = 1/2.0 + 1/4.0
>>> a.g = 1/2.0 + 1/4.0 + 1/8.0
>>> a.Zf = 1+2j
>>> a.Zd = 3+4j
>>> a.Zg = 5+6j

>>> a.h, a.i, a.l
(7, 127, 255)
>>> a.ro_h, a.ro_i, a.ro_l
(7, 127, 255)
>>> a.f, a.d, a.g
(0.5, 0.75, 0.875)
>>> a.ro_f, a.ro_d, a.ro_g
(0.5, 0.75, 0.875)
>>> a.Zf, a.Zd, a.Zg
((1+2j), (3+4j), (5+6j))
>>> a.ro_Zf, a.ro_Zd, a.ro_Zg
((1+2j), (3+4j), (5+6j))

>>> b = B()
>>> b.a0 #doctest: +ELLIPSIS
Traceback (most recent call last):
AttributeError: ...

>>> b.b0 #doctest: +ELLIPSIS
Traceback (most recent call last):
AttributeError: ...

>>> b.c0 #doctest: +ELLIPSIS
Traceback (most recent call last):
AttributeError: ...

>>> isinstance(b.a1, type(None))
True
>>> isinstance(b.a2, type(None))
True
>>> isinstance(b.b1, list)
True
>>> isinstance(b.b2, list)
True
>>> isinstance(b.c1, A)
True
>>> isinstance(b.c2, A)
True

>>> b.a1 = a
>>> b.a1 is not b.a2
True

TYPE_FIXES_REQUIRED:

>>> try: b.b1 = 1
... except (TypeError, AttributeError): pass

>>> try: b.c1 = 1
... except (TypeError, AttributeError): pass

>>> try: b.a2 = None
... except (TypeError, AttributeError): pass

>>> try: b.b2 = []
... except (TypeError, AttributeError): pass

>>> try: b.c2 = A()
... except (TypeError, AttributeError): pass
"""

import sys
if sys.version_info < (2,5):
    __doc__ = (__doc__.split('TYPE_FIXES_REQUIRED')[0] +
               __doc__.split('TYPE_FIXES_REQUIRED')[1].replace('\nAttributeError: ...', '\nTypeError: ...'))


cdef class A:

    cdef public short h
    cdef public int i
    cdef public long l
    cdef public long long q
    cdef public float f
    cdef public double d
    cdef public long double g
    cdef public float complex Zf
    cdef public double complex Zd
    cdef public long double complex Zg

    cdef readonly short ro_h
    cdef readonly int ro_i
    cdef readonly long ro_l
    cdef readonly long long ro_q
    cdef readonly float ro_f
    cdef readonly double ro_d
    cdef readonly long double ro_g
    cdef readonly float complex ro_Zf
    cdef readonly double complex ro_Zd
    cdef readonly long double complex ro_Zg

    def __cinit__(self):
        self.ro_h = 7
        self.ro_i = 127
        self.ro_l = 255
        self.ro_q = 255
        self.ro_f = 1.0/2.0
        self.ro_d = 1/2.0 + 1/4.0
        self.ro_g = 1/2.0 + 1/4.0 + 1/8.0
        self.ro_Zf = 1+2j
        self.ro_Zd = 3+4j
        self.ro_Zg = 5+6j


cdef class B:

    cdef object a0
    cdef public object a1
    cdef readonly object a2

    cdef list b0
    cdef public list b1
    cdef readonly list b2

    cdef A c0
    cdef public A c1
    cdef readonly A c2

    def __cinit__(self):
        self.b0 = self.b1 = self.b2 = []
        self.c0 = self.c1 = self.c2 = A()
Cython-0.26.1/tests/run/genexpr_iterable_lookup_T600.pyx0000664000175000017500000000406012542002467023763 0ustar  stefanstefan00000000000000# mode: run
# ticket: 600
# tag: genexpr
# cython: language_level=3

cimport cython

#@cython.test_assert_path_exists('//ComprehensionNode')
#@cython.test_fail_if_path_exists('//SimpleCallNode')
def list_genexpr_iterable_lookup():
    """
    >>> x = (0,1,2,3,4,5)
    >>> [ x*2 for x in x if x % 2 == 0 ]  # leaks in Py2 but finds the right 'x'
    [0, 4, 8]

    >>> list_genexpr_iterable_lookup()
    [0, 4, 8]
    """
    x = (0,1,2,3,4,5)
    result = list( x*2 for x in x if x % 2 == 0 )
    assert x == (0,1,2,3,4,5)
    return result


#@cython.test_assert_path_exists('//ComprehensionNode')
#@cython.test_fail_if_path_exists('//SingleAssignmentNode//SimpleCallNode')
def genexpr_iterable_in_closure():
    """
    >>> genexpr_iterable_in_closure()
    ['aa', 'cc']
    """
    x = 'abc'
    def f():
        return x
    result = list( x*2 for x in x if x != 'b' )
    assert x == 'abc' # don't leak in Py3 code
    assert f() == 'abc' # don't leak in Py3 code
    return result


def genexpr_over_complex_arg(func, L):
    """
    >>> class wrapper(object):
    ...     value = 5
    >>> genexpr_over_complex_arg(list, wrapper())
    [5]
    """
    return func(d for d in set([type(L).value, L.__class__.value, L.value]))


def listcomp():
    """
    >>> listcomp()
    """
    data = [('red', 5), ('blue', 1), ('yellow', 8), ('black', 0)]
    data.sort(key=lambda r: r[1])
    keys = [r[1] for r in data]
    return keys


def genexpr_in_listcomp(L):
    """
    >>> genexpr_in_listcomp( [[1,2,3]]*2 )
    [[1, 2, 3], [1, 2, 3]]
    """
    return list(d for d in [list(d for d in d) for d in L])


@cython.test_assert_path_exists('//ForFromStatNode')
def genexpr_range_in_listcomp(L):
    """
    >>> genexpr_range_in_listcomp( [1,2,3] )
    [[0], [0, 1], [0, 1, 2]]
    """
    cdef int z,d
    return [list(d for d in range(z)) for z in L]


@cython.test_fail_if_path_exists('//ForInStatNode')
def genexpr_in_dictcomp_dictiter():
    """
    >>> sorted(genexpr_in_dictcomp_dictiter())
    [1, 5]
    """
    d = {1:2, 3:4, 5:6}
    return {k:d for k,d in d.iteritems() if d != 4}
Cython-0.26.1/tests/run/cpp_smart_ptr_helper.h0000664000175000017500000000074013023021033022147 0ustar  stefanstefan00000000000000class CountAllocDealloc {
  public:
      CountAllocDealloc(int* alloc_count, int* dealloc_count)
          : _alloc_count(alloc_count), _dealloc_count(dealloc_count) {
        (*_alloc_count)++;
      }
      ~CountAllocDealloc() {
        (*_dealloc_count)++;
      }
  private:
    int* _alloc_count;
    int* _dealloc_count;
};

template
struct FreePtr {
  void operator()( T * t ) noexcept
  {
    if(t != nullptr) {
      delete t;
      t=nullptr;
    }
  }
};
Cython-0.26.1/tests/run/yield_from_py33.pyx0000664000175000017500000000065512542002467021357 0ustar  stefanstefan00000000000000# mode: run
# tag: generator

def yield_from_gen(values):
    """
    >>> def yf(x): yield from x
    >>> list(yf(yield_from_gen([1, 2, 3, 4])))
    [1, 2, 3, 4]
    """
    for value in values:
        yield value


def yield_from_gen_return(values):
    """
    >>> def yf(x): yield from x
    >>> list(yf(yield_from_gen_return([1, 2, 3, 4])))
    [1, 2, 3, 4]
    """
    for value in values:
        yield value
    return 5
Cython-0.26.1/tests/run/r_starargcall.pyx0000664000175000017500000000027412542002467021165 0ustar  stefanstefan00000000000000def spam(a, b, c):
    print u"Args:", a, b, c

def eggs():
    """
    >>> eggs()
    Args: 1 2 3
    Args: buckle my shoe
    """
    spam(*(1,2,3))
    spam(*[u"buckle",u"my",u"shoe"])
Cython-0.26.1/tests/run/cimport.srctree0000664000175000017500000000114413143605603020645 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import a"

######## setup.py ########


from Cython.Build import cythonize
from distutils.core import setup

setup(
  ext_modules = cythonize("*.pyx"),
)

######## other.pxd ########

cdef class A:
    pass

cdef int foo(int)

######## other.pyx ########

cdef class A:
    pass

cdef int foo(int a):
     return a**2

######## pkg/__init__.py ########


######## pkg/sub.pxd ########

ctypedef int my_int

######## a.pyx ########

from other cimport A, foo
print A, foo(10)

cimport other
print other.A, other.foo(10)

from pkg cimport sub
cdef sub.my_int a = 100
Cython-0.26.1/tests/run/slice2_T636.py0000664000175000017500000000154012542002467020055 0ustar  stefanstefan00000000000000# mode: run
# ticket 636
# tag: slicing, getitem

class Sliceable(object):
    """
    >>> sl = Sliceable()

    >>> sl[1:2]
    (1, 2, None)
    >>> py_slice2(sl, 1, 2)
    (1, 2, None)

    >>> sl[1:None]
    (1, None, None)
    >>> py_slice2(sl, 1, None)
    (1, None, None)

    >>> sl[None:2]
    (None, 2, None)
    >>> py_slice2(sl, None, 2)
    (None, 2, None)

    >>> sl[None:None]
    (None, None, None)
    >>> py_slice2(sl, None, None)
    (None, None, None)
    """
    def __getitem__(self, sl):
        return (sl.start, sl.stop, sl.step)

def py_slice2(obj,a,b):
    """
    >>> [1,2,3][1:2]
    [2]
    >>> py_slice2([1,2,3], 1, 2)
    [2]

    >>> [1,2,3][None:2]
    [1, 2]
    >>> py_slice2([1,2,3], None, 2)
    [1, 2]

    >>> [1,2,3][None:None]
    [1, 2, 3]
    >>> py_slice2([1,2,3], None, None)
    [1, 2, 3]
    """
    return obj[a:b]
Cython-0.26.1/tests/run/cpp_template_functions_helper.h0000664000175000017500000000256613143605603024065 0ustar  stefanstefan00000000000000template 
T no_arg() {
    return T();
}

template 
T one_param(T value) {
    return value;
}

template 
std::pair two_params(T a, U b) {
    return std::pair(a, b);
}

template 
class A {
    public:
        template 
        std::pair method(T a, U b) {
            return std::pair(a, b);
        }
        template 
        U part_method(std::pair p) {
            return p.second;
        }
        template 
        U part_method_ref(const std::pair& p) {
            return p.second;
        }

        int overloaded(double d) {
            return (int) d;
        }
        T overloaded(std::pair p) {
            return p.first;
        }
        template 
        U overloaded(std::vector v) {
            return v[0];
        }
        template 
        U overloaded(char* c, std::vector v) {
            return v[0];
        }
};

template 
T nested_deduction(const T *a) {
    return *a;
}

template 
std::pair pair_arg(std::pair a) {
    return a;
}

template 
T* pointer_param(T* param) {
    return param;
}

class double_pair : public std::pair {
  public:
    double_pair(double x, double y) : std::pair(x, y) { };
};
Cython-0.26.1/tests/run/withstat_py.py0000664000175000017500000001027112542002467020532 0ustar  stefanstefan00000000000000import sys


def typename(t):
    name = type(t).__name__
    if sys.version_info < (2,5):
        if name == 'classobj' and issubclass(t, MyException):
            name = 'type'
        elif name == 'instance' and isinstance(t, MyException):
            name = 'MyException'
    return "" % name


class MyException(Exception):
    pass


class ContextManager(object):
    def __init__(self, value, exit_ret = None):
        self.value = value
        self.exit_ret = exit_ret

    def __exit__(self, a, b, tb):
        print("exit %s %s %s" % (typename(a), typename(b), typename(tb)))
        return self.exit_ret

    def __enter__(self):
        print("enter")
        return self.value


def no_as():
    """
    >>> no_as()
    enter
    hello
    exit   
    """
    with ContextManager("value"):
        print("hello")


def basic():
    """
    >>> basic()
    enter
    value
    exit   
    """
    with ContextManager("value") as x:
        print(x)


def with_pass():
    """
    >>> with_pass()
    enter
    exit   
    """
    with ContextManager("value") as x:
        pass


def with_return():
    """
    >>> print(with_return())
    enter
    exit   
    value
    """
    with ContextManager("value") as x:
        return x


def with_break():
    """
    >>> print(with_break())
    enter
    exit   
    a
    """
    for c in list("abc"):
        with ContextManager("value") as x:
            break
        print("FAILED")
    return c


def with_continue():
    """
    >>> print(with_continue())
    enter
    exit   
    enter
    exit   
    enter
    exit   
    c
    """
    for c in list("abc"):
        with ContextManager("value") as x:
            continue
        print("FAILED")
    return c


def with_exception(exit_ret):
    """
    >>> with_exception(None)
    enter
    value
    exit   
    outer except
    >>> with_exception(True)
    enter
    value
    exit   
    """
    try:
        with ContextManager("value", exit_ret=exit_ret) as value:
            print(value)
            raise MyException()
    except:
        print("outer except")


def with_real_lock():
    """
    >>> with_real_lock()
    about to acquire lock
    holding lock
    lock no longer held
    """
    from threading import Lock
    lock = Lock()

    print("about to acquire lock")

    with lock:
        print("holding lock")

    print("lock no longer held")


def functions_in_with():
    """
    >>> f = functions_in_with()
    enter
    exit   
    outer except
    >>> f(1)[0]
    1
    >>> print(f(1)[1])
    value
    """
    try:
        with ContextManager("value") as value:
            def f(x): return x, value
            make = lambda x:x()
            raise make(MyException)
    except:
        print("outer except")
    return f


def multitarget():
    """
    >>> multitarget()
    enter
    1 2 3 4 5
    exit   
    """
    with ContextManager((1, 2, (3, (4, 5)))) as (a, b, (c, (d, e))):
        print('%s %s %s %s %s' % (a, b, c, d, e))


def tupletarget():
    """
    >>> tupletarget()
    enter
    (1, 2, (3, (4, 5)))
    exit   
    """
    with ContextManager((1, 2, (3, (4, 5)))) as t:
        print(t)


class GetManager(object):
    def get(self, *args):
        return ContextManager(*args)


def manager_from_expression():
    """
    >>> manager_from_expression()
    enter
    1
    exit   
    enter
    2
    exit   
    """
    with GetManager().get(1) as x:
        print(x)
    g = GetManager()
    with g.get(2) as x:
        print(x)
Cython-0.26.1/tests/run/dynamic_attributes.pxd0000664000175000017500000000005413023021033022167 0ustar  stefanstefan00000000000000cdef class MegaSpam:
    cdef dict __dict__
Cython-0.26.1/tests/run/autotestdict_skip.pyx0000664000175000017500000000051712542002467022107 0ustar  stefanstefan00000000000000#cython: autotestdict=True

"""
Tests that autotestdict doesn't come into effect when
a __test__ is defined manually.

If this doesn't work, then the function doctest should fail.

>>> True
True
"""

import sys

def func():
    """
    >>> sys.version_info < (3, 4)
    False
    """

__test__ = {
    u"one" : """
>>> True
True
"""
}
Cython-0.26.1/tests/run/scanner_trace.srctree0000664000175000017500000000055512542531613022005 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace

######## setup.py ###########

from distutils.core import setup
from Cython.Build import cythonize

import Cython.Compiler.Scanning

Cython.Compiler.Scanning.trace_scanner = 1

setup(
    ext_modules = cythonize("*.pyx")
)

import simple
assert simple.test() == 123


######## simple.pyx ###########

def test():
    return 123
Cython-0.26.1/tests/run/inlinepxd.pyx0000664000175000017500000000052313143605603020333 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> f()
3
>>> g()
6
>>> h()
6
>>> i()
6
>>> j()
6
"""

cimport inlinepxd_support
from inlinepxd_support cimport my_add as my_add3

def f():
    return my_add(1, 2)

def g():
    return inlinepxd_support.my_add(1, 2, 3)

def h():
    return my_add3(1, 2, 3)

def i():
    return my_add3(5)

def j():
    return my_add3(2, 4)
Cython-0.26.1/tests/run/nonlocal_T490.pyx0000664000175000017500000000442312542002467020672 0ustar  stefanstefan00000000000000def simple():
    """
    >>> simple()
    1
    2
    """
    x = 1
    y = 2
    def f():
        nonlocal x
        nonlocal x, y
        print(x)
        print(y)
    f()

def assign():
    """
    >>> assign()
    1
    """
    xx = 0
    def ff():
        nonlocal xx
        xx += 1
        print(xx)
    ff()

def nested():
    """
    >>> nested()
    1
    """
    x = 0
    def fx():
        def gx():
            nonlocal x
            x=1
            print(x)
        return gx
    fx()()

def arg(x):
    """
    >>> arg('x')
    xyy
    """
    def appendy():
        nonlocal x
        x += 'y'
    x+='y'
    appendy()
    print x
    return

def argtype(int n):
    """
    >>> argtype(0)
    1
    """
    def inc():
        nonlocal n
        n += 1
    inc()
    print n
    return

def ping_pong():
    """
    >>> f = ping_pong()
    >>> inc, dec = f(0)
    >>> inc()
    1
    >>> inc()
    2
    >>> dec()
    1
    >>> inc()
    2
    >>> dec()
    1
    >>> dec()
    0
    """
    def f(x):
        def inc():
            nonlocal x
            x += 1
            return x
        def dec():
            nonlocal x
            x -= 1
            return x
        return inc, dec
    return f

def methods():
    """
    >>> f = methods()
    >>> c = f(0)
    >>> c.inc()
    1
    >>> c.inc()
    2
    >>> c.dec()
    1
    >>> c.dec()
    0
    """
    def f(x):
        class c:
            def inc(self):
                nonlocal x
                x += 1
                return x
            def dec(self):
                nonlocal x
                x -= 1
                return x
        return c()
    return f

def class_body(int x, y):
    """
    >>> c = class_body(2,99)
    >>> c.z
    (3, 2)
    >>> c.x     #doctest: +ELLIPSIS
    Traceback (most recent call last):
    AttributeError: ...
    >>> c.y     #doctest: +ELLIPSIS
    Traceback (most recent call last):
    AttributeError: ...
    """
    class c(object):
        nonlocal x
        nonlocal y
        y = 2
        x += 1
        z = x,y
    return c()

def nested_nonlocals(x):
    """
    >>> g = nested_nonlocals(1)
    >>> h = g()
    >>> h()
    3
    """
    def g():
        nonlocal x
        x -= 2
        def h():
            nonlocal x
            x += 4
            return x
        return h
    return g
Cython-0.26.1/tests/run/unreachable.pyx0000664000175000017500000000024212542002467020611 0ustar  stefanstefan00000000000000# mode: run
# tag: generators unreachable

def with_yield_removed():
    """
    >>> o = with_yield_removed()
    >>> list(o)
    []
    """
    return
    yield
Cython-0.26.1/tests/run/isnonebool.pyx0000664000175000017500000000123412542002467020511 0ustar  stefanstefan00000000000000def test_and(a,b):
    """
    >>> test_and(None, None)
    True
    >>> test_and(None, 1)
    False
    >>> test_and(1, None)
    False
    """
    return a is None and b is None

def test_more(a,b):
    """
    >>> test_more(None, None)
    True
    >>> test_more(None, 1)
    True
    >>> test_more(1, None)
    False
    >>> test_more(None, 0)
    False
    """
    return a is None and (b is None or b == 1)

def test_more_c(a,b):
    """
    >>> test_more_c(None, None)
    True
    >>> test_more_c(None, 1)
    True
    >>> test_more_c(1, None)
    False
    >>> test_more_c(None, 0)
    False
    """
    return (a is None or 1 == 2) and (b is None or b == 1)
Cython-0.26.1/tests/run/parallel_swap_assign_T425.pyx0000664000175000017500000001625512542002467023263 0ustar  stefanstefan00000000000000# ticket: 425

cimport cython

@cython.test_assert_path_exists(
    "//ParallelAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode/NameNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode[@use_managed_ref=False]/NameNode",
    )
@cython.test_fail_if_path_exists(
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode[@use_managed_ref=True]",
    )
def swap(a,b):
    """
    >>> swap(1,2)
    (2, 1)
    """
    a,b = b,a
    return a,b


@cython.test_assert_path_exists(
    "//ParallelAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode/NameNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode[@use_managed_ref=False]/NameNode",
    )
@cython.test_fail_if_path_exists(
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode[@use_managed_ref=True]",
    )
def swap5(a,b,c,d,e):
    """
    >>> swap5(1,2,3,4,5)
    (5, 4, 3, 2, 1)
    """
    a,b,c,d,e = e,d,c,b,a
    return a,b,c,d,e


@cython.test_assert_path_exists(
    "//ParallelAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode/NameNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode[@use_managed_ref=False]/NameNode",
    )
@cython.test_fail_if_path_exists(
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode[@use_managed_ref=True]",
    )
cdef bint c_swap_cmp5(a, b, c, d, e):
    a,b,c,d,e = e,d,c,b,a
    return a > b > c > d > e

def swap_cmp5(a,b,c,d,e):
    """
    >>> swap_cmp5(1,2,3,4,5)
    True
    """
    return c_swap_cmp5(a,b,c,d,e)


@cython.test_assert_path_exists(
    "//ParallelAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode/NameNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode[@use_managed_ref=True]/NameNode",
    )
@cython.test_fail_if_path_exists(
    "//ParallelAssignmentNode/SingleAssignmentNode//CoerceToTempNode[@use_managed_ref=False]",
    )
def swap_py(a,b):
    """
    >>> swap_py(1,2)
    (1, 2)
    """
    a,a = b,a
    return a,b


cdef class A:
    cdef readonly object x
    cdef readonly object y
    def __init__(self, x, y):
        self.x, self.y = x, y

@cython.test_assert_path_exists(
    "//ParallelAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode/CoerceToTempNode",
    "//ParallelAssignmentNode/SingleAssignmentNode/CoerceToTempNode[@use_managed_ref=False]",
    "//ParallelAssignmentNode/SingleAssignmentNode//AttributeNode/NameNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//AttributeNode[@use_managed_ref=False]/NameNode",
    )
@cython.test_fail_if_path_exists(
    "//ParallelAssignmentNode/SingleAssignmentNode/CoerceToTempNode[@use_managed_ref=True]",
    "//ParallelAssignmentNode/SingleAssignmentNode/AttributeNode[@use_managed_ref=True]",
    )
def swap_attr_values(A a, A b):
    """
    >>> a, b = A(1,2), A(3,4)
    >>> a.x, a.y, b.x, b.y
    (1, 2, 3, 4)
    >>> swap_attr_values(a,b)
    >>> a.x, a.y, b.x, b.y
    (3, 2, 1, 4)
    """
    a.x, a.y, b.x, b.y = a.y, b.x, b.y, a.x # shift by one
    a.x, a.y, b.x, b.y = b.x, b.y, a.x, a.y # shift by two
    a.x, a.y, b.x, b.y = b.y, b.x, a.y, a.x # reverse


cdef class B:
    cdef readonly A a1
    cdef readonly A a2
    def __init__(self, x1, y1, x2, y2):
        self.a1, self.a2 = A(x1, y1), A(x2, y2)

@cython.test_assert_path_exists(
    "//ParallelAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode/CoerceToTempNode",
    "//ParallelAssignmentNode/SingleAssignmentNode/CoerceToTempNode[@use_managed_ref=False]",
    "//ParallelAssignmentNode/SingleAssignmentNode//AttributeNode/NameNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//AttributeNode[@use_managed_ref=False]/NameNode",
    )
@cython.test_fail_if_path_exists(
    "//ParallelAssignmentNode/SingleAssignmentNode/CoerceToTempNode[@use_managed_ref=True]",
    "//ParallelAssignmentNode/SingleAssignmentNode/AttributeNode[@use_managed_ref=True]",
    )
def swap_recursive_attr_values(B a, B b):
    """
    >>> a, b = B(1,2,3,4), B(5,6,7,8)
    >>> a.a1.x, a.a1.y, a.a2.x, a.a2.y
    (1, 2, 3, 4)
    >>> b.a1.x, b.a1.y, b.a2.x, b.a2.y
    (5, 6, 7, 8)
    >>> swap_recursive_attr_values(a,b)
    >>> a.a1.x, a.a1.y, a.a2.x, a.a2.y
    (2, 1, 4, 4)
    >>> b.a1.x, b.a1.y, b.a2.x, b.a2.y
    (6, 5, 8, 8)

    # compatibility test
    >>> class A:
    ...     def __init__(self, x, y):
    ...         self.x, self.y = x, y
    >>> class B:
    ...     def __init__(self, x1, y1, x2, y2):
    ...         self.a1, self.a2 = A(x1, y1), A(x2, y2)
    >>> a, b = B(1,2,3,4), B(5,6,7,8)
    >>> a.a1, a.a2 = a.a2, a.a1
    >>> b.a1, b.a2 = b.a2, b.a1
    >>> a.a1, a.a1.x, a.a2.y, a.a2, a.a1.y, a.a2.x = a.a2, a.a2.y, a.a1.x, a.a1, a.a2.x, a.a1.y
    >>> b.a1, b.a1.x, b.a2.y, b.a2, b.a1.y, b.a2.x = b.a2, b.a2.y, b.a1.x, b.a1, b.a2.x, b.a1.y
    >>> a.a1.x, a.a1.y, a.a2.x, a.a2.y
    (2, 1, 4, 4)
    >>> b.a1.x, b.a1.y, b.a2.x, b.a2.y
    (6, 5, 8, 8)
    """
    a.a1, a.a2 = a.a2, a.a1
    b.a1, b.a2 = b.a2, b.a1
    a.a1, a.a1.x, a.a2.y, a.a2, a.a1.y, a.a2.x = a.a2, a.a2.y, a.a1.x, a.a1, a.a2.x, a.a1.y
    b.a1, b.a1.x, b.a2.y, b.a2, b.a1.y, b.a2.x = b.a2, b.a2.y, b.a1.x, b.a1, b.a2.x, b.a1.y


@cython.test_assert_path_exists(
#    "//ParallelAssignmentNode",
#    "//ParallelAssignmentNode/SingleAssignmentNode",
#    "//ParallelAssignmentNode/SingleAssignmentNode//IndexNode",
#    "//ParallelAssignmentNode/SingleAssignmentNode//IndexNode[@use_managed_ref=False]",
    )
@cython.test_fail_if_path_exists(
#    "//ParallelAssignmentNode/SingleAssignmentNode//IndexNode[@use_managed_ref=True]",
    )
def swap_list_items(list a, int i, int j):
    """
    >>> l = [1,2,3,4]
    >>> swap_list_items(l, 1, 2)
    >>> l
    [1, 3, 2, 4]
    >>> swap_list_items(l, 3, 0)
    >>> l
    [4, 3, 2, 1]
    >>> swap_list_items(l, 0, 5)
    Traceback (most recent call last):
    IndexError: list index out of range
    >>> l
    [4, 3, 2, 1]
    """
    a[i], a[j] = a[j], a[i]


@cython.test_assert_path_exists(
    "//ParallelAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//IndexNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//IndexNode[@use_managed_ref=True]",
    )
@cython.test_fail_if_path_exists(
    "//ParallelAssignmentNode/SingleAssignmentNode//IndexNode[@use_managed_ref=False]",
    )
def swap_list_items_py1(list a, int i, int j):
    a[i], a[j] = a[j+1], a[i]


@cython.test_assert_path_exists(
    "//ParallelAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//IndexNode",
    "//ParallelAssignmentNode/SingleAssignmentNode//IndexNode[@use_managed_ref=True]",
    )
@cython.test_fail_if_path_exists(
    "//ParallelAssignmentNode/SingleAssignmentNode//IndexNode[@use_managed_ref=False]",
    )
def swap_list_items_py2(list a, int i, int j):
    a[i], a[j] = a[i], a[i]
Cython-0.26.1/tests/run/cpp_namespaces.pyx0000664000175000017500000000156213023021033021310 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

cdef extern from "cpp_namespaces_helper.h" namespace "A":
    ctypedef int A_t
    cdef struct S:
        double x
        A_t k
    A_t A_func(A_t first, A_t)
    cdef void f(A_t)

cdef extern from "cpp_namespaces_helper.h" namespace "outer":
    int outer_value

cdef extern from "cpp_namespaces_helper.h" namespace "outer::inner":
    int inner_value

def test_function(x, y):
    """
    >>> test_function(1, 2)
    3
    >>> test_function(9, 16)
    25
    """
    return A_func(x, y)

def test_nested():
    """
    >>> test_nested()
    10
    100
    """
    print outer_value
    print inner_value

def test_typedef(A_t a):
    """
    >>> test_typedef(3)
    3
    """
    return a

def test_convert_struct(S s):
    """
    >>> py_value = {'x': 3.5, 'k': 10}
    >>> test_convert_struct(py_value) == py_value
    True
    """
    return s
Cython-0.26.1/tests/run/check_fused_types_pxd.pxd0000664000175000017500000000007512542002467022661 0ustar  stefanstefan00000000000000cimport cython

unresolved_t = cython.fused_type(int, float)
Cython-0.26.1/tests/run/addop.pyx0000664000175000017500000000605012574327400017434 0ustar  stefanstefan00000000000000cimport cython


def bigint(x):
    print(str(x).rstrip('L'))


def mixed_test():
    """
    >>> mixed_test()
    (30, 22)
    """
    cdef int int1, int2, int3
    cdef char *ptr1, *ptr2 = "test", *ptr3 = "toast"
    int2 = 10
    int3 = 20
    obj1 = 1
    obj2 = 2
    obj3 = 3
    int1 = int2 + int3
    ptr1 = ptr2 + int3
    ptr1 = int2 + ptr3
    obj1 = obj2 + int3
    return int1, obj1


@cython.test_fail_if_path_exists('//AddNode')
def add_x_1(x):
    """
    >>> add_x_1(0)
    1
    >>> add_x_1(1)
    2
    >>> add_x_1(-1)
    0
    >>> bigint(2**50 + 1)
    1125899906842625
    >>> bigint(add_x_1(2**50))
    1125899906842625
    >>> add_x_1(1.5)
    2.5
    >>> add_x_1(-1.5)
    -0.5
    >>> try: add_x_1("abc")
    ... except TypeError: pass
    """
    return x + 1


@cython.test_fail_if_path_exists('//AddNode')
def add_x_1f(x):
    """
    >>> add_x_1f(0)
    1.0
    >>> add_x_1f(1)
    2.0
    >>> add_x_1f(-1)
    0.0
    >>> add_x_1f(1.5)
    2.5
    >>> add_x_1f(-1.5)
    -0.5
    >>> try: add_x_1f("abc")
    ... except TypeError: pass
    """
    return x + 1.0


@cython.test_fail_if_path_exists('//AddNode')
def add_x_large(x):
    """
    >>> add_x_large(0)
    1073741824
    >>> add_x_large(1)
    1073741825
    >>> add_x_large(-1)
    1073741823
    >>> add_x_large(1.5)
    1073741825.5
    >>> add_x_large(-2.0**31)
    -1073741824.0
    >>> bigint(add_x_large(2**30 + 1))
    2147483649
    >>> bigint(2**50 + 1 + 2**30)
    1125900980584449
    >>> bigint(add_x_large(2**50 + 1))
    1125900980584449
    >>> bigint(2**31 + 2**30)
    3221225472
    >>> bigint(add_x_large(2**31))
    3221225472
    >>> bigint(2**66 + 2**30)
    73786976295911948288
    >>> bigint(add_x_large(2**66))
    73786976295911948288
    >>> try: add_x_large("abc")
    ... except TypeError: pass
    """
    return x + 2**30


@cython.test_fail_if_path_exists('//AddNode')
def add_1_x(x):
    """
    >>> add_1_x(0)
    1
    >>> add_1_x(1)
    2
    >>> add_1_x(-1)
    0
    >>> bigint(2**50 + 1)
    1125899906842625
    >>> bigint(add_1_x(2**50))
    1125899906842625
    >>> add_1_x(1.5)
    2.5
    >>> add_1_x(-1.5)
    -0.5
    >>> try: add_1_x("abc")
    ... except TypeError: pass
    """
    return 1 + x


@cython.test_fail_if_path_exists('//AddNode')
def add_1f_x(x):
    """
    >>> add_1f_x(0)
    1.0
    >>> add_1f_x(1)
    2.0
    >>> add_1f_x(-1)
    0.0
    >>> 1.0 + 2**52
    4503599627370497.0
    >>> add_1f_x(2**52)
    4503599627370497.0
    >>> add_1f_x(2**60) == 1.0 + 2**60 or add_1f_x(2**60)
    True
    >>> add_1f_x(1.5)
    2.5
    >>> add_1f_x(-1.5)
    -0.5
    >>> try: add_1f_x("abc")
    ... except TypeError: pass
    """
    return 1.0 + x


@cython.test_fail_if_path_exists('//AddNode')
def add_large_x(x):
    """
    >>> add_large_x(0)
    1073741824
    >>> add_large_x(1)
    1073741825
    >>> add_large_x(-1)
    1073741823
    >>> add_large_x(1.5)
    1073741825.5
    >>> add_large_x(-2.0**30)
    0.0
    >>> add_large_x(-2.0**31)
    -1073741824.0
    >>> try: add_large_x("abc")
    ... except TypeError: pass
    """
    return 2**30 + x
Cython-0.26.1/tests/run/cpp_bool.pyx0000664000175000017500000000045612542002467020144 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

from libcpp cimport bool

def test_bool(bool a):
    """
    >>> test_bool(True)
    True
    >>> test_bool(1)
    True
    >>> test_bool(0)
    False
    >>> test_bool(100)
    True
    >>> test_bool(None)
    False
    >>> test_bool([])
    False
    """
    return a
Cython-0.26.1/tests/run/importas.pyx0000664000175000017500000000143712542002467020205 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> import sys as sous
>>> import distutils.core as corey
>>> from copy import deepcopy as copey
>>> import distutils.command as commie

>>> sous is _sous
True
>>> corey is _corey
True
>>> copey is _copey
True
>>> _commie is commie
True

>>> _sous is not None
True
>>> _corey is not None
True
>>> _copey is not None
True
>>> _commie is not None
True

>>> print(_sous.__name__)
sys
>>> print(sous.__name__)
sys
>>> print(_corey.__name__)
distutils.core
>>> print(corey.__name__)
distutils.core
>>> print(_copey.__name__)
deepcopy
>>> print(copey.__name__)
deepcopy
>>> print(_commie.__name__)
distutils.command
>>> print(commie.__name__)
distutils.command
"""

import sys as _sous
import distutils.core as _corey
from copy import deepcopy as _copey
import distutils.command as _commie
Cython-0.26.1/tests/run/broken_exception.pyx0000664000175000017500000000065212542002467021703 0ustar  stefanstefan00000000000000
import sys

def exception_creates_invalid_instance():
    """
    >>> print( exception_creates_invalid_instance() )
    OK
    """
    class MyException(Exception):
        def __new__(cls, *args):
            return object()

    if sys.version_info[0] >= 3:
        expected_error = TypeError
    else:
        expected_error = MyException

    try:
        raise MyException
    except expected_error:
        return "OK"
Cython-0.26.1/tests/run/r_barbieri1.pyx0000664000175000017500000000062212542002467020523 0ustar  stefanstefan00000000000000__doc__ = u"""
  >>> try:
  ...     B()
  ... except Exception, e:
  ...     print("%s: %s" % (e.__class__.__name__, e))
  Exception: crash-me
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u"Exception, e", u"Exception as e")

cdef class A:
    def __cinit__(self):
        raise Exception(u"crash-me")

cdef class B(A):
    def __cinit__(self):
        print "hello world"
Cython-0.26.1/tests/run/cyfunction_defaults.pyx0000664000175000017500000001200412542002467022407 0ustar  stefanstefan00000000000000# cython: binding=True
# mode: run
# tag: cyfunction, closures

cimport cython
import sys

def get_defaults(func):
    if sys.version_info >= (2, 6, 0):
        return func.__defaults__
    return func.func_defaults

def test_defaults_none():
    """
    >>> get_defaults(test_defaults_none)
    """

def test_defaults_literal(a=1, b=(1,2,3)):
    """
    >>> get_defaults(test_defaults_literal) is get_defaults(test_defaults_literal)
    True
    >>> get_defaults(test_defaults_literal)
    (1, (1, 2, 3))
    >>> a, b = get_defaults(test_defaults_literal)
    >>> c, d = test_defaults_literal()
    >>> a is c
    True
    >>> b is d
    True
    """
    return a, b

def test_defaults_nonliteral():
    """
    >>> f0, f1 = test_defaults_nonliteral()
    >>> get_defaults(f0) is get_defaults(f0) # cached
    True
    >>> get_defaults(f0)
    (0, {}, (1, 2, 3))
    >>> a, b = get_defaults(f0)[1:]
    >>> c, d = f0(0)
    >>> a is c
    True
    >>> b is d
    True
    >>> get_defaults(f1) is get_defaults(f1) # cached
    True
    >>> get_defaults(f1)
    (0, [], (1, 2, 3))
    >>> a, b = get_defaults(f1)[1:]
    >>> c, d = f1(0)
    >>> a is c
    True
    >>> b is d
    True
    """
    ret = []
    for i in {}, []:
        def foo(a, b=0, c=i, d=(1,2,3)):
            return c, d
        ret.append(foo)
    return ret

_counter = 0
def counter():
    global _counter
    _counter += 1
    return _counter

def test_defaults_nonliteral_func_call(f):
    """
    >>> f = test_defaults_nonliteral_func_call(counter)
    >>> f()
    1
    >>> get_defaults(f)
    (1,)
    >>> f = test_defaults_nonliteral_func_call(lambda: list())
    >>> f()
    []
    >>> get_defaults(f)
    ([],)
    >>> get_defaults(f)[0] is f()
    True
    """
    def func(a=f()):
        return a
    return func


def cy_kwonly_default_args(a, x=1, *, b=2):
    l = m = 1

def test_kwdefaults(value):
    """
    >>> cy_kwonly_default_args.__defaults__
    (1,)
    >>> cy_kwonly_default_args.func_defaults
    (1,)

    >>> cy_kwonly_default_args.__kwdefaults__
    {'b': 2}

    >>> test_kwdefaults.__defaults__
    >>> test_kwdefaults.__kwdefaults__

    >>> f = test_kwdefaults(5)
    >>> f.__defaults__
    (1,)
    >>> f.__kwdefaults__
    {'b': 5}
    >>> f.__kwdefaults__ = ()
    Traceback (most recent call last):
    TypeError: __kwdefaults__ must be set to a dict object
    >>> f.__kwdefaults__ = None
    >>> f.__kwdefaults__
    >>> f.__kwdefaults__ = {}
    >>> f.__kwdefaults__
    {}
    >>> f.__kwdefaults__ = {'a': 2}
    >>> f.__kwdefaults__
    {'a': 2}
    """
    def kwonly_default_args(a, x=1, *, b=value):
        return a, x, b
    return kwonly_default_args


_counter2 = 1.0
def counter2():
    global _counter2
    _counter2 += 1.0
    return _counter2

def test_defaults_fused(cython.floating arg1, cython.floating arg2 = counter2()):
    """
    >>> test_defaults_fused(1.0)
    1.0 2.0
    >>> test_defaults_fused(1.0, 3.0)
    1.0 3.0
    >>> _counter2
    2.0

    >>> get_defaults(test_defaults_fused)
    (2.0,)
    >>> get_defaults(test_defaults_fused[float])
    (2.0,)
    """
    print arg1, arg2

funcs = []
for i in range(10):
    def defaults_fused(cython.floating a, cython.floating b = i):
        return a, b
    funcs.append(defaults_fused)

def test_dynamic_defaults_fused():
    """
    >>> test_dynamic_defaults_fused()
    i 0 func result (1.0, 0.0) defaults (0,)
    i 1 func result (1.0, 1.0) defaults (1,)
    i 2 func result (1.0, 2.0) defaults (2,)
    i 3 func result (1.0, 3.0) defaults (3,)
    i 4 func result (1.0, 4.0) defaults (4,)
    i 5 func result (1.0, 5.0) defaults (5,)
    i 6 func result (1.0, 6.0) defaults (6,)
    i 7 func result (1.0, 7.0) defaults (7,)
    i 8 func result (1.0, 8.0) defaults (8,)
    i 9 func result (1.0, 9.0) defaults (9,)
    """
    for i, f in enumerate(funcs):
        print "i", i, "func result", f(1.0), "defaults", get_defaults(f)


@cython.test_fail_if_path_exists(
    '//NameNode[@entry.in_closure = True]',
    '//NameNode[@entry.from_closure = True]')
def test_func_default_inlined():
    """
    Make sure we don't accidentally generate a closure.

    >>> func = test_func_default_inlined()
    >>> func()
    1
    >>> func(2)
    2
    """
    def default():
        return 1
    def func(arg=default()):
        return arg
    return func


@cython.test_fail_if_path_exists(
    '//NameNode[@entry.in_closure = True]',
    '//NameNode[@entry.from_closure = True]')
def test_func_default_scope():
    """
    Test that the default value expression is evaluated in the outer scope.

    >>> func = test_func_default_scope()
    3
    >>> func()
    [0, 1, 2, 3]
    >>> func(2)
    2
    """
    i = -1
    def func(arg=[ i for i in range(4) ]):
        return arg
    print i  # list comps leak in Py2 mode => i == 3
    return func


def test_func_default_scope_local():
    """
    >>> func = test_func_default_scope_local()
    -1
    >>> func()
    [0, 1, 2, 3]
    >>> func(2)
    2
    """
    i = -1
    def func(arg=list(i for i in range(4))):
        return arg
    print i  # genexprs don't leak
    return func
Cython-0.26.1/tests/run/cpp_template_functions.pyx0000664000175000017500000000570713143605603023117 0ustar  stefanstefan00000000000000# tag: cpp

cimport cython
from libcpp.pair cimport pair
from libcpp.vector cimport vector

cdef extern from "cpp_template_functions_helper.h":
    cdef T no_arg[T]()
    cdef T one_param[T](T)
    cdef pair[T, U] two_params[T, U](T, U)
    cdef cppclass A[T]:
        pair[T, U] method[U](T, U)
        U part_method[U](pair[T, U])
        U part_method_ref[U](pair[T, U]&)
        int overloaded(double x)
        T overloaded(pair[T, T])
        U overloaded[U](vector[U])
        X overloaded[X](char* s, vector[X])
    cdef T nested_deduction[T](const T*)
    pair[T, U] pair_arg[T, U](pair[T, U] a)
    cdef T* pointer_param[T](T*)
    cdef cppclass double_pair(pair[double, double]):
        double_pair(double, double)

def test_no_arg():
    """
    >>> test_no_arg()
    0
    """
    return no_arg[int]()

def test_one_param(int x):
    """
    >>> test_one_param(3)
    (3, 3.0)
    """
    return one_param[int](x), one_param[double](x)

def test_two_params(int x, int y):
    """
    >>> test_two_params(1, 2)
    (1, 2.0)
    """
    return two_params[int, double](x, y)

def test_method(int x, int y):
    """
    >>> test_method(5, 10)
    ((5, 10.0), (5.0, 10), (5, 10), (5.0, 10))
    """
    cdef A[int] a_int
    cdef A[double] a_double
    return (a_int.method[float](x, y), a_double.method[int](x, y),
        a_int.method(x, y), a_double.method(x, y))
#    return a_int.method[double](x, y), a_double.method[int](x, y)

def test_part_method(int x, int y):
    """
    >>> test_part_method(5, 10)
    (10.0, 10, 10.0)
    """
    cdef A[int] a_int
    cdef pair[int, double] p_int = (x, y)
    cdef A[double] a_double
    cdef pair[double, int] p_double = (x, y)
    return (a_int.part_method(p_int),
        a_double.part_method(p_double),
        a_double.part_method_ref(double_pair(x, y)))

def test_simple_deduction(int x, double y):
    """
    >>> test_simple_deduction(1, 2)
    (1, 2.0)
    """
    return one_param(x), one_param(y)

def test_more_deductions(int x, double y):
    """
    >>> test_more_deductions(1, 2)
    (1, 2.0)
    """
    return nested_deduction(&x), nested_deduction(&y)

def test_class_deductions(pair[long, double] x):
    """
    >>> test_class_deductions((1, 1.5))
    (1, 1.5)
    """
    return pair_arg(x)

def test_deduce_through_pointers(int k):
    """
    >>> test_deduce_through_pointers(5)
    (5, 5.0)
    """
    cdef double x = k
    return pointer_param(&k)[0], pointer_param(&x)[0]

def test_inference(int k):
    """
    >>> test_inference(27)
    27
    """
    res = one_param(&k)
    assert cython.typeof(res) == 'int *', cython.typeof(res)
    return res[0]

def test_overload_GH1583():
    """
    >>> test_overload_GH1583()
    """
    cdef A[int] a
    assert a.overloaded(1.5) == 1
    cdef pair[int, int] p = (2, 3)
    assert a.overloaded(p) == 2
    cdef vector[double] v = [0.25, 0.125]
    assert a.overloaded(v) == 0.25
    assert a.overloaded("s", v) == 0.25
    # GH Issue #1584
    # assert a.overloaded[double](v) == 0.25
Cython-0.26.1/tests/run/if_else_expr_cpp_helper.h0000664000175000017500000000032612574327400022621 0ustar  stefanstefan00000000000000class Holder {
public:
    int value;
    Holder() : value(-1) { }
    Holder(int value) : value(value) { }
};

Holder v1(1);
Holder v2(2);

Holder& get_v1() {
    return v1;
}

Holder& get_v2() {
    return v2;
}
Cython-0.26.1/tests/run/unicodeencode.pyx0000664000175000017500000000512712542002467021153 0ustar  stefanstefan00000000000000# -*- coding: utf-8 -*-

__doc__ = u"""
>>> len(u)
15
"""

cimport cython

_bytes = bytes

cdef unicode text = u'abcäöüöéèâÃÀABC'

u = text

def default():
    """
    >>> default() == 'abcdefg'.encode()
    True
    """
    return u'abcdefg'.encode()

def encode_non_constant(encoding):
    """
    >>> isinstance(encode_non_constant('utf8'), _bytes)
    True
    >>> encode_non_constant('utf8') == u.encode('UTF-8')
    True
    """
    return text.encode(encoding)

@cython.test_assert_path_exists('//PythonCapiFunctionNode[@cname = "PyUnicode_AsUTF8String"]')
def utf8():
    """
    >>> isinstance(utf8(), _bytes)
    True
    >>> utf8() == u.encode('UTF-8')
    True
    """
    return text.encode(u'UTF-8')

@cython.test_assert_path_exists('//PythonCapiFunctionNode[@cname = "PyUnicode_AsUTF8String"]')
def utf8_strict():
    """
    >>> isinstance(utf8_strict(), _bytes)
    True
    >>> utf8_strict() == u.encode('UTF-8', 'strict')
    True
    """
    return text.encode(u'UTF-8', u'strict')

@cython.test_assert_path_exists('//PythonCapiFunctionNode[@cname = "PyUnicode_AsUTF8String"]')
def utf8_str_strict():
    """
    >>> isinstance(utf8_str_strict(), _bytes)
    True
    >>> utf8_str_strict() == u.encode('UTF-8', 'strict')
    True
    """
    return text.encode('UTF-8', 'strict')

@cython.test_assert_path_exists('//PythonCapiFunctionNode[@cname = "PyUnicode_AsUTF8String"]')
def utf8_bytes_strict():
    """
    >>> isinstance(utf8_bytes_strict(), _bytes)
    True
    >>> utf8_bytes_strict() == u.encode('UTF-8', 'strict')
    True
    """
    return text.encode(b'UTF-8', b'strict')

@cython.test_assert_path_exists('//PythonCapiFunctionNode[@cname = "PyUnicode_AsEncodedString"]')
def ascii_replace():
    """
    >>> isinstance(ascii_replace(), _bytes)
    True
    >>> ascii_replace() == u.encode('ASCII', 'replace')
    True
    """
    return text.encode(u'ASCII', u'replace')

def cp850_strict():
    """
    >>> isinstance(cp850_strict(), _bytes)
    True
    >>> cp850_strict() == u.encode('cp850', 'strict')
    True
    """
    return text.encode(u'cp850', u'strict')

@cython.test_assert_path_exists('//PythonCapiFunctionNode[@cname = "PyUnicode_AsLatin1String"]')
def latin1():
    """
    >>> isinstance(latin1(), _bytes)
    True
    >>> latin1() == u.encode('latin-1')
    True
    """
    return text.encode(u'latin-1')

@cython.test_fail_if_path_exists('//PythonCapiFunctionNode', '//SimpleCallNode')
def latin1_constant():
    """
    >>> isinstance(latin1_constant(), _bytes)
    True
    >>> latin1_constant() == latin1()
    True
    """
    return u'abcäöüöéèâÃÀABC'.encode('latin1')
Cython-0.26.1/tests/run/cdef_members_binding_properties.pyx0000664000175000017500000000552612542002467024733 0ustar  stefanstefan00000000000000# cython: embedsignature=True, binding=True
# mode: run

# same test as "cdef_members_T517.pyx" but "binding=True"

__doc__ = u"""
>>> a = A()
>>> a.h = 7
>>> a.i = 127
>>> a.l = 255
>>> a.q = 255
>>> a.f = 1.0/2.0
>>> a.d = 1/2.0 + 1/4.0
>>> a.g = 1/2.0 + 1/4.0 + 1/8.0
>>> a.Zf = 1+2j
>>> a.Zd = 3+4j
>>> a.Zg = 5+6j

>>> a.h, a.i, a.l
(7, 127, 255)
>>> a.ro_h, a.ro_i, a.ro_l
(7, 127, 255)
>>> a.f, a.d, a.g
(0.5, 0.75, 0.875)
>>> a.ro_f, a.ro_d, a.ro_g
(0.5, 0.75, 0.875)
>>> a.Zf, a.Zd, a.Zg
((1+2j), (3+4j), (5+6j))
>>> a.ro_Zf, a.ro_Zd, a.ro_Zg
((1+2j), (3+4j), (5+6j))

>>> b = B()
>>> b.a0 #doctest: +ELLIPSIS
Traceback (most recent call last):
AttributeError: ...

>>> b.b0 #doctest: +ELLIPSIS
Traceback (most recent call last):
AttributeError: ...

>>> b.c0 #doctest: +ELLIPSIS
Traceback (most recent call last):
AttributeError: ...

>>> isinstance(b.a1, type(None))
True
>>> isinstance(b.a2, type(None))
True
>>> isinstance(b.b1, list)
True
>>> isinstance(b.b2, list)
True
>>> isinstance(b.c1, A)
True
>>> isinstance(b.c2, A)
True

>>> b.a1 = a
>>> b.a1 is not b.a2
True

TYPE_FIXES_REQUIRED:

>>> try: b.b1 = 1
... except (TypeError, AttributeError): pass

>>> try: b.c1 = 1
... except (TypeError, AttributeError): pass

>>> try: b.a2 = None
... except (TypeError, AttributeError): pass

>>> try: b.b2 = []
... except (TypeError, AttributeError): pass

>>> try: b.c2 = A()
... except (TypeError, AttributeError): pass
"""

import sys
if sys.version_info < (2,5):
    __doc__ = (__doc__.split('TYPE_FIXES_REQUIRED')[0] +
               __doc__.split('TYPE_FIXES_REQUIRED')[1].replace('\nAttributeError: ...', '\nTypeError: ...'))


cdef class A:

    cdef public short h
    cdef public int i
    cdef public long l
    cdef public long long q
    cdef public float f
    cdef public double d
    cdef public long double g
    cdef public float complex Zf
    cdef public double complex Zd
    cdef public long double complex Zg

    cdef readonly short ro_h
    cdef readonly int ro_i
    cdef readonly long ro_l
    cdef readonly long long ro_q
    cdef readonly float ro_f
    cdef readonly double ro_d
    cdef readonly long double ro_g
    cdef readonly float complex ro_Zf
    cdef readonly double complex ro_Zd
    cdef readonly long double complex ro_Zg

    def __cinit__(self):
        self.ro_h = 7
        self.ro_i = 127
        self.ro_l = 255
        self.ro_q = 255
        self.ro_f = 1.0/2.0
        self.ro_d = 1/2.0 + 1/4.0
        self.ro_g = 1/2.0 + 1/4.0 + 1/8.0
        self.ro_Zf = 1+2j
        self.ro_Zd = 3+4j
        self.ro_Zg = 5+6j


cdef class B:

    cdef object a0
    cdef public object a1
    cdef readonly object a2

    cdef list b0
    cdef public list b1
    cdef readonly list b2

    cdef A c0
    cdef public A c1
    cdef readonly A c2

    def __cinit__(self):
        self.b0 = self.b1 = self.b2 = []
        self.c0 = self.c1 = self.c2 = A()
Cython-0.26.1/tests/run/cintop.pyx0000664000175000017500000000131412542002467017635 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> int2 = 42
    >>> int3 = 7
    >>> char1 = ord('C')

    >>> int1 = int2 | int3
    >>> int1 |= int2 ^ int3
    >>> int1 ^= int2 & int3
    >>> int1 ^= int2 << int3
    >>> int1 ^= int2 >> int3
    >>> int1 ^= int2 << int3 | int2 >> int3
    >>> long1 = char1 | int1
    >>> (int1, long1) == f()
    True

"""

def f():
    """
    >>> f()
    (45, 111)
    """
    cdef int int1, int2, int3
    cdef char char1
    cdef long long1, long2
    int2 = 42
    int3 = 7
    char1 = c'C'

    int1 = int2 | int3
    int1 |= int2 ^ int3
    int1 ^= int2 & int3
    int1 ^= int2 << int3
    int1 ^= int2 >> int3
    int1 ^= int2 << int3 | int2 >> int3
    long1 = char1 | int1
    return int1, long1
Cython-0.26.1/tests/run/autotestdict.pxd0000664000175000017500000000001712542002467021027 0ustar  stefanstefan00000000000000# I just exist
Cython-0.26.1/tests/run/special_methods_T561.pyx0000664000175000017500000006231612542002467022234 0ustar  stefanstefan00000000000000# mode: run
# ticket: 561
# ticket: 3

# The patch in #561 changes code generation for most special methods
# to remove the Cython-generated wrapper and let PyType_Ready()
# generate its own wrapper.  (This wrapper would be used, for instance,
# when using the special method as a bound method.)

# To test this, we go through and verify that each affected special
# method works as a bound method.

# Special methods that are treated the same under Python 2 and 3 are
# tested here; see also special_methods_T561_py2.pyx and
# special_methods_T561_py3.pyx for tests of the differences between
# Python 2 and 3.

# Regarding ticket 3, we should additionally test that unbound method
# calls to these special methods (e.g. ExtType.__init__()) do not use
# a runtime lookup indirection.

import sys

__doc__ = u"""
    >>> # If you define either setitem or delitem, you get wrapper objects
    >>> # for both methods.  (This behavior is unchanged by #561.)
    >>> si_setitem = SetItem().__setitem__
    >>> si_setitem('foo', 'bar')
    SetItem setitem 'foo' 'bar'
    >>> si_delitem = SetItem().__delitem__
    >>> si_delitem('foo')
    Traceback (most recent call last):
    ...
    NotImplementedError: Subscript deletion not supported by special_methods_T561.SetItem
    >>> di_setitem = DelItem().__setitem__
    >>> di_setitem('foo', 'bar')
    Traceback (most recent call last):
    ...
    NotImplementedError: Subscript assignment not supported by special_methods_T561.DelItem
    >>> di_delitem = DelItem().__delitem__
    >>> di_delitem('foo')
    DelItem delitem 'foo'
    >>> sdi_setitem = SetDelItem().__setitem__
    >>> sdi_setitem('foo', 'bar')
    SetDelItem setitem 'foo' 'bar'
    >>> sdi_delitem = SetDelItem().__delitem__
    >>> sdi_delitem('foo')
    SetDelItem delitem 'foo'
    >>> g01 = object.__getattribute__(GetAttr(), '__getattribute__')
    >>> g01('attr')
    GetAttr getattr 'attr'
    >>> g10 = object.__getattribute__(GetAttribute(), '__getattr__')
    Traceback (most recent call last):
    ...
    AttributeError: 'special_methods_T561.GetAttribute' object has no attribute '__getattr__'
    >>> g11 = object.__getattribute__(GetAttribute(), '__getattribute__')
    >>> g11('attr')
    GetAttribute getattribute 'attr'
    >>> # If you define either setattr or delattr, you get wrapper objects
    >>> # for both methods.  (This behavior is unchanged by #561.)
    >>> sa_setattr = SetAttr().__setattr__
    >>> sa_setattr('foo', 'bar')
    SetAttr setattr 'foo' 'bar'
    >>> sa_delattr = SetAttr().__delattr__
    >>> sa_delattr('foo')
    Traceback (most recent call last):
    ...
    AttributeError: 'special_methods_T561.SetAttr' object has no attribute 'foo'
    >>> da_setattr = DelAttr().__setattr__
    >>> da_setattr('foo', 'bar')
    Traceback (most recent call last):
    ...
    AttributeError: 'special_methods_T561.DelAttr' object has no attribute 'foo'
    >>> da_delattr = DelAttr().__delattr__
    >>> da_delattr('foo')
    DelAttr delattr 'foo'
    >>> sda_setattr = SetDelAttr().__setattr__
    >>> sda_setattr('foo', 'bar')
    SetDelAttr setattr 'foo' 'bar'
    >>> sda_delattr = SetDelAttr().__delattr__
    >>> sda_delattr('foo')
    SetDelAttr delattr 'foo'
    >>> # If you define either set or delete, you get wrapper objects
    >>> # for both methods.  (This behavior is unchanged by #561.)
    >>> s_set = Set().__set__
    >>> s_set('instance', 'val')
    Set set 'instance' 'val'
    >>> s_delete = Set().__delete__
    >>> s_delete('instance')
    Traceback (most recent call last):
    ...
    NotImplementedError: __delete__
    >>> d_set = Delete().__set__
    >>> d_set('instance', 'val')
    Traceback (most recent call last):
    ...
    NotImplementedError: __set__
    >>> d_delete = Delete().__delete__
    >>> d_delete('instance')
    Delete delete 'instance'
    >>> sd_set = SetDelete().__set__
    >>> sd_set('instance', 'val')
    SetDelete set 'instance' 'val'
    >>> sd_delete = SetDelete().__delete__
    >>> sd_delete('instance')
    SetDelete delete 'instance'
    >>> # If you define __long__, you get a wrapper object for __int__.
    >>> # (This behavior is unchanged by #561.)
    >>> Li = Long().__int__
    >>> Li()
    Long __long__
"""
if sys.version_info >= (2,5):
    __doc__ += u"""\
    >>> vs0 = VerySpecial(0)
    VS __init__ 0
    >>> vs0_index = vs0.__index__
    >>> vs0_index()
    VS __index__ 0
"""

cdef class VerySpecial:
    """
    >>> vs0 = VerySpecial(0)
    VS __init__ 0
    >>> vs1 = VerySpecial(1)
    VS __init__ 1

    >>> vs0_add = vs0.__add__
    >>> vs0_add(vs1)
    VS __add__ 0 1
    >>> vs0_sub = vs0.__sub__
    >>> vs0_sub(vs1)
    VS __sub__ 0 1
    >>> vs0_mul = vs0.__mul__
    >>> vs0_mul(vs1)
    VS __mul__ 0 1
    >>> vs0_mod = vs0.__mod__
    >>> vs0_mod(vs1)
    VS __mod__ 0 1
    >>> vs0_divmod = vs0.__divmod__
    >>> vs0_divmod(vs1)
    VS __divmod__ 0 1
    >>> vs0_pow = vs0.__pow__
    >>> vs0_pow(vs1)
    VS __pow__ pow(0, 1, None)
    >>> vs0_pow(vs1, 13)
    VS __pow__ pow(0, 1, 13)
    >>> vs0_neg = vs0.__neg__
    >>> vs0_neg()
    VS __neg__ 0
    >>> vs0_pos = vs0.__pos__
    >>> vs0_pos()
    VS __pos__ 0
    >>> vs0_abs = vs0.__abs__
    >>> vs0_abs()
    VS __abs__ 0
    >>> vs0_invert = vs0.__invert__
    >>> vs0_invert()
    VS __invert__ 0
    >>> vs0_lshift = vs0.__lshift__
    >>> vs0_lshift(vs1)
    VS __lshift__ 0 << 1
    >>> vs0_rshift = vs0.__rshift__
    >>> vs0_rshift(vs1)
    VS __rshift__ 0 >> 1
    >>> vs0_and = vs0.__and__
    >>> vs0_and(vs1)
    VS __and__ 0 & 1
    >>> vs0_xor = vs0.__xor__
    >>> vs0_xor(vs1)
    VS __xor__ 0 ^ 1
    >>> vs0_or = vs0.__or__
    >>> vs0_or(vs1)
    VS __or__ 0 | 1
    >>> vs0_int = vs0.__int__
    >>> vs0_int()
    VS __int__ 0
    >>> vs0_float = vs0.__float__
    >>> vs0_float()
    VS __float__ 0
    >>> vs0_iadd = vs0.__iadd__
    >>> vs0_iadd(vs1)
    VS __iadd__ 0 += 1
    >>> vs0_isub = vs0.__isub__
    >>> vs0_isub(vs1)
    VS __isub__ 0 -= 1
    >>> vs0_imul = vs0.__imul__
    >>> vs0_imul(vs1)
    VS __imul__ 0 *= 1
    >>> vs0_imod = vs0.__imod__
    >>> vs0_imod(vs1)
    VS __imod__ 0 %= 1
    >>> vs0_ipow = vs0.__ipow__
    >>> vs0_ipow(vs1)
    VS __ipow__ 0 1
    >>> vs0_ilshift = vs0.__ilshift__
    >>> vs0_ilshift(vs1)
    VS __ilshift__ 0 <<= 1
    >>> vs0_irshift = vs0.__irshift__
    >>> vs0_irshift(vs1)
    VS __irshift__ 0 >>= 1
    >>> vs0_iand = vs0.__iand__
    >>> vs0_iand(vs1)
    VS __iand__ 0 &= 1
    >>> vs0_ixor = vs0.__ixor__
    >>> vs0_ixor(vs1)
    VS __ixor__ 0 ^= 1
    >>> vs0_ior = vs0.__ior__
    >>> vs0_ior(vs1)
    VS __ior__ 0 |= 1
    >>> vs0_floordiv = vs0.__floordiv__
    >>> vs0_floordiv(vs1)
    VS __floordiv__ 0 / 1
    >>> vs0_truediv = vs0.__truediv__
    >>> vs0_truediv(vs1)
    VS __truediv__ 0 / 1
    >>> vs0_ifloordiv = vs0.__ifloordiv__
    >>> vs0_ifloordiv(vs1)
    VS __ifloordiv__ 0 /= 1
    >>> vs0_itruediv = vs0.__itruediv__
    >>> vs0_itruediv(vs1)
    VS __itruediv__ 0 /= 1

    # If you define an arithmetic method, you get wrapper objects for
    # the reversed version as well.  (This behavior is unchanged by #561.)
    >>> vs0_radd = vs0.__radd__
    >>> vs0_radd(vs1)
    VS __add__ 1 0
    >>> vs0_rsub = vs0.__rsub__
    >>> vs0_rsub(vs1)
    VS __sub__ 1 0
    >>> vs0_rmul = vs0.__rmul__
    >>> vs0_rmul(vs1)
    VS __mul__ 1 0
    >>> vs0_rmod = vs0.__rmod__
    >>> vs0_rmod(vs1)
    VS __mod__ 1 0
    >>> vs0_rdivmod = vs0.__rdivmod__
    >>> vs0_rdivmod(vs1)
    VS __divmod__ 1 0
    >>> vs0_rpow = vs0.__rpow__
    >>> vs0_rpow(vs1)
    VS __pow__ pow(1, 0, None)
    >>> vs0_rlshift = vs0.__rlshift__
    >>> vs0_rlshift(vs1)
    VS __lshift__ 1 << 0
    >>> vs0_rrshift = vs0.__rrshift__
    >>> vs0_rrshift(vs1)
    VS __rshift__ 1 >> 0
    >>> vs0_rand = vs0.__rand__
    >>> vs0_rand(vs1)
    VS __and__ 1 & 0
    >>> vs0_rxor = vs0.__rxor__
    >>> vs0_rxor(vs1)
    VS __xor__ 1 ^ 0
    >>> vs0_ror = vs0.__ror__
    >>> vs0_ror(vs1)
    VS __or__ 1 | 0
    >>> vs0_rfloordiv = vs0.__rfloordiv__
    >>> vs0_rfloordiv(vs1)
    VS __floordiv__ 1 / 0
    >>> vs0_rtruediv = vs0.__rtruediv__
    >>> vs0_rtruediv(vs1)
    VS __truediv__ 1 / 0
    >>> vs0_getitem = vs0.__getitem__
    >>> vs0_getitem('foo')
    VS __getitem__ 0['foo']
    >>> vs0_contains = vs0.__contains__
    >>> vs0_contains(vs1)
    VS __contains__ 0 1
    False
    >>> vs0_len = vs0.__len__
    >>> vs0_len()
    VS __len__ 0
    0
    >>> vs0_repr = vs0.__repr__
    >>> vs0_repr()
    VS __repr__ 0
    >>> vs0_hash = vs0.__hash__
    >>> vs0_hash()
    VS __hash__ 0
    1000
    >>> vs0_call = vs0.__call__
    >>> vs0_call(vs1)
    VS __call__ 0(1)
    >>> vs0_str = vs0.__str__
    >>> vs0_str()
    VS __str__ 0

    # If you define __richcmp__, you get all of __lt__, __le__,
    # __eq__, __ne__, __gt__, __ge__ (this behavior is unchanged by #561).
    # (you don't get a __richcmp__ method, because it doesn't have a
    # Python signature)
    >>> vs0_lt = vs0.__lt__
    >>> vs0_lt(vs1)
    VS richcmp 0 1 (kind=0)
    >>> vs0_le = vs0.__le__
    >>> vs0_le(vs1)
    VS richcmp 0 1 (kind=1)
    >>> vs0_eq = vs0.__eq__
    >>> vs0_eq(vs1)
    VS richcmp 0 1 (kind=2)
    >>> vs0_ne = vs0.__ne__
    >>> vs0_ne(vs1)
    VS richcmp 0 1 (kind=3)
    >>> vs0_gt = vs0.__gt__
    >>> vs0_gt(vs1)
    VS richcmp 0 1 (kind=4)
    >>> vs0_ge = vs0.__ge__
    >>> vs0_ge(vs1)
    VS richcmp 0 1 (kind=5)
    >>> vs0_iter = vs0.__iter__
    >>> vs0_iter()
    VS __iter__ 0
    >>> vs0_next = vs0.__next__
    >>> vs0_next()
    VS next/__next__ 0

    >>> vs0_get = vs0.__get__
    >>> vs0_get('instance', 'owner')
    VS __get__ 0 'instance' 'owner'
    >>> vs0_init = vs0.__init__
    >>> vs0_init(0)
    VS __init__ 0
    """
    cdef readonly int value

    def __init__(self, v):
        self.value = v
        print "VS __init__ %d" % self.value

    def __add__(self, other):
        print "VS __add__ %d %d" % (self.value, other.value)

    def __sub__(self, other):
        print "VS __sub__ %d %d" % (self.value, other.value)

    def __mul__(self, other):
        print "VS __mul__ %d %d" % (self.value, other.value)

    def __div__(self, other):
        print "VS __div__ %d %d" % (self.value, other.value)

    def __mod__(self, other):
        print "VS __mod__ %d %d" % (self.value, other.value)

    def __divmod__(self, other):
        print "VS __divmod__ %d %d" % (self.value, other.value)

    def __pow__(self, other, mod):
        print "VS __pow__ pow(%d, %d, %r)" % (self.value, other.value, mod)

    def __lshift__(self, other):
        print "VS __lshift__ %d << %d" % (self.value, other.value)

    def __rshift__(self, other):
        print "VS __rshift__ %d >> %d" % (self.value, other.value)

    def __and__(self, other):
        print "VS __and__ %d & %d" % (self.value, other.value)

    def __xor__(self, other):
        print "VS __xor__ %d ^ %d" % (self.value, other.value)

    def __or__(self, other):
        print "VS __or__ %d | %d" % (self.value, other.value)

    def __floordiv__(self, other):
        print "VS __floordiv__ %d / %d" % (self.value, other.value)

    def __truediv__(self, other):
        print "VS __truediv__ %d / %d" % (self.value, other.value)

    def __neg__(self):
        print "VS __neg__ %d" % self.value

    def __pos__(self):
        print "VS __pos__ %d" % self.value

    def __abs__(self):
        print "VS __abs__ %d" % self.value

    def __nonzero__(self):
        print "VS __nonzero__ %d" % self.value

    def __invert__(self):
        print "VS __invert__ %d" % self.value

    def __int__(self):
        print "VS __int__ %d" % self.value

    def __long__(self):
        print "VS __long__ %d" % self.value

    def __float__(self):
        print "VS __float__ %d" % self.value

    def __oct__(self):
        print "VS __oct__ %d" % self.value

    def __hex__(self):
        print "VS __hex__ %d" % self.value

    def __iadd__(self, other):
        print "VS __iadd__ %d += %d" % (self.value, other.value)

    def __isub__(self, other):
        print "VS __isub__ %d -= %d" % (self.value, other.value)

    def __imul__(self, other):
        print "VS __imul__ %d *= %d" % (self.value, other.value)

    def __idiv__(self, other):
        print "VS __idiv__ %d /= %d" % (self.value, other.value)

    def __imod__(self, other):
        print "VS __imod__ %d %%= %d" % (self.value, other.value)

    def __ipow__(self, other):
        # We must declare mod as an argument, but we must not touch it
        # or we'll get a segfault.  See #562
        print "VS __ipow__ %d %d" % (self.value, other.value)

    def __ilshift__(self, other):
        print "VS __ilshift__ %d <<= %d" % (self.value, other.value)

    def __irshift__(self, other):
        print "VS __irshift__ %d >>= %d" % (self.value, other.value)

    def __iand__(self, other):
        print "VS __iand__ %d &= %d" % (self.value, other.value)

    def __ixor__(self, other):
        print "VS __ixor__ %d ^= %d" % (self.value, other.value)

    def __ior__(self, other):
        print "VS __ior__ %d |= %d" % (self.value, other.value)

    def __ifloordiv__(self, other):
        print "VS __ifloordiv__ %d /= %d" % (self.value, other.value)

    def __itruediv__(self, other):
        print "VS __itruediv__ %d /= %d" % (self.value, other.value)

    def __index__(self):
        print "VS __index__ %d" % self.value

    def __getitem__(self, index):
        print "VS __getitem__ %d[%r]" % (self.value, index)

    def __contains__(self, other):
        print "VS __contains__ %d %d" % (self.value, other.value)

    def __len__(self):
        print "VS __len__ %d" % (self.value)

    def __cmp__(self, other):
        print "VS __cmp__ %d %d" % (self.value, other.value)

    def __repr__(self):
        print "VS __repr__ %d" % self.value

    def __hash__(self):
        print "VS __hash__ %d" % self.value
        return self.value + 1000

    def __call__(self, other):
        print "VS __call__ %d(%d)" % (self.value, other.value)

    def __str__(self):
        print "VS __str__ %d" % self.value

    def __richcmp__(self, other, kind):
        print "VS richcmp %d %d (kind=%r)" % (self.value, other.value, kind)

    def __iter__(self):
        print "VS __iter__ %d" % self.value

    def __next__(self):
        print "VS next/__next__ %d" % self.value

    def __get__(self, inst, own):
        print "VS __get__ %d %r %r" % (self.value, inst, own)

cdef class SetItem:
    def __setitem__(self, index, value):
        print "SetItem setitem %r %r" % (index, value)

cdef class DelItem:
    def __delitem__(self, index):
        print "DelItem delitem %r" % index

cdef class SetDelItem:
    def __setitem__(self, index, value):
        print "SetDelItem setitem %r %r" % (index, value)

    def __delitem__(self, index):
        print "SetDelItem delitem %r" % index

cdef class GetAttr:
    def __getattr__(self, attr):
        print "GetAttr getattr %r" % attr

cdef class GetAttribute:
    def __getattribute__(self, attr):
        print "GetAttribute getattribute %r" % attr

cdef class SetAttr:
    def __setattr__(self, attr, val):
        print "SetAttr setattr %r %r" % (attr, val)

cdef class DelAttr:
    def __delattr__(self, attr):
        print "DelAttr delattr %r" % attr

cdef class SetDelAttr:
    def __setattr__(self, attr, val):
        print "SetDelAttr setattr %r %r" % (attr, val)

    def __delattr__(self, attr):
        print "SetDelAttr delattr %r" % attr

cdef class Set:
    def __set__(self, inst, val):
        print "Set set %r %r" % (inst, val)

cdef class Delete:
    def __delete__(self, inst):
        print "Delete delete %r" % inst

cdef class SetDelete:
    def __set__(self, inst, val):
        print "SetDelete set %r %r" % (inst, val)

    def __delete__(self, inst):
        print "SetDelete delete %r" % inst

cdef class Long:
    def __long__(self):
        print "Long __long__"

cdef class GetAttrGetItemRedirect:
    """
    >>> o = GetAttrGetItemRedirect()

    >>> assert o.item == o['item']
    >>> source, item_value = o.item
    >>> assert source == 'item', source

    >>> assert o['attr'] == o.attr
    >>> source, attr_value = o['attr']
    >>> assert source == 'attr', source

    >>> assert item_value is attr_value, repr((item_value, attr_value))
    """
    cdef object obj
    def __cinit__(self):
        self.obj = object()

    def __getattr__(self, name):
        if name == 'item':
            return self[name]
        return ('attr', self.obj)

    def __getitem__(self, key):
        if key == 'attr':
            return getattr(self, key)
        return ('item', self.obj)


# test unbound method usage in subtypes

cdef class VerySpecialSubType(VerySpecial):
    """
    >>> vs0 = VerySpecialSubType(0)
    VS __init__ 0
    >>> vs1 = VerySpecialSubType(1)
    VS __init__ 1

    >>> vs0_add = vs0.__add__
    >>> vs0_add(vs1)
    VS __add__ 0 1
    >>> vs0_sub = vs0.__sub__
    >>> vs0_sub(vs1)
    VS __sub__ 0 1
    >>> vs0_mul = vs0.__mul__
    >>> vs0_mul(vs1)
    VS __mul__ 0 1
    >>> vs0_mod = vs0.__mod__
    >>> vs0_mod(vs1)
    VS __mod__ 0 1
    >>> vs0_divmod = vs0.__divmod__
    >>> vs0_divmod(vs1)
    VS __divmod__ 0 1
    >>> vs0_pow = vs0.__pow__
    >>> vs0_pow(vs1)
    VS __pow__ pow(0, 1, None)
    >>> vs0_pow(vs1, 13)
    VS __pow__ pow(0, 1, 13)
    >>> vs0_neg = vs0.__neg__
    >>> vs0_neg()
    VS __neg__ 0
    >>> vs0_pos = vs0.__pos__
    >>> vs0_pos()
    VS __pos__ 0
    >>> vs0_abs = vs0.__abs__
    >>> vs0_abs()
    VS __abs__ 0
    >>> vs0_invert = vs0.__invert__
    >>> vs0_invert()
    VS __invert__ 0
    >>> vs0_lshift = vs0.__lshift__
    >>> vs0_lshift(vs1)
    VS __lshift__ 0 << 1
    >>> vs0_rshift = vs0.__rshift__
    >>> vs0_rshift(vs1)
    VS __rshift__ 0 >> 1
    >>> vs0_and = vs0.__and__
    >>> vs0_and(vs1)
    VS __and__ 0 & 1
    >>> vs0_xor = vs0.__xor__
    >>> vs0_xor(vs1)
    VS __xor__ 0 ^ 1
    >>> vs0_or = vs0.__or__
    >>> vs0_or(vs1)
    VS __or__ 0 | 1
    >>> vs0_int = vs0.__int__
    >>> vs0_int()
    VS __int__ 0
    >>> vs0_float = vs0.__float__
    >>> vs0_float()
    VS __float__ 0
    >>> vs0_iadd = vs0.__iadd__
    >>> vs0_iadd(vs1)
    VS __iadd__ 0 += 1
    >>> vs0_isub = vs0.__isub__
    >>> vs0_isub(vs1)
    VS __isub__ 0 -= 1
    >>> vs0_imul = vs0.__imul__
    >>> vs0_imul(vs1)
    VS __imul__ 0 *= 1
    >>> vs0_imod = vs0.__imod__
    >>> vs0_imod(vs1)
    VS __imod__ 0 %= 1
    >>> vs0_ipow = vs0.__ipow__
    >>> vs0_ipow(vs1)
    VS __ipow__ 0 1
    >>> vs0_ilshift = vs0.__ilshift__
    >>> vs0_ilshift(vs1)
    VS __ilshift__ 0 <<= 1
    >>> vs0_irshift = vs0.__irshift__
    >>> vs0_irshift(vs1)
    VS __irshift__ 0 >>= 1
    >>> vs0_iand = vs0.__iand__
    >>> vs0_iand(vs1)
    VS __iand__ 0 &= 1
    >>> vs0_ixor = vs0.__ixor__
    >>> vs0_ixor(vs1)
    VS __ixor__ 0 ^= 1
    >>> vs0_ior = vs0.__ior__
    >>> vs0_ior(vs1)
    VS __ior__ 0 |= 1
    >>> vs0_floordiv = vs0.__floordiv__
    >>> vs0_floordiv(vs1)
    VS __floordiv__ 0 / 1
    >>> vs0_truediv = vs0.__truediv__
    >>> vs0_truediv(vs1)
    VS __truediv__ 0 / 1
    >>> vs0_ifloordiv = vs0.__ifloordiv__
    >>> vs0_ifloordiv(vs1)
    VS __ifloordiv__ 0 /= 1
    >>> vs0_itruediv = vs0.__itruediv__
    >>> vs0_itruediv(vs1)
    VS __itruediv__ 0 /= 1

    # If you define an arithmetic method, you get wrapper objects for
    # the reversed version as well.  (This behavior is unchanged by #561.)
    >>> vs0_radd = vs0.__radd__
    >>> vs0_radd(vs1)
    VS __add__ 1 0
    >>> vs0_rsub = vs0.__rsub__
    >>> vs0_rsub(vs1)
    VS __sub__ 1 0
    >>> vs0_rmul = vs0.__rmul__
    >>> vs0_rmul(vs1)
    VS __mul__ 1 0
    >>> vs0_rmod = vs0.__rmod__
    >>> vs0_rmod(vs1)
    VS __mod__ 1 0
    >>> vs0_rdivmod = vs0.__rdivmod__
    >>> vs0_rdivmod(vs1)
    VS __divmod__ 1 0
    >>> vs0_rpow = vs0.__rpow__
    >>> vs0_rpow(vs1)
    VS __pow__ pow(1, 0, None)
    >>> vs0_rlshift = vs0.__rlshift__
    >>> vs0_rlshift(vs1)
    VS __lshift__ 1 << 0
    >>> vs0_rrshift = vs0.__rrshift__
    >>> vs0_rrshift(vs1)
    VS __rshift__ 1 >> 0
    >>> vs0_rand = vs0.__rand__
    >>> vs0_rand(vs1)
    VS __and__ 1 & 0
    >>> vs0_rxor = vs0.__rxor__
    >>> vs0_rxor(vs1)
    VS __xor__ 1 ^ 0
    >>> vs0_ror = vs0.__ror__
    >>> vs0_ror(vs1)
    VS __or__ 1 | 0
    >>> vs0_rfloordiv = vs0.__rfloordiv__
    >>> vs0_rfloordiv(vs1)
    VS __floordiv__ 1 / 0
    >>> vs0_rtruediv = vs0.__rtruediv__
    >>> vs0_rtruediv(vs1)
    VS __truediv__ 1 / 0
    >>> vs0_getitem = vs0.__getitem__
    >>> vs0_getitem('foo')
    VS __getitem__ 0['foo']
    >>> vs0_contains = vs0.__contains__
    >>> vs0_contains(vs1)
    VS __contains__ 0 1
    False
    >>> vs0_len = vs0.__len__
    >>> vs0_len()
    VS __len__ 0
    0
    >>> vs0_repr = vs0.__repr__
    >>> vs0_repr()
    VS __repr__ 0
    >>> vs0_hash = vs0.__hash__
    >>> vs0_hash()
    VS __hash__ 0
    1000
    >>> vs0_call = vs0.__call__
    >>> vs0_call(vs1)
    VS __call__ 0(1)
    >>> vs0_str = vs0.__str__
    >>> vs0_str()
    VS __str__ 0
    >>> vs0_lt = vs0.__lt__
    >>> vs0_lt(vs1)
    VS richcmp 0 1 (kind=0)
    >>> vs0_le = vs0.__le__
    >>> vs0_le(vs1)
    VS richcmp 0 1 (kind=1)
    >>> vs0_eq = vs0.__eq__
    >>> vs0_eq(vs1)
    VS richcmp 0 1 (kind=2)
    >>> vs0_ne = vs0.__ne__
    >>> vs0_ne(vs1)
    VS richcmp 0 1 (kind=3)
    >>> vs0_gt = vs0.__gt__
    >>> vs0_gt(vs1)
    VS richcmp 0 1 (kind=4)
    >>> vs0_ge = vs0.__ge__
    >>> vs0_ge(vs1)
    VS richcmp 0 1 (kind=5)
    >>> vs0_iter = vs0.__iter__
    >>> vs0_iter()
    VS __iter__ 0
    >>> vs0_next = vs0.__next__
    >>> vs0_next()
    VS next/__next__ 0
    >>> vs0_get = vs0.__get__
    >>> vs0_get('instance', 'owner')
    VS __get__ 0 'instance' 'owner'
    >>> vs0_init = vs0.__init__
    >>> vs0_init(0)
    VS __init__ 0
    """
    def __init__(self, v):
        VerySpecial.__init__(self, v)

    def __add__(self, other):
        return VerySpecial.__add__(self, other)

    def __sub__(self, other):
        return VerySpecial.__sub__(self, other)

    def __mul__(self, other):
        return VerySpecial.__mul__(self, other)

    def __div__(self, other):
        return VerySpecial.__div__(self, other)

    def __mod__(self, other):
        return VerySpecial.__mod__(self, other)

    def __divmod__(self, other):
        return VerySpecial.__divmod__(self, other)

    def __pow__(self, other, mod):
        return VerySpecial.__pow__(self, other, mod)

    def __lshift__(self, other):
        return VerySpecial.__lshift__(self, other)

    def __rshift__(self, other):
        return VerySpecial.__rshift__(self, other)

    def __and__(self, other):
        return VerySpecial.__and__(self, other)

    def __xor__(self, other):
        return VerySpecial.__xor__(self, other)

    def __or__(self, other):
        return VerySpecial.__or__(self, other)

    def __floordiv__(self, other):
        return VerySpecial.__floordiv__(self, other)

    def __truediv__(self, other):
        return VerySpecial.__truediv__(self, other)

    def __neg__(self):
        return VerySpecial.__neg__(self)

    def __pos__(self):
        return VerySpecial.__pos__(self)

    def __abs__(self):
        return VerySpecial.__abs__(self)

    def __nonzero__(self):
        return VerySpecial.__nonzero__(self)

    def __invert__(self):
        return VerySpecial.__invert__(self)

    def __int__(self):
        return VerySpecial.__int__(self)

    def __long__(self):
        return VerySpecial.__long__(self)

    def __float__(self):
        return VerySpecial.__float__(self)

    def __oct__(self):
        return VerySpecial.__oct__(self)

    def __hex__(self):
        return VerySpecial.__hex__(self)

    def __iadd__(self, other):
        return VerySpecial.__iadd__(self, other)

    def __isub__(self, other):
        return VerySpecial.__isub__(self, other)

    def __imul__(self, other):
        return VerySpecial.__imul__(self, other)

    def __idiv__(self, other):
        return VerySpecial.__idiv__(self, other)

    def __imod__(self, other):
        return VerySpecial.__imod__(self, other)

    def __ipow__(self, other):
        return VerySpecial.__ipow__(self, other)

    def __ilshift__(self, other):
        return VerySpecial.__ilshift__(self, other)

    def __irshift__(self, other):
        return VerySpecial.__irshift__(self, other)

    def __iand__(self, other):
        return VerySpecial.__iand__(self, other)

    def __ixor__(self, other):
        return VerySpecial.__ixor__(self, other)

    def __ior__(self, other):
        return VerySpecial.__ior__(self, other)

    def __ifloordiv__(self, other):
        return VerySpecial.__ifloordiv__(self, other)

    def __itruediv__(self, other):
        return VerySpecial.__itruediv__(self, other)

    def __index__(self):
        return VerySpecial.__index__(self)

    def __getitem__(self, index):
        return VerySpecial.__getitem__(self, index)

    def __contains__(self, other):
        return VerySpecial.__contains__(self, other)

    def __len__(self):
        return VerySpecial.__len__(self)

    def __cmp__(self, other):
        return VerySpecial.__cmp__(self, other)

    def __repr__(self):
        return VerySpecial.__repr__(self)

    def __hash__(self):
        return VerySpecial.__hash__(self)

    def __call__(self, arg):
        return VerySpecial.__call__(self, arg)

    def __str__(self):
        return VerySpecial.__str__(self)

# there is no __richcmp__ at the Python level
#    def __richcmp__(self, other, kind):
#        return VerySpecial.__richcmp__(self, other, kind)

    def __iter__(self):
        return VerySpecial.__iter__(self)

    def __next__(self):
        return VerySpecial.__next__(self)

    def __get__(self, inst, own):
        return VerySpecial.__get__(self, inst, own)
Cython-0.26.1/tests/run/new_style_exceptions.pyx0000664000175000017500000000076512542002467022624 0ustar  stefanstefan00000000000000
import sys, types

def test(obj):
    """
    >>> test(Exception('hi'))
    Raising: Exception('hi',)
    Caught: Exception('hi',)
    """
    print u"Raising: %s%r" % (obj.__class__.__name__, obj.args)
    try:
        raise obj
    except:
        info = sys.exc_info()
        if sys.version_info >= (2,5):
            assert isinstance(info[0], type)
        else:
            assert isinstance(info[0], types.ClassType)
        print u"Caught: %s%r" % (info[1].__class__.__name__, info[1].args)
Cython-0.26.1/tests/run/closure_tests_2.pyx0000664000175000017500000002204512542002467021464 0ustar  stefanstefan00000000000000# mode: run
# tag: closures
# preparse: id
# preparse: def_to_cdef
#
# closure_tests_2.pyx
#
# Battery of tests for closures in Cython. Based on the collection of
# compiler tests from P423/B629 at Indiana University, Spring 1999 and
# Fall 2000. Special thanks to R. Kent Dybvig, Dan Friedman, Kevin
# Millikin, and everyone else who helped to generate the original
# tests. Converted into a collection of Python/Cython tests by Craig
# Citro.
#
# Note: This set of tests is split (somewhat randomly) into several
# files, simply because putting all the tests in a single file causes
# gcc and g++ to buckle under the load.
#


def g1526():
    """
    >>> g1526()
    2
    """
    x_1134 = 0
    def g1525():
      x_1136 = 1
      z_1135 = x_1134
      def g1524(y_1137):
        return (x_1136)+((z_1135)+(y_1137))
      return g1524
    f_1138 = g1525()
    return f_1138(f_1138(x_1134))


def g1535():
    """
    >>> g1535()
    3050
    """
    def g1534():
      def g1533():
        def g1531(t_1141):
          def g1532(f_1142):
            return t_1141(f_1142(1000))
          return g1532
        return g1531
      def g1530():
        def g1529(x_1140):
          return (x_1140)+(50)
        return g1529
      return g1533()(g1530())
    def g1528():
      def g1527(y_1139):
        return (y_1139)+(2000)
      return g1527
    return g1534()(g1528())


def g1540():
    """
    >>> g1540()
    2050
    """
    def g1539():
      t_1143 = 50
      def g1538(f_1144):
        return (t_1143)+(f_1144())
      return g1538
    def g1537():
      def g1536():
        return 2000
      return g1536
    return g1539()(g1537())


def g1547():
    """
    >>> g1547()
    2050
    """
    def g1546():
      def g1545():
        def g1543(t_1145):
          def g1544(f_1146):
            return (t_1145)+(f_1146())
          return g1544
        return g1543
      return g1545()(50)
    def g1542():
      def g1541():
        return 2000
      return g1541
    return g1546()(g1542())


def g1550():
    """
    >>> g1550()
    700
    """
    def g1549():
      x_1147 = 300
      def g1548(y_1148):
        return (x_1147)+(y_1148)
      return g1548
    return g1549()(400)


def g1553():
    """
    >>> g1553()
    0
    """
    x_1152 = 3
    def g1552():
      def g1551(x_1150, y_1149):
        return x_1150
      return g1551
    f_1151 = g1552()
    if (f_1151(0, 0)):
      return f_1151(f_1151(0, 0), x_1152)
    else:
      return 0


def g1562():
    """
    >>> g1562()
    False
    """
    def g1561():
      def g1556(x_1153):
        def g1560():
          def g1559():
            return isinstance(x_1153, list)
          if (g1559()):
            def g1558():
              def g1557():
                return (x_1153[0])
              return (g1557() == 0)
            return (not g1558())
          else:
            return False
        if (g1560()):
          return x_1153
        else:
          return False
      return g1556
    f_1154 = g1561()
    def g1555():
      def g1554():
        return [0,[]]
      return [0,g1554()]
    return f_1154(g1555())


def g1570():
    """
    >>> g1570()
    False
    """
    def g1569():
      def g1563(x_1155):
        def g1568():
          if (x_1155):
            def g1567():
              def g1566():
                return isinstance(x_1155, list)
              if (g1566()):
                def g1565():
                  def g1564():
                    return (x_1155[0])
                  return (g1564() == 0)
                return (not g1565())
              else:
                return False
            return (not g1567())
          else:
            return False
        if (g1568()):
          return x_1155
        else:
          return False
      return g1563
    f_1156 = g1569()
    return f_1156(0)


def g1575():
    """
    >>> g1575()
    []
    """
    def g1574():
      def g1571(x_1157):
        def g1573():
          def g1572():
            return isinstance(x_1157, list)
          if (g1572()):
            return True
          else:
            return (x_1157 == [])
        if (g1573()):
          return x_1157
        else:
          return []
      return g1571
    f_1158 = g1574()
    return f_1158(0)


def g1578():
    """
    >>> g1578()
    4
    """
    y_1159 = 4
    def g1577():
      def g1576(y_1160):
        return y_1160
      return g1576
    f_1161 = g1577()
    return f_1161(f_1161(y_1159))


def g1581():
    """
    >>> g1581()
    0
    """
    y_1162 = 4
    def g1580():
      def g1579(x_1164, y_1163):
        return 0
      return g1579
    f_1165 = g1580()
    return f_1165(f_1165(y_1162, y_1162), f_1165(y_1162, y_1162))


def g1584():
    """
    >>> g1584()
    0
    """
    y_1166 = 4
    def g1583():
      def g1582(x_1168, y_1167):
        return 0
      return g1582
    f_1169 = g1583()
    return f_1169(f_1169(y_1166, y_1166), f_1169(y_1166, f_1169(y_1166, y_1166)))


def g1587():
    """
    >>> g1587()
    0
    """
    y_1170 = 4
    def g1586():
      def g1585(x_1172, y_1171):
        return 0
      return g1585
    f_1173 = g1586()
    return f_1173(f_1173(y_1170, f_1173(y_1170, y_1170)), f_1173(y_1170, f_1173(y_1170, y_1170)))


def g1594():
    """
    >>> g1594()
    4
    """
    def g1593():
      def g1588(y_1174):
        def g1592():
          def g1591(f_1176):
            return f_1176(f_1176(y_1174))
          return g1591
        def g1590():
          def g1589(y_1175):
            return y_1175
          return g1589
        return g1592()(g1590())
      return g1588
    return g1593()(4)


def g1598():
    """
    >>> g1598()
    23
    """
    def g1597():
      def g1596(x_1177):
        return x_1177
      return g1596
    f_1178 = g1597()
    def g1595():
      if (False):
        return 1
      else:
        return f_1178(22)
    return (g1595()+1)


def g1603():
    """
    >>> g1603()
    22
    """
    def g1602():
      def g1601(x_1179):
        return x_1179
      return g1601
    f_1180 = g1602()
    def g1600():
      def g1599():
        return 23 == 0
      return f_1180(g1599())
    if (g1600()):
      return 1
    else:
      return 22


def g1611():
    """
    >>> g1611()
    5061
    """
    def g1610():
      def g1609(x_1182):
        if (x_1182):
          return (not x_1182)
        else:
          return x_1182
      return g1609
    f_1185 = g1610()
    def g1608():
      def g1607(x_1181):
        return (10)*(x_1181)
      return g1607
    f2_1184 = g1608()
    x_1183 = 23
    def g1606():
      def g1605():
        def g1604():
          return x_1183 == 0
        return f_1185(g1604())
      if (g1605()):
        return 1
      else:
        return (x_1183)*(f2_1184((x_1183-1)))
    return (g1606()+1)


def g1614():
    """
    >>> g1614()
    1
    """
    def g1613():
      def g1612():
        return 0
      return g1612
    f_1186 = g1613()
    x_1187 = f_1186()
    return 1


def g1617():
    """
    >>> g1617()
    1
    """
    def g1616():
      def g1615():
        return 0
      return g1615
    f_1188 = g1616()
    f_1188()
    return 1


def g1620():
    """
    >>> g1620()
    4
    """
    def g1619():
      def g1618(x_1189):
        return x_1189
      return g1618
    f_1190 = g1619()
    if (True):
      f_1190(3)
      return 4
    else:
      return 5


def g1623():
    """
    >>> g1623()
    6
    """
    def g1622():
      def g1621(x_1191):
        return x_1191
      return g1621
    f_1192 = g1622()
    (f_1192(4)) if (True) else (5)
    return 6


def g1627():
    """
    >>> g1627()
    120
    """
    def g1626():
      def g1624(fact_1195, n_1194, acc_1193):
        def g1625():
          return n_1194 == 0
        if (g1625()):
          return acc_1193
        else:
          return fact_1195(fact_1195, (n_1194-1), (n_1194)*(acc_1193))
      return g1624
    fact_1196 = g1626()
    return fact_1196(fact_1196, 5, 1)


def g1632():
    """
    >>> g1632()
    144
    """
    def g1631():
      def g1628(b_1199, c_1198, a_1197):
        b_1203 = (b_1199)+(a_1197)
        def g1630():
          def g1629():
            a_1201 = (b_1199)+(b_1199)
            c_1200 = (c_1198)+(c_1198)
            return (a_1201)+(a_1201)
          return (a_1197)+(g1629())
        a_1202 = g1630()
        return (a_1202)*(a_1202)
      return g1628
    return g1631()(2, 3, 4)


def g1639():
    """
    >>> g1639()
    3
    """
    def g1638():
      def g1636(x_1204):
        def g1637():
          return x_1204()
        return g1637
      return g1636
    f_1205 = g1638()
    def g1635():
      def g1634():
        def g1633():
          return 3
        return g1633
      return f_1205(g1634())
    return g1635()()


def g1646():
    """
    >>> g1646()
    3628800
    """
    def g1645():
      def g1643(x_1207):
        def g1644():
          return x_1207 == 0
        if (g1644()):
          return 1
        else:
          return (x_1207)*(f_1206((x_1207)-(1)))
      return g1643
    f_1206 = g1645()
    q_1208 = 17
    def g1642():
      def g1640(a_1209):
        q_1208 = 10
        def g1641():
          return a_1209(q_1208)
        return g1641
      return g1640
    g_1210 = g1642()
    return g_1210(f_1206)()

Cython-0.26.1/tests/run/ticket_123.pyx0000664000175000017500000000052012542002467020207 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> ret = repeat_iter()
>>> for s in ret:
...     print(s)
a
a
b
b
c
c
"""

def repeat_iter():
    cdef dict e
    cdef unicode s
    ret = []
    e = {u"A": u"a", u"B": u"b", u"C": u"c"}
    for s in e.itervalues():
        ret.append(s)
    for s in e.itervalues():
        ret.append(s)

    ret.sort()
    return ret
Cython-0.26.1/tests/run/complex_numbers_cxx_T398.pyx0000664000175000017500000000014612542002467023156 0ustar  stefanstefan00000000000000# ticket: 398

cdef extern from "complex_numbers_cxx_T398.h": pass
include "complex_numbers_T305.pyx"
Cython-0.26.1/tests/run/bishop1.pyx0000664000175000017500000000050712542002467017711 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> m = fmatrix()
    >>> m[1] = True
    >>> m.getfoo()
    1
    >>> m[0] = True
    >>> m.getfoo()
    0
"""

cdef class fmatrix:
  cdef int foo

  def __setitem__(self, int key, int value):
    if key:
      self.foo = value
      return
    self.foo = not value

  def getfoo(self):
    return self.foo
Cython-0.26.1/tests/run/r_print.pyx0000664000175000017500000000023412542002467020016 0ustar  stefanstefan00000000000000def frighten():
    """
    >>> frighten()
    NOBODY expects the Spanish Inquisition!
    """
    print u"NOBODY", u"expects", u"the Spanish Inquisition!"
Cython-0.26.1/tests/run/assert.pyx0000664000175000017500000000324112542002467017643 0ustar  stefanstefan00000000000000# mode: run

cimport cython

def f(a, b, int i):
    """
    >>> f(1, 2, 1)
    >>> f(0, 2, 1)
    Traceback (most recent call last):
    AssertionError
    >>> f(1, -1, 1)
    Traceback (most recent call last):
    AssertionError
    >>> f(1, 2, 0)
    Traceback (most recent call last):
    AssertionError
    """
    assert a
    assert a+b
    assert i


@cython.test_assert_path_exists(
    '//AssertStatNode',
    '//AssertStatNode//TupleNode')
def g(a, b):
    """
    >>> g(1, "works")
    >>> g(0, "fails")
    Traceback (most recent call last):
    AssertionError: fails
    >>> g(0, (1, 2))
    Traceback (most recent call last):
    AssertionError: (1, 2)
    """
    assert a, b


@cython.test_assert_path_exists(
    '//AssertStatNode',
    '//AssertStatNode//TupleNode')
def g(a, b):
    """
    >>> g(1, "works")
    >>> g(0, "fails")
    Traceback (most recent call last):
    AssertionError: fails
    >>> g(0, (1, 2))
    Traceback (most recent call last):
    AssertionError: (1, 2)
    """
    assert a, b


@cython.test_assert_path_exists(
    '//AssertStatNode',
    '//AssertStatNode//TupleNode',
    '//AssertStatNode//TupleNode//TupleNode')
def assert_with_tuple_arg(a):
    """
    >>> assert_with_tuple_arg(True)
    >>> assert_with_tuple_arg(False)
    Traceback (most recent call last):
    AssertionError: (1, 2)
    """
    assert a, (1, 2)


@cython.test_assert_path_exists(
    '//AssertStatNode')
@cython.test_fail_if_path_exists(
    '//AssertStatNode//TupleNode')
def assert_with_str_arg(a):
    """
    >>> assert_with_str_arg(True)
    >>> assert_with_str_arg(False)
    Traceback (most recent call last):
    AssertionError: abc
    """
    assert a, 'abc'
Cython-0.26.1/tests/run/class_scope_del_T684.py0000664000175000017500000000042412542002467022021 0ustar  stefanstefan00000000000000# mode:run
# tag: class, scope, del
# ticket: 684

class DelInClass(object):
    """
    >>> DelInClass.y
    5
    >>> DelInClass.x
    Traceback (most recent call last):
    AttributeError: type object 'DelInClass' has no attribute 'x'
    """
    x = 5
    y = x
    del x
Cython-0.26.1/tests/run/cpp_stl_string.pyx0000664000175000017500000001620713023021033021363 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

cimport cython

from libcpp.string cimport string

b_asdf = b'asdf'
b_asdg = b'asdg'
b_s = b's'

def test_conversion(py_obj):
    """
    >>> test_conversion(b_asdf) == b_asdf or test_conversion(b_asdf)
    True
    >>> test_conversion(123)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: expected ..., int found
    """
    cdef string s = py_obj
    return s

def test_indexing(char *py_str):
    """
    >>> test_indexing(b_asdf)
    ('s', 's')
    """
    cdef string s
    s = string(py_str)
    return chr(s[1]), chr(s.at(1))

def test_size(char *py_str):
    """
    >>> test_size(b_asdf)
    (4, 4)
    """
    cdef string s
    s = string(py_str)
    return s.size(), s.length()

def test_compare(char *a, char *b):
    """
    >>> test_compare(b_asdf, b_asdf)
    0

    >>> test_compare(b_asdf, b_asdg) < 0
    True
    """
    cdef string s = string(a)
    cdef string t = string(b)
    return s.compare(t)

def test_empty():
    """
    >>> test_empty()
    (True, False)
    """
    cdef string a = string(b"")
    cdef string b = string(b"aa")
    return a.empty(), b.empty()

def test_push_back(char *a):
    """
    >>> test_push_back(b_asdf) == b_asdf + b_s
    True
    """
    cdef string s = string(a)
    s.push_back(ord('s'))
    return s.c_str()

def test_insert(char *a, char *b, int i):
    """
    >>> test_insert('AAAA'.encode('ASCII'), 'BBBB'.encode('ASCII'), 2) == 'AABBBBAA'.encode('ASCII')
    True
    """
    cdef string s = string(a)
    cdef string t = string(b)
    cdef string u = s.insert(i, t)
    return u.c_str()

def test_copy(char *a):
    """
    >>> test_copy(b_asdf) == b_asdf[1:]
    True
    """
    cdef string t = string(a)
    cdef char[6] buffer
    cdef size_t length = t.copy(buffer, 4, 1)
    buffer[length] = c'\0'
    return buffer

def test_find(char *a, char *b):
    """
    >>> test_find(b_asdf, 'df'.encode('ASCII'))
    2
    """
    cdef string s = string(a)
    cdef string t = string(b)
    cdef size_t i = s.find(t)
    return i

def test_clear():
    """
    >>> test_clear() == ''.encode('ASCII')
    True
    """
    cdef string s = string("asdf")
    s.clear()
    return s.c_str()

def test_assign(char *a):
    """
    >>> test_assign(b_asdf) == 'ggg'.encode('ASCII')
    True
    """
    cdef string s = string(a)
    s.assign("ggg")
    return s.c_str()


def test_substr(char *a):
    """
    >>> test_substr('ABCDEFGH'.encode('ASCII')) == ('BCDEFGH'.encode('ASCII'), 'BCDE'.encode('ASCII'), 'ABCDEFGH'.encode('ASCII'))
    True
    """
    cdef string s = string(a)
    cdef string x, y, z
    x = s.substr(1)
    y = s.substr(1, 4)
    z = s.substr()
    return x.c_str(), y.c_str(), z.c_str()

def test_append(char *a, char *b):
    """
    >>> test_append(b_asdf, '1234'.encode('ASCII')) == b_asdf + '1234'.encode('ASCII')
    True
    """
    cdef string s = string(a)
    cdef string t = string(b)
    cdef string j = s.append(t)
    return j.c_str()

def test_char_compare(py_str):
    """
    >>> test_char_compare(b_asdf)
    True
    """
    cdef char *a = py_str
    cdef string b = string(a)
    return b.compare(b) == 0

def test_cstr(char *a):
    """
    >>> test_cstr(b_asdf) == b_asdf
    True
    """
    cdef string b = string(a)
    return b.c_str()

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def test_decode(char* a):
    """
    >>> print(test_decode(b_asdf))
    asdf
    """
    cdef string b = string(a)
    return b.decode('ascii')


@cython.test_assert_path_exists("//ReturnStatNode//PythonCapiCallNode")
def test_cstr_decode(char* a):
    """
    >>> print(test_cstr_decode(b_asdf))
    asdf
    """
    cdef string b = string(a)
    return b.c_str().decode('utf-8')


@cython.test_assert_path_exists("//ReturnStatNode//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//ReturnStatNode//AttributeNode")
def test_cstr_ptr_decode(char* a):
    """
    >>> print(test_cstr_ptr_decode(b_asdf))
    asdf
    """
    cdef string b = string(a)
    s = b.c_str()
    return s.decode('utf-8')


@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def test_decode_sliced(char* a):
    """
    >>> print(test_decode_sliced(b_asdf))
    sd
    """
    cdef string b = string(a)
    return b[1:3].decode('ascii')

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def test_decode_sliced_negative(char* a):
    """
    >>> a,b,c,d = test_decode_sliced_negative(b_asdf)
    >>> print(a)
    sd
    >>> print(b)
    a
    >>> print(c)
    
    >>> print(d)
    
    """
    cdef string b = string(a)
    return b[-3:-1].decode('ascii'), b[-5:-3].decode('ascii'), b[-20:-4].decode('ascii'), b[-2:-20].decode('ascii')

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def test_decode_sliced_end(char* a):
    """
    >>> a,b = test_decode_sliced_end(b_asdf)
    >>> print(a)
    asd
    >>> print(b)
    asdf
    """
    cdef string b = string(a)
    return b[:3].decode('ascii'), b[:42].decode('ascii')

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def test_decode_sliced_end_negative(char* a):
    """
    >>> a,b,c = test_decode_sliced_end_negative(b_asdf)
    >>> print(a)
    asd
    >>> print(b)
    a
    >>> print(c)
    
    """
    cdef string b = string(a)
    return b[:-1].decode('ascii'), b[:-3].decode('ascii'), b[:-4].decode('ascii')

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def test_decode_sliced_start(char* a):
    """
    >>> print(test_decode_sliced_start(b_asdf))
    df
    """
    cdef string b = string(a)
    return b[2:].decode('ascii')

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def test_decode_sliced_start_negative(char* a):
    """
    >>> a,b = test_decode_sliced_start_negative(b_asdf)
    >>> print(a)
    df
    >>> print(b)
    asdf
    """
    cdef string b = string(a)
    return b[-2:].decode('ascii'), b[-20:].decode('ascii')

def test_equals_operator(char *a, char *b):
    """
    >>> test_equals_operator(b_asdf, b_asdf)
    (True, False)
    """
    cdef string s = string(a)
    cdef string t = string(b)
    return t == s, t != "asdf"

def test_less_than(char *a, char *b):
    """
    >>> test_less_than(b_asdf[:-1], b_asdf)
    (True, True, True)

    >>> test_less_than(b_asdf[:-1], b_asdf[:-1])
    (False, False, True)
    """
    cdef string s = string(a)
    cdef string t = string(b)
    return (s < t, s < b, s <= b)

def test_greater_than(char *a, char *b):
    """
    >>> test_greater_than(b_asdf[:-1], b_asdf)
    (False, False, False)

    >>> test_greater_than(b_asdf[:-1], b_asdf[:-1])
    (False, False, True)
    """
    cdef string s = string(a)
    cdef string t = string(b)
    return (s > t, s > b, s >= b)


def test_iteration(string s):
    """
    >>> test_iteration(b'xyz')
    [120, 121, 122]
    >>> test_iteration(b'')
    []
    """
    return [c for c in s]
Cython-0.26.1/tests/run/pointers.pyx0000664000175000017500000000350212542002467020205 0ustar  stefanstefan00000000000000cimport cython

cdef char* c_string = b'abcdefg'
cdef void* void_ptr = c_string

cdef int i = 42
cdef int* int_ptr = &i

cdef float x = 42.2
cdef float* float_ptr = &x

def compare():
    """
    >>> compare()
    True
    True
    True
    False
    False
    True
    True
    """
    print c_string == c_string
    print c_string == void_ptr
    print c_string is void_ptr
    print c_string != void_ptr
    print c_string is not void_ptr
    print void_ptr != int_ptr
    print void_ptr != float_ptr

def if_tests():
    """
    >>> if_tests()
    True
    True
    """
    if c_string == void_ptr:
        print True
    if c_string != void_ptr:
        print False
    if int_ptr != void_ptr:
        print True

def bool_binop():
    """
    >>> bool_binop()
    True
    """
    if c_string == void_ptr and c_string == c_string and int_ptr != void_ptr and void_ptr != float_ptr:
        print True

def bool_binop_truth(int x):
    """
    >>> bool_binop_truth(1)
    True
    True
    >>> bool_binop_truth(0)
    True
    """
    if c_string and void_ptr and int_ptr and (c_string == c_string or int_ptr != void_ptr):
        print True
    if c_string and x or not (void_ptr or int_ptr and float_ptr) or x:
        print True


def binop_voidptr(int x, long y, char* z):
    """
    >>> binop_voidptr(1, 3, b'abc')
    'void *'
    """
    result = &x and &y and z
    return cython.typeof(result)


def cond_expr_voidptr(int x, long y, char* z):
    """
    >>> cond_expr_voidptr(0, -1, b'abc')
    ('void *', 0)
    >>> cond_expr_voidptr(-1, 0, b'abc')
    ('void *', -1)
    >>> cond_expr_voidptr(-1, 0, b'')
    ('void *', 0)
    >>> cond_expr_voidptr(0, -1, b'')
    ('void *', -1)
    """
    result = &x if len(z) else &y
    assert sizeof(long) >= sizeof(int)
    assert -1 == (-1L)
    return cython.typeof(result), (result)[0]
Cython-0.26.1/tests/run/attribute_and_lambda.pyx0000664000175000017500000000035412542002467022471 0ustar  stefanstefan00000000000000# mode: run
# tag: lambda, attribute, regression

class TestClass(object):
    bar = 123


def test_attribute_and_lambda(f):
    """
    >>> test_attribute_and_lambda(lambda _: TestClass())
    123
    """
    return f(lambda x: x).bar
Cython-0.26.1/tests/run/r_hordijk1.pyx0000664000175000017500000000103112542002467020371 0ustar  stefanstefan00000000000000__doc__ = """
  >>> try:
  ...     s = Spam()
  ... except KeyError, e:
  ...     print("Exception: %s" % e)
  ... else:
  ...     print("Did not raise the expected exception")
  Exception: 'This is not a spanish inquisition'
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace("Error, e", "Error as e")

cdef extern from "Python.h":
    ctypedef class __builtin__.list [object PyListObject]:
        pass

cdef class Spam(list):
    def __init__(self):
        raise KeyError("This is not a spanish inquisition")
Cython-0.26.1/tests/run/self_in_ext_type_closure.pyx0000664000175000017500000000347312542002467023445 0ustar  stefanstefan00000000000000# mode: run
# ticket: 742

import cython

@cython.cclass
class ExtType(object):
    def const1(self):
        return 1

    def ext_method0(self):
        """
        >>> x = ExtType()
        >>> x.ext_method0()()
        1
        """
        def func():
            return self.const1()
        return func

    def ext_method1(self, a):
        """
        >>> x = ExtType()
        >>> x.ext_method1(2)()
        (1, 2)
        """
        def func():
            return self.const1(), a
        return func

    def ext_method1_def(self, a=2):
        """
        >>> x = ExtType()
        >>> x.ext_method1_def()()
        (1, 2)
        >>> x.ext_method1_def(3)()
        (1, 3)
        """
        def func():
            return self.const1(), a
        return func

    def ext_method_args(self, *args):
        """
        >>> x = ExtType()
        >>> x.ext_method_args(2)()
        (1, 2)
        """
        def func():
            return self.const1(), args[0]
        return func

    def ext_method_args_only(*args):
        """
        >>> x = ExtType()
        >>> x.ext_method_args_only(2)()
        (1, 2)
        """
        def func():
            return args[0].const1(), args[1]
        return func


@cython.cclass
class GenType(object):
    def const1(self):
        return 1

    def gen0(self):
        """
        >>> x = GenType()
        >>> tuple(x.gen0())
        (1, 2)
        """
        yield self.const1()
        yield 2

    def gen1(self, a):
        """
        >>> x = GenType()
        >>> tuple(x.gen1(2))
        (1, 2)
        """
        yield self.const1()
        yield a

    def gen_default(self, a=2):
        """
        >>> x = GenType()
        >>> tuple(x.gen_default())
        (1, 2)
        >>> tuple(x.gen_default(3))
        (1, 3)
        """
        yield self.const1()
        yield a
Cython-0.26.1/tests/run/c_int_types_T255.pyx0000664000175000017500000004635512574327400021420 0ustar  stefanstefan00000000000000# ticket: 255

__doc__ = u""

# -------------------------------------------------------------------

SCHAR_MAX = ((-1)>>1)
SCHAR_MIN = (-SCHAR_MAX-1)

def test_schar(signed char x):
   u"""
   >>> test_schar(-129) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_schar(-128)
   -128
   >>> test_schar(0)
   0
   >>> test_schar(127)
   127
   >>> test_schar(128) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to signed char
   """
   return x

def test_add_schar(x, y):
   u"""
   >>> test_add_schar(SCHAR_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_add_schar(SCHAR_MIN, 0) == SCHAR_MIN
   True
   >>> test_add_schar(SCHAR_MIN, 1) == SCHAR_MIN+1
   True
   >>> test_add_schar(SCHAR_MAX, -1) == SCHAR_MAX-1
   True
   >>> test_add_schar(SCHAR_MAX, 0) == SCHAR_MAX
   True
   >>> test_add_schar(SCHAR_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to signed char
   """
   cdef signed char r = x + y
   return r

UCHAR_MAX = ((-1))

def test_uchar(unsigned char x):
   u"""
   >>> test_uchar(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to unsigned char
   >>> test_uchar(0)
   0
   >>> test_uchar(1)
   1
   >>> test_uchar(UCHAR_MAX) == UCHAR_MAX
   True
   >>> test_uchar(UCHAR_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to unsigned char
   """
   return x

def test_add_uchar(x, y):
   u"""
   >>> test_add_uchar(UCHAR_MAX, 0) == UCHAR_MAX
   True
   >>> test_add_uchar(UCHAR_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to unsigned char
   """
   cdef unsigned char r = x + y
   return r

# chars may be signed or unsigned
if (-1) < 0:
    CHAR_MAX = SCHAR_MAX
    CHAR_MIN = SCHAR_MIN
else:
    CHAR_MAX = UCHAR_MAX
    CHAR_MIN = 0

def test_char(char x):
   u"""
   >>> test_char(CHAR_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> if CHAR_MIN < 0:
   ...     assert test_char(-1) == -1
   >>> test_char(CHAR_MIN) == CHAR_MIN
   True
   >>> test_char(0)
   0
   >>> test_char(1)
   1
   >>> test_char(CHAR_MAX) == CHAR_MAX
   True
   >>> test_char(CHAR_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to char
   """
   return x

def test_add_char(x, y):
   u"""
   >>> test_add_char(CHAR_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_add_char(CHAR_MIN, 0) == CHAR_MIN
   True
   >>> test_add_char(CHAR_MIN, 1) == CHAR_MIN+1
   True
   >>> test_add_char(CHAR_MAX, -1) == CHAR_MAX-1
   True
   >>> test_add_char(CHAR_MAX, 0) == CHAR_MAX
   True
   >>> test_add_char(CHAR_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to char
   """
   cdef char r = x + y
   return r

# -------------------------------------------------------------------

SHORT_MAX = ((-1)>>1)
SHORT_MIN = (-SHORT_MAX-1)

def test_short(short x):
   u"""
   >>> test_short(SHORT_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to short
   >>> test_short(SHORT_MIN) == SHORT_MIN
   True
   >>> test_short(-1)
   -1
   >>> test_short(0)
   0
   >>> test_short(1)
   1
   >>> test_short(SHORT_MAX) == SHORT_MAX
   True
   >>> test_short(SHORT_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to short
   """
   return x

def test_add_short(x, y):
   u"""
   >>> test_add_short(SHORT_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to short
   >>> test_add_short(SHORT_MIN, 0) == SHORT_MIN
   True
   >>> test_add_short(SHORT_MIN, 1) == SHORT_MIN+1
   True
   >>> test_add_short(SHORT_MAX, -1) == SHORT_MAX-1
   True
   >>> test_add_short(SHORT_MAX, 0) == SHORT_MAX
   True
   >>> test_add_short(SHORT_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to short
   """
   cdef short r = x + y
   return r

SSHORT_MAX = ((-1)>>1)
SSHORT_MIN = (-SSHORT_MAX-1)

def test_sshort(short x):
   u"""
   >>> test_sshort(SSHORT_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to short
   >>> test_sshort(SSHORT_MIN) == SSHORT_MIN
   True
   >>> test_sshort(-1)
   -1
   >>> test_sshort(0)
   0
   >>> test_sshort(1)
   1
   >>> test_sshort(SSHORT_MAX) == SSHORT_MAX
   True
   >>> test_short(SSHORT_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to short
   """
   return x

def test_add_sshort(x, y):
   u"""
   >>> test_add_sshort(SSHORT_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to short
   >>> test_add_sshort(SSHORT_MIN, 0) == SSHORT_MIN
   True
   >>> test_add_sshort(SSHORT_MIN, 1) == SSHORT_MIN+1
   True
   >>> test_add_sshort(SSHORT_MAX, -1) == SSHORT_MAX-1
   True
   >>> test_add_sshort(SSHORT_MAX, 0) == SSHORT_MAX
   True
   >>> test_add_sshort(SSHORT_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to short
   """
   cdef signed short r = x + y
   return r

USHORT_MAX = ((-1))

def test_ushort(unsigned short x):
   u"""
   >>> test_ushort(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to unsigned short
   >>> test_ushort(0)
   0
   >>> test_ushort(1)
   1
   >>> test_ushort(USHORT_MAX) == USHORT_MAX
   True
   >>> test_ushort(USHORT_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_ushort(x, y):
   u"""
   >>> test_add_ushort(USHORT_MAX, 0) == USHORT_MAX
   True
   >>> test_add_ushort(USHORT_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to unsigned short
   """
   cdef unsigned short r = x + y
   return r

# -------------------------------------------------------------------

INT_MAX = ((-1)>>1)
INT_MIN = (-INT_MAX-1)

def test_int(int x):
   u"""
   >>> test_int(INT_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_int(INT_MIN) == INT_MIN
   True
   >>> test_int(-1)
   -1
   >>> test_int(0)
   0
   >>> test_int(1)
   1
   >>> test_int(INT_MAX) == INT_MAX
   True
   >>> test_int(INT_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_int(x, y):
   u"""
   >>> test_add_int(INT_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_add_int(INT_MIN, 0) == INT_MIN
   True
   >>> test_add_int(INT_MIN, 1) == INT_MIN+1
   True
   >>> test_add_int(INT_MAX, -1) == INT_MAX-1
   True
   >>> test_add_int(INT_MAX, 0) == INT_MAX
   True
   >>> test_add_int(INT_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef int r = x + y
   return r

SINT_MAX = ((-1)>>1)
SINT_MIN = (-SINT_MAX-1)

def test_sint(signed int x):
   u"""
   >>> test_sint(SINT_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_sint(SINT_MIN) == SINT_MIN
   True
   >>> test_sint(-1)
   -1
   >>> test_sint(0)
   0
   >>> test_sint(1)
   1
   >>> test_sint(SINT_MAX) == SINT_MAX
   True
   >>> test_sint(SINT_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_sint(x, y):
   u"""
   >>> test_add_sint(SINT_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_add_sint(SINT_MIN, 0) == SINT_MIN
   True
   >>> test_add_sint(SINT_MIN, 1) == SINT_MIN+1
   True
   >>> test_add_sint(SINT_MAX, -1) == SINT_MAX-1
   True
   >>> test_add_sint(SINT_MAX, 0) == SINT_MAX
   True
   >>> test_add_sint(SINT_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef signed int r = x + y
   return r

UINT_MAX = (-1)

def test_uint(unsigned int x):
   u"""
   >>> test_uint(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to unsigned int
   >>> print(test_uint(0))
   0
   >>> print(test_uint(1))
   1
   >>> test_uint(UINT_MAX) == UINT_MAX
   True
   >>> test_uint(UINT_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_uint(x, y):
   u"""
   >>> test_add_uint(UINT_MAX, 0) == UINT_MAX
   True
   >>> test_add_uint(UINT_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef unsigned int r = x + y
   return r

# -------------------------------------------------------------------

LONG_MAX = ((-1)>>1)
LONG_MIN = (-LONG_MAX-1)

def test_long(long x):
   u"""
   >>> test_long(LONG_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_long(LONG_MIN) == LONG_MIN
   True
   >>> test_long(-1)
   -1
   >>> test_long(0)
   0
   >>> test_long(1)
   1
   >>> test_long(LONG_MAX) == LONG_MAX
   True
   >>> test_long(LONG_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_long(x, y):
   u"""
   >>> test_add_long(LONG_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_add_long(LONG_MIN, 0) == LONG_MIN
   True
   >>> test_add_long(LONG_MIN, 1) == LONG_MIN+1
   True
   >>> test_add_long(LONG_MAX, -1) == LONG_MAX-1
   True
   >>> test_add_long(LONG_MAX, 0) == LONG_MAX
   True
   >>> test_add_long(LONG_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef long r = x + y
   return r

SLONG_MAX = ((-1)>>1)
SLONG_MIN = (-SLONG_MAX-1)

def test_slong(signed long x):
   u"""
   >>> test_slong(SLONG_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_slong(SLONG_MIN) == SLONG_MIN
   True
   >>> test_slong(-1)
   -1
   >>> test_slong(0)
   0
   >>> test_slong(1)
   1
   >>> test_slong(SLONG_MAX) == SLONG_MAX
   True
   >>> test_slong(SLONG_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_slong(x, y):
   u"""
   >>> test_add_slong(SLONG_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_add_slong(SLONG_MIN, 0) == SLONG_MIN
   True
   >>> test_add_slong(SLONG_MIN, 1) == SLONG_MIN+1
   True
   >>> test_add_slong(SLONG_MAX, -1) == SLONG_MAX-1
   True
   >>> test_add_slong(SLONG_MAX, 0) == SLONG_MAX
   True
   >>> test_add_slong(SLONG_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef signed long r = x + y
   return r

ULONG_MAX = (-1)

def test_ulong(unsigned long x):
   u"""
   >>> test_ulong(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to unsigned long
   >>> print(test_ulong(0))
   0
   >>> print(test_ulong(1))
   1
   >>> test_ulong(ULONG_MAX) == ULONG_MAX
   True
   >>> test_ulong(ULONG_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_ulong(x, y):
   u"""
   >>> test_add_ulong(ULONG_MAX, 0) == ULONG_MAX
   True
   >>> test_add_ulong(ULONG_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef unsigned long r = x + y
   return r

# -------------------------------------------------------------------

LONGLONG_MAX = ((-1)>>1)
LONGLONG_MIN = (-LONGLONG_MAX-1)

def test_longlong(long long x):
   u"""
   >>> test_longlong(LONGLONG_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_longlong(LONGLONG_MIN) == LONGLONG_MIN
   True
   >>> print(test_longlong(-1))
   -1
   >>> print(test_longlong(0))
   0
   >>> print(test_longlong(1))
   1
   >>> test_longlong(LONGLONG_MAX) == LONGLONG_MAX
   True
   >>> test_longlong(LONGLONG_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_longlong(x, y):
   u"""
   >>> test_add_longlong(LONGLONG_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_add_longlong(LONGLONG_MIN, 0) == LONGLONG_MIN
   True
   >>> test_add_longlong(LONGLONG_MIN, 1) == LONGLONG_MIN+1
   True
   >>> test_add_longlong(LONGLONG_MAX, -1) == LONGLONG_MAX-1
   True
   >>> test_add_longlong(LONGLONG_MAX, 0) == LONGLONG_MAX
   True
   >>> test_add_longlong(LONGLONG_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef long long r = x + y
   return r

SLONGLONG_MAX = ((-1)>>1)
SLONGLONG_MIN = (-SLONGLONG_MAX-1)

def test_slonglong(long long x):
   u"""
   >>> test_slonglong(SLONGLONG_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_slonglong(SLONGLONG_MIN) == SLONGLONG_MIN
   True
   >>> print(test_slonglong(-1))
   -1
   >>> print(test_slonglong(0))
   0
   >>> print(test_slonglong(1))
   1
   >>> test_slonglong(SLONGLONG_MAX) == SLONGLONG_MAX
   True
   >>> test_slonglong(SLONGLONG_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_slonglong(x, y):
   u"""
   >>> test_add_slonglong(SLONGLONG_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_add_slonglong(SLONGLONG_MIN, 0) == SLONGLONG_MIN
   True
   >>> test_add_slonglong(SLONGLONG_MIN, 1) == SLONGLONG_MIN+1
   True
   >>> test_add_slonglong(SLONGLONG_MAX, -1) == SLONGLONG_MAX-1
   True
   >>> test_add_slonglong(SLONGLONG_MAX, 0) == SLONGLONG_MAX
   True
   >>> test_add_slonglong(SLONGLONG_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef signed long long r = x + y
   return r

ULONGLONG_MAX = (-1)

def test_ulonglong(unsigned long long x):
   u"""
   >>> test_ulonglong(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to unsigned PY_LONG_LONG
   >>> print(test_ulonglong(0))
   0
   >>> print(test_ulonglong(1))
   1
   >>> test_ulonglong(ULONGLONG_MAX) == ULONGLONG_MAX
   True
   >>> test_ulonglong(ULONGLONG_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_ulonglong(x, y):
   u"""
   >>> test_add_ulonglong(ULONGLONG_MAX, 0) == ULONGLONG_MAX
   True
   >>> test_add_ulonglong(ULONGLONG_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef unsigned long long r = x + y
   return r

# -------------------------------------------------------------------
import sys

class MyInt(object):
   def __init__(self, value):
      self.value = value
   def __int__(self):
      print(u"MyInt.__int__()")
      return self.value

class MyBadInt(MyInt):
   def __int__(self):
      return u"%s" % self.value

class MyInt2:
   def __init__(self, value):
      self.value = value
   def __int__(self):
      print(u"MyInt.__int__()")
      return self.value

class MyBadInt2(MyInt2):
   def __int__(self):
      return u"%s" % self.value

def test_convert_pyint(x):
   u"""
   >>> test_convert_pyint(None)
   Traceback (most recent call last):
       ...
   TypeError: an integer is required
   >>> test_convert_pyint("123")
   Traceback (most recent call last):
       ...
   TypeError: an integer is required
   >>> test_convert_pyint(MyBadInt(0)) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   TypeError: ... returned non-... (type ...)

   >>> test_convert_pyint(False) == 0
   True
   >>> test_convert_pyint(True) == 1
   True
   >>> test_convert_pyint(3.14) == 3
   True

   >>> test_convert_pyint(MyInt(LONG_MIN)) == LONG_MIN
   MyInt.__int__()
   True
   >>> test_convert_pyint(MyInt(0)) == 0
   MyInt.__int__()
   True
   >>> test_convert_pyint(MyInt(LONG_MAX)) == LONG_MAX
   MyInt.__int__()
   True
   """
   cdef long r = x
   return r

class MyLong(object):
   def __init__(self, value):
      self.value = value
   def __int__(self): # Python 3
      return self.__long__()
   def __long__(self):
      print(u"MyInt.__long__()")
      return self.value

class MyBadLong(MyLong):
   def __long__(self):
      return u"%s" % self.value

def test_convert_pylong(x):
   u"""
   >>> test_convert_pylong(None)
   Traceback (most recent call last):
       ...
   TypeError: an integer is required
   >>> test_convert_pylong("123")
   Traceback (most recent call last):
       ...
   TypeError: an integer is required
   >>> test_convert_pylong(MyBadLong(0)) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   TypeError: ... returned non-... (type ...)

   >>> test_convert_pylong(False) == 0
   True
   >>> test_convert_pylong(True) == 1
   True
   >>> test_convert_pylong(3.14) == 3
   True

   >>> test_convert_pylong(MyLong(LONGLONG_MIN)) == LONGLONG_MIN
   MyInt.__long__()
   True
   >>> test_convert_pylong(MyLong(0)) == 0
   MyInt.__long__()
   True
   >>> test_convert_pylong(MyLong(LONGLONG_MAX)) == LONGLONG_MAX
   MyInt.__long__()
   True
   """
   cdef long long r = x
   return r

# -------------------------------------------------------------------

__doc__ = u"".join([
      f.__doc__ for f in (
         #
         test_char,  test_add_char,
         test_schar, test_add_schar,
         test_uchar, test_add_uchar,
         #
         test_short,  test_add_short,
         test_sshort, test_add_sshort,
         test_ushort, test_add_ushort,
         #
         test_int,  test_add_int,
         test_sint, test_add_sint,
         test_uint, test_add_uint,
         #
         test_long,  test_add_long,
         test_slong, test_add_slong,
         test_ulong, test_add_ulong,
         #
         test_longlong,  test_add_longlong,
         test_slonglong, test_add_slonglong,
         test_ulonglong, test_add_ulonglong,
         #
         test_convert_pyint,
         test_convert_pylong,
         )
    ])

# -------------------------------------------------------------------
Cython-0.26.1/tests/run/crashT245_pxd.pxd0000664000175000017500000000011512542002467020644 0ustar  stefanstefan00000000000000cdef extern from "crashT245.h":
    ctypedef struct MyStruct:
        int x

Cython-0.26.1/tests/run/ifelseexpr_T267.pyx0000664000175000017500000000145512542002467021237 0ustar  stefanstefan00000000000000# mode: run
# tag: condexpr
# ticket: 267

cimport cython

def ident(x): return x

def constants(x):
    """
    >>> constants(4)
    1
    >>> constants(5)
    10
    """
    a = 1 if x < 5 else 10
    return a

def temps(x):
    """
    >>> temps(4)
    1
    >>> temps(5)
    10
    """
    return ident(1) if ident(x) < ident(5) else ident(10)


def nested(x):
    """
    >>> nested(1)
    1
    >>> nested(2)
    2
    >>> nested(3)
    3
    """
    a = 1 if x == 1 else (2 if x == 2 else 3)
    return a


@cython.test_fail_if_path_exists('//CondExprNode')
def const_true(a,b):
    """
    >>> const_true(1,2)
    1
    """
    return a if 1 == 1 else b

@cython.test_fail_if_path_exists('//CondExprNode')
def const_false(a,b):
    """
    >>> const_false(1,2)
    2
    """
    return a if 1 != 1 else b
Cython-0.26.1/tests/run/memoryview_namespace_T775.pyx0000664000175000017500000000057112542002467023312 0ustar  stefanstefan00000000000000
cdef int[10] data
cdef int[:] myslice = data

def test_memoryview_namespace():
    """
    >>> test_memoryview_namespace()
    """
    namespace = dir(__import__(__name__))
    assert 'array' not in namespace, namespace
    assert 'memoryview' not in namespace, namespace
    assert '_memoryviewslice' not in namespace, namespace
    assert 'Enum' not in namespace, namespace
Cython-0.26.1/tests/run/empty_for_loop_T208.pyx0000664000175000017500000000044712542002467022121 0ustar  stefanstefan00000000000000# ticket: 208

def go_py_empty():
    """
    >>> go_py_empty()
    20
    """
    i = 20
    for i in range(4,0):
        print u"Spam!"
    return i

def go_c_empty():
    """
    >>> go_c_empty()
    20
    """
    cdef int i = 20
    for i in range(4,0):
        print u"Spam!"
    return i
Cython-0.26.1/tests/run/unicode_ascii_auto_encoding.pyx0000664000175000017500000000317412542002467024043 0ustar  stefanstefan00000000000000#cython: c_string_type = unicode
#cython: c_string_encoding = ascii

auto_string_type = unicode

from libc.string cimport strcmp


def _as_string(x):
    try:
        return x.decode('latin1')
    except AttributeError:
        return x


def as_objects(char* ascii_data):
    """
    >>> x = as_objects('abc')
    >>> isinstance(x, auto_string_type) or type(x)
    True
    >>> _as_string(x) == 'abc' or repr(x)
    True
    """
    assert isinstance(ascii_data, auto_string_type)
    assert isinstance(ascii_data, bytes)
    assert isinstance(ascii_data, str)
    assert isinstance(ascii_data, unicode)
    return ascii_data

def from_object():
    """
    >>> from_object()
    """
    cdef bytes b = b"abc"
    cdef str s = "abc"
    cdef unicode u = u"abc"
    assert strcmp(b, "abc") == 0
    assert strcmp(s, "abc") == 0
    assert strcmp(u, "abc") == 0

def slice_as_objects(char* ascii_data, int start, int end):
    """
    >>> x = slice_as_objects('grok', 1, 3)
    >>> isinstance(x, auto_string_type) or type(x)
    True
    >>> _as_string(x) == 'ro' or repr(x)
    True
    """
    assert isinstance(ascii_data[start:end], auto_string_type)
    assert isinstance(ascii_data[start:end], bytes)
    assert isinstance(ascii_data[start:end], str)
    assert isinstance(ascii_data[start:end], unicode)

    assert isinstance(ascii_data[start:], auto_string_type)
    assert isinstance(ascii_data[start:], bytes)
    assert isinstance(ascii_data[start:], str)
    assert isinstance(ascii_data[start:], unicode)

    return ascii_data[start:end]
Cython-0.26.1/tests/run/struct_conversion.pyx0000664000175000017500000001106512542002467022136 0ustar  stefanstefan00000000000000cdef struct Point:
    double x
    double y
    int color

def test_constructor(x, y, int color):
    """
    >>> sorted(test_constructor(1,2,255).items())
    [('color', 255), ('x', 1.0), ('y', 2.0)]
    >>> try: test_constructor(1,None,255)
    ... except TypeError: pass
    """
    cdef Point p = Point(x, y, color)
    return p


def return_constructor(x, y, int color):
    """
    >>> sorted(return_constructor(1,2,255).items())
    [('color', 255), ('x', 1.0), ('y', 2.0)]
    >>> try: return_constructor(1, None, 255)
    ... except TypeError: pass
    """
    return Point(x, y, color)


def test_constructor_kwds(x, y, color):
    """
    >>> sorted(test_constructor_kwds(1.25, 2.5, 128).items())
    [('color', 128), ('x', 1.25), ('y', 2.5)]
    >>> test_constructor_kwds(1.25, 2.5, None)
    Traceback (most recent call last):
    ...
    TypeError: an integer is required
    """
    cdef Point p = Point(x=x, y=y, color=color)
    return p


def return_constructor_kwds(double x, y, color):
    """
    >>> sorted(return_constructor_kwds(1.25, 2.5, 128).items())
    [('color', 128), ('x', 1.25), ('y', 2.5)]
    >>> return_constructor_kwds(1.25, 2.5, None)
    Traceback (most recent call last):
    ...
    TypeError: an integer is required
    """
    return Point(x=x, y=y, color=color)


def test_dict_construction(x, y, color):
    """
    >>> sorted(test_dict_construction(4, 5, 64).items())
    [('color', 64), ('x', 4.0), ('y', 5.0)]
    >>> try: test_dict_construction("foo", 5, 64)
    ... except TypeError: pass
    """
    cdef Point p = {'color': color, 'x': x, 'y': y}
    return p

def test_list_construction(x, y, color):
    """
    >>> sorted(test_list_construction(4, 5, 64).items())
    [('color', 64), ('x', 4.0), ('y', 5.0)]
    >>> try: test_list_construction("foo", 5, 64)
    ... except TypeError: pass
    """
    cdef Point p = [x, y, color]
    return p

'''
# FIXME: make this work
def test_tuple_construction(x, y, color):
    """
    >>> sorted(test_tuple_construction(4, 5, 64).items())
    [('color', 64), ('x', 4.0), ('y', 5.0)]
    >>> try: test_tuple_construction("foo", 5, 64)
    ... except TypeError: pass
    """
    cdef Point p = (x, y, color)
    return p
'''

cdef union int_or_float:
    int n
    double x

def test_union_constructor(n,x):
    """
    >>> test_union_constructor(1, None)
    1
    >>> test_union_constructor(None, 2.0)
    2.0
    """
    cdef int_or_float u
    if n is None:
        u = int_or_float(x=x)
        return u.x
    else:
        u = int_or_float(n=n)
        return u.n

cdef struct with_pointers:
    bint is_integral
    int_or_float data
    void* ptr

def test_pointers(int n, double x):
    """
    >>> test_pointers(100, 2.71828)
    100
    2.71828
    True
    """
    cdef with_pointers a = [True, {'n': n}, NULL]
    cdef with_pointers b = with_pointers(False, {'x': x}, NULL)
    print a.data.n
    print b.data.x
    print a.ptr == b.ptr == NULL

cdef struct MyStruct:
    char c
    int i
    float f
    char *s

bhello = b"hello"  # must hold a C reference in PyPy

def test_obj_to_struct(MyStruct mystruct):
    """
    >>> test_obj_to_struct(dict(c=10, i=20, f=6.7, s=bhello))
    c=10 i=20 f=6.70 s=hello
    >>> test_obj_to_struct(None)
    Traceback (most recent call last):
       ...
    TypeError: Expected a mapping, got NoneType
    >>> test_obj_to_struct(dict(s=b"world"))
    Traceback (most recent call last):
       ...
    ValueError: No value specified for struct attribute 'c'
    >>> test_obj_to_struct(dict(c=b"world"))
    Traceback (most recent call last):
       ...
    TypeError: an integer is required
    """
    print 'c=%d i=%d f=%.2f s=%s' % (mystruct.c, mystruct.i, mystruct.f, mystruct.s.decode('ascii'))

cdef struct NestedStruct:
    MyStruct mystruct
    double d

def test_nested_obj_to_struct(NestedStruct nested):
    """
    >>> test_nested_obj_to_struct(dict(mystruct=dict(c=10, i=20, f=6.7, s=bhello), d=4.5))
    c=10 i=20 f=6.70 s=hello d=4.50
    >>> test_nested_obj_to_struct(dict(d=7.6))
    Traceback (most recent call last):
       ...
    ValueError: No value specified for struct attribute 'mystruct'
    >>> test_nested_obj_to_struct(dict(mystruct={}, d=7.6))
    Traceback (most recent call last):
       ...
    ValueError: No value specified for struct attribute 'c'
    """
    print 'c=%d i=%d f=%.2f s=%s d=%.2f' % (nested.mystruct.c,
                                            nested.mystruct.i,
                                            nested.mystruct.f,
                                            nested.mystruct.s.decode('UTF-8'),
                                            nested.d)

Cython-0.26.1/tests/run/slice3.pyx0000664000175000017500000000210012542002467017515 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> class Test(object):
    ...     def __setitem__(self, key, value):
    ...         print((key, value))
    ...     def __getitem__(self, key):
    ...         print(key)
    ...         return self

    >>> ellipsis(Test())
    Ellipsis

    >>> full(Test())
    slice(None, None, None)

    >>> select(0, Test(), 10, 20, 30)
    slice(10, None, None)
    slice(None, 20, None)
    slice(None, None, 30)
    slice(10, 20, None)
    slice(10, None, 30)
    slice(None, 20, 30)
    slice(10, 20, 30)
    slice(1, 2, 3)

    >>> set(Test(), -11)
    (slice(1, 2, 3), -11)
"""

def ellipsis(o):
    obj1 = o[...]

def full(o):
    obj1 = o[::]

def set(o, v):
    cdef int int3, int4, int5
    int3, int4, int5 = 1,2,3
    o[int3:int4:int5] = v

def select(obj1, obj2, obj3, obj4, obj5):
    cdef int int3, int4, int5
    int3, int4, int5 = 1,2,3

    obj1 = obj2[obj3::]
    obj1 = obj2[:obj4:]
    obj1 = obj2[::obj5]
    obj1 = obj2[obj3:obj4:]
    obj1 = obj2[obj3::obj5]
    obj1 = obj2[:obj4:obj5]
    obj1 = obj2[obj3:obj4:obj5]
    obj1 = obj2[int3:int4:int5]

Cython-0.26.1/tests/run/raise_memory_error_T650.pyx0000664000175000017500000000171212542002467022765 0ustar  stefanstefan00000000000000# ticket: 650

cimport cython


@cython.test_assert_path_exists(
    '//RaiseStatNode',
    '//RaiseStatNode[@builtin_exc_name = "MemoryError"]')
def raise_me_type():
    """
    >>> try: raise_me_type()
    ... except MemoryError: pass
    ... else: print('NOT RAISED!')
    """
    raise MemoryError


@cython.test_assert_path_exists(
    '//RaiseStatNode',
    '//RaiseStatNode[@builtin_exc_name = "MemoryError"]')
def raise_me_instance():
    """
    >>> try: raise_me_instance()
    ... except MemoryError: pass
    ... else: print('NOT RAISED!')
    """
    raise MemoryError()


def raise_me_instance_value():
    """
    >>> raise_me_instance_value()
    Traceback (most recent call last):
        ...
    MemoryError: oom
    """
    raise MemoryError("oom")


def raise_me_instance_value_separate():
    """
    >>> raise_me_instance_value_separate()
    Traceback (most recent call last):
        ...
    MemoryError: oom
    """
    raise MemoryError, "oom"
Cython-0.26.1/tests/run/cpp_exception_declaration_compatibility.srctree0000664000175000017500000000125312542002467027330 0ustar  stefanstefan00000000000000# tag: cpp

"""
PYTHON setup.py build_ext -i
PYTHON test.py
"""

############### setup.py ###################
from distutils.core import setup
from Cython.Build import cythonize

setup(
    name="cython_test",
    ext_modules=cythonize('*.pyx', language="c++")
)


############### test.py ###################

from cpp_exc import TestClass

TestClass().test_func()


############### cpp_exc.pxd ###################

cdef inline void handle_exception():
    pass

cdef class TestClass:
    cpdef test_func(self) except +handle_exception


############### cpp_exc.pyx ###################

cdef class TestClass:
    cpdef test_func(self) except +handle_exception:
        print('test')
Cython-0.26.1/tests/run/cpp_function_lib.h0000664000175000017500000000136613143605603021273 0ustar  stefanstefan00000000000000#ifndef CPP_FUNCTION_LIB_H
#define CPP_FUNCTION_LIB_H

#include 

// Functions, functor and a holder of std::function used by cpp_stl_function.pyx tests.

double add_one(double a, int b);
double add_two(double a, int b);

class AddAnotherFunctor
{
    double to_add;

public:
    AddAnotherFunctor(double to_add);
    double operator()(double a, int b) const;
};


class FunctionKeeper
{
    std::function my_function;

public:
    FunctionKeeper(std::function user_function);
    virtual ~FunctionKeeper();

    void set_function(std::function user_function);
    std::function get_function() const;

    double call_function(double a, int b) const;
};

#endif
Cython-0.26.1/tests/run/exarkun.pyx0000664000175000017500000000112112542002467020012 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> p = Point(1,2,3)
    >>> p.gettuple()
    (1.0, 2.0, 3.0)
    >>> q = p + Point(2,3,4)
    >>> q.gettuple()
    (3.0, 5.0, 7.0)
    >>> p.gettuple()
    (1.0, 2.0, 3.0)
"""

cdef class Point:
    cdef double x, y, z
    def __init__(self, double x, double y, double z):
        self.x = x
        self.y = y
        self.z = z

    # XXX: originally, this said "def __add__(self, other)"
    def __add__(Point self, Point other):
        return Point(self.x + other.x, self.y + other.y, self.z + other.z)

    def gettuple(self):
        return (self.x, self.y, self.z)
Cython-0.26.1/tests/run/inherited_final_method.pyx0000664000175000017500000000105512542002467023027 0ustar  stefanstefan00000000000000# mode: run
# tag: exttype, final

cimport cython


cdef class BaseClass:
    """
    >>> obj = BaseClass()
    >>> obj.call_base()
    True
    """
    cdef method(self):
        return True

    def call_base(self):
        return self.method()


@cython.final
cdef class Child(BaseClass):
    """
    >>> obj = Child()
    >>> obj.call_base()
    True
    >>> obj.call_child()
    True
    """
    cdef method(self):
        return True

    def call_child(self):
        # original bug: this requires a proper cast for self
        return self.method()
Cython-0.26.1/tests/run/r_typecast.pyx0000664000175000017500000000035112542002467020516 0ustar  stefanstefan00000000000000cdef class ExtType:
    cdef c_method(self):
        return self

    def method(self):
        return 1

def call_method(ExtType et):
    """
    >>> call_method( ExtType() ).method()
    1
    """
    return et.c_method()
Cython-0.26.1/tests/run/reimport.pyx0000664000175000017500000000032712542002467020205 0ustar  stefanstefan00000000000000# mode: run
# tag: import

# reimports at module init time used to be a problem in Py3
import reimport

def test():
    """
    >>> test()
    True
    """
    import sys
    return reimport in sys.modules.values()
Cython-0.26.1/tests/run/typeof.pyx0000664000175000017500000000204412542002467017650 0ustar  stefanstefan00000000000000from cython cimport typeof

cdef class A:
    pass

cdef class B(A):
    pass

cdef struct X:
    double a
    double complex b

def simple():
    """
    >>> simple()
    int
    long
    long long
    int *
    int **
    A
    B
    X
    Python object
    """
    cdef int i = 0
    cdef long l = 0
    cdef long long ll = 0
    cdef int* iptr = &i
    cdef int** iptrptr = &iptr
    cdef A a = None
    cdef B b = None
    cdef X x = X(a=1, b=2)
    print typeof(i)
    print typeof(l)
    print typeof(ll)
    print typeof(iptr)
    print typeof(iptrptr)
    print typeof(a)
    print typeof(b)
    print typeof(x)
    print typeof(None)
    used = i, l, ll, iptr, iptrptr, a, b, x

def expression():
    """
    >>> expression()
    double
    double complex
    int
    unsigned int
    """
    cdef X x = X(a=1, b=2)
    cdef X *xptr = &x
    cdef short s = 0
    cdef int i = 0
    cdef unsigned int ui = 0
    print typeof(x.a)
    print typeof(xptr.b)
    print typeof(s + i)
    print typeof(i + ui)
    used = x, xptr, s, i, ui
Cython-0.26.1/tests/run/type_inference_T768.pyx0000664000175000017500000000052012542002467022066 0ustar  stefanstefan00000000000000# mode: run
# ticket: 768
from cython cimport typeof

def type_inference_del_int():
    """
    >>> type_inference_del_int()
    'Python object'
    """
    x = 1
    del x
    return typeof(x)

def type_inference_del_dict():
    """
    >>> type_inference_del_dict()
    'dict object'
    """
    x = {}
    del x
    return typeof(x)
Cython-0.26.1/tests/run/complex_numbers_cxx_T398.h0000664000175000017500000000013212542002467022560 0ustar  stefanstefan00000000000000#if defined(__cplusplus)
#define CYTHON_CCOMPLEX 1
#else
#define CYTHON_CCOMPLEX 0
#endif
Cython-0.26.1/tests/run/control_flow_except_T725.pyx0000664000175000017500000000044312542002467023143 0ustar  stefanstefan00000000000000def unused_except_capture():
    """
    >>> unused_except_capture()
    """
    try:
        try:
            raise ValueError
        except TypeError, s:
            raise TypeError
        except ValueError, s:
            raise ValueError # segfault
    except ValueError:
        pass
Cython-0.26.1/tests/run/cpp_exceptions.pyx0000664000175000017500000001114612542002467021370 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

cdef int raise_py_error() except *:
    raise TypeError("custom")

cdef extern from "cpp_exceptions_helper.h":
    cdef int raise_int_raw "raise_int"(bint fire) except +
    cdef int raise_int_value "raise_int"(bint fire) except +ValueError
    cdef int raise_int_custom "raise_int"(bint fire) except +raise_py_error

    cdef int raise_index_raw "raise_index"(bint fire) except +
    cdef int raise_index_value "raise_index"(bint fire) except +ValueError
    cdef int raise_index_custom "raise_index"(bint fire) except +raise_py_error

    cdef void raise_domain_error() except +
    cdef void raise_ios_failure() except +
    cdef void raise_memory() except +
    cdef void raise_overflow() except +
    cdef void raise_range_error() except +
    cdef void raise_typeerror() except +
    cdef void raise_underflow() except +

    cdef cppclass Foo:
        int bar_raw "bar"(bint fire) except +
        int bar_value "bar"(bint fire) except +ValueError
        int bar_custom "bar"(bint fire) except +raise_py_error


def test_domain_error():
    """
    >>> test_domain_error()
    Traceback (most recent call last):
    ...
    ValueError: domain_error
    """
    raise_domain_error()

def test_ios_failure():
    """
    >>> try: test_ios_failure()
    ... except (IOError, OSError): pass
    """
    raise_ios_failure()

def test_memory():
    """
    >>> test_memory()
    Traceback (most recent call last):
    ...
    MemoryError
    """
    # Re-raise the exception without a description string because we can't
    # rely on the implementation-defined value of what() in the doctest.
    try:
        raise_memory()
    except MemoryError:
        raise MemoryError

def test_overflow():
    """
    >>> test_overflow()
    Traceback (most recent call last):
    ...
    OverflowError: overflow_error
    """
    raise_overflow()

def test_range_error():
    """
    >>> test_range_error()
    Traceback (most recent call last):
    ...
    ArithmeticError: range_error
    """
    raise_range_error()

def test_typeerror():
    """
    >>> test_typeerror()
    Traceback (most recent call last):
    ...
    TypeError
    """
    # Re-raise the exception without a description string because we can't
    # rely on the implementation-defined value of what() in the doctest.
    try:
        raise_typeerror()
    except TypeError:
        raise TypeError

def test_underflow():
    """
    >>> test_underflow()
    Traceback (most recent call last):
    ...
    ArithmeticError: underflow_error
    """
    raise_underflow()

def test_int_raw(bint fire):
    """
    >>> test_int_raw(False)
    >>> test_int_raw(True)
    Traceback (most recent call last):
    ...
    RuntimeError: Unknown exception
    """
    raise_int_raw(fire)

def test_int_value(bint fire):
    """
    >>> test_int_value(False)
    >>> test_int_value(True)
    Traceback (most recent call last):
    ...
    ValueError
    """
    raise_int_value(fire)

def test_int_custom(bint fire):
    """
    >>> test_int_custom(False)
    >>> test_int_custom(True)
    Traceback (most recent call last):
    ...
    TypeError: custom
    """
    raise_int_custom(fire)

def test_index_raw(bint fire):
    """
    >>> test_index_raw(False)
    >>> test_index_raw(True)
    Traceback (most recent call last):
    ...
    IndexError: c++ error
    """
    raise_index_raw(fire)

def test_index_value(bint fire):
    """
    >>> test_index_value(False)
    >>> test_index_value(True)
    Traceback (most recent call last):
    ...
    ValueError: c++ error
    """
    raise_index_value(fire)

def test_index_custom(bint fire):
    """
    >>> test_index_custom(False)
    >>> test_index_custom(True)
    Traceback (most recent call last):
    ...
    TypeError: custom
    """
    raise_index_custom(fire)

def test_cppclass_method_raw(bint fire):
    """
    >>> test_cppclass_method_raw(False)
    >>> test_cppclass_method_raw(True)
    Traceback (most recent call last):
    ...
    RuntimeError: Unknown exception
    """
    foo = new Foo()
    try:
        foo.bar_raw(fire)
    finally:
        del foo

def test_cppclass_method_value(bint fire):
    """
    >>> test_cppclass_method_value(False)
    >>> test_cppclass_method_value(True)
    Traceback (most recent call last):
    ...
    ValueError
    """
    foo = new Foo()
    try:
        foo.bar_value(fire)
    finally:
        del foo

def test_cppclass_method_custom(bint fire):
    """
    >>> test_cppclass_method_custom(False)
    >>> test_cppclass_method_custom(True)
    Traceback (most recent call last):
    ...
    TypeError: custom
    """
    foo = new Foo()
    try:
        foo.bar_custom(fire)
    finally:
        del foo
Cython-0.26.1/tests/run/behnel3.pyx0000664000175000017500000000013312542002467017657 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> y
    >>> y or {}
    {}
    >>> x
    {}
"""

y = None
x = y or {}
Cython-0.26.1/tests/run/rodriguez_1.pyx0000664000175000017500000000043012542002467020571 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> b = B()
    >>> sorted(b.t.items())
    [(1, ((1, 2, 3),)), (2, (1, 2, 3))]
"""

class B:
    def __init__(self):
        self.t = {
            1 : (
                (1, 2, 3)
                ,
                )

            , 2 : ( 1, 2, 3)
            }
Cython-0.26.1/tests/run/define_macro_helper.h0000664000175000017500000000016313023021033021704 0ustar  stefanstefan00000000000000#pragma once
#ifdef DEFINE_NO_VALUE
#define VAL (DEFINE_WITH_VALUE + 1)
#else
#define VAL DEFINE_WITH_VALUE
#endif
Cython-0.26.1/tests/run/autotestdict.pyx0000664000175000017500000000613412542002467021062 0ustar  stefanstefan00000000000000# cython: autotestdict=True
# Directive defaults to True, but not when testing in Py3.4
"""
Tests autotestdict compiler directive.

Both module test and individual tests are run; finally,
all_tests_run() is executed which does final validation.

>>> items = list(__test__.items())
>>> items.sort()
>>> for key, value in items:
...     print('%s ; %s' % (key, value))
MyCdefClass.cpdef_method (line 77) ; >>> add_log("cpdef class method")
MyCdefClass.method (line 74) ; >>> add_log("cdef class method")
MyClass.method (line 63) ; >>> add_log("class method")
mycpdeffunc (line 50) ; >>> add_log("cpdef")
myfunc (line 40) ; >>> add_log("def")
"""

import sys
log = []

cdef cdeffunc():
    """
    >>> True
    False
    """
cdeffunc() # make sure it's being used

def all_tests_run():
    assert sorted(log) == sorted([u'cdef class', u'class'] + (
        (1 if sys.version_info < (3, 4) else 2) * [u'cdef class method', u'class method', u'cpdef', u'cpdef class method', u'def'])), sorted(log)

def add_log(s):
    log.append(unicode(s))
    if len(log) == len(__test__) + (2 if sys.version_info < (3, 4) else 7):
        # Final per-function doctest executed
        all_tests_run()

def myfunc():
    """>>> add_log("def")"""
    x = lambda a:1 # no docstring here ...

def doc_without_test():
    """Some docs"""

def nodocstring():
    pass

cpdef mycpdeffunc():
    """>>> add_log("cpdef")"""


class MyClass:
    """
    Needs no hack

    >>> add_log("class")
    >>> True
    True
    """

    def method(self):
        """>>> add_log("class method")"""

cdef class MyCdefClass:
    """
    Needs no hack

    >>> add_log("cdef class")
    >>> True
    True
    """
    def method(self):
        """>>> add_log("cdef class method")"""

    cpdef cpdef_method(self):
        """>>> add_log("cpdef class method")"""

    cdef cdef_method(self):
        """>>> add_log("cdef class cmethod")"""

    def __cinit__(self):
        """
        Should not be included, as it can't be looked up with getattr

        >>> True
        False
        """

    def __dealloc__(self):
        """
        Should not be included, as it can't be looked up with getattr

        >>> True
        False
        """

    def __richcmp__(self, other, int op):
        """
        Should not be included, as it can't be looked up with getattr in Py 2

        >>> True
        False
        """

    def __nonzero__(self):
        """
        Should not be included, as it can't be looked up with getattr in Py 3.1

        >>> True
        False
        """

    def __len__(self):
        """
        Should not be included, as it can't be looked up with getattr in Py 3.1

        >>> sys.version_info < (3, 4)
        False
        """

    def __contains__(self, value):
        """
        Should not be included, as it can't be looked up with getattr in Py 3.1

        >>> sys.version_info < (3, 4)
        False
        """

cdef class MyOtherCdefClass:
    """
    Needs no hack

    >>> True
    True
    """

    def __bool__(self):
        """
        Should not be included, as it can't be looked up with getattr in Py 2

        >>> True
        False
        """
Cython-0.26.1/tests/run/charencoding.pyx0000664000175000017500000000230412542002467020765 0ustar  stefanstefan00000000000000# coding: ASCII

import sys
if sys.version_info[0] < 3:
    __doc__ = u"""
>>> expected = ''.join([chr(i) for i in range(0x10,0xFF,0x11)] + [chr(0xFF)])

>>> s = test_assign()
>>> assert s == expected, repr(s)

>>> s = test_array()
>>> assert s == expected, repr(s)
"""
else:
    __doc__ = u"""
>>> expected = bytes(list(range(0x10,0xFF,0x11)) + [0xFF])

>>> s = test_assign()
>>> assert s == expected, repr(s)

>>> s = test_array()
>>> assert s == expected, repr(s)
"""

def test_assign():
    cdef char[17] s

    s[ 0] = c'\x10'
    s[ 1] = c'\x21'
    s[ 2] = c'\x32'
    s[ 3] = c'\x43'
    s[ 4] = c'\x54'
    s[ 5] = c'\x65'
    s[ 6] = c'\x76'
    s[ 7] = c'\x87'
    s[ 8] = c'\x98'
    s[ 9] = c'\xA9'
    s[10] = c'\xBA'
    s[11] = c'\xCB'
    s[12] = c'\xDC'
    s[13] = c'\xED'
    s[14] = c'\xFE'
    s[15] = c'\xFF'

    s[16] = c'\x00'

    return s

def test_array():
    cdef char* s = [
        c'\x10',
        c'\x21',
        c'\x32',
        c'\x43',
        c'\x54',
        c'\x65',
        c'\x76',
        c'\x87',
        c'\x98',
        c'\xA9',
        c'\xBA',
        c'\xCB',
        c'\xDC',
        c'\xED',
        c'\xFE',
        c'\xFF',
        c'\x00',
        ]

    return s
Cython-0.26.1/tests/run/extcmethod.pyx0000664000175000017500000000246112542002467020511 0ustar  stefanstefan00000000000000# mode: run


cdef class Spam:

    cdef int tons

    cdef void add_tons(self, int x):
        self.tons += x

    cdef void eat(self):
        self.tons = 0

    def lift(self):
        print self.tons


cdef class SubSpam(Spam):

    cdef void add_tons(self, int x):
        self.tons += 2 * x


def test_spam():
    """
    >>> test_spam()
    5
    0
    20
    5
    """
    cdef Spam s
    cdef SubSpam ss
    s = Spam()
    s.eat()
    s.add_tons(5)
    s.lift()

    ss = SubSpam()
    ss.eat()
    ss.lift()

    ss.add_tons(10)
    ss.lift()

    s.lift()


cdef class SpamDish:
    cdef int spam

    cdef void describe(self):
        print "This dish contains", self.spam, "tons of spam."


cdef class FancySpamDish(SpamDish):
    cdef int lettuce

    cdef void describe(self):
        print "This dish contains", self.spam, "tons of spam",
        print "and", self.lettuce, "milligrams of lettuce."


cdef void describe_dish(SpamDish d):
    d.describe()


def test_spam_dish():
    """
    >>> test_spam_dish()
    This dish contains 42 tons of spam.
    This dish contains 88 tons of spam and 5 milligrams of lettuce.
    """
    cdef SpamDish s
    cdef FancySpamDish ss
    s = SpamDish()
    s.spam = 42
    ss = FancySpamDish()
    ss.spam = 88
    ss.lettuce = 5
    describe_dish(s)
    describe_dish(ss)
Cython-0.26.1/tests/run/mangle_c_keywords.pyx0000664000175000017500000000040412542002467022034 0ustar  stefanstefan00000000000000# Tests that illegal member and vtab entries are mangled.
cdef class A:
    """
    >>> a = A(100)
    >>> a.case()
    100
    """
    def __init__(self, value):
        self.switch = value
    cdef int switch
    cpdef case(self):
        return self.switch
Cython-0.26.1/tests/run/exttype.pyx0000664000175000017500000000142212542002467020043 0ustar  stefanstefan00000000000000
cdef gobble(a, b):
    print a, b

cdef class Spam:
    """
    >>> s = Spam(12)
    >>> s.eat()
    12 42
    """
    cdef eggs
    cdef int ham

    def __cinit__(self, eggs):
        self.eggs = eggs
        self.ham = 42

    def __dealloc__(self):
        self.ham = 0

    def eat(self):
        gobble(self.eggs, self.ham)

def f(Spam spam):
    """
    >>> s = Spam(12)
    >>> f(s)   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    AttributeError: '...Spam' object has no attribute 'foo'
    >>> s.eat()
    12 42
    >>> class Spam2(Spam):
    ...     foo = 1
    >>> s = Spam2(12)
    >>> s.eat()
    12 42
    >>> f(s)
    >>> s.eat()
    12 42
    """
    x = spam.eggs
    y = spam.ham
    z = spam.foo
    spam.eggs = x
    spam.ham = y
    spam.foo = z
Cython-0.26.1/tests/run/cpp_assignment_overload.srctree0000664000175000017500000000250612574327400024103 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp

"""
PYTHON setup.py build_ext --inplace
PYTHON -c "from assignment_overload import test; test()"
"""

######## setup.py ########

from distutils.core import setup
from Cython.Build import cythonize
setup(ext_modules=cythonize("*.pyx", language='c++'))


######## assign.cpp ########

class wrapped_int {
public:
  long long val;
  wrapped_int() { val = 0; }
  wrapped_int(long long val) { this->val = val; }
  wrapped_int &operator=(const wrapped_int &other) {
    this->val = other.val;
    return *this;
  }
  wrapped_int &operator=(const long long val) {
    this->val = val;
    return *this;
  }
};


######## assign.pxd ########

cdef extern from "assign.cpp" nogil:
    cppclass wrapped_int:
        long long val
        wrapped_int()
        wrapped_int(long long val)
        wrapped_int& operator=(const wrapped_int &other)
        wrapped_int& operator=(const long long &other)


######## assignment_overload.pyx ########

from assign cimport wrapped_int

def test():
    cdef wrapped_int a = wrapped_int(2)
    cdef wrapped_int b = wrapped_int(3)
    cdef long long c = 4

    assert &a != &b
    assert a.val != b.val

    a = b
    assert &a != &b
    assert a.val == b.val
    a = c
    assert a.val == c

    a, b, c = 2, 3, 4
    a = b = c
    assert &a != &b
    assert a.val == b.val
    assert b.val == c
Cython-0.26.1/tests/run/dict_get.pyx0000664000175000017500000000360612542002467020131 0ustar  stefanstefan00000000000000
cimport cython

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def get(dict d, key):
    """
    >>> d = { 1: 10 }
    >>> d.get(1)
    10
    >>> get(d, 1)
    10

    >>> d.get(2) is None
    True
    >>> get(d, 2) is None
    True

    >>> d.get((1,2)) is None
    True
    >>> get(d, (1,2)) is None
    True

    >>> class Unhashable:
    ...    def __hash__(self):
    ...        raise ValueError

    >>> d.get(Unhashable())
    Traceback (most recent call last):
    ValueError
    >>> get(d, Unhashable())
    Traceback (most recent call last):
    ValueError

    >>> None.get(1)
    Traceback (most recent call last):
    ...
    AttributeError: 'NoneType' object has no attribute 'get'
    >>> get(None, 1)
    Traceback (most recent call last):
    ...
    AttributeError: 'NoneType' object has no attribute 'get'
    """
    return d.get(key)


@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def get_default(dict d, key, default):
    """
    >>> d = { 1: 10 }

    >>> d.get(1, 2)
    10
    >>> get_default(d, 1, 2)
    10

    >>> d.get(2, 2)
    2
    >>> get_default(d, 2, 2)
    2

    >>> d.get((1,2), 2)
    2
    >>> get_default(d, (1,2), 2)
    2

    >>> class Unhashable:
    ...    def __hash__(self):
    ...        raise ValueError

    >>> d.get(Unhashable(), 2)
    Traceback (most recent call last):
    ValueError
    >>> get_default(d, Unhashable(), 2)
    Traceback (most recent call last):
    ValueError
    """
    return d.get(key, default)


@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def get_in_condition(dict d, key, expected_result):
    """
    >>> d = dict(a=1, b=2)
    >>> get_in_condition(d, 'a', 1)
    True
    """
    return d.get(key) is expected_result or d.get(key) == expected_result
Cython-0.26.1/tests/run/cdef_cpdef_override_GH543.srctree0000664000175000017500000000170613023021033023731 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON test.py

######## setup.py ########
from Cython.Build import cythonize
from distutils.core import setup

setup(
  ext_modules = cythonize("*.pyx"),
)

######## test.py ########
from base import A, B
from derived import C

assert B().foo() == 'B.foo'
assert C().foo() == 'B.foo'

assert A().foo1() == 'A.foo'
assert B().foo1() == 'B.foo'
assert C().foo1() == 'B.foo'

assert B().foo2() == 'B.foo'
assert C().foo2() == 'B.foo'

assert C().bar() == 'C.bar'

######## base.pxd ########
cdef class A(object):
    cdef foo(self)

cdef class B(A):
    cpdef foo(self)

######## base.pyx ########
cdef class A(object):
    cdef foo(self):
        return "A.foo"

    def foo1(self):
        return self.foo()

cdef class B(A):
    cpdef foo(self):
        return "B.foo"

    def foo2(self):
        return self.foo()

######## derived.pyx ########
from base cimport B

cdef class C(B):
    cpdef bar(self):
        return "C.bar"
Cython-0.26.1/tests/run/builtin_globals.py0000664000175000017500000000033512542002467021324 0ustar  stefanstefan00000000000000# mode: run
# tag: allow_unknown_names

assert "NEW" not in globals()

globals().update(NEW=True)

assert "NEW" in globals()


def default_args(value=NEW):
    """
    >>> default_args()
    True
    """
    return value
Cython-0.26.1/tests/run/hasattr.pyx0000664000175000017500000000156213143605603020013 0ustar  stefanstefan00000000000000class Foo:
    @property
    def foo(self):
        return None
    @property
    def bar(self):
        raise AttributeError
    @property
    def baz(self):
        return int(1)/int(0)


unicode_foo = u"foo"


def wrap_hasattr(obj, name):
    """
    >>> wrap_hasattr(None, "abc")
    False
    >>> wrap_hasattr(list, "append")
    True
    >>> wrap_hasattr(Foo(), "foo")
    True
    >>> wrap_hasattr(Foo(), unicode_foo)
    True
    >>> wrap_hasattr(Foo(), "spam")
    False
    >>> wrap_hasattr(Foo(), "bar")
    False
    >>> Foo().baz   #doctest: +ELLIPSIS
    Traceback (most recent call last):
       ...
    ZeroDivisionError: ...
    >>> wrap_hasattr(Foo(), "baz")
    False
    >>> hasattr(Foo(), None)   #doctest: +ELLIPSIS
    Traceback (most recent call last):
       ...
    TypeError: hasattr(): attribute name must be string
    """
    return hasattr(obj, name)
Cython-0.26.1/tests/run/exceptions_nogil.pyx0000664000175000017500000001201412542002467021711 0ustar  stefanstefan00000000000000cdef void foo(int i) except * with gil:
    if i != 0: raise ValueError

cdef int bar(int i) except? -1 with gil:
    if i != 0: raise ValueError
    return 0

cdef int spam(int i) except? -1 with gil:
    if i != 0: raise TypeError
    return -1

def test_foo():
    """
    >>> test_foo()
    """
    #
    foo(0)
    foo(0)
    with nogil:
        foo(0)
        foo(0)
    #
    try:
        with nogil:
            foo(0)
    finally:
        pass
    #
    try:
        with nogil:
            foo(0)
        with nogil:
            foo(0)
    finally:
        pass
    #
    try:
        with nogil:
            foo(0)
        with nogil:
            foo(1)
    except:
        with nogil:
            foo(0)
    finally:
        with nogil:
            foo(0)
        pass
    #
    try:
        with nogil:
            foo(0)
            foo(0)
    finally:
        pass
    #
    try:
        with nogil:
            foo(0)
            foo(1)
    except:
        with nogil:
            foo(0)
    finally:
        with nogil:
            foo(0)
        pass
    #
    try:
        with nogil:
            foo(0)
        try:
            with nogil:
                foo(1)
        except:
            with nogil:
                foo(1)
        finally:
            with nogil:
                foo(0)
            pass
    except:
        with nogil:
            foo(0)
    finally:
        with nogil:
            foo(0)
        pass
    #
    try:
        with nogil:
            foo(0)
        try:
            with nogil:
                foo(1)
        except:
            with nogil:
                foo(1)
        finally:
            with nogil:
                foo(1)
            pass
    except:
        with nogil:
            foo(0)
    finally:
        with nogil:
            foo(0)
        pass
    #

def test_bar():
    """
    >>> test_bar()
    """
    #
    bar(0)
    bar(0)
    with nogil:
        bar(0)
        bar(0)
    #
    try:
        with nogil:
            bar(0)
    finally:
        pass
    #
    try:
        with nogil:
            bar(0)
        with nogil:
            bar(0)
    finally:
        pass
    #
    try:
        with nogil:
            bar(0)
        with nogil:
            bar(1)
    except ValueError:
        with nogil:
            bar(0)
    finally:
        with nogil:
            bar(0)
        pass
    #
    try:
        with nogil:
            bar(0)
            bar(0)
    finally:
        pass
    #
    try:
        with nogil:
            bar(0)
            bar(1)
    except ValueError:
        with nogil:
            bar(0)
    finally:
        with nogil:
            bar(0)
        pass
    #
    try:
        with nogil:
            bar(0)
        try:
            with nogil:
                bar(1)
        except ValueError:
            with nogil:
                bar(1)
        finally:
            with nogil:
                bar(0)
            pass
    except ValueError:
        with nogil:
            bar(0)
    finally:
        with nogil:
            bar(0)
        pass
    #
    try:
        with nogil:
            bar(0)
        try:
            with nogil:
                bar(1)
        except ValueError:
            with nogil:
                bar(1)
        finally:
            with nogil:
                bar(1)
            pass
    except ValueError:
        with nogil:
            bar(0)
    finally:
        with nogil:
            bar(0)
        pass
    #

def test_spam():
    """
    >>> test_spam()
    """
    #
    spam(0)
    spam(0)
    with nogil:
        spam(0)
        spam(0)
    #
    try:
        with nogil:
            spam(0)
    finally:
        pass
    #
    try:
        with nogil:
            spam(0)
        with nogil:
            spam(0)
    finally:
        pass
    #
    try:
        with nogil:
            spam(0)
        with nogil:
            spam(1)
    except TypeError:
        with nogil:
            spam(0)
    finally:
        with nogil:
            spam(0)
        pass
    #
    try:
        with nogil:
            spam(0)
            spam(0)
    finally:
        pass
    #
    try:
        with nogil:
            spam(0)
            spam(1)
    except TypeError:
        with nogil:
            spam(0)
    finally:
        with nogil:
            spam(0)
        pass
    #
    try:
        with nogil:
            spam(0)
        try:
            with nogil:
                spam(1)
        except TypeError:
            with nogil:
                spam(1)
        finally:
            with nogil:
                spam(0)
            pass
    except TypeError:
        with nogil:
            spam(0)
    finally:
        with nogil:
            spam(0)
        pass
    #
    try:
        with nogil:
            spam(0)
        try:
            with nogil:
                spam(1)
        except TypeError:
            with nogil:
                spam(1)
        finally:
            with nogil:
                spam(1)
            pass
    except TypeError:
        with nogil:
            spam(0)
    finally:
        with nogil:
            spam(0)
        pass
    #
Cython-0.26.1/tests/run/generators_pep479.pyx0000664000175000017500000000601712542002467021627 0ustar  stefanstefan00000000000000# mode: run
# tag: generators, pep479

from __future__ import generator_stop

import sys
if sys.version_info[0] >= 3:
    # additionally test exception chaining
    __doc__ = u"""
>>> g = test_raise_StopIteration_value()
>>> next(g)
1
>>> try: next(g)
... except RuntimeError as exc:
...     print(type(exc.__context__) is StopIteration or type(exc.__context__), exc.__context__)
... else:
...     print("NOT RAISED!")
True huhu
"""


def test_raise_StopIteration():
    """
    >>> g = test_raise_StopIteration()
    >>> next(g)
    1
    >>> next(g)
    Traceback (most recent call last):
    RuntimeError: generator raised StopIteration
    """
    yield 1
    raise StopIteration


def test_raise_StopIteration_value():
    """
    >>> g = test_raise_StopIteration_value()
    >>> next(g)
    1
    >>> next(g)
    Traceback (most recent call last):
    RuntimeError: generator raised StopIteration
    """
    yield 1
    raise StopIteration('huhu')


def test_return():
    """
    >>> g = test_return()
    >>> next(g)
    1
    >>> next(g)
    Traceback (most recent call last):
    StopIteration
    """
    yield 1
    return


def test_return_value():
    """
    >>> g = test_return_value()
    >>> next(g)
    1
    >>> next(g)
    Traceback (most recent call last):
    StopIteration: 2
    """
    yield 1
    return 2


def test_propagate_StopIteration(it):
    """
    >>> results = []
    >>> for x in test_propagate_StopIteration(iter([])):
    ...     results.append(x)
    Traceback (most recent call last):
    RuntimeError: generator raised StopIteration
    >>> results
    []

    >>> for x in test_propagate_StopIteration(iter([1, 2])):
    ...     results.append(x)
    Traceback (most recent call last):
    RuntimeError: generator raised StopIteration
    >>> results
    [1, 2]
    """
    while True:
       yield next(it)


def test_catch_StopIteration(it):
    """
    >>> for x in test_catch_StopIteration(iter([])):
    ...     print(x)

    >>> for x in test_catch_StopIteration(iter([1, 2])):
    ...     print(x)
    1
    2
    """
    try:
        while True:
           yield next(it)
    except StopIteration:
        pass
    else:
        print("NOT RAISED!")


def test_yield_from(it):
    """
    >>> for x in test_yield_from(iter([])):
    ...     print(x)

    >>> for x in test_yield_from(iter([1, 2])):
    ...     print(x)
    1
    2
    """
    yield from it


def test_yield_from_gen():
    """
    >>> for x in test_yield_from_gen():
    ...     print(x)
    1
    RETURN: 2
    """
    x = yield from test_return_value()
    print("RETURN: %s" % x)


def test_genexpr(it):
    """
    >>> list(test_genexpr(iter([])))
    []
    >>> list(test_genexpr(iter([1, 2])))
    [1]

    >>> list(test_genexpr(iter([1])))
    Traceback (most recent call last):
    RuntimeError: generator raised StopIteration

    >>> list(test_genexpr(iter([1, 2, 3])))
    Traceback (most recent call last):
    RuntimeError: generator raised StopIteration

    >>> list(test_genexpr(iter([1, 2])))
    [1]
    """
    return (x for x in it if next(it))
Cython-0.26.1/tests/run/moduletryexcept.pyx0000664000175000017500000000345312542002467021604 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> a
2
>>> b
3
>>> exc[0].__class__.__name__
'AttributeError'
>>> exc[1].__class__.__name__
'KeyError'
>>> exc[2].__class__.__name__
'IndexError'
>>> exc[3].__class__.__name__
'ValueError'
>>> exc[3] is val
True

>>> except_as_deletes   # Py2 behaviour
False
>>> no_match_does_not_touch_target
True
"""

a = 0

try:
    raise KeyError
except AttributeError:
    a = 1
except KeyError:
    a = 2
except:
    a = 3

b = 0

try:
    raise IndexError
except AttributeError:
    b = 1
except KeyError:
    b = 2
except:
    b = 3

exc = [None]*4

try:
    raise AttributeError
except AttributeError as e:
    exc[0] = e
except KeyError       as e:
    exc[0] = e
except IndexError     as e:
    exc[0] = e
except:
    exc[0] = 'SOMETHING ELSE'

e = None
try:
    raise KeyError
except AttributeError as e:
    exc[1] = e
except KeyError       as e:
    exc[1] = e
except IndexError     as e:
    exc[1] = e
except:
    exc[1] = 'SOMETHING ELSE'

try:
    e
except NameError:
    except_as_deletes = True
else:
    except_as_deletes = False

e = 123
try:
    raise TypeError
except NameError as e:
    pass
except TypeError:
    pass
no_match_does_not_touch_target = (e == 123)

try:
    raise IndexError
except AttributeError as e:
    exc[2] = e
except KeyError       as e:
    exc[2] = e
except IndexError     as e:
    exc[2] = e
except:
    exc[2] = 'SOMETHING ELSE'

val = None
try:
    try:
        try:
            raise ValueError
        except AttributeError as e:
            exc[3] = e
        except KeyError       as e:
            exc[3] = e
        except IndexError     as e:
            exc[3] = e
        except:
            raise
    except (AttributeError,
            KeyError,
            IndexError,
            ValueError) as e:
        val = e
        raise e
except Exception as e:
    exc[3] = e
Cython-0.26.1/tests/run/final_method_T586.pyx0000664000175000017500000000323212542002467021521 0ustar  stefanstefan00000000000000# mode: run
# ticket: 568

cimport cython

@cython.final
cdef class FinalType(object):
    """
    >>> obj = FinalType()
    >>> obj.test_cdef()
    >>> obj.test_cpdef()
    """

    @cython.test_assert_path_exists("//CFuncDefNode[@entry.is_final_cmethod=True]")
    cdef cdef_method(self):
        pass

    @cython.test_assert_path_exists("//CFuncDefNode[@entry.is_final_cmethod=True]")
    @cython.test_fail_if_path_exists("//CFuncDefNode//OverrideCheckNode")
    cpdef cpdef_method(self):
        pass

    @cython.test_assert_path_exists("//AttributeNode[@entry.is_final_cmethod=True]")
    def test_cdef(self):
        self.cdef_method()

    @cython.test_assert_path_exists("//AttributeNode[@entry.is_final_cmethod=True]")
    def test_cpdef(self):
        self.cpdef_method()


def test_external_call():
    """
    >>> test_external_call()
    """
    f = FinalType()
    return f.cpdef_method()

def test_external_call_in_temp():
    """
    >>> test_external_call_in_temp()
    """
    return FinalType().cpdef_method()


cdef class BaseTypeWithFinalMethods(object):
    """
    >>> obj = BaseTypeWithFinalMethods()
    >>> obj.test_cdef()
    """

    @cython.test_assert_path_exists("//CFuncDefNode[@entry.is_final_cmethod=True]")
    @cython.final
    cdef cdef_method(self):
        pass

    @cython.test_assert_path_exists("//AttributeNode[@entry.is_final_cmethod=True]")
    def test_cdef(self):
        self.cdef_method()


cdef class SubType(BaseTypeWithFinalMethods):
    """
    >>> obj = SubType()
    >>> obj.test_cdef()
    """
    @cython.test_assert_path_exists("//AttributeNode[@entry.is_final_cmethod=True]")
    def test_cdef(self):
        self.cdef_method()
Cython-0.26.1/tests/run/simpcall.pyx0000664000175000017500000000132613023021033020131 0ustar  stefanstefan00000000000000# mode: test

def f(x, y):
    x = y


cdef void g(int i, float f, char *p):
    f = i


cdef h(int i, obj):
    i = obj


def z(a, b, c):
    """
    >>> z(1,9.2, b'test')
    """
    f(a, b)
    f(a, b,)
    g(1, 2.0, "spam")
    g(a, b, c)


def fail0(a, b):
    """
    >>> fail0(1,2)
    Traceback (most recent call last):
    TypeError: f() takes exactly 2 positional arguments (0 given)
    """
    f()


def fail1(a, b):
    """
    >>> fail1(1,2)
    Traceback (most recent call last):
    TypeError: f() takes exactly 2 positional arguments (1 given)
    """
    f(a)


def failtype():
    """
    >>> failtype()
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    h(42, "eggs")
Cython-0.26.1/tests/run/r_lepage_3.pyx0000664000175000017500000000034112542002467020340 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> g = Grail()
>>> g("spam", 42, ["tomato", "sandwich"])
Grail called with: spam 42 ['tomato', 'sandwich']
"""

cdef class Grail:

    def __call__(self, x, y, z):
        print u"Grail called with:", x, y, z
Cython-0.26.1/tests/run/for_from_float_T254.pyx0000664000175000017500000000212012542002467022051 0ustar  stefanstefan00000000000000# ticket: 254

def double_target(a, b):
    """
    >>> double_target(0, 4)
    at 0.0
    at 1.0
    at 2.0
    at 3.0
    4.0
    """
    cdef double x
    for x from a <= x < b:
        print u"at", x
    return x

def double_step(a, b, dx):
    """
    >>> double_step(0, 2, .5)
    at 0.0
    at 0.5
    at 1.0
    at 1.5
    2.0
    """
    cdef double x
    for x from a <= x < b by dx:
        print u"at", x
    return x

def double_step_typed(a, b, double dx):
    """
    >>> double_step_typed(0, 2, .5)
    at 0.0
    at 0.5
    at 1.0
    at 1.5
    2.0
    """
    cdef double x
    for x from a <= x < b by dx:
        print u"at", x
    return x

def double_step_py_target(a, b, double dx):
    """
    >>> double_step_py_target(0, 2, .5)
    at 0.0
    at 0.5
    at 1.0
    at 1.5
    2.0
    """
    cdef object x
    for x from a <= x < b by dx:
        print u"at", x
    return x

def int_step_py_target(a, b, int dx):
    """
    >>> int_step_py_target(0, 2, 1)
    at 0
    at 1
    2
    """
    cdef object x
    for x from a <= x < b by dx:
        print u"at", x
    return x
Cython-0.26.1/tests/run/directive_locals_in_pxd.pxd0000664000175000017500000000026512542002467023174 0ustar  stefanstefan00000000000000cimport cython

@cython.locals(egg=double)
cdef foo(egg)

@cython.locals(egg=cython.double)
cdef foo_defval(egg=*)

@cython.locals(egg=cython.bint, v=cython.int)
cpdef cpfoo(egg=*)
Cython-0.26.1/tests/run/closure_inlining.pyx0000664000175000017500000000754512542002467021720 0ustar  stefanstefan00000000000000# cython: optimize.inline_defnode_calls=True
# mode: run
cimport cython

@cython.test_fail_if_path_exists('//SimpleCallNode')
@cython.test_assert_path_exists('//InlinedDefNodeCallNode')
def simple_noargs():
    """
    >>> simple_noargs()
    123
    """
    def inner():
        return 123
    return inner()


@cython.test_fail_if_path_exists('//SimpleCallNode')
@cython.test_assert_path_exists('//InlinedDefNodeCallNode')
def test_coerce(a, int b):
    """
    >>> test_coerce(2, 2)
    4
    """
    def inner(int a, b):
        return a * b
    return inner(a, b)


cdef class Foo(object):
    def __repr__(self):
        return ''


@cython.test_fail_if_path_exists('//SimpleCallNode')
@cython.test_assert_path_exists('//InlinedDefNodeCallNode')
def test_func_signature(a):
    """
    >>> test_func_signature(Foo())
    
    >>> test_func_signature(123)
    Traceback (most recent call last):
    TypeError: Cannot convert int to closure_inlining.Foo
    """

    def inner(Foo a):
        return a
    return inner(a)

@cython.test_fail_if_path_exists('//SimpleCallNode')
@cython.test_assert_path_exists('//InlinedDefNodeCallNode')
def test_func_signature2(a, b):
    """
    >>> test_func_signature2(Foo(), 123)
    (, 123)
    >>> test_func_signature2(321, 123)
    Traceback (most recent call last):
    TypeError: Cannot convert int to closure_inlining.Foo
    """

    def inner(Foo a, b):
        return a, b
    return inner(a, b)

# Starred args and default values are not yet supported for inlining
@cython.test_assert_path_exists('//SimpleCallNode')
def test_defaults(a, b):
    """
    >>> test_defaults(1, 2)
    (1, 2, 123)
    """
    def inner(a, b=b, c=123):
        return a, b, c
    return inner(a)

@cython.test_assert_path_exists('//SimpleCallNode')
def test_kwonly_args(a, b):
    """
    >>> test_kwonly_args(1, 2)
    (1, 2, 123)
    """
    def inner(a, b=b, *, c=123):
        return a, b, c
    return inner(a)

@cython.test_assert_path_exists('//SimpleCallNode')
def test_kwonly_args_missing(a, b):
    """
    >>> test_kwonly_args_missing(1, 2)
    Traceback (most recent call last):
    TypeError: inner() needs keyword-only argument c
    """
    def inner(a, b=b, *, c):
        return a, b, c
    return inner(a)

@cython.test_assert_path_exists('//SimpleCallNode')
def test_starred(a):
    """
    >>> test_starred(123)
    (123, (), {})
    """
    def inner(a, *args, **kwargs):
        return a, args, kwargs
    return inner(a)


def test_global_calls_still_work():
    """
    >>> global_call_result
    123
    """
    return 123

global_call_result = test_global_calls_still_work()


@cython.test_fail_if_path_exists(
    '//InlinedDefNodeCallNode//SimpleCallNode')
@cython.test_assert_path_exists(
    '//InlinedDefNodeCallNode',
    '//InlinedDefNodeCallNode[@function_name.name = "call"]',
    '//InlinedDefNodeCallNode//InlinedDefNodeCallNode')
def test_sideeffect_call_order():
    """
    >>> test_sideeffect_call_order()
    [2, 4, 5]
    """
    L = []
    def sideeffect(x):
        L.append(x)
        return x
    def call(x1, x2, x3, x4, x5):
        pass
    call(1, sideeffect(2), 3, sideeffect(4), sideeffect(5))
    return L


def test_redef(redefine):
    """
    >>> test_redef(False)
    1
    >>> test_redef(True)
    2
    """
    def inner():
        return 1
    def inner2():
        return 2
    def redef():
        nonlocal inner
        inner = inner2
    if redefine:
        redef()
        assert inner == inner2
    else:
        assert inner != inner2
    return inner()


def test_with_statement():
    """
    >>> test_with_statement()
    enter
    running
    exit
    """
    def make_context_manager():
        class CM(object):
            def __enter__(self):
                print "enter"
            def __exit__(self, *args):
                print "exit"
        return CM()

    with make_context_manager():
        print "running"
Cython-0.26.1/tests/run/type_slots_int_long_T287.pyx0000664000175000017500000000147512542002467023173 0ustar  stefanstefan00000000000000# ticket: 287

__doc__ = u"""
>>> print( "%d" % Int() )
2
>>> print( "%d" % Long() )
3
>>> print( "%d" % IntLongA() )
2
>>> print( "%d" % IntLongB() )
2

"""


def getint(int i):
    """
    >>> getint( Int() )
    2
    >>> getint( Long() )
    3
    >>> getint( IntLongA() )
    2
    >>> getint( IntLongB() )
    2
    """
    return i

def getlong(long long i):
    """
    >>> getlong( Int() )
    2
    >>> getlong( Long() )
    3
    >>> getlong( IntLongA() )
    2
    >>> getlong( IntLongB() )
    2
    """
    return i


cdef class Int:
   def __int__(self):
       return 2

cdef class Long:
   def __long__(self):
       return 3

cdef class IntLongA:
   def __int__(self):
       return 2
   def __long__(self):
       return 3

cdef class IntLongB:
   def __int__(self):
       return 2
   __long__ = __int__
Cython-0.26.1/tests/run/charptr_comparison_T582.pyx0000664000175000017500000000761212542002467022767 0ustar  stefanstefan00000000000000# ticket: 582

cimport cython

################################################################################
## plain char*

@cython.test_assert_path_exists('//SingleAssignmentNode')
#@cython.test_fail_if_path_exists('//SingleAssignmentNode//CoerceFromPyTypeNode')
def charptr_equals_literal(char* s):
    """
    >>> charptr_equals_literal('abc'.encode('ASCII'))
    True
    >>> charptr_equals_literal('aabc'.encode('ASCII'))
    False
    >>> charptr_equals_literal('abcx'.encode('ASCII'))
    False
    >>> charptr_equals_literal('bcx'.encode('ASCII'))
    False
    """
    cdef bint result = (s == b"abc")
    return result

def charptr_gt_literal(char* s):
    """
    >>> charptr_gt_literal('abc'.encode('ASCII'))
    False
    >>> charptr_gt_literal('aabc'.encode('ASCII'))
    False
    >>> charptr_gt_literal('abcx'.encode('ASCII'))
    True
    >>> charptr_gt_literal('bcx'.encode('ASCII'))
    True
    """
    cdef bint result = (s > b"abc")
    return result

def charptr_lt_literal(char* s):
    """
    >>> charptr_lt_literal('abc'.encode('ASCII'))
    False
    >>> charptr_lt_literal('aabc'.encode('ASCII'))
    True
    >>> charptr_lt_literal('abcx'.encode('ASCII'))
    False
    >>> charptr_lt_literal('bcx'.encode('ASCII'))
    False
    """
    cdef bint result = (s < b"abc")
    return result

def charptr_ge_literal(char* s):
    """
    >>> charptr_ge_literal('abc'.encode('ASCII'))
    True
    >>> charptr_ge_literal('aabc'.encode('ASCII'))
    False
    >>> charptr_ge_literal('abcx'.encode('ASCII'))
    True
    >>> charptr_ge_literal('bcx'.encode('ASCII'))
    True
    """
    cdef bint result = (s >= b"abc")
    return result

def charptr_le_literal(char* s):
    """
    >>> charptr_le_literal('abc'.encode('ASCII'))
    True
    >>> charptr_le_literal('aabc'.encode('ASCII'))
    True
    >>> charptr_le_literal('abcx'.encode('ASCII'))
    False
    >>> charptr_le_literal('bcx'.encode('ASCII'))
    False
    """
    cdef bint result = (s <= b"abc")
    return result


################################################################################
## slices

@cython.test_assert_path_exists('//SingleAssignmentNode')
#FIXME: optimise me!
#@cython.test_fail_if_path_exists('//SingleAssignmentNode//CoerceFromPyTypeNode')
def slice_equals_literal(char* s):
    """
    >>> slice_equals_literal('abc'.encode('ASCII'))
    True
    >>> slice_equals_literal('aabc'.encode('ASCII'))
    False
    >>> slice_equals_literal('abcx'.encode('ASCII'))
    True
    >>> slice_equals_literal('bcx'.encode('ASCII'))
    False
    """
    cdef bint result = (s[:3] == b"abc")
    return result

def slice_gt_literal(char* s):
    """
    >>> slice_gt_literal('abc'.encode('ASCII'))
    False
    >>> slice_gt_literal('aabc'.encode('ASCII'))
    False
    >>> slice_gt_literal('abcx'.encode('ASCII'))
    False
    >>> slice_gt_literal('bcx'.encode('ASCII'))
    True
    """
    cdef bint result = (s[:3] > b"abc")
    return result

def slice_lt_literal(char* s):
    """
    >>> slice_lt_literal('abc'.encode('ASCII'))
    False
    >>> slice_lt_literal('aabc'.encode('ASCII'))
    True
    >>> slice_lt_literal('abcx'.encode('ASCII'))
    False
    >>> slice_lt_literal('bcx'.encode('ASCII'))
    False
    """
    cdef bint result = (s[:3] < b"abc")
    return result

def slice_ge_literal(char* s):
    """
    >>> slice_ge_literal('abc'.encode('ASCII'))
    True
    >>> slice_ge_literal('aabc'.encode('ASCII'))
    False
    >>> slice_ge_literal('abcx'.encode('ASCII'))
    True
    >>> slice_ge_literal('bcx'.encode('ASCII'))
    True
    """
    cdef bint result = (s[:3] >= b"abc")
    return result

def slice_le_literal(char* s):
    """
    >>> slice_le_literal('abc'.encode('ASCII'))
    True
    >>> slice_le_literal('aabc'.encode('ASCII'))
    True
    >>> slice_le_literal('abcx'.encode('ASCII'))
    True
    >>> slice_le_literal('bcx'.encode('ASCII'))
    False
    """
    cdef bint result = (s[:3] <= b"abc")
    return result
Cython-0.26.1/tests/run/builtin_subtype_methods_T653.pyx0000664000175000017500000001030612542002467024027 0ustar  stefanstefan00000000000000#cython: language_level=2
# mode: run
# ticket: 653

cimport cython

cdef class MyList(list):
    def test_append(self, x):
        """
        >>> l = MyList()
        >>> type(l) is MyList
        True
        >>> list(l)
        []
        >>> l.test_append(5)
        >>> list(l)
        [5]
        """
        self.append(x)

cdef class MyDict(dict):
    @cython.test_assert_path_exists("//ComprehensionNode//AttributeNode",
                                    "//ComprehensionNode//AttributeNode[@attribute='items']")
    @cython.test_fail_if_path_exists("//ComprehensionNode//CMethodSelfCloneNode")
    def test_items(self):
        """
        >>> MyDict(a=1, b=2).test_items()
        [('a', 1), ('b', 2)]
        """
        l = [ (key, value) for key, value in self.items() ]
        l.sort()
        return l

    def test_values(self):
        """
        >>> MyDict(a=1, b=2).test_values()
        [1, 2]
        """
        l = [ v for v in self.values() ]
        l.sort()
        return l

@cython.final
cdef class MyDictFinal(dict):
    @cython.test_assert_path_exists("//ComprehensionNode//CMethodSelfCloneNode")
    def test_items(self):
        """
        >>> MyDictFinal(a=1, b=2).test_items()
        [('a', 1), ('b', 2)]
        """
        l = [ (key, value) for key, value in self.items() ]
        l.sort()
        return l

    def test_values(self):
        """
        >>> MyDictFinal(a=1, b=2).test_values()
        [1, 2]
        """
        l = [ v for v in self.values() ]
        l.sort()
        return l

cdef class MyDict2(MyDict):
    @cython.test_assert_path_exists("//ComprehensionNode//AttributeNode",
                                    "//ComprehensionNode//AttributeNode[@attribute='items']")
    @cython.test_fail_if_path_exists("//ComprehensionNode//CMethodSelfCloneNode")
    def test_items(self):
        """
        >>> MyDict2(a=1, b=2).test_items()
        [('a', 1), ('b', 2)]
        """
        l = [ (key, value) for key, value in self.items() ]
        l.sort()
        return l

    def test_values(self):
        """
        >>> MyDict2(a=1, b=2).test_values()
        [1, 2]
        """
        l = [ v for v in self.values() ]
        l.sort()
        return l

@cython.final
cdef class MyDict2Final(MyDict):
    @cython.test_assert_path_exists("//ComprehensionNode//CMethodSelfCloneNode")
    def test_items(self):
        """
        >>> MyDict2Final(a=1, b=2).test_items()
        [('a', 1), ('b', 2)]
        """
        l = [ (key, value) for key, value in self.items() ]
        l.sort()
        return l

    def test_values(self):
        """
        >>> MyDict2Final(a=1, b=2).test_values()
        [1, 2]
        """
        l = [ v for v in self.values() ]
        l.sort()
        return l

@cython.final
cdef class MyDictOverride(dict):
    def items(self):
        return [(1,2), (3,4)]

    @cython.test_assert_path_exists("//ComprehensionNode//AttributeNode",
                                    "//ComprehensionNode//AttributeNode[@attribute='items']")
    @cython.test_fail_if_path_exists("//ComprehensionNode//CMethodSelfCloneNode")
    def test_items(self):
        """
        >>> MyDictOverride(a=1, b=2).test_items()
        [(1, 2), (3, 4)]
        """
        l = [ (key, value) for key, value in self.items() ]
        l.sort()
        return l

    def test_values(self):
        """
        >>> MyDictOverride(a=1, b=2).test_values()
        [1, 2]
        """
        l = [ v for v in self.values() ]
        l.sort()
        return l

@cython.final
cdef class MyDictOverride2(MyDict):
    def items(self):
        return [(1,2), (3,4)]

    @cython.test_assert_path_exists("//ComprehensionNode//AttributeNode",
                                    "//ComprehensionNode//AttributeNode[@attribute='items']")
    @cython.test_fail_if_path_exists("//ComprehensionNode//CMethodSelfCloneNode")
    def test_items(self):
        """
        >>> MyDictOverride2(a=1, b=2).test_items()
        [(1, 2), (3, 4)]
        """
        l = [ (key, value) for key, value in self.items() ]
        l.sort()
        return l

    def test_values(self):
        """
        >>> MyDictOverride2(a=1, b=2).test_values()
        [1, 2]
        """
        l = [ v for v in self.values() ]
        l.sort()
        return l
Cython-0.26.1/tests/run/extinherit.pyx0000664000175000017500000000077312542002467020534 0ustar  stefanstefan00000000000000cdef class Parrot:
    cdef object name
    cdef int alive

cdef class Norwegian(Parrot):
    cdef object plumage_colour

def create():
    cdef Parrot p
    p = Norwegian()
    p.alive = 1
    return p

def rest(Norwegian polly):
    """
    >>> p = create()
    >>> rest(p)
    0
    """
    cdef Parrot fred
    cdef object spam
    spam = None

    fred = polly
    polly = fred
    polly = spam
    assert polly is None
    assert fred.alive

    spam = polly
    fred.alive = 0

    return fred.alive
Cython-0.26.1/tests/run/large_consts_T237.pyx0000664000175000017500000000067012542002467021547 0ustar  stefanstefan00000000000000# ticket: 237
#def add_large_c():
#    cdef unsigned long long val = 2**30 + 2**30
#    return val

def add_large():
    """
    >>> add_large() == 2147483647 + 2147483647
    True

    #>>> add_large_c() == 2147483647 + 2147483647
    #True
    """
    return 2147483647 + 2147483647

def add_large_pow():
    """
    >>> add_large_pow() == 2**31 + 2**31
    True
    >>> add_large_pow() == 2**32
    True
    """
    return 2**31 + 2**31
Cython-0.26.1/tests/run/closure_name_mangling_T537.pyx0000664000175000017500000000055612542002467023422 0ustar  stefanstefan00000000000000# mode: run
# tag: closures
# ticket: 537

__doc__ = u"""
>>> f1 = nested1()
>>> f2 = nested2()
>>> f1 == f2      # inner functions (f)
False
>>> f1() == f2()  # inner-inner functions (g)
False
"""

def nested1():
   def f():
      def g():
         pass
      return g
   return f

def nested2():
   def f():
      def g():
         pass
      return g
   return f
Cython-0.26.1/tests/run/yield_inside_lambda.py0000664000175000017500000000041212542002467022110 0ustar  stefanstefan00000000000000# mode: run
# tag: generators, lambda


def test_inside_lambda():
    """
    >>> obj = test_inside_lambda()()
    >>> next(obj)
    1
    >>> next(obj)
    2
    >>> try: next(obj)
    ... except StopIteration: pass
    """
    return lambda:((yield 1), (yield 2))
Cython-0.26.1/tests/run/pure_py.py0000664000175000017500000001472112542002467017642 0ustar  stefanstefan00000000000000import cython

is_compiled = cython.compiled

NULL = 5
_NULL = NULL


def test_sizeof():
    """
    >>> test_sizeof()
    True
    True
    True
    True
    True
    """
    x = cython.declare(cython.bint)
    print(cython.sizeof(x) == cython.sizeof(cython.bint))
    print(cython.sizeof(cython.char) <= cython.sizeof(cython.short) <= cython.sizeof(cython.int) <= cython.sizeof(cython.long) <= cython.sizeof(cython.longlong))
    print(cython.sizeof(cython.uint) == cython.sizeof(cython.int))
    print(cython.sizeof(cython.p_int) == cython.sizeof(cython.p_double))
    if cython.compiled:
        print(cython.sizeof(cython.char) < cython.sizeof(cython.longlong))
    else:
        print(cython.sizeof(cython.char) == 1)


def test_declare(n):
    """
    >>> test_declare(100)
    (100, 100)
    >>> test_declare(100.5)
    (100, 100)
    >>> test_declare(None) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    TypeError: ...
    """
    x = cython.declare(cython.int)
    y = cython.declare(cython.int, n)
    if cython.compiled:
        cython.declare(xx=cython.int, yy=cython.long)
        i = cython.sizeof(xx)
    ptr = cython.declare(cython.p_int, cython.address(y))
    return y, ptr[0]


@cython.locals(x=cython.double, n=cython.int)
def test_cast(x):
    """
    >>> test_cast(1.5)
    1
    """
    n = cython.cast(cython.int, x)
    return n


@cython.locals(x=cython.int, y=cython.p_int)
def test_address(x):
    """
    >>> test_address(39)
    39
    """
    y = cython.address(x)
    return y[0]


@cython.wraparound(False)
def test_wraparound(x):
    """
    >>> test_wraparound([1, 2, 3])
    [1, 2, 1]
    """
    with cython.wraparound(True):
        x[-1] = x[0]
    return x


@cython.boundscheck(False)
def test_boundscheck(x):
    """
    >>> test_boundscheck([1, 2, 3])
    3
    >>> try: test_boundscheck([1, 2])
    ... except IndexError: pass
    """
    with cython.boundscheck(True):
        return x[2]


## CURRENTLY BROKEN - FIXME!!
## Is this test make sense? Implicit conversion in pure Python??

## @cython.locals(x=cython.int)
## @cython.locals(y=cython.bint)
## def test_locals(x):
##     """
##     >>> test_locals(5)
##     True
##     """
##     y = x
##     return y


def test_with_nogil(nogil):
    """
    >>> raised = []
    >>> class nogil(object):
    ...     def __enter__(self):
    ...         pass
    ...     def __exit__(self, exc_class, exc, tb):
    ...         raised.append(exc)
    ...         return exc_class is None

    >>> test_with_nogil(nogil())
    WORKS
    True
    >>> raised
    [None]
    """
    result = False
    with nogil:
        print("WORKS")
        with cython.nogil:
            result = True
    return result

MyUnion = cython.union(n=cython.int, x=cython.double)
MyStruct = cython.struct(is_integral=cython.bint, data=MyUnion)
MyStruct2 = cython.typedef(MyStruct[2])

def test_struct(n, x):
    """
    >>> test_struct(389, 1.64493)
    (389, 1.64493)
    """
    a = cython.declare(MyStruct2)
    a[0] = MyStruct(is_integral=True, data=MyUnion(n=n))
    a[1] = MyStruct(is_integral=False, data={'x': x})
    return a[0].data.n, a[1].data.x

import cython as cy
from cython import declare, cast, locals, address, typedef, p_void, compiled
from cython import declare as my_declare, locals as my_locals, p_void as my_void_star, typedef as my_typedef, compiled as my_compiled

@my_locals(a=cython.p_void)
def test_imports():
    """
    >>> test_imports()
    (True, True)
    """
    a = cython.NULL
    b = declare(p_void, cython.NULL)
    c = my_declare(my_void_star, cython.NULL)
    d = cy.declare(cy.p_void, cython.NULL)

    return a == d, compiled == my_compiled

## CURRENTLY BROKEN - FIXME!!

# MyStruct3 = typedef(MyStruct[3])
# MyStruct4 = my_typedef(MyStruct[4])
# MyStruct5 = cy.typedef(MyStruct[5])

def test_declare_c_types(n):
    """
    >>> test_declare_c_types(0)
    >>> test_declare_c_types(1)
    >>> test_declare_c_types(2)
    """
    #
    b00 = cython.declare(cython.bint, 0)
    b01 = cython.declare(cython.bint, 1)
    b02 = cython.declare(cython.bint, 2)
    #
    i00 = cython.declare(cython.uchar, n)
    i01 = cython.declare(cython.char, n)
    i02 = cython.declare(cython.schar, n)
    i03 = cython.declare(cython.ushort, n)
    i04 = cython.declare(cython.short, n)
    i05 = cython.declare(cython.sshort, n)
    i06 = cython.declare(cython.uint, n)
    i07 = cython.declare(cython.int, n)
    i08 = cython.declare(cython.sint, n)
    i09 = cython.declare(cython.slong, n)
    i10 = cython.declare(cython.long, n)
    i11 = cython.declare(cython.ulong, n)
    i12 = cython.declare(cython.slonglong, n)
    i13 = cython.declare(cython.longlong, n)
    i14 = cython.declare(cython.ulonglong, n)

    i20 = cython.declare(cython.Py_ssize_t, n)
    i21 = cython.declare(cython.size_t, n)
    #
    f00 = cython.declare(cython.float, n)
    f01 = cython.declare(cython.double, n)
    f02 = cython.declare(cython.longdouble, n)
    #
    #z00 = cython.declare(cython.complex, n+1j)
    #z01 = cython.declare(cython.floatcomplex, n+1j)
    #z02 = cython.declare(cython.doublecomplex, n+1j)
    #z03 = cython.declare(cython.longdoublecomplex, n+1j)


@cython.ccall
@cython.returns(cython.double)
def c_call(x):
    """
    Test that a declared return type is honoured when compiled.

    >>> result, return_type = call_ccall(1)

    >>> (not is_compiled and 'double') or return_type
    'double'
    >>> (is_compiled and 'int') or return_type
    'int'

    >>> (not is_compiled and 1.0) or result
    1.0
    >>> (is_compiled and 1) or result
    1
    """
    return x


def call_ccall(x):
    ret = c_call(x)
    return ret, cython.typeof(ret)


@cython.cfunc
@cython.inline
@cython.returns(cython.double)
def cdef_inline(x):
    """
    >>> result, return_type = call_cdef_inline(1)
    >>> (not is_compiled and 'float') or type(return_type).__name__
    'float'
    >>> (not is_compiled and 'double') or return_type
    'double'
    >>> (is_compiled and 'int') or return_type
    'int'
    >>> result == 2.0  or  result
    True
    """
    return x + 1


def call_cdef_inline(x):
    ret = cdef_inline(x)
    return ret, cython.typeof(ret)


@cython.locals(counts=cython.int[10], digit=cython.int)
def count_digits_in_carray(digits):
    """
    >>> digits = '37692837651902834128342341'
    >>> ''.join(sorted(digits))
    '01112222333334445667788899'
    >>> count_digits_in_carray(map(int, digits))
    [1, 3, 4, 5, 3, 1, 2, 2, 3, 2]
    """
    counts = [0] * 10
    for digit in digits:
        assert 0 <= digit <= 9
        counts[digit] += 1
    return counts
Cython-0.26.1/tests/run/if_const.pyx0000664000175000017500000001015212542002467020145 0ustar  stefanstefan00000000000000
cimport cython

DEF INT_VAL = 1

def _not_constant_but_False():
    return False

@cython.test_fail_if_path_exists("//PrimaryCmpNode",
                                 "//IfStatNode")
def int_bool_result():
    """
    >>> int_bool_result()
    True
    """
    if 5:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//IfStatNode")
def constant_if_elif_else():
    """
    >>> constant_if_elif_else()
    True
    """
    if 0:
        return False
    elif 5:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//PrintStatNode")
@cython.test_assert_path_exists("//IfStatNode",
                                "//IfClauseNode")
def non_constant_if_elif_else1():
    """
    >>> non_constant_if_elif_else1()
    True
    """
    if _not_constant_but_False():
        return False
    elif 5:
        return True
    else:
        print(False)

@cython.test_fail_if_path_exists("//PrintStatNode")
@cython.test_assert_path_exists("//IfStatNode",
                                "//IfClauseNode")
def non_constant_if_elif_else2():
    """
    >>> non_constant_if_elif_else2()
    True
    """
    if _not_constant_but_False():
        return False
    elif 0:
        print(False)
    else:
        return True

@cython.test_fail_if_path_exists("//PrimaryCmpNode",
                                 "//IfStatNode")
def if_not_compare_true():
    """
    >>> if_not_compare_true()
    False
    """
    if not 0 == 0:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//PrimaryCmpNode",
                                 "//IfStatNode")
def if_compare_true():
    """
    >>> if_compare_true()
    True
    """
    if 0 == 0:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//PrimaryCmpNode",
                                 "//IfStatNode")
def if_compare_false():
    """
    >>> if_compare_false()
    False
    """
    if 0 == 1:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//PrimaryCmpNode",
                                 "//IfStatNode")
def if_compare_or_true():
    """
    >>> if_compare_or_true()
    True
    """
    if 0 == 1 or 1 == 1:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//PrimaryCmpNode",
                                 "//IfStatNode")
def if_compare_or_false():
    """
    >>> if_compare_or_false()
    False
    """
    if 0 == 1 or 1 == 0:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//PrimaryCmpNode",
                                 "//IfStatNode")
def if_compare_and_true():
    """
    >>> if_compare_and_true()
    True
    """
    if 0 == 0 and 1 == 1:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//PrimaryCmpNode",
                                 "//IfStatNode")
def if_compare_and_false():
    """
    >>> if_compare_and_false()
    False
    """
    if 1 == 1 and 1 == 0:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//PrimaryCmpNode",
                                 "//IfStatNode")
def if_compare_cascaded():
    """
    >>> if_compare_cascaded()
    True
    """
    if 0 < 1 < 2 < 3:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//CoerceToBooleanNode",
                                 "//ListNode",
                                 "//IfStatNode")
def list_bool_result_true():
    """
    >>> list_bool_result_true()
    True
    """
    if [1,2,3]:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//CoerceToBooleanNode",
                                 "//ListNode",
                                 "//IfStatNode")
def list_bool_result_false():
    """
    >>> list_bool_result_false()
    False
    """
    if []:
        return True
    else:
        return False

@cython.test_fail_if_path_exists("//PrimaryCmpNode",
                                 "//IfStatNode")
def compile_time_DEF_if():
    """
    >>> compile_time_DEF_if()
    True
    """
    if INT_VAL != 0:
        return True
    else:
        return False
Cython-0.26.1/tests/run/py_hash_t.pyx0000664000175000017500000000064212542002467020322 0ustar  stefanstefan00000000000000
cimport cython


def assign_py_hash_t(x):
    """
    >>> assign_py_hash_t(12)
    12
    >>> assign_py_hash_t(-12)
    -12
    """
    cdef Py_hash_t h = x
    return h


def infer_hash_type(x):
    """
    >>> infer_hash_type(123)
    'Py_hash_t'
    """
    h = hash(x)
    return cython.typeof(h)


def assign_to_name(x):
    """
    >>> assign_to_name(321)
    321
    """
    Py_hash_t = x
    return Py_hash_t
Cython-0.26.1/tests/run/ct_IF.pyx0000664000175000017500000000072512542002467017332 0ustar  stefanstefan00000000000000DEF NO = 0
DEF YES = 1

def f():
    """
    >>> f()
    1
    """
    cdef int i
    IF YES:
        i = 1
    ELIF NO:
        i = 2
    ELSE:
        i = 3
    return i

def g():
    """
    >>> g()
    2
    """
    cdef int i
    IF NO:
        i = 1
    ELIF YES:
        i = 2
    ELSE:
        i = 3
    return i

def h():
    """
    >>> h()
    3
    """
    cdef int i
    IF NO:
        i = 1
    ELIF NO:
        i = 2
    ELSE:
        i = 3
    return i
Cython-0.26.1/tests/run/dict_values_in_expression.pyx0000664000175000017500000000060612542002467023613 0ustar  stefanstefan00000000000000
def values_in_expression(**kwargs):
    """
    >>> sorted(values_in_expression(a=3, b=4))
    [1, 2, 3, 4]
    """
    return [ arg for arg in [1,2] + list(kwargs.values()) ]


cdef dict make_dict(d):
    return dict(d)

def values_of_expression(**kwargs):
    """
    >>> sorted(values_of_expression(a=3, b=4))
    [3, 4]
    """
    return [ arg for arg in make_dict(kwargs).values() ]
Cython-0.26.1/tests/run/arithmetic_analyse_types.pyx0000664000175000017500000000272612542002467023442 0ustar  stefanstefan00000000000000# ticket: 676
# tag: cpp

from cython cimport typeof

cdef extern from "arithmetic_analyse_types_helper.h":
    cdef struct short_return:
        char *msg
    cdef struct int_return:
        char *msg
    cdef struct longlong_return:
        char *msg
    cdef short_return f(short)
    cdef int_return f(int)
    cdef longlong_return f(long long)

def short_binop(short val):
    """
    Arithmetic in C is always done with at least int precision.
    
    >>> print(short_binop(3))
    int called
    """
    assert typeof(val + val) == "int", typeof(val + val)
    assert typeof(val - val) == "int", typeof(val - val)
    assert typeof(val & val) == "int", typeof(val & val)
    cdef int_return x = f(val + val)
    return x.msg.decode('ASCII')

def short_unnop(short val):
    """
    Arithmetic in C is always done with at least int precision.
    
    >>> print(short_unnop(3))
    int called
    """
    cdef int_return x = f(-val)
    return x.msg.decode('ASCII')

def longlong_binop(long long val):
    """
    >>> print(longlong_binop(3))
    long long called
    """
    cdef longlong_return x = f(val * val)
    return x.msg.decode('ASCII')

def longlong_unnop(long long val):
    """
    >>> print(longlong_unnop(3))
    long long called
    """
    cdef longlong_return x = f(~val)
    return x.msg.decode('ASCII')


def test_bint(bint a):
    """
    >>> test_bint(True)
    """
    assert typeof(a + a) == "int", typeof(a + a)
    assert typeof(a & a) == "bint", typeof(a & a)
Cython-0.26.1/tests/run/typetest_T417.pyx0000664000175000017500000000462312542002467020747 0ustar  stefanstefan00000000000000# ticket: 417
#cython: autotestdict=True

cdef class Foo:
    cdef int i
    def __cinit__(self):
        self.i = 1

cdef class SubFoo(Foo):
    pass

cdef class Bar:
    pass

def foo1(arg):
    """
    >>> foo1(Foo())
    >>> foo1(SubFoo())
    >>> foo1(None)
    >>> foo1(123)
    >>> foo1(Bar())
    """
    cdef Foo val = arg

def foo2(arg):
    """
    >>> foo2(Foo())
    >>> foo2(SubFoo())
    >>> foo2(None)
    >>> foo2(123)
    Traceback (most recent call last):
       ...
    TypeError: Cannot convert int to typetest_T417.Foo
    >>> foo2(Bar())
    Traceback (most recent call last):
       ...
    TypeError: Cannot convert typetest_T417.Bar to typetest_T417.Foo
    """
    cdef Foo val = arg

def foo3(arg):
    """
    >>> foo3(Foo())
    >>> foo3(SubFoo())
    >>> foo3(None)
    Traceback (most recent call last):
       ...
    TypeError: Cannot convert NoneType to typetest_T417.Foo
    >>> foo3(123)
    Traceback (most recent call last):
       ...
    TypeError: Cannot convert int to typetest_T417.Foo
    >>> foo2(Bar())
    Traceback (most recent call last):
       ...
    TypeError: Cannot convert typetest_T417.Bar to typetest_T417.Foo
    """
    cdef val = arg

def attribute_access(arg):
    """
    >>> attribute_access(Foo())
    >>> attribute_access(SubFoo())
    >>> attribute_access(None)
    Traceback (most recent call last):
       ...
    TypeError: Cannot convert NoneType to typetest_T417.Foo
    >>> attribute_access(123)
    Traceback (most recent call last):
       ...
    TypeError: Cannot convert int to typetest_T417.Foo
    >>> attribute_access(Bar())
    Traceback (most recent call last):
       ...
    TypeError: Cannot convert typetest_T417.Bar to typetest_T417.Foo
    """
    cdef val = (arg).i


cdef int count = 0

cdef object getFoo():
     global count
     count += 1
     return Foo()

def test_getFoo():
    """
    >>> test_getFoo()
    1
    """
    cdef int old_count = count
    cdef Foo x = getFoo()
    return count - old_count

def test_getFooCast():
    """
    >>> test_getFooCast()
    1
    """
    cdef int old_count = count
    cdef Foo x = getFoo()
    return count - old_count

def test_builtin_typecheck_cast(maybe_list):
    """
    >>> test_builtin_typecheck_cast([])
    []
    >>> test_builtin_typecheck_cast({})
    Traceback (most recent call last):
       ...
    TypeError: Expected list, got dict
    """
    return maybe_list
Cython-0.26.1/tests/run/strfunction.pyx0000664000175000017500000000114412542002467020720 0ustar  stefanstefan00000000000000__doc__ = u"""
   >>> str('test')
   'test'
   >>> z
   'test'
"""

s = str
z = str('test')

def c(string):
    """
    >>> c('testing')
    'testing'
    """
    return str(string)

class subs(str):
    """
    >>> subs('testing a subtype')
    'testing a subtype'

    #   >>> csub('testing a subtype')
    #   'testing a subtype'
    #   >>> csubs('testing a subtype')
    #   'testing a subtype'
    """
    pass

def sub(string):
    """
    >>> sub('testing a subtype')
    'testing a subtype'
    """
    return subs(string)

#cdef class subs(str):
#    pass

#def csub(string):
#    return csubs(string)
Cython-0.26.1/tests/run/autotestdict_all.pyx0000664000175000017500000000627212542002467021715 0ustar  stefanstefan00000000000000# cython: autotestdict=True, autotestdict.all=True

"""
Tests autotestdict compiler directive.

Both module test and individual tests are run; finally,
all_tests_run() is executed which does final validation.

>>> items = list(__test__.items())
>>> items.sort()
>>> for key, value in items:
...     print('%s ; %s' % (key, value))
MyCdefClass.cdef_method (line 79) ; >>> add_log("cdef class cmethod")
MyCdefClass.cpdef_method (line 76) ; >>> add_log("cpdef class method")
MyCdefClass.method (line 73) ; >>> add_log("cdef class method")
MyClass.method (line 62) ; >>> add_log("class method")
cdeffunc (line 26) ; >>> add_log("cdef")
doc_without_test (line 43) ; Some docs
mycpdeffunc (line 49) ; >>> add_log("cpdef")
myfunc (line 40) ; >>> add_log("def")
"""

import sys
log = []

cdef cdeffunc():
    """>>> add_log("cdef")"""
cdeffunc() # make sure it's being used

def all_tests_run():
    assert sorted(log) == sorted([u'cdef', u'cdef class', u'class', u'cdef class cmethod'] + (
        (1 if sys.version_info < (3, 4) else 2) * [u'cdef class method', u'class method', u'cpdef', u'cpdef class method', u'def'])), sorted(log)

def add_log(s):
    log.append(unicode(s))
    if len(log) == len(__test__) + (1 if sys.version_info < (3, 4) else 6):
        # Final per-function doctest executed
        all_tests_run()

def myfunc():
    """>>> add_log("def")"""

def doc_without_test():
    """Some docs"""

def nodocstring():
    pass

cpdef mycpdeffunc():
    """>>> add_log("cpdef")"""


class MyClass:
    """
    Needs no hack

    >>> add_log("class")
    >>> True
    True
    """

    def method(self):
        """>>> add_log("class method")"""

cdef class MyCdefClass:
    """
    Needs no hack

    >>> add_log("cdef class")
    >>> True
    True
    """
    def method(self):
        """>>> add_log("cdef class method")"""

    cpdef cpdef_method(self):
        """>>> add_log("cpdef class method")"""

    cdef cdef_method(self):
        """>>> add_log("cdef class cmethod")"""

    def __cinit__(self):
        """
        Should not be included, as it can't be looked up with getattr

        >>> True
        False
        """

    def __dealloc__(self):
        """
        Should not be included, as it can't be looked up with getattr

        >>> True
        False
        """

    def __richcmp__(self, other, int op):
        """
        Should not be included, as it can't be looked up with getattr in Py 2

        >>> True
        False
        """

    def __nonzero__(self):
        """
        Should not be included, as it can't be looked up with getattr in Py 3.1

        >>> True
        False
        """

    def __len__(self):
        """
        Should not be included, as it can't be looked up with getattr in Py 3.1

        >>> sys.version_info < (3, 4)
        False
        """

    def __contains__(self, value):
        """
        Should not be included, as it can't be looked up with getattr in Py 3.1

        >>> sys.version_info < (3, 4)
        False
        """

cdef class MyOtherCdefClass:
    """
    Needs no hack

    >>> True
    True
    """

    def __bool__(self):
        """
        Should not be included, as it can't be looked up with getattr in Py 2

        >>> True
        False
        """
Cython-0.26.1/tests/run/sizeof.pyx0000664000175000017500000000046712542002467017650 0ustar  stefanstefan00000000000000cdef struct Spam:
    char *grail

def f():
    """
    >>> f()
    """
    cdef int i, j, k
    cdef char *p
    i = sizeof(p)
    i = sizeof(j + k)
    i = sizeof(int)
    i = sizeof(long int)
    i = sizeof(void*)
    i = sizeof(Spam)
    i = sizeof(Spam*)
    i = sizeof(Spam[5])
    i = sizeof(Spam (*)())
Cython-0.26.1/tests/run/locals_T732.pyx0000664000175000017500000000237712542002467020347 0ustar  stefanstefan00000000000000# mode: run
# ticket: 731
# tag: locals, vars, dir

cimport cython

LOCALS = locals()
GLOBALS = globals()
DIR_SAME = sorted(dir()) == sorted(globals().keys())


def test_module_locals_and_dir():
    """
    >>> LOCALS is GLOBALS
    True
    >>> DIR_SAME
    True
    """


def test_class_locals_and_dir():
    """
    >>> klass = test_class_locals_and_dir()
    >>> 'visible' in klass.locs and 'not_visible' not in klass.locs
    True
    >>> klass.names
    ['__module__', '__qualname__', 'visible']
    """
    not_visible = 1234
    class Foo:
        visible = 4321
        names = dir()
        locs = locals()
    return Foo


@cython.test_fail_if_path_exists('//SortedDictKeysNode')
def test_class_dir_contains():
    """
    >>> klass = test_class_dir_contains()
    True
    False
    True
    False
    True
    False
    True
    True
    True
    """
    not_visible = 1234
    class Foo:
        visible = 4321
        print('visible' in dir())
        print('not_visible' in dir())
        print('not_visible' not in dir())
        print('locs' in dir())
        print('visible' in locals())
        print('locs' in locals())
        locs = locals()
        print('visible' in dir())
        print('locs' in dir())
        print('locs' in locals())
    return Foo
Cython-0.26.1/tests/run/lambda_T723.pyx0000664000175000017500000000022412542002467020277 0ustar  stefanstefan00000000000000# mode: run
# ticket: 723
# tag: lambda

def t723(a):
    """
    >>> t723(2)()
    4
    >>> t723(2)(3)
    9
    """
    return lambda x=a: x * x
Cython-0.26.1/tests/run/class_scope.py0000664000175000017500000000027412542002467020453 0ustar  stefanstefan00000000000000# mode:run
# tag: class, scope

class MethodRedef(object):
    """
    >>> MethodRedef().a(5)
    7
    """

    def a(self, i):
        return i+1

    def a(self, i):
        return i+2
Cython-0.26.1/tests/run/static_methods.pxd0000664000175000017500000000010312576726720021335 0ustar  stefanstefan00000000000000cdef class FromPxd:
    @staticmethod
    cdef static_cdef(int* x)
Cython-0.26.1/tests/run/funcexcept.pyx0000664000175000017500000000146312542002467020512 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> import sys
>>> if not IS_PY3: sys.exc_clear()

>>> def test_py():
...   try:
...     raise AttributeError
...   except AttributeError:
...     print(sys.exc_info()[0] == AttributeError or sys.exc_info()[0])
...   print((IS_PY3 and sys.exc_info()[0] is None) or
...         (not IS_PY3 and sys.exc_info()[0] == AttributeError) or
...         sys.exc_info()[0])

>>> print(sys.exc_info()[0]) # 0
None
>>> test_py()
True
True

>>> print(sys.exc_info()[0]) # test_py()
None

>>> test_c()
True
True
>>> print(sys.exc_info()[0]) # test_c()
None
"""

import sys

IS_PY3 = sys.version_info[0] >= 3

def test_c():
    try:
        raise AttributeError
    except AttributeError:
        print(sys.exc_info()[0] == AttributeError or sys.exc_info()[0])
    print(sys.exc_info()[0] is None or sys.exc_info()[0])
Cython-0.26.1/tests/run/addloop.pyx0000664000175000017500000000100012542002467017753 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> x = 1
    >>> for i in range(10):
    ...     x = x + i
    >>> x
    46

"""

def add_pyrange(max):
    """
    >>> add_pyrange(10)
    46
    """
    x = 1
    for i in range(max):
        x = x + i
    return x

def add_py(max):
    """
    >>> add_py(10)
    46
    """
    x = 1
    for i from 0 <= i < max:
        x = x + i
    return x

def add_c(max):
    """
    >>> add_c(10)
    46
    """
    cdef int x,i
    x = 1
    for i from 0 <= i < max:
        x = x + i
    return x
Cython-0.26.1/tests/run/cpp_nonstdint.pyx0000664000175000017500000000570112542002467021227 0ustar  stefanstefan00000000000000# tag: cpp

cdef extern from "cpp_nonstdint.h":
    ctypedef int Int24
    ctypedef int Int56
    ctypedef int Int88
    ctypedef int Int512

cdef object one = 1

# ---

INT24_MAX = (one<<(sizeof(Int24)*8-1))-one
INT24_MIN = (-INT24_MAX-one)

def test_int24(Int24 i):
    """
    >>> str(test_int24(-1))
    '-1'
    >>> str(test_int24(0))
    '0'
    >>> str(test_int24(1))
    '1'

    >>> test_int24(INT24_MAX) == INT24_MAX
    True
    >>> test_int24(INT24_MIN) == INT24_MIN
    True

    >>> test_int24(INT24_MIN-1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...
    >>> test_int24(INT24_MAX+1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...

    >>> test_int24("123") #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    TypeError: ...
    """
    return i

# ---

INT56_MAX = (one<<(sizeof(Int56)*8-1))-one
INT56_MIN = (-INT56_MAX-one)

def test_int56(Int56 i):
    """
    >>> str(test_int56(-1))
    '-1'
    >>> str(test_int56(0))
    '0'
    >>> str(test_int56(1))
    '1'

    >>> test_int56(INT56_MAX) == INT56_MAX
    True
    >>> test_int56(INT56_MIN) == INT56_MIN
    True

    >>> test_int56(INT56_MIN-1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...
    >>> test_int56(INT56_MAX+1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...

    >>> test_int56("123") #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    TypeError: ...
    """
    return i

# ---

INT88_MAX = (one<<(sizeof(Int88)*8-1))-one
INT88_MIN = (-INT88_MAX-one)

def test_int88(Int88 i):
    """
    >>> str(test_int88(-1))
    '-1'
    >>> str(test_int88(0))
    '0'
    >>> str(test_int88(1))
    '1'

    >>> test_int88(INT88_MAX) == INT88_MAX
    True
    >>> test_int88(INT88_MIN) == INT88_MIN
    True

    >>> test_int88(INT88_MIN-1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...
    >>> test_int88(INT88_MAX+1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...

    >>> test_int88("123") #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    TypeError: ...
    """
    return i

# ---

INT512_MAX = (one<<(sizeof(Int512)*8-1))-one
INT512_MIN = (-INT512_MAX-one)

def test_int512(Int512 i):
    """
    >>> str(test_int512(-1))
    '-1'
    >>> str(test_int512(0))
    '0'
    >>> str(test_int512(1))
    '1'

    >>> test_int512(INT512_MAX) == INT512_MAX
    True
    >>> test_int512(INT512_MIN) == INT512_MIN
    True

    >>> test_int512(INT512_MIN-1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...
    >>> test_int512(INT512_MAX+1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...

    >>> test_int512("123") #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    TypeError: ...
    """
    return i

# ---
Cython-0.26.1/tests/run/pinard7.pyx0000664000175000017500000000051212542002467017704 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> c = build()
    >>> c.method()
    Traceback (most recent call last):
    AssertionError: 1
"""

cdef enum Mode:
    a = 1
    b = 2

cdef class Curseur:
    cdef Mode mode

    def method(self):
        assert False, self.mode

def build():
    cdef Curseur c
    c = Curseur()
    c.mode = a
    return c
Cython-0.26.1/tests/run/temps_corner1.pyx0000664000175000017500000000042612542002467021125 0ustar  stefanstefan00000000000000cdef class A:
    def numerator(self):
        return self

cdef int  bitsize(A a):
    return 1

coeffs = [A()]

class B:
    """
    >>> B().coeffs_bitsize()
    [2]
    """
    def coeffs_bitsize(self):
        r = [bitsize(c.numerator())+1 for c in coeffs]
        return r
Cython-0.26.1/tests/run/final_cdef_class.pyx0000664000175000017500000000141412542002467021601 0ustar  stefanstefan00000000000000
cimport cython

@cython.final
cdef class FinalClass:
    """
    >>> f = FinalClass()
    >>> test_final_class(f)
    Type tested

    >>> try:
    ...     class SubType(FinalClass): pass
    ... except TypeError:
    ...     print('PASSED!')
    PASSED!
    """

cdef class NonFinalClass:
    """
    >>> class SubType(NonFinalClass): pass
    >>> s = SubType()
    """

@cython.final
cdef class FinalSubClass(NonFinalClass):
    """
    >>> f = FinalSubClass()
    >>> test_non_final_class(f)
    Type tested

    >>> try:
    ...     class SubType(FinalSubClass): pass
    ... except TypeError:
    ...     print('PASSED!')
    PASSED!
    """


def test_final_class(FinalClass c):
    print u"Type tested"

def test_non_final_class(NonFinalClass c):
    print u"Type tested"
Cython-0.26.1/tests/run/r_bowden1.pyx0000664000175000017500000000040712542002467020223 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> f(100)
101L
>>> g(3000000000)
3000000001L
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u"L", u"")

def f(x):
    cdef unsigned long long ull
    ull = x
    return ull + 1

def g(unsigned long x):
    return x + 1
Cython-0.26.1/tests/run/any.pyx0000664000175000017500000001245112574327400017136 0ustar  stefanstefan00000000000000
cdef class VerboseGetItem(object):
    cdef object sequence
    def __init__(self, seq):
        self.sequence = seq
    def __getitem__(self, i):
        print i
        return self.sequence[i] # may raise IndexError


cimport cython

@cython.test_assert_path_exists("//SimpleCallNode")
@cython.test_fail_if_path_exists("//ForInStatNode")
def any_item(x):
    """
    >>> any_item([0,0,1,0,0])
    True
    >>> any_item([0,0,0,0,1])
    True
    >>> any_item([0,0,0,0,0])
    False

    >>> any(VerboseGetItem([0,0,1,0,0]))
    0
    1
    2
    True
    >>> any_item(VerboseGetItem([0,0,1,0,0]))
    0
    1
    2
    True

    >>> any(VerboseGetItem([0,0,0,0,0]))
    0
    1
    2
    3
    4
    5
    False
    >>> any_item(VerboseGetItem([0,0,0,0,0]))
    0
    1
    2
    3
    4
    5
    False
    """
    return any(x)


@cython.test_assert_path_exists(
    "//ForInStatNode",
    "//InlinedGeneratorExpressionNode"
)
@cython.test_fail_if_path_exists(
    "//SimpleCallNode",
    "//YieldExprNode"
)
def any_in_simple_gen(seq):
    """
    >>> any_in_simple_gen([0,1,0])
    True
    >>> any_in_simple_gen([0,0,0])
    False

    >>> any_in_simple_gen(VerboseGetItem([0,0,1,0,0]))
    0
    1
    2
    True
    >>> any_in_simple_gen(VerboseGetItem([0,0,0,0,0]))
    0
    1
    2
    3
    4
    5
    False
    """
    return any(x for x in seq)


@cython.test_assert_path_exists(
    "//ForInStatNode",
    "//InlinedGeneratorExpressionNode"
)
@cython.test_fail_if_path_exists(
    "//SimpleCallNode",
    "//YieldExprNode"
)
def any_in_simple_gen_scope(seq):
    """
    >>> any_in_simple_gen_scope([0,1,0])
    True
    >>> any_in_simple_gen_scope([0,0,0])
    False

    >>> any_in_simple_gen_scope(VerboseGetItem([0,0,1,0,0]))
    0
    1
    2
    True
    >>> any_in_simple_gen_scope(VerboseGetItem([0,0,0,0,0]))
    0
    1
    2
    3
    4
    5
    False
    """
    x = 'abc'
    result = any(x for x in seq)
    assert x == 'abc'
    return result


@cython.test_assert_path_exists(
    "//ForInStatNode",
    "//InlinedGeneratorExpressionNode"
)
@cython.test_fail_if_path_exists(
    "//SimpleCallNode",
    "//YieldExprNode"
)
def any_in_conditional_gen(seq):
    """
    >>> any_in_conditional_gen([3,6,9])
    False
    >>> any_in_conditional_gen([0,3,7])
    True
    >>> any_in_conditional_gen([1,0,1])
    True

    >>> any_in_conditional_gen(VerboseGetItem([0,0,3,0,0]))
    0
    1
    2
    3
    4
    5
    False
    >>> any_in_conditional_gen(VerboseGetItem([0,3,0,1,1]))
    0
    1
    2
    3
    True
    """
    return any(x%3 for x in seq if x%2 == 1)

mixed_ustring = u'AbcDefGhIjKlmnoP'
lower_ustring = mixed_ustring.lower()
upper_ustring = mixed_ustring.upper()


@cython.test_assert_path_exists(
    '//PythonCapiCallNode',
    '//ForFromStatNode',
    "//InlinedGeneratorExpressionNode"
)
@cython.test_fail_if_path_exists(
    '//SimpleCallNode',
    '//ForInStatNode'
)
def any_lower_case_characters(unicode ustring):
    """
    >>> any_lower_case_characters(upper_ustring)
    False
    >>> any_lower_case_characters(mixed_ustring)
    True
    >>> any_lower_case_characters(lower_ustring)
    True
    """
    return any(uchar.islower() for uchar in ustring)


@cython.test_assert_path_exists(
    "//ForInStatNode",
    "//InlinedGeneratorExpressionNode",
    "//InlinedGeneratorExpressionNode//IfStatNode"
)
@cython.test_fail_if_path_exists(
    "//SimpleCallNode",
    "//YieldExprNode",
#    "//IfStatNode//CoerceToBooleanNode"
)
def any_in_typed_gen(seq):
    """
    >>> any_in_typed_gen([0,1,0])
    True
    >>> any_in_typed_gen([0,0,0])
    False

    >>> any_in_typed_gen(VerboseGetItem([0,0,1,0,0]))
    0
    1
    2
    True
    >>> any_in_typed_gen(VerboseGetItem([0,0,0,0,0]))
    0
    1
    2
    3
    4
    5
    False
    """
    cdef int x
    return any(x for x in seq)


@cython.test_assert_path_exists(
    "//ForInStatNode",
    "//InlinedGeneratorExpressionNode",
    "//InlinedGeneratorExpressionNode//IfStatNode"
)
@cython.test_fail_if_path_exists(
    "//SimpleCallNode",
    "//YieldExprNode"
)
def any_in_gen_builtin_name(seq):
    """
    >>> any_in_gen_builtin_name([0,1,0])
    True
    >>> any_in_gen_builtin_name([0,0,0])
    False

    >>> any_in_gen_builtin_name(VerboseGetItem([0,0,1,0,0]))
    0
    1
    2
    True
    >>> any_in_gen_builtin_name(VerboseGetItem([0,0,0,0,0]))
    0
    1
    2
    3
    4
    5
    False
    """
    return any(type for type in seq)


@cython.test_assert_path_exists(
    "//ForInStatNode",
    "//InlinedGeneratorExpressionNode",
    "//InlinedGeneratorExpressionNode//IfStatNode"
)
@cython.test_fail_if_path_exists(
    "//SimpleCallNode",
    "//YieldExprNode",
#    "//IfStatNode//CoerceToBooleanNode"
)
def any_in_double_gen(seq):
    """
    >>> any(x for L in [[0,0,0],[0,0,1],[0,0,0]] for x in L)
    True
    >>> any_in_double_gen([[0,0,0],[0,0,1],[0,0,0]])
    True

    >>> any(x for L in [[0,0,0],[0,0,0],[0,0,0]] for x in L)
    False
    >>> any_in_double_gen([[0,0,0],[0,0,0],[0,0,0]])
    False

    >>> any_in_double_gen([VerboseGetItem([0,0,0]), VerboseGetItem([0,0,1,0,0])])
    0
    1
    2
    3
    0
    1
    2
    True
    >>> any_in_double_gen([VerboseGetItem([0,0,0]),VerboseGetItem([0,0]),VerboseGetItem([0,0,0])])
    0
    1
    2
    3
    0
    1
    2
    0
    1
    2
    3
    False
    """
    cdef int x
    return any(x for L in seq for x in L)
Cython-0.26.1/tests/run/libc_time.pyx0000664000175000017500000000152412542002467020273 0ustar  stefanstefan00000000000000# tag: posix
from libc.stdlib  cimport getenv
from posix.stdlib cimport setenv, unsetenv
from libc.time    cimport *


def test_time():
    """
    >>> test_time()
    """
    cdef time_t t1, t2
    t1 = time(NULL)
    assert t1 != 0
    t1 = time(&t2)
    assert t1 == t2


def test_mktime():
    """
    >>> test_mktime()  # doctest:+ELLIPSIS
    (986138177, ...'Sun Apr  1 15:16:17 2001\\n')
    """
    cdef tm t, gmt
    cdef time_t tt
    cdef char *ct
    cdef char *tz

    tz = getenv("TZ")
    setenv("TZ", "UTC", 1)
    tzset()
    t.tm_sec = 17
    t.tm_min = 16
    t.tm_hour = 15
    t.tm_year = 101
    t.tm_mon = 3
    t.tm_mday = 1
    t.tm_isdst = 0
    tt = mktime(&t)
    assert tt != -1
    ct = ctime(&tt)
    assert ct != NULL
    if tz:
        setenv("TZ", tz, 1)
    else:
        unsetenv("TZ")
    tzset()
    return tt, ct
Cython-0.26.1/tests/run/tryfinally.pyx0000664000175000017500000003105712574327400020547 0ustar  stefanstefan00000000000000# mode: run
# tag: tryfinally

import string
import sys
IS_PY3 = sys.version_info[0] >= 3

cimport cython

try:
    next
except NameError:
    def next(it): return it.next()


def finally_except():
    """
    >>> try:
    ...     raise ValueError
    ... finally:
    ...     raise TypeError
    Traceback (most recent call last):
    TypeError
    >>> finally_except()
    Traceback (most recent call last):
    TypeError
    """
    try:
        raise ValueError
    finally:
        raise TypeError


def finally_pass():
    """
    >>> finally_pass()
    Traceback (most recent call last):
    ValueError
    """
    try:
        raise ValueError()
    finally:
        pass


def except_finally_reraise():
    """
    >>> def py_check():
    ...     try: raise ValueError
    ...     except ValueError:
    ...         for i in range(2):
    ...             try: raise TypeError
    ...             finally:
    ...                 break
    ...         assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
    ...         raise
    ...
    >>> py_check()
    Traceback (most recent call last):
    ValueError
    >>> except_finally_reraise()
    Traceback (most recent call last):
    ValueError
    """
    try:
        raise ValueError
    except ValueError:
        for i in range(2):
            try:
                raise TypeError
            finally:
                break
        assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
        raise


def except_finally_reraise_new():
    """
    >>> def py_check():
    ...     try: raise ValueError
    ...     except ValueError:
    ...         try: raise TypeError
    ...         finally:
    ...             raise
    >>> try: py_check()
    ... except ValueError: assert not IS_PY3
    ... except TypeError: assert IS_PY3
    ... else: assert False
    >>> try: except_finally_reraise_new()
    ... except TypeError: pass  # currently only Py3 semantics implemented
    ... else: assert False
    """
    try:
        raise ValueError
    except ValueError:
        try:
            raise TypeError
        finally:
            raise


def finally_exception_check_return():
    """
    >>> if not IS_PY3:
    ...     sys.exc_clear()
    >>> def py_check():
    ...     try: raise ValueError()
    ...     finally:
    ...         if IS_PY3:
    ...             assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
    ...         else:
    ...             assert sys.exc_info() == (None, None, None), str(sys.exc_info())
    ...         return 1
    >>> py_check()
    1
    >>> finally_exception_check_return()
    1
    """
    try:
        raise ValueError()
    finally:
        if IS_PY3:
            assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
        else:
            assert sys.exc_info() == (None, None, None), str(sys.exc_info())
        return 1


cdef void swallow():
    try:
        raise TypeError()
    except:
        return


def finally_exception_check_swallow():
    """
    >>> if not IS_PY3:
    ...     sys.exc_clear()
    >>> def swallow():
    ...     try: raise TypeError()
    ...     except: return
    >>> def py_check():
    ...     try: raise ValueError()
    ...     finally:
    ...         if IS_PY3:
    ...             assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
    ...         else:
    ...             assert sys.exc_info() == (None, None, None), str(sys.exc_info())
    ...         swallow()
    ...         if IS_PY3:
    ...             assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
    ...         else:
    ...             assert sys.exc_info() == (None, None, None), str(sys.exc_info())
    >>> py_check()
    Traceback (most recent call last):
    ValueError
    >>> if not IS_PY3:
    ...     sys.exc_clear()
    >>> finally_exception_check_swallow()
    Traceback (most recent call last):
    ValueError
    """
    try:
        raise ValueError()
    finally:
        if IS_PY3:
            assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
        else:
            assert sys.exc_info() == (None, None, None), str(sys.exc_info())
        swallow()
        if IS_PY3:
            assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
        else:
            assert sys.exc_info() == (None, None, None), str(sys.exc_info())


def finally_exception_break_check():
    """
    >>> if not IS_PY3:
    ...     sys.exc_clear()
    >>> def py_check():
    ...     i = None
    ...     for i in range(2):
    ...         try: raise ValueError()
    ...         finally:
    ...             if IS_PY3:
    ...                 assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
    ...             else:
    ...                 assert sys.exc_info() == (None, None, None), str(sys.exc_info())
    ...             break
    ...     assert sys.exc_info() == (None, None, None), str(sys.exc_info())
    ...     return i
    >>> py_check()
    0
    >>> finally_exception_break_check()
    0
    """
    i = None
    for i in range(2):
        try:
            raise ValueError()
        finally:
            if IS_PY3:
                assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
            else:
                assert sys.exc_info() == (None, None, None), str(sys.exc_info())
            break
    assert sys.exc_info() == (None, None, None), str(sys.exc_info())
    return i


def finally_exception_break_check_with_swallowed_raise():
    """
    >>> if not IS_PY3:
    ...     sys.exc_clear()
    >>> def swallow():
    ...     try: raise TypeError()
    ...     except: return
    >>> def py_check():
    ...     i = None
    ...     for i in range(2):
    ...         try: raise ValueError()
    ...         finally:
    ...             if IS_PY3:
    ...                 assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
    ...             else:
    ...                 assert sys.exc_info() == (None, None, None), str(sys.exc_info())
    ...             swallow()
    ...             if IS_PY3:
    ...                 assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
    ...             else:
    ...                 assert sys.exc_info() == (None, None, None), str(sys.exc_info())
    ...             break
    ...     assert sys.exc_info() == (None, None, None), str(sys.exc_info())
    ...     return i
    >>> py_check()
    0
    >>> finally_exception_break_check_with_swallowed_raise()
    0
    """
    i = None
    for i in range(2):
        try:
            raise ValueError()
        finally:
            if IS_PY3:
                assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
            else:
                assert sys.exc_info() == (None, None, None), str(sys.exc_info())
            swallow()
            if IS_PY3:
                assert sys.exc_info()[0] == ValueError, str(sys.exc_info())
            else:
                assert sys.exc_info() == (None, None, None), str(sys.exc_info())
            break
    assert sys.exc_info() == (None, None, None), str(sys.exc_info())
    return i


def try_return_cy():
    """
    >>> def try_return_py():
    ...    try:
    ...        return 1
    ...    finally:
    ...        return 2
    >>> try_return_py()
    2
    >>> try_return_cy()
    2
    """
    try:
        return 1
    finally:
        return 2

cdef int try_return_c():
    try:
        return 1
    finally:
        return 2

def call_try_return_c():
    """
    >>> call_try_return_c()
    2
    """
    return try_return_c()

cdef int try_return_with_exception():
    try:
        raise TypeError
    finally:
        return 1

def call_try_return_with_exception():
    """
    >>> call_try_return_with_exception()
    1
    """
    return try_return_with_exception()

def try_return_temp(a):
    b = a+2
    try:
        c = a+b
        return c
    finally:
        print b-a

def try_continue(a):
    """
    >>> i=1
    >>> for i in range(3):
    ...     try:
    ...         continue
    ...     finally:
    ...         i+=1
    >>> i
    3
    >>> try_continue(3)
    3
    """
    i=1
    for i in range(a):
        try:
            continue
        finally:
            i+=1
    return i


def try_return_none_1():
    """
    >>> try_return_none_1()
    """
    try:
        return
    finally:
        return

cdef extern from *:
    ctypedef struct PyObject
    void Py_INCREF(object)

cdef PyObject* _none():
    ret = None
    Py_INCREF(ret)
    return  ret

def try_return_none_2():
    """
    >>> try_return_none_2()
    """
    try:
        return  _none()
    finally:
        return  _none()

def try_break():
    """
    >>> try_break()
    """
    for a in "abcd":
        try:
            if a == 'c':
                break
        except:
            break


def empty_try():
    """
    >>> empty_try()
    1
    """
    try:
        pass
    finally:
        return 1


def empty_try_in_except_raise(raise_in_finally):
    """
    >>> empty_try_in_except_raise(False)
    Traceback (most recent call last):
    ValueError: HUHU
    >>> empty_try_in_except_raise(True)
    Traceback (most recent call last):
    TypeError: OLA
    """
    try:
        raise ValueError("HUHU")
    except ValueError:
        try:
            pass
        finally:
            if raise_in_finally:
                raise TypeError('OLA')
        raise


def try_all_cases(x):
    """
    >>> try_all_cases(None)
    2
    >>> try_all_cases('break')
    4
    >>> try_all_cases('raise')
    Traceback (most recent call last):
    ValueError
    >>> try_all_cases('return')
    3
    >>> try_all_cases('tryraise')
    Traceback (most recent call last):
    TypeError
    >>> try_all_cases('trybreak')
    4
    """
    for i in range(3):
        try:
            if i == 0:
                pass
            elif i == 1:
                continue
            elif x == 'trybreak':
                break
            elif x == 'tryraise':
                raise TypeError()
            else:
                return 2
        finally:
            if x == 'raise':
                raise ValueError()
            elif x == 'break':
                break
            elif x == 'return':
                return 3
    return 4


def finally_yield(x):
    """
    >>> g = finally_yield(None)
    >>> next(g)  # 1
    1
    >>> next(g)  # 2
    1
    >>> next(g)  # 3
    Traceback (most recent call last):
    StopIteration

    >>> g = finally_yield('raise')
    >>> next(g)  # raise 1
    1
    >>> next(g)  # raise 2
    1
    >>> next(g)  # raise 3
    Traceback (most recent call last):
    TypeError

    >>> g = finally_yield('break')
    >>> next(g)   # break 1
    1
    >>> next(g)   # break 2
    1
    >>> next(g)   # break 3
    Traceback (most recent call last):
    StopIteration
    """
    for i in range(3):
        try:
            if i == 0:
                continue
            elif x == 'raise':
                raise TypeError()
            elif x == 'break':
                break
            else:
                return
        finally:
            yield 1


def complex_finally_clause(x, obj):
    """
    >>> class T(object):
    ...     def method(self, value):
    ...         print(value)

    >>> complex_finally_clause('finish', T())
    module.py
    module.py
    module.py
    99
    >>> complex_finally_clause('tryreturn', T())
    module.py
    module.py
    module.py
    2
    >>> complex_finally_clause('trybreak', T())
    module.py
    module.py
    module.py
    99
    >>> complex_finally_clause('tryraise', T())
    Traceback (most recent call last):
    TypeError
    """
    name = 'module'
    l = []
    cdef object lobj = l

    for i in range(3):
        l[:] = [1, 2, 3]
        try:
            if i == 0:
                pass
            elif i == 1:
                continue
            elif x == 'trybreak':
                break
            elif x == 'tryraise':
                raise TypeError()
            elif x == 'tryreturn':
                return 2
            else:
                pass
        finally:
            obj.method(name + '.py')
            from contextlib import contextmanager
            with contextmanager(lambda: (yield 1))() as y:
                assert y == 1
                a = 1
            with nogil:
                if i > 0:
                    with gil:
                        assert obj.method
                        a = 2
            # FIXME: prevent deep-copying inner functions
            #def closure(l):
            #    assert l == lobj
            #closure()
            assert name[0] in string.ascii_letters
            string.Template("-- huhu $name --").substitute(**{'name': '(%s)' % name})
            if a:
                a = 3
            del l[0], lobj[0]
            assert all(i == 3 for i in l), l
    return 99
Cython-0.26.1/tests/run/type_slots_nonzero_bool.pyx0000664000175000017500000000114612542002467023336 0ustar  stefanstefan00000000000000__doc__ = """

>>> not not BoolA(0)
False
>>> not not BoolA(1)
True

>>> not not BoolB(0)
False
>>> not not BoolB(1)
True

>>> not not BoolX(0)
False
>>> not not BoolX(1)
True

>>> not not BoolY(0)
False
>>> not not BoolY(1)
True

"""

cdef class BoolA:
    cdef bint value
    def __cinit__(self, bint value):
        self.value = value
    def __nonzero__(self):
        return self.value

cdef class BoolB:
    cdef bint value
    def __cinit__(self, bint value):
        self.value = value
    def __bool__(self):
        return self.value

cdef class BoolX(BoolA):
    pass

cdef class BoolY(BoolB):
    pass
Cython-0.26.1/tests/run/fstring.pyx0000664000175000017500000001713113143605603020020 0ustar  stefanstefan00000000000000# mode: run
# tag: f_strings, pep498

####
# Cython specific PEP 498 tests in addition to test_fstring.pyx from CPython
####

import sys
IS_PYPY = hasattr(sys, 'pypy_version_info')

cdef extern from *:
    int INT_MAX
    long LONG_MAX
    long LONG_MIN

max_int = INT_MAX
max_long = LONG_MAX
min_long = LONG_MIN


def escaping():
    """
    >>> escaping()
    """
    assert f'{{{{{"abc"}}}}}{{}}{{' == '{{abc}}{}{'
    assert f'\x7b}}' == '{}'
    assert f'{"{{}}"}' == '{{}}'


def format2(ab, cd):
    """
    >>> a, b, c = format2(1, 2)
    >>> print(a)
    ab2
    >>> print(b)
    1cd
    >>> print(c)
    12

    >>> a, b, c = format2('ab', 'cd')
    >>> print(a)
    abcd
    >>> print(b)
    abcd
    >>> print(c)
    abcd
    """
    a = f"ab{cd}"
    assert isinstance(a, unicode), type(a)
    b = f"{ab}cd"
    assert isinstance(b, unicode), type(b)
    c = f"{ab}{cd}"
    assert isinstance(c, unicode) or (IS_PYPY and isinstance(c, str)), type(c)
    return a, b, c


def format_c_numbers(signed char c, short s, int n, long l, float f, double d):
    """
    >>> s1, s2, s3, s4 = format_c_numbers(123, 135, 12, 12312312, 2.3456, 3.1415926)
    >>> print(s1)
    123 13512312312122.35
    >>> print(s2)
    3.14 2.3
    >>> print(s3)
      12f
    >>> print(s4)
    0C014 3.14

    >>> s1, s2, s3, s4 = format_c_numbers(-123, -135, -12, -12312312, -2.3456, -3.1415926)
    >>> print(s1)
    -123-135-12312312-12-2.35
    >>> print(s2)
    -3.14-2.3
    >>> print(s3)
     -12f
    >>> print(s4)
    -C-14-3.14

    >>> s1, s2, s3, s4 = format_c_numbers(0, 0, 0, 0, -2.3456, -0.1415926)
    >>> print(s1)
    0   000-2.35
    >>> print(s2)
    -0.142-2.3
    >>> print(s3)
       0f
    >>> print(s4)
    00000-0.142

    """
    s1 = f"{c}{s:4}{l}{n}{f:.3}"
    assert isinstance(s1, unicode), type(s1)
    s2 = f"{d:.3}{f:4.2}"
    assert isinstance(s2, unicode), type(s2)
    s3 = f"{n:-4}f"
    assert isinstance(s3, unicode), type(s3)
    s4 = f"{n:02X}{n:03o}{d:5.3}"
    assert isinstance(s4, unicode), type(s4)
    return s1, s2, s3, s4


def format_c_numbers_max(int n, long l):
    """
    >>> n, l = max_int, max_long
    >>> s1, s2 = format_c_numbers_max(n, l)
    >>> s1 == '{n}:{l}'.format(n=n, l=l) or s1
    True
    >>> s2 == '{n:012X}:{l:020X}'.format(n=n, l=l) or s2
    True

    >>> n, l = -max_int-1, -max_long-1
    >>> s1, s2 = format_c_numbers_max(n, l)
    >>> s1 == '{n}:{l}'.format(n=n, l=l) or s1
    True
    >>> s2 == '{n:012X}:{l:020X}'.format(n=n, l=l) or s2
    True
    """
    s1 = f"{n}:{l}"
    assert isinstance(s1, unicode), type(s1)
    s2 = f"{n:012X}:{l:020X}"
    assert isinstance(s2, unicode), type(s2)
    return s1, s2


def format_c_number_range(int n):
    """
    >>> for i in range(-1000, 1000):
    ...     assert format_c_number_range(i) == str(i)
    """
    return f'{n}'


def format_c_number_range_width(int n):
    """
    >>> for i in range(-1000, 1000):
    ...     assert format_c_number_range_width(i) == '%04d' % i, format_c_number_range_width(i)
    """
    return f'{n:04}'


def format_c_number_range_dyn_width(int n, int width):
    """
    >>> for i in range(-1000, 1000):
    ...     assert format_c_number_range_dyn_width(i, 0) == str(i), format_c_number_range_dyn_width(i, 0)
    ...     assert format_c_number_range_dyn_width(i, 1) == '%01d' % i, format_c_number_range_dyn_width(i, 1)
    ...     assert format_c_number_range_dyn_width(i, 4) == '%04d' % i, format_c_number_range_dyn_width(i, 4)
    ...     assert format_c_number_range_dyn_width(i, 5) == '%05d' % i, format_c_number_range_dyn_width(i, 5)
    ...     assert format_c_number_range_dyn_width(i, 6) == '%06d' % i, format_c_number_range_dyn_width(i, 6)
    """
    return f'{n:0{width}}'


def format_bool(bint x):
    """
    >>> a, b, c, d = format_bool(1)
    >>> print(a)  # 1
    True
    >>> print(b)  # 1
    True
    >>> print(c)  # 1
    False
    >>> print(d)  # 1
    False

    >>> a, b, c, d = format_bool(2)
    >>> print(a)  # 2
    True
    >>> print(b)  # 2
    True
    >>> print(c)  # 2
    False
    >>> print(d)  # 2
    False

    >>> a, b, c, d = format_bool(0)
    >>> print(a)  # 3
    False
    >>> print(b)  # 3
    True
    >>> print(c)  # 3
    False
    >>> print(d)  # 3
    False
    """
    return f'{x}', f'{True}', f'{x == 2}', f'{2 > 3}'


def format_c_values(Py_UCS4 uchar, Py_UNICODE pyunicode):
    """
    >>> s, s1, s2, s3 = format_c_values(b'A'.decode('ascii'), b'X'.decode('ascii'))
    >>> print(s)
    AXAX
    >>> print(s1)
    A
    >>> print(s2)
    X
    >>> print(s3)
    None

    """
    s = f"{uchar}{pyunicode}{uchar!s}{pyunicode!s}"
    assert isinstance(s, unicode), type(s)
    s1 = f"{uchar}"
    assert isinstance(s1, unicode), type(s1)
    s2 = f"{pyunicode}"
    assert isinstance(s2, unicode), type(s2)
    l = [1, 2, 3]
    s3 = f"{l.reverse()}"  # C int return value => 'None'
    assert isinstance(s3, unicode), type(s3)
    assert l == [3, 2, 1]
    return s, s1, s2, s3


xyz_ustring = u'xÄyÖz'

def format_strings(str s, unicode u):
    u"""
    >>> a, b, c, d, e, f, g = format_strings('abc', b'xyz'.decode('ascii'))
    >>> print(a)
    abcxyz
    >>> print(b)
    xyzabc
    >>> print(c)
    uxyzsabc
    >>> print(d)
    sabcuxyz
    >>> print(e)
    sabcuÄÄuxyz
    >>> print(f)
    sabcu\N{SNOWMAN}uxyz
    >>> print(g)
    sabcu\N{OLD PERSIAN SIGN A}uxyz\N{SNOWMAN}

    >>> a, b, c, d, e, f, g = format_strings('abc', xyz_ustring)
    >>> print(a)
    abcxÄyÖz
    >>> print(b)
    xÄyÖzabc
    >>> print(c)
    uxÄyÖzsabc
    >>> print(d)
    sabcuxÄyÖz
    >>> print(e)
    sabcuÄÄuxÄyÖz
    >>> print(f)
    sabcu\N{SNOWMAN}uxÄyÖz
    >>> print(g)
    sabcu\N{OLD PERSIAN SIGN A}uxÄyÖz\N{SNOWMAN}
    """
    a = f"{s}{u}"
    assert isinstance(a, unicode), type(a)
    b = f"{u}{s}"
    assert isinstance(b, unicode), type(b)
    c = f"u{u}s{s}"
    assert isinstance(c, unicode), type(c)
    d = f"s{s}u{u}"
    assert isinstance(d, unicode), type(d)
    e = f"s{s}uÄÄu{u}"
    assert isinstance(e, unicode), type(e)
    f = f"s{s}u\N{SNOWMAN}u{u}"
    assert isinstance(f, unicode), type(f)
    g = f"s{s}u\N{OLD PERSIAN SIGN A}u{u}\N{SNOWMAN}"
    assert isinstance(g, unicode), type(g)
    return a, b, c, d, e, f, g


def format_pystr(str s1, str s2):
    """
    >>> a, b, c, d = format_pystr('abc', 'xyz')
    >>> print(a)
    abcxyz
    >>> print(b)
    xyzabc
    >>> print(c)
    uxyzsabc
    >>> print(d)
    sabcuxyz
    """
    a = f"{s1}{s2}"
    assert isinstance(a, unicode) or (IS_PYPY and isinstance(a, str)), type(a)
    b = f"{s2}{s1}"
    assert isinstance(b, unicode) or (IS_PYPY and isinstance(a, str)), type(b)
    c = f"u{s2}s{s1}"
    assert isinstance(c, unicode), type(c)
    d = f"s{s1}u{s2}"
    assert isinstance(d, unicode), type(d)
    return a, b, c, d


def raw_fstring(value):
    """
    >>> print(raw_fstring('abc'))
    abc\\x61
    """
    return fr'{value}\x61'


def format_repr(value):
    """
    >>> a, b = format_repr('abc')
    >>> print('x{value!r}x'.format(value='abc'))
    x'abc'x
    >>> print('x{value!r:6}x'.format(value='abc'))
    x'abc' x
    >>> print(a)
    x'abc'x
    >>> print(b)
    x'abc' x
    """
    a = f'x{value!r}x'
    assert isinstance(a, unicode), type(a)
    b = f'x{value!r:6}x'
    assert isinstance(b, unicode), type(b)
    return a, b


def format_str(value):
    """
    >>> a, b = format_str('abc')
    >>> print('x{value!s}x'.format(value='abc'))
    xabcx
    >>> print('x{value!s:6}x'.format(value='abc'))
    xabc   x
    >>> print(a)
    xabcx
    >>> print(b)
    xabc   x
    """
    a = f'x{value!s}x'
    assert isinstance(a, unicode), type(a)
    b = f'x{value!s:6}x'
    assert isinstance(b, unicode), type(b)
    return a, b
Cython-0.26.1/tests/run/pure_cdef_class_property_decorator_T264.pxd0000664000175000017500000000012713023021033026144 0ustar  stefanstefan00000000000000# mode: run
# ticket: 264
# tag: property, decorator

cdef class Prop:
    cdef _value
Cython-0.26.1/tests/run/altet2.pyx0000664000175000017500000000032512542002467017535 0ustar  stefanstefan00000000000000__doc__ = u"""
  >>> iter(C())    # doctest: +ELLIPSIS
  Traceback (most recent call last):
  TypeError: iter() returned non-iterator...
"""

cdef class C:

    def __iter__(self):
        "This is a doc string."
Cython-0.26.1/tests/run/cpdef_extern_func.pyx0000664000175000017500000000124113143605603022020 0ustar  stefanstefan00000000000000# cython: c_string_type=str
# cython: c_string_encoding=ascii
# distutils: extra_compile_args=-fpermissive

__doc__ = """
>>> sqrt(1)
1.0
>>> pyx_sqrt(4)
2.0
>>> pxd_sqrt(9)
3.0
>>> log(10)  # doctest: +ELLIPSIS
Traceback (most recent call last):
NameError: ...name 'log' is not defined
>>> strchr('abcabc', ord('c'))
'cabc'
>>> strchr(needle=ord('c'), haystack='abcabc')
'cabc'
"""

cdef extern from "math.h":
    cpdef double sqrt(double)
    cpdef double pyx_sqrt "sqrt"(double)
    cdef double log(double) # not wrapped

cdef extern from "string.h":
    # signature must be exact in C++, disagrees with C
    cpdef const char* strchr(const char *haystack, int needle);
Cython-0.26.1/tests/run/list.pyx0000664000175000017500000000530412542002467017317 0ustar  stefanstefan00000000000000
cimport cython

def f(obj1, obj2, obj3, obj4, obj5):
    """
    >>> f(1, 2, 3, 4, 5)
    []
    """
    obj1 = []
    return obj1

def g(obj1, obj2, obj3, obj4, obj5):
    """
    >>> g(1, 2, 3, 4, 5)
    [2]
    """
    obj1 = [obj2]
    return obj1

def h(obj1, obj2, obj3, obj4, obj5):
    """
    >>> h(1, 2, 3, 4, 5)
    [2, 3]
    """
    obj1 = [obj2, obj3]
    return obj1

def j(obj1, obj2, obj3, obj4, obj5):
    """
    >>> j(1, 2, 3, 4, 5)
    [2, 3, 4]
    """
    obj1 = [obj2, obj3, obj4]
    return obj1

def k(obj1, obj2, obj3, obj4, obj5):
    """
    >>> k(1, 2, 3, 4, 5)
    [17, 42, 88]
    """
    obj1 = [17, 42, 88]
    return obj1

@cython.test_fail_if_path_exists("//SimpleCallNode")
def test_list_call(ob):
    """
    >>> def f():
    ...     yield 1
    ...     yield 2
    ...
    >>> list(f())
    [1, 2]
    """
    return list(ob)

def test_list_sort():
    """
    >>> test_list_sort()
    [1, 2, 3, 4]
    """
    cdef list l1
    l1 = [2,3,1,4]
    l1.sort()
    return l1

def test_list_sort_reversed():
    cdef list l1
    l1 = [2,3,1,4]
    l1.sort(reversed=True)
    return l1

def test_list_reverse():
    """
    >>> test_list_reverse()
    [1, 2, 3, 4]
    """
    cdef list l1
    l1 = [4,3,2,1]
    l1.reverse()
    return l1

def test_list_append():
    """
    >>> test_list_append()
    [1, 2, 3, 4]
    """
    cdef list l1 = [1,2]
    l1.append(3)
    l1.append(4)
    return l1

def test_list_append_insert():
    """
    >>> test_list_append_insert()
    ['first', 'second']
    """
    cdef list l = []
    l.append("second")
    l.insert(0, "first")
    return l

def test_list_pop():
    """
    >>> test_list_pop()
    (2, [1])
    """
    cdef list l1
    l1 = [1,2]
    two = l1.pop()
    return two, l1

def test_list_pop0():
    """
    >>> test_list_pop0()
    (1, [2])
    """
    cdef list l1
    l1 = [1,2]
    one = l1.pop(0)
    return one, l1

def test_list_pop_all():
    """
    >>> test_list_pop_all()
    True
    """
    cdef list l1
    l1 = [1,2]
    i = 0
    try:
        l1.pop()
        i = 1
        l1.pop(-1)
        i = 2
        l1.pop(0)
        i = 3
    except IndexError:
        return i == 2
    return False

def test_list_extend():
    """
    >>> test_list_extend()
    [1, 2, 3, 4, 5, 6]
    """
    cdef list l = [1,2,3]
    l.extend([])
    l.extend(())
    l.extend(set())
    assert l == [1,2,3]
    assert len(l) == 3
    l.extend([4,5,6])
    return l

def test_none_list_extend(list l):
    """
    >>> test_none_list_extend([])
    [1, 2, 3]
    >>> test_none_list_extend([0, 0, 0])
    [0, 0, 0, 1, 2, 3]
    >>> test_none_list_extend(None)
    123
    """
    try:
        l.extend([1,2,3])
    except AttributeError:
        return 123
    return l
Cython-0.26.1/tests/run/locals_expressions_T430.pyx0000664000175000017500000000156212542002467022777 0ustar  stefanstefan00000000000000# ticket: 430

__doc__ = u"""
>>> sorted( get_locals(1,2,3, k=5) .items())
[('args', (2, 3)), ('kwds', {'k': 5}), ('x', 1), ('y', 'hi'), ('z', 5)]

>>> sorted(get_locals_items(1,2,3, k=5))
[('args', (2, 3)), ('kwds', {'k': 5}), ('x', 1), ('y', 'hi'), ('z', 5)]

>>> sorted(get_locals_items_listcomp(1,2,3, k=5))
[('args', (2, 3)), ('kwds', {'k': 5}), ('x', 1), ('y', 'hi'), ('z', 5)]
"""

def get_locals(x, *args, **kwds):
    cdef int z = 5
    y = "hi"
    return locals()

def get_locals_items(x, *args, **kwds):
    cdef int z = 5
    y = "hi"
    return locals().items()

def get_locals_items_listcomp(x, *args, **kwds):
    # FIXME: 'item' should *not* appear in locals() yet, as locals()
    # is evaluated before assigning to item !
    cdef int z = 5
    y = "hi"
    return [ item for item in locals().items() ]

def sorted(it):
    l = list(it)
    l.sort()
    return l
Cython-0.26.1/tests/run/in_list_with_side_effects_T544.pyx0000664000175000017500000000100212542002467024252 0ustar  stefanstefan00000000000000# ticket: 544

def count(i=[0]):
    i[0] += 1
    return i[0]

def test(x):
    """
    >>> def py_count(i=[0]):
    ...     i[0] += 1
    ...     return i[0]
    >>> 1 in (py_count(), py_count(), py_count(), py_count())
    True
    >>> 4 in (py_count(), py_count(), py_count(), py_count())
    False
    >>> 12 in (py_count(), py_count(), py_count(), py_count())
    True

    >>> test(1)
    True
    >>> test(4)
    False
    >>> test(12)
    True
    """
    return x in (count(), count(), count(), count())
Cython-0.26.1/tests/run/pyclass_special_methods.pyx0000664000175000017500000000165512542002467023252 0ustar  stefanstefan00000000000000# mode: run
# tag: pyclass, getattr

"""
Python bypasses __getattribute__ overrides for some special method lookups.
"""

lookups = []


class PyClass(object):
    """
    >>> del lookups[:]
    >>> obj = PyClass()
    >>> obj.test
    'getattribute(test)'
    >>> lookups
    ['getattribute(test)']
    """
    def __getattribute__(self, name):
        lookup = 'getattribute(%s)' % name
        lookups.append(lookup)
        return lookup

    def __getattr__(self, name):
        lookup = 'getattr(%s)' % name
        lookups.append(lookup)
        return lookup


def use_as_context_manager(obj):
    """
    >>> del lookups[:]
    >>> class PyCM(PyClass):
    ...     def __enter__(self): return '__enter__(%s)' % (self is obj or self)
    ...     def __exit__(self, *args): pass
    >>> obj = PyCM()
    >>> use_as_context_manager(obj)
    '__enter__(True)'
    >>> lookups
    []
    """
    with obj as x:
        pass
    return x
Cython-0.26.1/tests/run/size_t.pyx0000664000175000017500000000157012542002467017642 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> test(0)
0
>>> test(1)
1
>>> test(2)
2
>>> str(test((1<<32)-1))
'4294967295'

>>> try: test(-1)
... except (OverflowError, TypeError): print("ERROR")
ERROR

>>> test(1<<128) #doctest: +ELLIPSIS
Traceback (most recent call last):
    ...
OverflowError: ...

>>> a = A(1,2)
>>> a.a == 1
True
>>> a.b == 2
True
>>> a.foo(5)
5
>>> try: a.foo(-1)
... except (OverflowError, TypeError): print("ERROR")
ERROR
>>> a.foo(1 << 180) #doctest: +ELLIPSIS
Traceback (most recent call last):
    ...
OverflowError: ...
"""

# XXX This should generate a warning !!!
cdef extern from *:
    ctypedef unsigned long size_t

def test(size_t i):
    return i

cdef class A:
    cdef public size_t a
    cdef readonly size_t b

    def __init__(self, size_t a, object b):
        self.a = a
        self.b = b

    cpdef size_t foo(self, size_t x):
        cdef object o = x
        return o
Cython-0.26.1/tests/run/cimport_from_pyx.srctree0000664000175000017500000000210412542002467022566 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import a"

######## setup.py ########

from Cython.Build.Dependencies import cythonize
import Cython.Compiler.Options
Cython.Compiler.Options.cimport_from_pyx = True

from distutils.core import setup

setup(
  ext_modules = cythonize("*.pyx"),
)

######## a.pyx ########

from b cimport Bclass, Bfunc, Bstruct, Benum, Benum_value, Btypedef, Py_EQ, Py_NE
cdef Bclass b = Bclass(5)
assert Bfunc(&b.value) == b.value
assert b.asStruct().value == b.value
cdef Btypedef b_type = &b.value
cdef Benum b_enum = Benum_value
cdef int tmp = Py_EQ

#from c cimport ClassC
#cdef ClassC c = ClassC()
#print c.value

######## b.pyx ########

from cpython.object cimport Py_EQ, Py_NE

cdef enum Benum:
    Benum_value

cdef struct Bstruct:
    int value

ctypedef long *Btypedef

cdef class Bclass:
    cdef long value
    def __init__(self, value):
        self.value = value
    cdef Bstruct asStruct(self):
        return Bstruct(value=self.value)

cdef long Bfunc(Btypedef x):
    return x[0]

######## c.pxd ########

cdef class ClassC:
    cdef int value
Cython-0.26.1/tests/run/unicodeliteralslatin1.pyx0000664000175000017500000000270112542002467022641 0ustar  stefanstefan00000000000000# -*- coding: latin-1 -*-

__doc__ = br"""
    >>> sa
    'abc'
    >>> ua
    u'abc'
    >>> b
    u'123'
    >>> c
    u'S\xf8k ik'
    >>> d
    u'\xfc\xd6\xe4'
    >>> e
    u'\x03g\xf8\uf8d2S\xf8k ik'
    >>> f
    u'\xf8'
    >>> add
    u'S\xf8k ik\xfc\xd6\xe4abc'
    >>> null
    u'\x00'
""".decode("ASCII") + b"""
    >>> len(sa)
    3
    >>> len(ua)
    3
    >>> len(b)
    3
    >>> len(c)
    6
    >>> len(d)
    3
    >>> len(e)
    10
    >>> len(f)
    1
    >>> len(add)
    12
    >>> len(null)
    1
""".decode("ASCII") + u"""
    >>> ua == u'abc'
    True
    >>> b == u'123'
    True
    >>> c == u'Søk ik'
    True
    >>> d == u'üÖä'
    True
    >>> e == u'\x03\x67\xf8\uf8d2Søk ik'     # unescaped by Cython
    True
    >>> e == u'\\x03\\x67\\xf8\\uf8d2Søk ik' # unescaped by Python
    True
    >>> f == u'\xf8'  # unescaped by Cython
    True
    >>> f == u'\\xf8' # unescaped by Python
    True
    >>> k == u'ä' == u'\\N{LATIN SMALL LETTER A WITH DIAERESIS}'
    True
    >>> add == u'Søk ik' + u'üÖä' + 'abc'
    True
    >>> null == u'\\x00' # unescaped by Python (required by doctest)
    True
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u" u'", u" '")
else:
    __doc__ = __doc__.replace(u" b'", u" '")

sa = 'abc'
ua = u'abc'

b = u'123'
c = u'Søk ik'
d = u'üÖä'
e = u'\x03\x67\xf8\uf8d2Søk ik'
f = u'\xf8'
k = u'\N{LATIN SMALL LETTER A WITH DIAERESIS}'

add = u'Søk ik' + u'üÖä' + u'abc'
null = u'\x00'
Cython-0.26.1/tests/run/inlinepxd_support.pxd0000664000175000017500000000010713143605603022100 0ustar  stefanstefan00000000000000
cdef inline int my_add(int a, int b=1, int c=0):
    return a + b + c
Cython-0.26.1/tests/run/cpp_classes_def.pyx0000664000175000017500000000615713143605603021467 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror
# cython: experimental_cpp_class_def=True

cdef double pi
from math import pi
from libc.math cimport sin, cos
from libcpp cimport bool

cdef extern from "shapes.h" namespace "shapes":
    cdef cppclass Shape:
        float area() const

cdef cppclass RegularPolygon(Shape):
    float radius # major
    int n
    __init__(int n, float radius):
        this.n = n
        this.radius = radius
    float area() const:
        cdef double theta = pi / this.n
        return this.radius * this.radius * sin(theta) * cos(theta) * this.n

def test_Poly(int n, float radius=1):
    """
    >>> test_Poly(4)
    2.0
    >>> test_Poly(3)         #doctest: +ELLIPSIS
    1.29903...
    >>> test_Poly(3, 10.0)   #doctest: +ELLIPSIS
    129.903...
    >>> test_Poly(100)       #doctest: +ELLIPSIS
    3.13952...
    >>> test_Poly(1000)      #doctest: +ELLIPSIS
    3.14157...
    """
    cdef RegularPolygon* poly
    try:
        poly = new RegularPolygon(n, radius)
        poly.n = n
        poly.radius = radius
        return poly.area()
    finally:
        del poly

cdef cppclass BaseClass:
    int n
    int method():
        return this.n

cdef cppclass SubClass(BaseClass):
    bool override
    __init__(bool override):
        this.n = 1
        this.override = override
    int method():
        if override:
            return 0
        else:
            return BaseClass.method()

def test_BaseMethods(x):
    """
    >>> test_BaseMethods(True)
    0
    >>> test_BaseMethods(False)
    1
    """
    cdef SubClass* subClass
    try:
        subClass = new SubClass(x)
        return subClass.method()
    finally:
        del subClass

cdef cppclass WithStatic:
    @staticmethod
    double square(double x):
        return x * x

def test_Static(x):
    """
    >>> test_Static(2)
    4.0
    >>> test_Static(0.5)
    0.25
    """
    return WithStatic.square(x)


cdef cppclass InitDealloc:
    __init__():
        try:
            print "Init"
        finally:
            return  # swallow any exceptions
    __dealloc__():
        try:
            print "Dealloc"
        finally:
            return  # swallow any exceptions

def test_init_dealloc():
    """
    >>> test_init_dealloc()
    start
    Init
    live
    Dealloc
    end
    """
    print "start"
    cdef InitDealloc *ptr = new InitDealloc()
    print "live"
    del ptr
    print "end"


cdef cppclass WithTemplate[T]:
    T value
    void set_value(T value):
        this.value = value
    T get_value():
        return this.value

cdef cppclass ResolveTemplate(WithTemplate[long]):
    pass

def test_templates(long value):
    """
    >>> test_templates(10)
    >>> test_templates(-2)
    """
    cdef WithTemplate[long] *base = new WithTemplate[long]()
    del base

    cdef ResolveTemplate *resolved = new ResolveTemplate()
    resolved.set_value(value)
    assert resolved.value == resolved.get_value() == value, resolved.value

    base = resolved
    base.set_value(2 * value)
    assert base.get_value() == base.value == 2 * value, base.value

    del base

cdef cppclass Simple:
  pass

def test_default_init_no_gil():
  with nogil:
    s = new Simple()
    del s
Cython-0.26.1/tests/run/extlen.pyx0000664000175000017500000000015012542002467017635 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> len(Spam())
    0
"""

cdef class Spam:

    def __len__(self):
        return 0
Cython-0.26.1/tests/run/exttype_dealloc.pyx0000664000175000017500000000631312542002467021532 0ustar  stefanstefan00000000000000# mode: run
# tag: dealloc

import gc
import sys


test_results = []


cdef void add_name(obj):
    name = type(obj).__name__.rsplit('.', 1)[-1]
    test_results.append(name)


def find_name(exttype):
    name = exttype.__name__.rsplit('.', 1)[-1]
    return test_results.count(name)


cdef class ExtTypeSimple:
    """
    >>> obj = ExtTypeSimple()
    >>> find_name(ExtTypeSimple)
    0
    >>> obj = None
    >>> _ = gc.collect()
    >>> find_name(ExtTypeSimple)
    1
    """
    cdef int x
    def __dealloc__(self):
        add_name(self)
        self.x = 0


class PySubTypeSimple(ExtTypeSimple):
    """
    >>> obj = PySubTypeSimple()
    >>> find_name(PySubTypeSimple)
    0
    >>> obj = None
    >>> _ = gc.collect()
    >>> find_name(PySubTypeSimple)
    1
    """


class PySubTypeDel(ExtTypeSimple):
    """
    >>> obj = PySubTypeDel()
    >>> find_name(PySubTypeDel)
    0
    >>> obj = None
    >>> _ = gc.collect()
    >>> find_name(PySubTypeDel)
    2
    """
    def __del__(self):
        add_name(self)


cdef class ExtSubTypeObjAttr(ExtTypeSimple):
    """
    >>> obj = ExtSubTypeObjAttr()
    >>> find_name(ExtSubTypeObjAttr)
    0
    >>> obj = None
    >>> _ = gc.collect()

    # both this type and the base class add the same name
    >>> find_name(ExtSubTypeObjAttr)
    2
    """
    cdef object attr
    def __dealloc__(self):
        add_name(self)
        self.x = 1


cdef class ExtTypeRaise:
    """
    >>> obj = ExtTypeRaise()
    >>> find_name(ExtTypeRaise)
    0
    >>> obj = None
    >>> _ = gc.collect()
    >>> find_name(ExtTypeRaise)
    1
    """
    def __dealloc__(self):
        add_name(self)
        raise RuntimeError("HUHU !")


class PySubTypeRaise(ExtTypeRaise):
    """
    >>> obj = PySubTypeRaise()
    >>> obj.ref = obj
    >>> find_name(PySubTypeRaise)
    0
    >>> obj = None
    >>> _ = gc.collect()
    >>> find_name(PySubTypeRaise)
    1
    """


cdef class ExtTypeRefCycle:
    """
    >>> obj = ExtTypeRefCycle()
    >>> obj.ref = obj
    >>> find_name(ExtTypeRefCycle)
    0
    >>> obj = None
    >>> _ = gc.collect()
    >>> find_name(ExtTypeRefCycle)
    1
    """
    cdef public object ref
    cdef int x
    def __dealloc__(self):
        add_name(self)
        self.x = 1


class PySubTypeRefCycleDel(ExtTypeRefCycle):
    """
    >>> obj = PySubTypeRefCycleDel()
    >>> obj.ref = obj
    >>> find_name(PySubTypeRefCycleDel)
    0
    >>> obj = None
    >>> _ = gc.collect()

    >>> count = 2
    >>> if sys.version_info >= (3, 4):
    ...     count = find_name(PySubTypeRefCycleDel)
    >>> count
    2
    """
    def __del__(self):
        add_name(self)


cdef class ExtTypeRefCycleRaise:
    """
    >>> obj = ExtTypeRefCycleRaise()
    >>> obj.ref = obj
    >>> find_name(ExtTypeRefCycleRaise)
    0
    >>> obj = None
    >>> _ = gc.collect()
    >>> find_name(ExtTypeRefCycleRaise)
    1
    """
    cdef public object ref
    def __dealloc__(self):
        add_name(self)
        raise RuntimeError("Cleaning up !")


class PySubTypeRefCycleRaise(ExtTypeRefCycleRaise):
    """
    >>> obj = PySubTypeRefCycleRaise()
    >>> obj.ref = obj
    >>> find_name(PySubTypeRefCycleRaise)
    0
    >>> obj = None
    >>> _ = gc.collect()
    >>> find_name(PySubTypeRefCycleRaise)
    1
    """
Cython-0.26.1/tests/run/complex_cast_T445.pyx0000664000175000017500000000230112542002467021537 0ustar  stefanstefan00000000000000# ticket: 445

def complex_double_cast(double x, double complex z):
    """
    >>> complex_double_cast(1, 4-3j)
    ((1+0j), (4-3j))
    """
    cdef double complex xx = x
    cdef double complex zz = z
    xx = x
    return xx, zz

def complex_double_int_cast(int x, int complex z):
    """
    >>> complex_double_int_cast(2, 2 + 3j)
    ((2+0j), (3+3j))
    """
    cdef double complex xx = x
    cdef double complex zz = (z+1)
    return xx, zz

def complex_int_double_cast(double x, double complex z):
    """
    >>> complex_int_double_cast(2.5, 2.5 + 3.5j)
    ((2+0j), (2+3j))
    """
    cdef int complex xx = x
    cdef int complex zz = z
    return xx, zz

cdef int side_effect_counter = 0

cdef double complex side_effect(double complex z):
    global side_effect_counter
    side_effect_counter += 1
    print "side effect", side_effect_counter, z
    return z

def test_side_effect(int complex z):
    """
    >>> test_side_effect(5)
    side effect 1 (5+0j)
    (5+0j)
    >>> test_side_effect(3-4j)
    side effect 2 (3-4j)
    (3-4j)
    """
    cdef int complex zz = side_effect(z)
    return zz
Cython-0.26.1/tests/run/r_pyclass.pyx0000664000175000017500000000034412542002467020342 0ustar  stefanstefan00000000000000class Spam:

    def __init__(self, w):
        self.weight = w

    def serve(self):
        print self.weight, u"tons of spam!"

def order():
    """
    >>> order()
    42 tons of spam!
    """
    s = Spam(42)
    s.serve()
Cython-0.26.1/tests/run/classdecorators_T336.pyx0000664000175000017500000000151412542002467022255 0ustar  stefanstefan00000000000000# ticket: 336

__doc__ = u"""
>>> print('\\n'.join(calls))
Py-Honk PyTestClass
PyTestClass
Py-Hello PyTestClass
PyTestClass
Py-Done PyTestClass

>>> c = PyTestClass()
Ho, Ho, Ho!
"""

calls = []

class print_msg(object):
    def __init__(self, message):
        self.msg = message
    def __call__(self, c):
        calls.append( self.msg + c.__name__ )
        return c

def print_name(c):
    calls.append( c.__name__ )
    return c

@print_msg(u"Py-Done ")
@print_name
@print_msg(u"Py-Hello ")
@print_name
@print_msg(u"Py-Honk ")
class PyTestClass(object):
    def __init__(self):
        print u"Ho, Ho, Ho!"

# not currently working:
#
## @print_msg("Cy-Done ")
## @print_name
## @print_msg("Cy-Hello ")
## @print_name
## @print_msg("Cy-Honk ")
## cdef class CyTestClass(object):
##     def __init__(self):
##         print u"Ho, Ho, Ho!"
Cython-0.26.1/tests/run/delete.pyx0000664000175000017500000000357212542002467017613 0ustar  stefanstefan00000000000000
cimport cython

class A(object):
    """
    >>> a = A()
    >>> a.f()
    [2, 1]
    >>> a.g()
    (False, True)
    """
    def f(self):
        self.refs = [3,2,1]
        del self.refs[0]
        return self.refs

    def g(self):
        self.a = 3
        del self.a
        return (hasattr(self, u"a"), hasattr(self, u"g"))

def del_item(L, o):
    """
    >>> del_item({1: 'a', 2: 'b'}, 1)
    {2: 'b'}
    >>> del_item(list(range(10)), 2)
    [0, 1, 3, 4, 5, 6, 7, 8, 9]
    """
    del L[o]
    return L

@cython.test_assert_path_exists('//DelStatNode//IndexNode//NoneCheckNode')
def del_dict(dict D, o):
    """
    >>> del_dict({1: 'a', 2: 'b'}, 1)
    {2: 'b'}
    """
    del D[o]
    return D

@cython.test_fail_if_path_exists('//NoneCheckNode')
def del_dict_from_literal(o):
    """
    >>> del_dict_from_literal(1)
    {2: 'b'}
    """
    D = {1: 'a', 2: 'b'}
    del D[o]
    return D

def del_list(list L, o):
    """
    >>> del_list(list(range(5)), 3)
    [0, 1, 2, 4]
    """
    del L[o]
    return L

def del_int(L, int i):
    """
    >>> del_int(list(range(5)), 3)
    [0, 1, 2, 4]
    >>> del_int({-1: 'neg', 1: 'pos'}, -1)
    {1: 'pos'}
    """
    del L[i]
    return L

def del_list_int(L, int i):
    """
    >>> del_list_int(list(range(5)), 3)
    [0, 1, 2, 4]
    """
    del L[i]
    return L

def del_temp_slice(a):
    """
    >>> class A(object):
    ...     attr = [1,2,3]
    >>> a = A()
    >>> a.attr
    [1, 2, 3]
    >>> del_temp_slice(a)
    []
    >>> a.attr
    []
    >>> del_temp_slice(a)
    []
    >>> a.attr
    []
    """
    while a.attr:
        del a.attr[:]
    return a.attr

def del_local(a):
    """
    >>> del_local(object())
    """
    del a
    assert 'a' not in locals()

def del_seq(a, b, c):
    """
    >>> del_seq(1, 2, 3)
    """
    del a, (b, c)
    assert 'a' not in locals()
    assert 'b' not in locals()
    assert 'c' not in locals()
Cython-0.26.1/tests/run/tp_new_cimport.srctree0000664000175000017500000000343712542002467022231 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import tp_new_tests; tp_new_tests.test_all()"
PYTHON -c "import tp_new_tests; tp_new_tests.test_sub()"

######## setup.py ########

from Cython.Build.Dependencies import cythonize
from distutils.core import setup

setup(
    ext_modules = cythonize("**/*.pyx"),
    )

######## tp_new_tests.py ########

def test_all():
    test_a()
    test_b()
    test_a_in_b()
    test_sub()

def test_a():
    import a
    assert isinstance(a.tpnew_ExtTypeA(), a.ExtTypeA)
    assert a.tpnew_ExtTypeA().attrA == 123

def test_b():
    import b
    assert isinstance(b.tpnew_ExtTypeB(), b.ExtTypeB)
    assert b.tpnew_ExtTypeB().attrB == 234

def test_a_in_b():
    import a,b
    assert isinstance(b.tpnew_ExtTypeA(), a.ExtTypeA)
    assert b.tpnew_ExtTypeA().attrA == 123

def test_sub():
    import b
    assert isinstance(b.tpnew_SubExtTypeA(), b.SubExtTypeA)
    assert b.tpnew_SubExtTypeA().attrAB == 345
    assert b.tpnew_SubExtTypeA().attrA == 123

######## a.pxd ########

cdef api class ExtTypeA[type ExtTypeA_Type, object ExtTypeAObject]:
    cdef readonly attrA

######## a.pyx ########

cdef class ExtTypeA:
    def __cinit__(self):
        self.attrA = 123

def tpnew_ExtTypeA():
    return ExtTypeA.__new__(ExtTypeA)

######## b.pxd ########

from a cimport ExtTypeA

cdef class ExtTypeB:
    cdef readonly attrB

cdef class SubExtTypeA(ExtTypeA):
    cdef readonly attrAB

######## b.pyx ########

from a cimport ExtTypeA

cdef class ExtTypeB:
    def __cinit__(self):
        self.attrB = 234

cdef class SubExtTypeA(ExtTypeA):
    def __cinit__(self):
        self.attrAB = 345

def tpnew_ExtTypeA():
    return ExtTypeA.__new__(ExtTypeA)

def tpnew_ExtTypeB():
    return ExtTypeB.__new__(ExtTypeB)

def tpnew_SubExtTypeA():
    return SubExtTypeA.__new__(SubExtTypeA)
Cython-0.26.1/tests/run/py_unicode_type.pyx0000664000175000017500000001430013143605603021536 0ustar  stefanstefan00000000000000# -*- coding: iso-8859-1 -*-
# mode: run
# tag: warnings

cimport cython

cdef Py_UNICODE char_ASCII = u'A'
cdef Py_UNICODE char_KLINGON = u'\uF8D2'

u_A = char_ASCII
u_KLINGON = char_KLINGON


def compare_ASCII():
    """
    >>> compare_ASCII()
    True
    False
    False
    """
    print(char_ASCII == u'A')
    print(char_ASCII == u'B')
    print(char_ASCII == u'\uF8D2')


def compare_klingon():
    """
    >>> compare_klingon()
    True
    False
    False
    """
    print(char_KLINGON == u'\uF8D2')
    print(char_KLINGON == u'A')
    print(char_KLINGON == u'B')


from cpython.unicode cimport PyUnicode_FromOrdinal
import sys

u0 = u'\x00'
u1 = u'\x01'
umax = PyUnicode_FromOrdinal(sys.maxunicode)

def unicode_ordinal(Py_UNICODE i):
    """
    >>> ord(unicode_ordinal(0)) == 0
    True
    >>> ord(unicode_ordinal(1)) == 1
    True
    >>> ord(unicode_ordinal(sys.maxunicode)) == sys.maxunicode
    True

    >>> ord(unicode_ordinal(u0)) == 0
    True
    >>> ord(unicode_ordinal(u1)) == 1
    True
    >>> ord(unicode_ordinal(umax)) == sys.maxunicode
    True

    Value too small:
    >>> unicode_ordinal(-1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...

    Value too large:
    >>> unicode_ordinal(sys.maxunicode+1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...

    Less than one character:
    >>> unicode_ordinal(u0[:0])
    Traceback (most recent call last):
    ...
    ValueError: only single character unicode strings can be converted to Py_UNICODE, got length 0

    More than one character:
    >>> unicode_ordinal(u0+u1)
    Traceback (most recent call last):
    ...
    ValueError: only single character unicode strings can be converted to Py_UNICODE, got length 2
    """
    return i


def ord_pyunicode(Py_UNICODE x):
    """
    >>> ord_pyunicode(u0)
    0
    >>> ord_pyunicode(u_A)
    65
    >>> ord_pyunicode(u_KLINGON)
    63698
    """
    return ord(x)


@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode')
def unicode_type_methods(Py_UNICODE uchar):
    """
    >>> unicode_type_methods(ord('A'))
    [True, True, False, False, False, False, False, True, True]
    >>> unicode_type_methods(ord('a'))
    [True, True, False, False, True, False, False, False, False]
    >>> unicode_type_methods(ord('8'))
    [True, False, True, True, False, True, False, False, False]
    >>> unicode_type_methods(ord('\\t'))
    [False, False, False, False, False, False, True, False, False]
    """
    return [
        # character types
        uchar.isalnum(),
        uchar.isalpha(),
        uchar.isdecimal(),
        uchar.isdigit(),
        uchar.islower(),
        uchar.isnumeric(),
        uchar.isspace(),
        uchar.istitle(),
        uchar.isupper(),
        ]

@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode')
def unicode_methods(Py_UNICODE uchar):
    """
    >>> unicode_methods(ord('A')) == ['a', 'A', 'A']
    True
    >>> unicode_methods(ord('a')) == ['a', 'A', 'A']
    True
    """
    return [
        # character conversion
        uchar.lower(),
        uchar.upper(),
        uchar.title(),
        ]

@cython.test_assert_path_exists('//IntNode')
@cython.test_fail_if_path_exists('//SimpleCallNode',
                                 '//PythonCapiCallNode')
def len_uchar(Py_UNICODE uchar):
    """
    >>> len_uchar(ord('A'))
    1
    """
    assert uchar  # just to avoid C compiler unused arg warning
    return len(uchar)

def index_uchar(Py_UNICODE uchar, Py_ssize_t i):
    """
    >>> index_uchar(ord('A'), 0) == ('A', 'A', 'A')
    True
    >>> index_uchar(ord('A'), -1) == ('A', 'A', 'A')
    True
    >>> index_uchar(ord('A'), 1)
    Traceback (most recent call last):
    IndexError: string index out of range
    """
    return uchar[0], uchar[-1], uchar[i]

mixed_ustring = u'AbcDefGhIjKlmnoP'
lower_ustring = mixed_ustring.lower()
upper_ustring = mixed_ustring.lower()

@cython.test_assert_path_exists('//PythonCapiCallNode',
                                '//ForFromStatNode')
@cython.test_fail_if_path_exists('//SimpleCallNode',
                                 '//ForInStatNode')
def count_lower_case_characters(unicode ustring):
    """
    >>> count_lower_case_characters(mixed_ustring)
    10
    >>> count_lower_case_characters(lower_ustring)
    16
    """
    cdef Py_ssize_t count = 0
    for uchar in ustring:
         if uchar.islower():
             count += 1
    return count

@cython.test_assert_path_exists('//PythonCapiCallNode',
                                '//ForFromStatNode')
@cython.test_fail_if_path_exists('//SimpleCallNode',
                                 '//ForInStatNode')
def count_lower_case_characters_slice(unicode ustring):
    """
    >>> count_lower_case_characters_slice(mixed_ustring)
    10
    >>> count_lower_case_characters_slice(lower_ustring)
    14
    """
    cdef Py_ssize_t count = 0
    for uchar in ustring[1:-1]:
         if uchar.islower():
             count += 1
    return count

@cython.test_assert_path_exists('//SwitchStatNode',
                                '//ForFromStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode')
def iter_and_in():
    """
    >>> iter_and_in()
    a
    b
    e
    f
    h
    """
    for c in u'abcdefgh':
        if c in u'abCDefGh':
            print c

@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode')
def index_and_in():
    """
    >>> index_and_in()
    1
    3
    4
    7
    8
    """
    cdef int i
    for i in range(1,9):
        if u'abcdefgh'[-i] in u'abCDefGh':
            print i


def uchar_lookup_in_dict(obj, Py_UNICODE uchar):
    """
    >>> d = {u_KLINGON: 1234, u0: 0, u1: 1, u_A: 2}
    >>> uchar_lookup_in_dict(d, u_KLINGON)
    (1234, 1234)
    >>> uchar_lookup_in_dict(d, u_A)
    (2, 2)
    >>> uchar_lookup_in_dict(d, u0)
    (0, 0)
    >>> uchar_lookup_in_dict(d, u1)
    (1, 1)
    """
    cdef dict d = obj
    dval = d[uchar]
    objval = obj[uchar]
    return dval, objval


_WARNINGS = """
250:16: Item lookup of unicode character codes now always converts to a Unicode string. Use an explicit C integer cast to get back the previous integer lookup behaviour.
"""
Cython-0.26.1/tests/run/empty_builtin_constructors.pyx0000664000175000017500000000245112542002467024060 0ustar  stefanstefan00000000000000
cimport cython
import sys

IS_PY3 = sys.version_info[0] >= 3

def _bool():
    """
    >>> _bool() == bool()
    True
    """
    return bool()

def _int():
    """
    >>> _int() == int()
    True
    """
    return int()

def _long():
    """
    >>> IS_PY3 or _long() == long()
    True
    """
    return long()

def _float():
    """
    >>> _float() == float()
    True
    """
    return float()

def _complex():
    """
    >>> _complex() == complex()
    True
    """
    return complex()

def _bytes():
    """
    >>> IS_PY3 and _bytes() == bytes() or _bytes() == str()
    True
    """
    return bytes()

def _str():
    """
    >>> _str() == str()
    True
    """
    return str()

def _unicode():
    """
    >>> IS_PY3 and _unicode() == str() or _unicode() == unicode()
    True
    """
    return unicode()

def _tuple():
    """
    >>> _tuple() == tuple()
    True
    """
    return tuple()

def _list():
    """
    >>> _list() == list()
    True
    """
    return list()

def _dict():
    """
    >>> _dict() == dict()
    True
    """
    return dict()

py_set = cython.set

def _set():
    """
    >>> _set() == py_set()
    True
    """
    return set()

py_frozenset = cython.frozenset

def _frozenset():
    """
    >>> _frozenset() == py_frozenset()
    True
    """
    return frozenset()
Cython-0.26.1/tests/run/nonecheck.pyx0000664000175000017500000001075512542002467020307 0ustar  stefanstefan00000000000000__doc__ = u"""
Tests accessing attributes of extension type variables
set to None
"""

cimport cython

cdef class MyClass:
    cdef int a, b
    def __init__(self, a, b):
        self.a = a
        self.b = b

@cython.nonecheck(True)
def getattr_(MyClass var):
    """
    >>> obj = MyClass(2, 3)
    >>> getattr_(obj)
    2
    >>> getattr_(None)
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'a'
    >>> setattr_(obj)
    >>> getattr_(obj)
    10
    """
    print var.a

@cython.nonecheck(True)
def setattr_(MyClass var):
    """
    >>> obj = MyClass(2, 3)
    >>> setattr_(obj)
    >>> setattr_(None)
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'a'
    """
    var.a = 10

@cython.nonecheck(True)
def getattr_nogil(MyClass var):
    """
    >>> getattr_nogil(None)
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'a'
    """
    with nogil:
        var.a

@cython.nonecheck(True)
def setattr_nogil(MyClass var):
    """
    >>> setattr_nogil(None)
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'a'
    """
    with nogil:
        var.a = 1

def some():
    return MyClass(4, 5)

@cython.nonecheck(True)
def checking(MyClass var):
    """
    >>> obj = MyClass(2, 3)
    >>> checking(obj)
    2
    2
    >>> checking(None)
    var is None
    """
    state = (var is None)
    if not state:
        print var.a
    if var is not None:
        print var.a
    else:
        print u"var is None"

@cython.nonecheck(True)
def check_and_assign(MyClass var):
    """
    >>> obj = MyClass(2, 3)
    >>> check_and_assign(obj)
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'a'
    """
    if var is not None:
        print var.a
        var = None
        print var.a

@cython.nonecheck(True)
def check_buffer_get(object[int] buf):
    """
    >>> check_buffer_get(None)
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    """
    return buf[0]

@cython.nonecheck(True)
def check_buffer_set(object[int] buf):
    """
    >>> check_buffer_set(None)
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    """
    buf[0] = 1

@cython.nonecheck(True)
def test_memslice_get(double[:] buf):
    """
    >>> test_memslice_get(None)
    Traceback (most recent call last):
    TypeError: Cannot index None memoryview slice
    """
    return buf[0]

@cython.nonecheck(True)
def test_memslice_set(double[:] buf):
    """
    >>> test_memslice_set(None)
    Traceback (most recent call last):
    TypeError: Cannot index None memoryview slice
    """
    buf[0] = 1.0

@cython.nonecheck(True)
def test_memslice_copy(double[:] buf):
    """
    >>> test_memslice_copy(None)
    Traceback (most recent call last):
    AttributeError: Cannot access 'copy' attribute of None memoryview slice
    """
    cdef double[:] copy = buf.copy()

@cython.nonecheck(True)
def test_memslice_transpose(double[:] buf):
    """
    >>> test_memslice_transpose(None)
    Traceback (most recent call last):
    AttributeError: Cannot transpose None memoryview slice
    """
    cdef double[:] T = buf.T

@cython.nonecheck(True)
def test_memslice_shape(double[:] buf):
    """
    >>> test_memslice_shape(None)
    Traceback (most recent call last):
    AttributeError: Cannot access 'shape' attribute of None memoryview slice
    """
    cdef Py_ssize_t extent = buf.shape[0]

@cython.nonecheck(True)
def test_memslice_slice(double[:] buf):
    """
    >>> test_memslice_slice(None)
    Traceback (most recent call last):
    TypeError: Cannot slice None memoryview slice
    """
    cdef double[:] sliced = buf[1:]

@cython.nonecheck(True)
def test_memslice_slice2(double[:] buf):
    """
    Should this raise an error? It may not slice at all.
    >>> test_memslice_slice(None)
    Traceback (most recent call last):
    TypeError: Cannot slice None memoryview slice
    """
    cdef double[:] sliced = buf[:]

@cython.nonecheck(True)
def test_memslice_slice_assign(double[:] buf):
    """
    >>> test_memslice_slice_assign(None)
    Traceback (most recent call last):
    TypeError: Cannot assign to None memoryview slice
    """
    buf[...] = 2

@cython.nonecheck(True)
def test_memslice_slice_assign2(double[:] buf):
    """
    >>> test_memslice_slice_assign2(None)
    Traceback (most recent call last):
    TypeError: Cannot slice None memoryview slice
    """
    buf[:] = buf[::-1]
Cython-0.26.1/tests/run/cyfunction.pyx0000664000175000017500000001740413143605603020530 0ustar  stefanstefan00000000000000# cython: binding=True
# mode: run
# tag: cyfunction

import sys
IS_PY3 = sys.version_info[0] >= 3
IS_PY34 = sys.version_info > (3, 4, 0, 'beta', 3)


def inspect_isroutine():
    """
    >>> inspect_isroutine()
    True
    """
    import inspect
    return inspect.isroutine(inspect_isroutine)


def inspect_isfunction():
    """
    >>> inspect_isfunction()
    False
    False
    """
    import inspect, types
    print isinstance(inspect_isfunction, types.FunctionType)
    return inspect.isfunction(inspect_isfunction)


def inspect_isbuiltin():
    """
    >>> inspect_isbuiltin()
    False
    False
    """
    import inspect, types
    print isinstance(inspect_isfunction, types.BuiltinFunctionType)
    return inspect.isbuiltin(inspect_isbuiltin)


def inspect_signature(a, b, c=123, *, d=234):
    """
    >>> sig = inspect_signature(1, 2)
    >>> if IS_PY34: list(sig.parameters)
    ... else: ['a', 'b', 'c', 'd']
    ['a', 'b', 'c', 'd']
    >>> if IS_PY34: sig.parameters['c'].default == 123
    ... else: True
    True
    >>> if IS_PY34: sig.parameters['d'].default == 234
    ... else: True
    True
    """
    import inspect
    return inspect.signature(inspect_signature) if IS_PY34 else None


# def test___signature__(a, b, c=123, *, d=234):
#     """
#     >>> sig = test___signature__(1, 2)
#     >>> if IS_PY34: list(sig.parameters)
#     ... else: ['a', 'b', 'c', 'd']
#     ['a', 'b', 'c', 'd']
#     >>> if IS_PY34: sig.parameters['c'].default == 123
#     ... else: True
#     True
#     >>> if IS_PY34: sig.parameters['d'].default == 234
#     ... else: True
#     True
#     """
#     return inspect_signature.__signature__ if IS_PY34 else None


def test_dict():
    """
    >>> test_dict.foo = 123
    >>> test_dict.__dict__
    {'foo': 123}
    >>> test_dict.__dict__ = {'bar': 321}
    >>> test_dict.__dict__
    {'bar': 321}
    >>> test_dict.func_dict
    {'bar': 321}
    """

def test_name():
    """
    >>> test_name.__name__
    'test_name'
    >>> test_name.func_name
    'test_name'
    >>> test_name.__name__ = 123 #doctest:+ELLIPSIS
    Traceback (most recent call last):
    TypeError: __name__ must be set to a ... object
    >>> test_name.__name__ = 'foo'
    >>> test_name.__name__
    'foo'
    """

def test_doc():
    """
    >>> del test_doc.__doc__
    >>> test_doc.__doc__
    >>> test_doc.__doc__ = 'docstring'
    >>> test_doc.__doc__
    'docstring'
    >>> test_doc.func_doc
    'docstring'
    """


def test_hash():
    """
    >>> d = {test_hash: 123}
    >>> test_hash in d
    True
    >>> d[test_hash]
    123
    >>> hash(test_hash) == hash(test_hash)
    True
    """


def test_closure():
    """
    >>> test_closure.func_closure is None
    True
    """

def test_globals():
    """
    >>> test_globals.func_globals is not None
    True
    >>> 'test_globals' in test_globals.func_globals or test_globals.func_globals
    True
    >>> 'test_name' in test_globals.func_globals or test_globals.func_globals
    True
    >>> 'not there' not in test_globals.func_globals or test_globals.func_globals
    True
    >>> try: test_globals.func_globals = {}
    ... except (AttributeError, TypeError): pass
    ... else: assert 0, 'FAILED'
    """

def test_reduce():
    """
    >>> import pickle
    >>> pickle.loads(pickle.dumps(test_reduce))()
    'Hello, world!'
    """
    return 'Hello, world!'

def test_method(self):
    return self

class BindingTest:
    """
    >>> BindingTest.test_method = test_method
    >>> BindingTest.test_method() #doctest:+ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...
    >>> BindingTest().test_method()
    
    """
    def __repr__(self):
        return ''


def codeof(func):
    if IS_PY3:
        return func.__code__
    else:
        return func.func_code

def varnamesof(func):
    code = codeof(func)
    varnames = code.co_varnames
    if sys.version_info < (2,5):
        pos = {'a':0, 'x':1, 'b':2, 'l':3, 'm':4}
        varnames = tuple(sorted(varnames, key=pos.__getitem__))
    return varnames

def namesof(func):
    code = codeof(func)
    names = code.co_names
    if sys.version_info < (2,5):
        names = ()
    return names

def cy_no_arg():
    l = m = 1
def cy_one_arg(a):
    l = m = 1
def cy_two_args(x, b):
    l = m = 1
def cy_default_args(x=1, b=2):
    l = m = 1

def test_code():
    """
    >>> def no_arg(): l = m = 1
    >>> def one_arg(a): l = m = 1
    >>> def two_args(x, b): l = m = 1
    >>> def default_args(x=1, b=2): l = m = 1

    >>> codeof(no_arg).co_argcount
    0
    >>> codeof(cy_no_arg).co_argcount
    0
    >>> print(codeof(no_arg).co_name)
    no_arg
    >>> print(codeof(cy_no_arg).co_name)
    cy_no_arg
    >>> namesof(no_arg)
    ()
    >>> codeof(cy_no_arg).co_names
    ()
    >>> varnamesof(no_arg)
    ('l', 'm')
    >>> codeof(cy_no_arg).co_varnames
    ('l', 'm')

    >>> codeof(one_arg).co_argcount
    1
    >>> codeof(cy_one_arg).co_argcount
    1
    >>> print(codeof(one_arg).co_name)
    one_arg
    >>> print(codeof(cy_one_arg).co_name)
    cy_one_arg
    >>> namesof(one_arg)
    ()
    >>> codeof(cy_one_arg).co_names
    ()
    >>> varnamesof(one_arg)
    ('a', 'l', 'm')
    >>> codeof(cy_one_arg).co_varnames
    ('a', 'l', 'm')

    >>> codeof(two_args).co_argcount
    2
    >>> codeof(cy_two_args).co_argcount
    2
    >>> namesof(two_args)
    ()
    >>> codeof(cy_two_args).co_names
    ()
    >>> varnamesof(two_args)
    ('x', 'b', 'l', 'm')
    >>> codeof(cy_two_args).co_varnames
    ('x', 'b', 'l', 'm')

    >>> codeof(default_args).co_argcount
    2
    >>> codeof(cy_default_args).co_argcount
    2
    >>> namesof(default_args)
    ()
    >>> codeof(cy_default_args).co_names
    ()
    >>> varnamesof(default_args)
    ('x', 'b', 'l', 'm')
    >>> codeof(cy_default_args).co_varnames
    ('x', 'b', 'l', 'm')
    """


def test_annotations(a: "test", b: "other" = 2, c: 123 = 4) -> "ret":
    """
    >>> isinstance(test_annotations.__annotations__, dict)
    True
    >>> sorted(test_annotations.__annotations__.items())
    [('a', 'test'), ('b', 'other'), ('c', 123), ('return', 'ret')]

    >>> def func_b(): return 42
    >>> def func_c(): return 99
    >>> inner = test_annotations(1, func_b, func_c)
    >>> sorted(inner.__annotations__.items())
    [('return', 99), ('x', 'banana'), ('y', 42)]

    >>> inner.__annotations__ = {234: 567}
    >>> inner.__annotations__
    {234: 567}
    >>> inner.__annotations__ = None
    >>> inner.__annotations__
    {}
    >>> inner.__annotations__ = 321
    Traceback (most recent call last):
    TypeError: __annotations__ must be set to a dict object
    >>> inner.__annotations__
    {}

    >>> inner = test_annotations(1, func_b, func_c)
    >>> sorted(inner.__annotations__.items())
    [('return', 99), ('x', 'banana'), ('y', 42)]
    >>> inner.__annotations__['abc'] = 66
    >>> sorted(inner.__annotations__.items())
    [('abc', 66), ('return', 99), ('x', 'banana'), ('y', 42)]

    >>> inner = test_annotations(1, func_b, func_c)
    >>> sorted(inner.__annotations__.items())
    [('return', 99), ('x', 'banana'), ('y', 42)]
    """
    def inner(x: "banana", y: b()) -> c():
        return x,y
    return inner


def add_one(func):
    "Decorator to add 1 to the last argument of the function call"
    def inner(*args):
        args = args[:-1] + (args[-1] + 1,)
        return func(*args)
    return inner

@add_one
def test_decorated(x):
    """
    >>> test_decorated(0)
    1
    """
    return x

@add_one
@add_one
def test_decorated2(x):
    """
    >>> test_decorated2(0)
    2
    """
    return x


cdef class TestDecoratedMethods:
    @add_one
    def test(self, x):
        """
        >>> TestDecoratedMethods().test(0)
        1
        """
        return x

    @add_one
    @add_one
    def test2(self, x):
        """
        >>> TestDecoratedMethods().test2(0)
        2
        """
        return x
Cython-0.26.1/tests/run/reversed_iteration.pyx0000664000175000017500000005316012542002467022244 0ustar  stefanstefan00000000000000# mode: run
# tag: forin, builtins, reversed, enumerate

cimport cython

import sys
IS_PY3 = sys.version_info[0] >= 3
IS_32BIT_PY2 = not IS_PY3 and sys.maxint < 2**32


def unlongify(v):
    # on 32bit Py2.x platforms, 'unsigned int' coerces to a Python long => fix doctest output here.
    s = repr(v)
    if IS_32BIT_PY2:
        assert s.count('L') == s.count(',') + 1, s
        s = s.replace('L', '')
    return s


def _reversed(it):
    return list(it)[::-1]

@cython.test_assert_path_exists('//ForInStatNode',
                                '//ForInStatNode/IteratorNode',
                                '//ForInStatNode/IteratorNode[@reversed = True]',
                                )
@cython.test_fail_if_path_exists('//ForInStatNode/IteratorNode//SimpleCallNode')
def reversed_list(list l):
    """
    >>> [ i for i in _reversed([1,2,3,4]) ]
    [4, 3, 2, 1]
    >>> reversed_list([1,2,3,4])
    [4, 3, 2, 1]
    >>> reversed_list([])
    []
    >>> reversed_list(None)
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not iterable
    """
    result = []
    for item in reversed(l):
        result.append(item)
    return result

@cython.test_assert_path_exists('//ForInStatNode',
                                '//ForInStatNode/IteratorNode',
                                '//ForInStatNode/IteratorNode[@reversed = True]',
                                )
@cython.test_fail_if_path_exists('//ForInStatNode/IteratorNode//SimpleCallNode')
def reversed_tuple(tuple t):
    """
    >>> [ i for i in _reversed((1,2,3,4)) ]
    [4, 3, 2, 1]
    >>> reversed_tuple((1,2,3,4))
    [4, 3, 2, 1]
    >>> reversed_tuple(())
    []
    >>> reversed_tuple(None)
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not iterable
    """
    result = []
    for item in reversed(t):
        result.append(item)
    return result

@cython.test_assert_path_exists('//ForInStatNode',
                                '//ForInStatNode/IteratorNode',
                                '//ForInStatNode/IteratorNode[@reversed = True]',
                                )
@cython.test_fail_if_path_exists('//ForInStatNode/IteratorNode//SimpleCallNode')
def enumerate_reversed_list(list l):
    """
    >>> list(enumerate(_reversed([1,2,3])))
    [(0, 3), (1, 2), (2, 1)]
    >>> enumerate_reversed_list([1,2,3])
    [(0, 3), (1, 2), (2, 1)]
    >>> enumerate_reversed_list([])
    []
    >>> enumerate_reversed_list(None)
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not iterable
    """
    result = []
    cdef Py_ssize_t i
    for i, item in enumerate(reversed(l)):
        result.append((i, item))
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_range(int N):
    """
    >>> [ i for i in _reversed(range(5)) ]
    [4, 3, 2, 1, 0]
    >>> reversed_range(5)
    ([4, 3, 2, 1, 0], 0)

    >>> [ i for i in _reversed(range(0)) ]
    []
    >>> reversed_range(0)
    ([], 99)
    """
    cdef int i = 99
    result = []
    for i in reversed(range(N)):
        result.append(i)
    return result, i

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_range_step_pos(int a, int b):
    """
    >>> [ i for i in _reversed(range(0, 5, 1)) ]
    [4, 3, 2, 1, 0]
    >>> reversed_range_step_pos(0, 5)
    ([4, 3, 2, 1, 0], 0)

    >>> [ i for i in _reversed(range(5, 0, 1)) ]
    []
    >>> reversed_range_step_pos(5, 0)
    ([], 99)
    """
    cdef int i = 99
    result = []
    for i in reversed(range(a, b, 1)):
        result.append(i)
    return result, i

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_range_step_neg(int a, int b):
    """
    >>> [ i for i in _reversed(range(5, -1, -1)) ]
    [0, 1, 2, 3, 4, 5]
    >>> reversed_range_step_neg(5, -1)
    ([0, 1, 2, 3, 4, 5], 5)

    >>> [ i for i in _reversed(range(0, 5, -1)) ]
    []
    >>> reversed_range_step_neg(0, 5)
    ([], 99)
    """
    cdef int i = 99
    result = []
    for i in reversed(range(a, b, -1)):
        result.append(i)
    return result, i

@cython.test_assert_path_exists('//ForFromStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode')
def reversed_range_step3(int a, int b):
    """
    >>> [ i for i in _reversed(range(-5, 0, 3)) ]
    [-2, -5]
    >>> reversed_range_step3(-5, 0)
    ([-2, -5], -5)

    >>> [ i for i in _reversed(range(0, 5, 3)) ]
    [3, 0]
    >>> reversed_range_step3(0, 5)
    ([3, 0], 0)

    >>> [ i for i in _reversed(range(5, 0, 3)) ]
    []
    >>> reversed_range_step3(5, 0)
    ([], 99)

    >>> [ i for i in _reversed(range(1, 1, 3)) ]
    []
    >>> reversed_range_step3(1, 1)
    ([], 99)
    """
    cdef int i = 99
    result = []
    for i in reversed(range(a, b, 3)):
        result.append(i)
    return result, i

@cython.test_assert_path_exists('//ForFromStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode')
def reversed_range_step3_expr(int a, int b):
    """
    >>> [ i for i in _reversed(range(0, 5, 3)) ]
    [3, 0]
    >>> reversed_range_step3_expr(0, 5)
    ([3, 0], 0)
    """
    cdef int i = 99, c = 100
    result = []
    for i in reversed(range(c-c + a + c-c, c-c + b + c-c, 3)):
        result.append(i)
    return result, i

@cython.test_assert_path_exists('//ForFromStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode')
def reversed_range_step3_neg(int a, int b):
    """
    >>> [ i for i in _reversed(range(0, -5, -3)) ]
    [-3, 0]
    >>> reversed_range_step3_neg(0, -5)
    ([-3, 0], 0)

    >>> [ i for i in _reversed(range(5, 0, -3)) ]
    [2, 5]
    >>> reversed_range_step3_neg(5, 0)
    ([2, 5], 5)

    >>> [ i for i in _reversed(range(0, 5, -3)) ]
    []
    >>> reversed_range_step3_neg(0, 5)
    ([], 99)

    >>> [ i for i in _reversed(range(1, 1, -3)) ]
    []
    >>> reversed_range_step3_neg(1, 1)
    ([], 99)
    """
    cdef int i = 99
    result = []
    for i in reversed(range(a, b, -3)):
        result.append(i)
    return result, i

@cython.test_assert_path_exists('//ForFromStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode')
def reversed_range_step3_neg_expr(int a, int b):
    """
    >>> [ i for i in _reversed(range(5, 0, -3)) ]
    [2, 5]
    >>> reversed_range_step3_neg_expr(5, 0)
    ([2, 5], 5)
    """
    cdef int i = 99, c = 100
    result = []
    for i in reversed(range(c-c + a + c-c, c-c + b + c-c, -3)):
        result.append(i)
    return result, i

def reversed_range_step3_py_args(a, b):
    """
    >>> [ i for i in _reversed(range(-5, 0, 3)) ]
    [-2, -5]
    >>> reversed_range_step3_py_args(-5, 0)
    ([-2, -5], -5)

    >>> [ i for i in _reversed(range(0, 5, 3)) ]
    [3, 0]
    >>> reversed_range_step3_py_args(0, 5)
    ([3, 0], 0)

    >>> [ i for i in _reversed(range(5, 0, 3)) ]
    []
    >>> reversed_range_step3_py_args(5, 0)
    ([], 99)

    >>> [ i for i in _reversed(range(1, 1, 3)) ]
    []
    >>> reversed_range_step3_py_args(1, 1)
    ([], 99)

    >>> reversed_range_step3_py_args(set(), 1) # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...integer...

    >>> reversed_range_step3_py_args(1, set()) # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...integer...
    """
    i = 99
    result = []
    for i in reversed(range(a, b, 3)):
        result.append(i)
    return result, i

def reversed_range_step3_neg_py_args(a, b):
    """
    >>> [ i for i in _reversed(range(0, -5, -3)) ]
    [-3, 0]
    >>> reversed_range_step3_neg_py_args(0, -5)
    ([-3, 0], 0)

    >>> [ i for i in _reversed(range(5, 0, -3)) ]
    [2, 5]
    >>> reversed_range_step3_neg_py_args(5, 0)
    ([2, 5], 5)

    >>> [ i for i in _reversed(range(0, 5, -3)) ]
    []
    >>> reversed_range_step3_neg_py_args(0, 5)
    ([], 99)

    >>> [ i for i in _reversed(range(1, 1, -3)) ]
    []
    >>> reversed_range_step3_neg_py_args(1, 1)
    ([], 99)

    >>> reversed_range_step3_neg_py_args(set(), 1) # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...integer...

    >>> reversed_range_step3_neg_py_args(1, set()) # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...integer...
    """
    i = 99
    result = []
    for i in reversed(range(a, b, -3)):
        result.append(i)
    return result, i

def reversed_range_step3_py_obj_left(a, int b):
    """
    >>> reversed_range_step3_py_obj_left(set(), 0)
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    cdef long i
    result = []
    for i in reversed(range(a, b, 3)):
        result.append(i)

def reversed_range_step3_py_obj_right(int a, b):
    """
    >>> reversed_range_step3_py_obj_right(0, set())
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    cdef long i
    result = []
    for i in reversed(range(a, b, 3)):
        result.append(i)

def reversed_range_step3_neg_py_obj_left(a, int b):
    """
    >>> reversed_range_step3_neg_py_obj_left(set(), 0)
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    cdef long i
    result = []
    for i in reversed(range(a, b, -3)):
        result.append(i)

def reversed_range_step3_neg_py_obj_right(int a, b):
    """
    >>> reversed_range_step3_py_obj_right(0, set())
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    cdef long i
    result = []
    for i in reversed(range(a, b, -3)):
        result.append(i)

@cython.test_fail_if_path_exists('//ForInStatNode')
def reversed_range_constant():
    """
    >>> [ i for i in _reversed(range(-12, -2, 4)) ]
    [-4, -8, -12]
    >>> reversed_range_constant()
    ([-4, -8, -12], -12)
    """
    cdef int i = 99
    result = []
    for i in reversed(range(1, 1, 4)):
        result.append(i)
    assert result == list(reversed(range(1, 1, 4))), result
    assert i == 99

    for i in reversed(range(1, 1, 1)):
        result.append(i)
    assert result == list(reversed(range(1, 1, 1))), result

    result = []
    for i in reversed(range(0, 1, 4)):
        result.append(i)
    assert result == list(reversed(range(0, 1, 4))), result

    result = []
    for i in reversed(range(0, 1, 1)):
        result.append(i)
    assert result == list(reversed(range(0, 1, 1))), result

    result = []
    for i in reversed(range(1, 8, 4)):
        result.append(i)
    assert result == list(reversed(range(1, 8, 4))), result

    result = []
    for i in reversed(range(1, 8, 1)):
        result.append(i)
    assert result == list(reversed(range(1, 8, 1))), result

    result = []
    for i in reversed(range(1, 9, 4)):
        result.append(i)
    assert result == list(reversed(range(1, 9, 4))), result

    result = []
    for i in reversed(range(1, 10, 4)):
        result.append(i)
    assert result == list(reversed(range(1, 10, 4))), result

    result = []
    for i in reversed(range(1, 11, 4)):
        result.append(i)
    assert result == list(reversed(range(1, 11, 4))), result

    result = []
    for i in reversed(range(1, 12, 4)):
        result.append(i)
    assert result == list(reversed(range(1, 12, 4))), result

    result = []
    for i in reversed(range(0, 8, 4)):
        result.append(i)
    assert result == list(reversed(range(0, 8, 4))), result

    result = []
    for i in reversed(range(0, 9, 4)):
        result.append(i)
    assert result == list(reversed(range(0, 9, 4))), result

    result = []
    for i in reversed(range(0, 10, 4)):
        result.append(i)
    assert result == list(reversed(range(0, 10, 4))), result

    result = []
    for i in reversed(range(0, 11, 4)):
        result.append(i)
    assert result == list(reversed(range(0, 11, 4))), result

    result = []
    for i in reversed(range(0, 12, 4)):
        result.append(i)
    assert result == list(reversed(range(0, 12, 4))), result

    i = 99
    result = []
    for i in reversed(range(-12, -2, 4)):
        result.append(i)
    assert result == list(reversed(range(-12, -2, 4))), result
    return result, i

@cython.test_assert_path_exists('//ForFromStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode')
def reversed_range_constant_neg():
    """
    >>> [ i for i in _reversed(range(-2, -12, -4)) ]
    [-10, -6, -2]
    >>> reversed_range_constant_neg()
    """
    cdef int i = 99
    result = []
    for i in reversed(range(1, 1, -4)):
        result.append(i)
    assert result == list(reversed(range(1, 1, -4))), result
    assert i == 99

    result = []
    for i in reversed(range(1, 1, -1)):
        result.append(i)
    assert result == list(reversed(range(1, 1, -1))), result

    result = []
    for i in reversed(range(1, 0, -4)):
        result.append(i)
    assert result == list(reversed(range(1, 0, -4))), result

    result = []
    for i in reversed(range(1, 0, -1)):
        result.append(i)
    assert result == list(reversed(range(1, 0, -1))), result

    result = []
    for i in reversed(range(8, 1, -4)):
        result.append(i)
    assert result == list(reversed(range(8, 1, -4))), result

    result = []
    for i in reversed(range(8, 1, -1)):
        result.append(i)
    assert result == list(reversed(range(8, 1, -1))), result

    result = []
    for i in reversed(range(9, 1, -4)):
        result.append(i)
    assert result == list(reversed(range(9, 1, -4))), result

    result = []
    for i in reversed(range(9, 1, -1)):
        result.append(i)
    assert result == list(reversed(range(9, 1, -1))), result

    result = []
    for i in reversed(range(10, 1, -4)):
        result.append(i)
    assert result == list(reversed(range(10, 1, -4))), result

    result = []
    for i in reversed(range(11, 1, -4)):
        result.append(i)
    assert result == list(reversed(range(11, 1, -4))), result

    result = []
    for i in reversed(range(11, 1, -1)):
        result.append(i)
    assert result == list(reversed(range(11, 1, -1))), result

    result = []
    for i in reversed(range(12, 1, -4)):
        result.append(i)
    assert result == list(reversed(range(12, 1, -4))), result

    result = []
    for i in reversed(range(12, 1, -1)):
        result.append(i)
    assert result == list(reversed(range(12, 1, -1))), result

    result = []
    for i in reversed(range(8, 0, -4)):
        result.append(i)
    assert result == list(reversed(range(8, 0, -4))), result

    result = []
    for i in reversed(range(8, 0, -1)):
        result.append(i)
    assert result == list(reversed(range(8, 0, -1))), result

    result = []
    for i in reversed(range(9, 0, -4)):
        result.append(i)
    assert result == list(reversed(range(9, 0, -4))), result

    result = []
    for i in reversed(range(9, 0, -1)):
        result.append(i)
    assert result == list(reversed(range(9, 0, -1))), result

    result = []
    for i in reversed(range(10, 0, -4)):
        result.append(i)
    assert result == list(reversed(range(10, 0, -4))), result

    result = []
    for i in reversed(range(10, 0, -1)):
        result.append(i)
    assert result == list(reversed(range(10, 0, -1))), result

    result = []
    for i in reversed(range(11, 0, -4)):
        result.append(i)
    assert result == list(reversed(range(11, 0, -4))), result

    result = []
    for i in reversed(range(11, 0, -1)):
        result.append(i)
    assert result == list(reversed(range(11, 0, -1))), result

    result = []
    for i in reversed(range(12, 0, -4)):
        result.append(i)
    assert result == list(reversed(range(12, 0, -4))), result

    result = []
    for i in reversed(range(12, 0, -1)):
        result.append(i)
    assert result == list(reversed(range(12, 0, -1))), result

    result = []
    for i in reversed(range(-2, -12, -4)):
        result.append(i)
    assert result == list(reversed(range(-2, -12, -4))), result

    result = []
    for i in reversed(range(-2, -12, -1)):
        result.append(i)
    assert result == list(reversed(range(-2, -12, -1))), result

unicode_string = u"abcDEF"

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unicode(unicode u):
    """
    >>> print(''.join(_reversed(unicode_string)))
    FEDcba
    >>> print(''.join(reversed_unicode(unicode_string)))
    FEDcba
    """
    result = []
    for c in reversed(u):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unicode_slice(unicode u):
    """
    >>> print(''.join(_reversed(unicode_string[1:-2])))
    Dcb
    >>> print(''.join(reversed_unicode_slice(unicode_string)))
    Dcb
    """
    result = []
    for c in reversed(u[1:-2]):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unicode_slice_neg_step(unicode u):
    """
    >>> print(''.join(_reversed(unicode_string[-2:1:-1])))
    cDE
    >>> print(''.join(reversed_unicode_slice_neg_step(unicode_string)))
    cDE
    """
    result = []
    for c in reversed(u[-2:1:-1]):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unicode_slice_pos_step(unicode u):
    """
    >>> print(''.join(_reversed(unicode_string[1:-2:1])))
    Dcb
    >>> print(''.join(reversed_unicode_slice_pos_step(unicode_string)))
    Dcb
    """
    result = []
    for c in reversed(u[1:-2:1]):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unicode_slice_start_pos_step(unicode u):
    """
    >>> print(''.join(_reversed(unicode_string[2::1])))
    FEDc
    >>> print(''.join(reversed_unicode_slice_start_pos_step(unicode_string)))
    FEDc
    """
    result = []
    for c in reversed(u[2::1]):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unicode_slice_start_neg_step(unicode u):
    """
    >>> print(''.join(_reversed(unicode_string[3::-1])))
    abcD
    >>> print(''.join(reversed_unicode_slice_start_neg_step(unicode_string)))
    abcD
    """
    result = []
    for c in reversed(u[3::-1]):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unicode_slice_end_pos_step(unicode u):
    """
    >>> print(''.join(_reversed(unicode_string[:-2:1])))
    Dcba
    >>> print(''.join(reversed_unicode_slice_end_pos_step(unicode_string)))
    Dcba
    """
    result = []
    for c in reversed(u[:-2:1]):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unicode_slice_end_neg_step(unicode u):
    """
    >>> print(''.join(_reversed(unicode_string[:-3:-1])))
    EF
    >>> print(''.join(reversed_unicode_slice_end_neg_step(unicode_string)))
    EF
    """
    result = []
    for c in reversed(u[:-3:-1]):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unicode_slice_neg_step_only(unicode u):
    """
    >>> print(''.join(_reversed(unicode_string[::-1])))
    abcDEF
    >>> print(''.join(reversed_unicode_slice_neg_step_only(unicode_string)))
    abcDEF
    """
    result = []
    for c in reversed(u[::-1]):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unicode_slice_pos_step_only(unicode u):
    """
    >>> print(''.join(_reversed(unicode_string[::1])))
    FEDcba
    >>> print(''.join(reversed_unicode_slice_pos_step_only(unicode_string)))
    FEDcba
    """
    result = []
    for c in reversed(u[::1]):
        result.append(c)
    return result

bytes_string = b'abcDEF'
join_bytes = b''.join

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_bytes(bytes s):
    """
    >>> b = IS_PY3 and bytes_string or map(ord, bytes_string)
    >>> list(_reversed(b))
    [70, 69, 68, 99, 98, 97]
    >>> reversed_bytes(bytes_string)
    [70, 69, 68, 99, 98, 97]
    """
    cdef char c
    result = []
    for c in reversed(s):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_bytes_slice(bytes s):
    """
    >>> b = IS_PY3 and bytes_string or map(ord, bytes_string)
    >>> list(_reversed(b[1:-2]))
    [68, 99, 98]
    >>> reversed_bytes_slice(bytes_string)
    [68, 99, 98]
    """
    cdef char c
    result = []
    for c in reversed(s[1:-2]):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_bytes_slice_step(bytes s):
    """
    >>> b = IS_PY3 and bytes_string or map(ord, bytes_string)
    >>> list(_reversed(b[-2:1:-1]))
    [99, 68, 69]
    >>> reversed_bytes_slice_step(bytes_string)
    [99, 68, 69]
    """
    cdef char c
    result = []
    for c in reversed(s[-2:1:-1]):
        result.append(c)
    return result

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_bytes_slice_step_only(bytes s):
    """
    >>> b = IS_PY3 and bytes_string or map(ord, bytes_string)
    >>> list(_reversed(b[::-1]))
    [97, 98, 99, 68, 69, 70]
    >>> reversed_bytes_slice_step_only(bytes_string)
    [97, 98, 99, 68, 69, 70]
    """
    cdef char c
    result = []
    for c in reversed(s[::-1]):
        result.append(c)
    return result


@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unsigned(int a, int b):
    """
    >>> unlongify(reversed_unsigned(0, 5))
    '[4, 3, 2, 1, 0]'
    >>> unlongify(reversed_unsigned(1, 5))
    '[4, 3, 2, 1]'
    >>> reversed_unsigned(1, 1)
    []
    """
    cdef unsigned int i
    return [i for i in reversed(range(a, b))]

@cython.test_assert_path_exists('//ForFromStatNode')
def reversed_unsigned_by_3(int a, int b):
    """
    >>> unlongify(reversed_unsigned_by_3(0, 5))
    '[3, 0]'
    >>> unlongify(reversed_unsigned_by_3(0, 7))
    '[6, 3, 0]'
    """
    cdef unsigned int i
    return [i for i in reversed(range(a, b, 3))]

@cython.test_assert_path_exists('//ForFromStatNode')
def range_unsigned_by_neg_3(int a, int b):
    """
    >>> unlongify(range_unsigned_by_neg_3(-1, 6))
    '[6, 3, 0]'
    >>> unlongify(range_unsigned_by_neg_3(0, 7))
    '[7, 4, 1]'
    """
    cdef unsigned int i
    return [i for i in range(b, a, -3)]
Cython-0.26.1/tests/run/enumboolctx.pyx0000664000175000017500000000124612542002467020704 0ustar  stefanstefan00000000000000cdef public enum Truth:
   FALSE=0
   TRUE=1

def enum_boolctx(Truth arg):
    """
    >>> enum_boolctx(FALSE)
    False
    >>> enum_boolctx(TRUE)
    True
    """
    if arg:
        return True
    else:
        return False

cdef extern from *:
    enum: FALSE_VALUE "(0)"
    enum: TRUE_VALUE "(1)"

def extern_enum_false():
    """
    >>> extern_enum_false()
    """
    if FALSE_VALUE:
        raise ValueError

def extern_enum_true():
    """
    >>> extern_enum_true()
    """
    if not TRUE_VALUE:
        raise ValueError

def extern_enum_false_true():
    """
    >>> extern_enum_false_true()
    """
    if not TRUE_VALUE or FALSE_VALUE:
        raise ValueError
Cython-0.26.1/tests/run/no_gc.pyx0000664000175000017500000000206513023021033017413 0ustar  stefanstefan00000000000000"""
Check that the @cython.no_gc decorator disables generation of the
tp_clear and tp_traverse slots, that is, disables cycle collection.
"""

cimport cython
from cpython.ref cimport PyObject, Py_TYPE

# Force non-gc'd PyTypeObject when safety is guaranteed by user but not provable

cdef extern from *:
    ctypedef struct PyTypeObject:
        void (*tp_clear)(object)
        void (*tp_traverse)(object)


def is_tp_clear_null(obj):
    return (Py_TYPE(obj)).tp_clear is NULL

def is_tp_traverse_null(obj):
    return (Py_TYPE(obj)).tp_traverse is NULL


@cython.no_gc
cdef class DisableGC:
    """
    An extension type that has tp_clear and tp_traverse methods generated 
    to test that it actually clears the references to NULL.

    >>> uut = DisableGC()
    >>> is_tp_clear_null(uut)
    True
    >>> is_tp_traverse_null(uut)
    True
    """

    cdef public object requires_cleanup

    def __cinit__(self):
        self.requires_cleanup = (
                "Tuples to strings don't really need cleanup, cannot take part of cycles",)

Cython-0.26.1/tests/run/setcomp.pyx0000664000175000017500000000235712574327400020025 0ustar  stefanstefan00000000000000
cimport cython

# Py2.3 doesn't have the set type, but Cython does :)
_set = set

def setcomp():
    """
    >>> type(setcomp()) is not list
    True
    >>> type(setcomp()) is _set
    True
    >>> sorted(setcomp())
    [0, 4, 8]
    """
    x = 'abc'
    result = { x*2
             for x in range(5)
             if x % 2 == 0 }
    assert x == 'abc' # do not leak
    return result

@cython.test_assert_path_exists(
    "//InlinedGeneratorExpressionNode",
    "//ComprehensionAppendNode")
def genexp_set():
    """
    >>> type(genexp_set()) is _set
    True
    >>> sorted(genexp_set())
    [0, 4, 8]
    """
    x = 'abc'
    result = set( x*2
                  for x in range(5)
                  if x % 2 == 0 )
    assert x == 'abc' # do not leak
    return result

cdef class A:
    def __repr__(self): return u"A"
    def __richcmp__(one, other, int op): return one is other
    def __hash__(self): return id(self) % 65536

def typed():
    """
    >>> list(typed())
    [A, A, A]
    """
    cdef A obj
    return {obj for obj in {A(), A(), A()}}

def iterdict():
    """
    >>> sorted(iterdict())
    [1, 2, 3]
    """
    cdef dict d = dict(a=1,b=2,c=3)
    return {d[key] for key in d}

def sorted(it):
    l = list(it)
    l.sort()
    return l
Cython-0.26.1/tests/run/builtin_callable.pyx0000664000175000017500000000071212542002467021627 0ustar  stefanstefan00000000000000# mode: run
# tag: builtin, callable

cimport cython

@cython.test_assert_path_exists("//SimpleCallNode[@type.is_pyobject = False]")
def test_callable(x):
    """
    >>> test_callable(None)
    False
    >>> test_callable('ABC')
    False

    >>> class C: pass
    >>> test_callable(C)
    True
    >>> test_callable(C())
    False

    >>> test_callable(int)
    True
    >>> test_callable(test_callable)
    True
    """
    b = callable(x)
    return b
Cython-0.26.1/tests/run/generators_py.py0000664000175000017500000001561612542002467021044 0ustar  stefanstefan00000000000000# mode: run
# tag: generators

import cython


def very_simple():
    """
    >>> x = very_simple()
    >>> next(x)
    1
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    >>> x = very_simple()
    >>> x.send(1)
    Traceback (most recent call last):
    TypeError: can't send non-None value to a just-started generator
    """
    yield 1


def simple():
    """
    >>> x = simple()
    >>> list(x)
    [1, 2, 3]
    """
    yield 1
    yield 2
    yield 3

def simple_seq(seq):
    """
    >>> x = simple_seq("abc")
    >>> list(x)
    ['a', 'b', 'c']
    """
    for i in seq:
        yield i

def simple_send():
    """
    >>> x = simple_send()
    >>> next(x)
    >>> x.send(1)
    1
    >>> x.send(2)
    2
    >>> x.send(3)
    3
    """
    i = None
    while True:
        i = yield i

def raising():
    """
    >>> x = raising()
    >>> next(x)
    Traceback (most recent call last):
    KeyError: 'foo'
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    """
    yield {}['foo']

def with_outer(*args):
    """
    >>> x = with_outer(1, 2, 3)
    >>> list(x())
    [1, 2, 3]
    """
    def generator():
        for i in args:
            yield i
    return generator

def with_outer_raising(*args):
    """
    >>> x = with_outer_raising(1, 2, 3)
    >>> list(x())
    [1, 2, 3]
    """
    def generator():
        for i in args:
            yield i
        raise StopIteration
    return generator

def test_close():
    """
    >>> x = test_close()
    >>> x.close()
    >>> x = test_close()
    >>> next(x)
    >>> x.close()
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    """
    while True:
        yield

def test_ignore_close():
    """
    >>> x = test_ignore_close()
    >>> x.close()
    >>> x = test_ignore_close()
    >>> next(x)
    >>> x.close()
    Traceback (most recent call last):
    RuntimeError: generator ignored GeneratorExit
    """
    try:
        yield
    except GeneratorExit:
        yield

def check_throw():
    """
    >>> x = check_throw()
    >>> x.throw(ValueError)
    Traceback (most recent call last):
    ValueError
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    >>> x = check_throw()
    >>> next(x)
    >>> x.throw(ValueError)
    >>> next(x)
    >>> x.throw(IndexError, "oops")
    Traceback (most recent call last):
    IndexError: oops
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    """
    while True:
        try:
            yield
        except ValueError:
            pass

def check_yield_in_except():
    """
    >>> import sys
    >>> orig_exc = sys.exc_info()[0]
    >>> g = check_yield_in_except()
    >>> next(g)
    >>> next(g)
    >>> orig_exc is sys.exc_info()[0] or sys.exc_info()[0]
    True
    """
    try:
        yield
        raise ValueError
    except ValueError:
        yield

def yield_in_except_throw_exc_type():
    """
    >>> import sys
    >>> g = yield_in_except_throw_exc_type()
    >>> next(g)
    >>> g.throw(TypeError)
    Traceback (most recent call last):
    TypeError
    >>> next(g)
    Traceback (most recent call last):
    StopIteration
    """
    try:
        raise ValueError
    except ValueError:
        yield

def yield_in_except_throw_instance():
    """
    >>> import sys
    >>> g = yield_in_except_throw_instance()
    >>> next(g)
    >>> g.throw(TypeError())
    Traceback (most recent call last):
    TypeError
    >>> next(g)
    Traceback (most recent call last):
    StopIteration
    """
    try:
        raise ValueError
    except ValueError:
        yield

def test_swap_assignment():
    """
    >>> gen = test_swap_assignment()
    >>> next(gen)
    (5, 10)
    >>> next(gen)
    (10, 5)
    """
    x,y = 5,10
    yield (x,y)
    x,y = y,x   # no ref-counting here
    yield (x,y)


class Foo(object):
    """
    >>> obj = Foo()
    >>> list(obj.simple(1, 2, 3))
    [1, 2, 3]
    """
    def simple(self, *args):
        for i in args:
            yield i

def test_nested(a, b, c):
    """
    >>> obj = test_nested(1, 2, 3)
    >>> [i() for i in obj]
    [1, 2, 3, 4]
    """
    def one():
        return a
    def two():
        return b
    def three():
        return c
    def new_closure(a, b):
        def sum():
            return a + b
        return sum
    yield one
    yield two
    yield three
    yield new_closure(a, c)


def tolist(func):
    def wrapper(*args, **kwargs):
        return list(func(*args, **kwargs))
    return wrapper

@tolist
def test_decorated(*args):
    """
    >>> test_decorated(1, 2, 3)
    [1, 2, 3]
    """
    for i in args:
        yield i

def test_return(a):
    """
    >>> d = dict()
    >>> obj = test_return(d)
    >>> next(obj)
    1
    >>> next(obj)
    Traceback (most recent call last):
    StopIteration
    >>> d['i_was_here']
    True
    """
    yield 1
    a['i_was_here'] = True
    return

def test_copied_yield(foo):
    """
    >>> class Manager(object):
    ...    def __enter__(self):
    ...        return self
    ...    def __exit__(self, type, value, tb):
    ...        pass
    >>> list(test_copied_yield(Manager()))
    [1]
    """
    with foo:
        yield 1

def test_nested_yield():
    """
    >>> obj = test_nested_yield()
    >>> next(obj)
    1
    >>> obj.send(2)
    2
    >>> obj.send(3)
    3
    >>> obj.send(4)
    Traceback (most recent call last):
    StopIteration
    """
    yield (yield (yield 1))

def test_sum_of_yields(n):
    """
    >>> g = test_sum_of_yields(3)
    >>> next(g)
    (0, 0)
    >>> g.send(1)
    (0, 1)
    >>> g.send(1)
    (1, 2)
    """
    x = 0
    x += yield (0, x)
    x += yield (0, x)
    yield (1, x)

def test_nested_gen(n):
    """
    >>> [list(a) for a in test_nested_gen(5)]
    [[], [0], [0, 1], [0, 1, 2], [0, 1, 2, 3]]
    """
    for a in range(n):
        yield (b for b in range(a))

def test_lambda(n):
    """
    >>> [i() for i in test_lambda(3)]
    [0, 1, 2]
    """
    for i in range(n):
        yield lambda : i

def test_generator_cleanup():
    """
    >>> g = test_generator_cleanup()
    >>> del g
    >>> g = test_generator_cleanup()
    >>> next(g)
    1
    >>> del g
    cleanup
    """
    try:
        yield 1
    finally:
        print('cleanup')

def test_del_in_generator():
    """
    >>> [ s for s in test_del_in_generator() ]
    ['abcabcabc', 'abcabcabc']
    """
    x = len('abc') * 'abc'
    a = x
    yield x
    del x
    yield a
    del a

@cython.test_fail_if_path_exists("//IfStatNode", "//PrintStatNode")
def test_yield_in_const_conditional_false():
    """
    >>> list(test_yield_in_const_conditional_false())
    []
    """
    if False:
        print((yield 1))

@cython.test_fail_if_path_exists("//IfStatNode")
@cython.test_assert_path_exists("//PrintStatNode")
def test_yield_in_const_conditional_true():
    """
    >>> list(test_yield_in_const_conditional_true())
    None
    [1]
    """
    if True:
        print((yield 1))
Cython-0.26.1/tests/run/cpp_class_redef.pxd0000664000175000017500000000015312542002467021430 0ustar  stefanstefan00000000000000# tag: cpp

cdef extern cppclass Foo:
    int _foo
    void set_foo(int foo) nogil
    int get_foo() nogil
Cython-0.26.1/tests/run/nononetypecheck.pyx0000664000175000017500000000014612542002467021537 0ustar  stefanstefan00000000000000cdef class Spam:
    pass

cdef f(Spam s):
    pass

def g():
    """
    >>> g()
    """
    f(None)
Cython-0.26.1/tests/run/unop_extras.pyx0000664000175000017500000000115112542002467020707 0ustar  stefanstefan00000000000000cimport cython.operator
from cython.operator cimport dereference
from cython.operator cimport dereference as deref

def test_deref(int x):
    """
    >>> test_deref(3)
    (3, 3, 3)
    >>> test_deref(5)
    (5, 5, 5)
    """
    cdef int* x_ptr = &x
    return cython.operator.dereference(x_ptr), dereference(x_ptr), deref(x_ptr)

def increment_decrement(int x):
    """
    >>> increment_decrement(10)
    11 11 12
    11 11 10
    10
    """
    print cython.operator.preincrement(x), cython.operator.postincrement(x), x
    print cython.operator.predecrement(x), cython.operator.postdecrement(x), x
    return x
Cython-0.26.1/tests/run/double_dealloc_T796.pyx0000664000175000017500000000302412542002467022027 0ustar  stefanstefan00000000000000"""
Initial cleanup and 'calibration':
>>> _ = gc.collect()
>>> old_unreachable = gc.collect()

Test:
>>> x = SimpleGarbage()
SimpleGarbage(1) __cinit__
>>> del x
SimpleGarbage(1) __dealloc__
Collector.__dealloc__

Make sure nothing changed in the environment:
>>> new_unreachable = get_new_unreachable()
>>> new_unreachable == old_unreachable or (old_unreachable, new_unreachable)
True
"""

import gc

cdef Py_ssize_t new_unreachable = 0

def get_new_unreachable():
    return new_unreachable

cdef int counter = 0
cdef int next_counter():
    global counter
    counter += 1
    return counter

cdef class Collector:
    # Indirectly trigger garbage collection in SimpleGarbage deallocation.
    # The __dealloc__ method of SimpleGarbage won't trigger the bug as the
    # refcount is artificially inflated for the duration of that function.
    def __dealloc__(self):
        print "Collector.__dealloc__"
        global new_unreachable
        new_unreachable = gc.collect()

cdef class SimpleGarbage:
    cdef Collector c  # to participate in garbage collection
    cdef int index
    cdef bint deallocated
    def __cinit__(self):
        self.index = next_counter()
        self.c = Collector()
        print self, "__cinit__"
    def __dealloc__(self):
        print self, "__dealloc__"
        if self.deallocated:
            print "Double dealloc!"
        self.deallocated = True
        gc.collect()
    def __str__(self):
        return "SimpleGarbage(%s)" % self.index
    def __repr__(self):
        return "SimpleGarbage(%s)" % self.index
Cython-0.26.1/tests/run/builtin_subtype_methods_cy3.pyx0000664000175000017500000000214112542002467024062 0ustar  stefanstefan00000000000000# cython: language_level=3
# mode: run
# ticket: 653


class DictPySubtype(dict):
    def keys(self):
        """
        >>> d = DictPySubtype(one=42, two=17, three=0)
        >>> for v in sorted(d.keys()):
        ...     print(v)
        three
        two
        """
        for key in dict.keys(self):
            if key != 'one':
                yield key

    def values(self):
        """
        >>> d = DictPySubtype(one=42, two=17, three=0)
        >>> for v in sorted(d.values()):
        ...     print(v)
        17
        42
        """
        for value in dict.values(self):
            if value:
                yield value

    def items(self):
        """
        >>> d = DictPySubtype(one=42, two=17, three=0)
        >>> for v in sorted(d.items()):
        ...     print(v)
        one
        two
        """
        for key, value in dict.items(self):
            if value:
                yield key


class ListPySubtype(list):
    """
    >>> lst = ListPySubtype([1,2,3])
    >>> lst.append(4)
    >>> lst
    [1, 2, 3, 5]
    """
    def append(self, value):
        list.append(self, value+1)
Cython-0.26.1/tests/run/__getattribute__.pyx0000664000175000017500000000423213150045407021637 0ustar  stefanstefan00000000000000# mode: run

# __getattribute__ and __getattr__ special methods for a single class.


cdef class just_getattribute:
    """
    >>> a = just_getattribute()
    >>> a.called
    1
    >>> a.called
    2
    >>> a.bar
    'bar'
    >>> a.called
    4
    >>> a.invalid
    Traceback (most recent call last):
    AttributeError
    >>> a.called
    6
    """
    cdef readonly int called
    def __getattribute__(self,n):
        self.called += 1
        if n == 'bar':
            return n
        elif n == 'called':
            return self.called
        else:
            raise AttributeError


cdef class just_getattr:
    """
    >>> a = just_getattr()
    >>> a.called
    0
    >>> a.called
    0
    >>> a.foo
    10
    >>> a.called
    0
    >>> a.bar
    'bar'
    >>> a.called
    1
    >>> a.invalid
    Traceback (most recent call last):
    AttributeError
    >>> a.called
    2
    """
    cdef readonly int called
    cdef readonly int foo
    def __init__(self):
        self.foo = 10
    def __getattr__(self,n):
        self.called += 1
        if n == 'bar':
            return n
        else:
            raise AttributeError


cdef class both:
    """
    >>> a = both()
    >>> (a.called_getattr, a.called_getattribute)
    (0, 2)
    >>> a.foo
    10
    >>> (a.called_getattr, a.called_getattribute)
    (0, 5)
    >>> a.bar
    'bar'
    >>> (a.called_getattr, a.called_getattribute)
    (1, 8)
    >>> a.invalid
    Traceback (most recent call last):
    AttributeError
    >>> (a.called_getattr, a.called_getattribute)
    (2, 11)
    """
    cdef readonly int called_getattribute
    cdef readonly int called_getattr
    cdef readonly int foo
    def __init__(self):
        self.foo = 10

    def __getattribute__(self,n):
        self.called_getattribute += 1
        if n == 'foo':
            return self.foo
        elif n == 'called_getattribute':
            return self.called_getattribute
        elif n == 'called_getattr':
            return self.called_getattr
        else:
            raise AttributeError

    def __getattr__(self,n):
        self.called_getattr += 1
        if n == 'bar':
            return n
        else:
            raise AttributeError
Cython-0.26.1/tests/run/charptr_decode.pyx0000664000175000017500000001064613023021023021277 0ustar  stefanstefan00000000000000
cimport cython

############################################################
# tests for char* slicing

cdef const char* cstring = "abcABCqtp"

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_decode():
    """
    >>> print(str(slice_charptr_decode()).replace("u'", "'"))
    ('a', 'abc', 'abcABCqtp')
    """
    return (cstring[:1].decode('UTF-8'),
            cstring[:3].decode('UTF-8'),
            cstring[:9].decode('UTF-8'))

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_decode_platform_encoding():
    """
    >>> print(str(slice_charptr_decode()).replace("u'", "'"))
    ('a', 'abc', 'abcABCqtp')
    """
    cdef bytes s = u'abcABCqtp'.encode()
    cdef char* cstr = s
    return (cstr[:1].decode(),
            cstr[:3].decode(),
            cstr[:9].decode())

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_decode_unknown_encoding():
    """
    >>> print(str(slice_charptr_decode_unknown_encoding()).replace("u'", "'"))
    ('abcABCqtp', 'abcABCqtp', 'abc', 'abcABCqt')
    """
    cdef const char* enc = 'UTF-8'
    cdef const char* error_handling = 'strict'
    return (cstring.decode(enc),
            cstring.decode(enc, error_handling),
            cstring[:3].decode(enc),
            cstring[:8].decode(enc, error_handling))

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_decode_slice2():
    """
    >>> print(str(slice_charptr_decode_slice2()).replace("u'", "'"))
    ('a', 'bc', 'tp')
    """
    return (cstring[0:1].decode('UTF-8'),
            cstring[1:3].decode('UTF-8'),
            cstring[7:9].decode('UTF-8'))

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_decode_strlen():
    """
    >>> print(str(slice_charptr_decode_strlen()).replace("u'", "'"))
    ('abcABCqtp', 'bcABCqtp', '', 'BCq', 'abcA', '')
    """
    return (cstring.decode('UTF-8'),
            cstring[1:].decode('UTF-8'),
            cstring[9:].decode('UTF-8'),
            cstring[-5:-2].decode('UTF-8'),
            cstring[:-5].decode('UTF-8'),
            cstring[:-9].decode('UTF-8'))

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_decode_unbound():
    """
    >>> print(str(slice_charptr_decode_unbound()).replace("u'", "'"))
    ('a', 'abc', 'abcABCqtp')
    """
    return (bytes.decode(cstring[:1], 'UTF-8'),
            bytes.decode(cstring[:3], 'UTF-8', 'replace'),
            bytes.decode(cstring[:9], 'UTF-8'))

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_decode_errormode():
    """
    >>> print(str(slice_charptr_decode_errormode()).replace("u'", "'"))
    ('a', 'abc', 'abcABCqtp')
    """
    return (cstring[:1].decode('UTF-8', 'strict'),
            cstring[:3].decode('UTF-8', 'replace'),
            cstring[:9].decode('UTF-8', 'unicode_escape'))

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_dynamic_bounds():
    """
    >>> print(str(slice_charptr_dynamic_bounds()).replace("u'", "'"))
    ('abc', 'abc', 'bcAB', 'BCqtp')
    """
    return (cstring[:return3()].decode('UTF-8'),
            cstring[0:return3()].decode('UTF-8'),
            cstring[return1():return5()].decode('UTF-8'),
            cstring[return4():return9()].decode('UTF-8'))

@cython.test_assert_path_exists("//PythonCapiCallNode")
@cython.test_fail_if_path_exists("//AttributeNode")
def slice_charptr_dynamic_bounds_non_name():
    """
    >>> print(str(slice_charptr_dynamic_bounds_non_name()).replace("u'", "'"))
    ('bcA', 'bcA', 'BCqtp', 'ABCqtp', 'bcABCqtp', 'bcABCqtp', 'cABC')
    """
    return ((cstring+1)[:return3()].decode('UTF-8'),
            (cstring+1)[0:return3()].decode('UTF-8'),
            (cstring+1)[return3():].decode('UTF-8'),
            (cstring+1)[2:].decode('UTF-8'),
            (cstring+1)[0:].decode('UTF-8'),
            (cstring+1)[:].decode('UTF-8'),
            (cstring+1)[return1():return5()].decode('UTF-8'))

cdef return1(): return 1
cdef return3(): return 3
cdef return4(): return 4
cdef return5(): return 5
cdef return9(): return 9
Cython-0.26.1/tests/run/cpdef_enums.pyx0000664000175000017500000000410213143605603020626 0ustar  stefanstefan00000000000000"""
>>> ONE, TEN, HUNDRED
(1, 10, 100)
>>> THOUSAND        # doctest: +ELLIPSIS
Traceback (most recent call last):
NameError: ...name 'THOUSAND' is not defined

>>> TWO == 2 or TWO
True
>>> THREE == 3 or THREE
True
>>> FIVE == 5 or FIVE
True
>>> SEVEN           # doctest: +ELLIPSIS
Traceback (most recent call last):
NameError: ...name 'SEVEN' is not defined

>>> FOUR == 4 or FOUR
True
>>> EIGHT == 8 or EIGHT
True
>>> SIXTEEN        # doctest: +ELLIPSIS
Traceback (most recent call last):
NameError: ...name 'SIXTEEN' is not defined

>>> RANK_0 == 11 or RANK_0
True
>>> RANK_1 == 37 or RANK_1
True
>>> RANK_2 == 389 or RANK_2
True
>>> RANK_3         # doctest: +ELLIPSIS
Traceback (most recent call last):
NameError: ...name 'RANK_3' is not defined

>>> set(PyxEnum) == set([TWO, THREE, FIVE])
True
>>> str(PyxEnum.TWO)
'PyxEnum.TWO'
>>> PyxEnum.TWO + PyxEnum.THREE == PyxEnum.FIVE
True
>>> PyxEnum(2) is PyxEnum["TWO"] is PyxEnum.TWO
True

>>> IntEnum  # not leaking into module namespace
Traceback (most recent call last):
NameError: name 'IntEnum' is not defined
"""


cdef extern from *:
    cpdef enum: # ExternPyx
        ONE "1"
        TEN "10"
        HUNDRED "100"

    cdef enum: # ExternSecretPyx
        THOUSAND "1000"

cpdef enum PyxEnum:
    TWO = 2
    THREE = 3
    FIVE = 5

cdef enum SecretPyxEnum:
    SEVEN = 7

def test_as_variable_from_cython():
    """
    >>> test_as_variable_from_cython()
    """
    import sys
    if sys.version_info >= (2, 7):
        assert list(PyxEnum) == [TWO, THREE, FIVE], list(PyxEnum)
        assert list(PxdEnum) == [RANK_0, RANK_1, RANK_2], list(PxdEnum)
    else:
        # No OrderedDict.
        assert set(PyxEnum) == {TWO, THREE, FIVE}, list(PyxEnum)
        assert set(PxdEnum) == {RANK_0, RANK_1, RANK_2}, list(PxdEnum)

cdef int verify_pure_c() nogil:
    cdef int x = TWO
    cdef int y = PyxEnum.THREE
    cdef int z = SecretPyxEnum.SEVEN
    return x + y + z

# Use it to suppress warning.
verify_pure_c()

def verify_resolution_GH1533():
    """
    >>> verify_resolution_GH1533()
    3
    """
    THREE = 100
    return int(PyxEnum.THREE)
Cython-0.26.1/tests/run/async_iter_pep492.pyx0000664000175000017500000001475313023021033021600 0ustar  stefanstefan00000000000000# mode: run
# tag: pep492, asyncfor, await

import sys

if sys.version_info >= (3, 5, 0, 'beta'):
    # pass Cython implemented AsyncIter() into a Python async-for loop
    __doc__ = u"""
>>> def test_py35(AsyncIterClass):
...     buffer = []
...     async def coro():
...         async for i1, i2 in AsyncIterClass(1):
...             buffer.append(i1 + i2)
...     return coro, buffer

>>> testfunc, buffer = test_py35(AsyncIterOld if sys.version_info < (3, 5, 2) else AsyncIter)
>>> buffer
[]

>>> yielded, _ = run_async(testfunc(), check_type=False)
>>> yielded == [i * 100 for i in range(1, 11)] or yielded
True
>>> buffer == [i*2 for i in range(1, 101)] or buffer
True
"""


cdef class AsyncYieldFrom:
    cdef object obj
    def __init__(self, obj):
        self.obj = obj

    def __await__(self):
        yield from self.obj


cdef class AsyncYield:
    cdef object value
    def __init__(self, value):
        self.value = value

    def __await__(self):
        yield self.value


def run_async(coro, check_type='coroutine'):
    if check_type:
        assert coro.__class__.__name__ == check_type, \
            'type(%s) != %s' % (coro.__class__, check_type)

    buffer = []
    result = None
    while True:
        try:
            buffer.append(coro.send(None))
        except StopIteration as ex:
            result = ex.args[0] if ex.args else None
            break
    return buffer, result


cdef class AsyncIter:
    cdef long i
    cdef long aiter_calls
    cdef long max_iter_calls

    def __init__(self, long max_iter_calls=1):
        self.i = 0
        self.aiter_calls = 0
        self.max_iter_calls = max_iter_calls

    def __aiter__(self):
        self.aiter_calls += 1
        return self

    async def __anext__(self):
        self.i += 1
        assert self.aiter_calls <= self.max_iter_calls

        if not (self.i % 10):
            await AsyncYield(self.i * 10)

        if self.i > 100:
            raise StopAsyncIteration

        return self.i, self.i


cdef class AsyncIterOld(AsyncIter):
    """
    Same as AsyncIter, but with the old async-def interface for __aiter__().
    """
    async def __aiter__(self):
        self.aiter_calls += 1
        return self


def test_for_1():
    """
    >>> testfunc, buffer = test_for_1()
    >>> buffer
    []
    >>> yielded, _ = run_async(testfunc())
    >>> yielded == [i * 100 for i in range(1, 11)] or yielded
    True
    >>> buffer == [i*2 for i in range(1, 101)] or buffer
    True
    """
    buffer = []
    async def test1():
        async for i1, i2 in AsyncIter(1):
            buffer.append(i1 + i2)
    return test1, buffer


def test_for_2():
    """
    >>> testfunc, buffer = test_for_2()
    >>> buffer
    []
    >>> yielded, _ = run_async(testfunc())
    >>> yielded == [100, 200] or yielded
    True
    >>> buffer == [i for i in range(1, 21)] + ['end'] or buffer
    True
    """
    buffer = []
    async def test2():
        nonlocal buffer
        async for i in AsyncIter(2):
            buffer.append(i[0])
            if i[0] == 20:
                break
        else:
            buffer.append('what?')
        buffer.append('end')
    return test2, buffer



def test_for_3():
    """
    >>> testfunc, buffer = test_for_3()
    >>> buffer
    []
    >>> yielded, _ = run_async(testfunc())
    >>> yielded == [i * 100 for i in range(1, 11)] or yielded
    True
    >>> buffer == [i for i in range(1, 21)] + ['what?', 'end'] or buffer
    True
    """
    buffer = []
    async def test3():
        nonlocal buffer
        async for i in AsyncIter(3):
            if i[0] > 20:
                continue
            buffer.append(i[0])
        else:
            buffer.append('what?')
        buffer.append('end')
    return test3, buffer


cdef class NonAwaitableFromAnext:
    def __aiter__(self):
        return self

    def __anext__(self):
        return 123


def test_broken_anext():
    """
    >>> testfunc = test_broken_anext()
    >>> try: run_async(testfunc())
    ... except TypeError as exc:
    ...     assert ' int ' in str(exc)
    ... else:
    ...     print("NOT RAISED!")
    """
    async def foo():
        async for i in NonAwaitableFromAnext():
            print('never going to happen')
    return foo


cdef class Manager:
    cdef readonly list counter
    def __init__(self, counter):
        self.counter = counter

    async def __aenter__(self):
        self.counter[0] += 10000

    async def __aexit__(self, *args):
        self.counter[0] += 100000


cdef class Iterable:
    cdef long i
    def __init__(self):
        self.i = 0

    def __aiter__(self):
        return self

    async def __anext__(self):
        if self.i > 10:
            raise StopAsyncIteration
        self.i += 1
        return self.i


def test_with_for():
    """
    >>> test_with_for()
    111011
    333033
    20555255
    """
    I = [0]

    manager = Manager(I)
    iterable = Iterable()
    mrefs_before = sys.getrefcount(manager)
    irefs_before = sys.getrefcount(iterable)

    async def main():
        async with manager:
            async for i in iterable:
                I[0] += 1
        I[0] += 1000

    run_async(main())
    print(I[0])

    assert sys.getrefcount(manager) == mrefs_before
    assert sys.getrefcount(iterable) == irefs_before

    ##############

    async def main():
        nonlocal I

        async with Manager(I):
            async for i in Iterable():
                I[0] += 1
        I[0] += 1000

        async with Manager(I):
            async for i in Iterable():
                I[0] += 1
        I[0] += 1000

    run_async(main())
    print(I[0])

    ##############

    async def main():
        async with Manager(I):
            I[0] += 100
            async for i in Iterable():
                I[0] += 1
            else:
                I[0] += 10000000
        I[0] += 1000

        async with Manager(I):
            I[0] += 100
            async for i in Iterable():
                I[0] += 1
            else:
                I[0] += 10000000
        I[0] += 1000

    run_async(main())
    print(I[0])


cdef class AI_old:
    async def __aiter__(self):
        1/0


cdef class AI_new:
    def __aiter__(self):
        1/0


def test_aiter_raises(AI):
    """
    >>> test_aiter_raises(AI_old)
    RAISED
    0
    >>> test_aiter_raises(AI_new)
    RAISED
    0
    """
    CNT = 0

    async def foo():
        nonlocal CNT
        async for i in AI():
            CNT += 1
        CNT += 10

    try:
        run_async(foo())
    except ZeroDivisionError:
        print("RAISED")
    else:
        print("NOT RAISED")
    return CNT
Cython-0.26.1/tests/run/closure_names.pyx0000664000175000017500000000113213150045407021173 0ustar  stefanstefan00000000000000# mode: run
# tag: closures
# ticket: gh-1797


def func():
    """
    >>> funcs = func()
    >>> [f(1) for f in funcs]
    ['eq', 'str', 'weakref', 'new', 'dict']
    """
    def __eq__(a):
        return 'eq'

    def __str__(a):
        return 'str'

    def __weakref__(a):
        return 'weakref'

    def __new__(a):
        return 'new'

    def __dict__(a):
        return 'dict'

    def list_from_gen(g):
        return list(g)

    # move into closure by using inside of generator expression
    return list_from_gen([__eq__, __str__, __weakref__, __new__, __dict__][i] for i in range(5))
Cython-0.26.1/tests/run/cpp_class_redef.pyx0000664000175000017500000000061012542002467021453 0ustar  stefanstefan00000000000000# tag: cpp

# This gives a warning, but should not give an error.
cdef cppclass Foo:
    int _foo
    int get_foo():
        return this._foo
    void set_foo(int foo):
        this._foo = foo

def test_Foo(n):
    """
    >>> test_Foo(1)
    1
    """
    cdef Foo* foo = NULL
    try:
        foo = new Foo()
        foo.set_foo(n)
        return foo.get_foo()
    finally:
        del foo
Cython-0.26.1/tests/run/exec_noargs.pyx0000664000175000017500000000076412542002467020646 0ustar  stefanstefan00000000000000# mode: run
# tag: exec

exec "GLOBAL = 1234"

def exec_module_scope():
    """
    >>> globals()['GLOBAL']
    1234
    """

def exec_func_scope():
    """
    >>> sorted(exec_func_scope().items())
    [('G', 1234), ('a', 'b')]
    """
    d = {}
    exec "d['a'] = 'b'; d['G'] = GLOBAL"
    return d

def exec_pyclass_scope():
    """
    >>> obj = exec_pyclass_scope()
    >>> obj.a
    'b'
    >>> obj.G
    1234
    """
    class TestExec:
        exec "a = 'b'; G = GLOBAL"
    return TestExec
Cython-0.26.1/tests/run/cpp_operators_helper.h0000664000175000017500000000163313023021033022154 0ustar  stefanstefan00000000000000#define UN_OP(op) const char* operator op () { return "unary "#op; }
#define POST_UN_OP(op) const char* operator op (int x) { x++; return "post "#op; }
#define BIN_OP(op) const char* operator op (int x) { x++; return "binary "#op; }

#define COMMA ,

class TestOps {

public:

    UN_OP(-);
    UN_OP(+);
    UN_OP(*);
    UN_OP(~);
    UN_OP(!);
    UN_OP(&);

    UN_OP(++);
    UN_OP(--);
    POST_UN_OP(++);
    POST_UN_OP(--);

    BIN_OP(+);
    BIN_OP(-);
    BIN_OP(*);
    BIN_OP(/);
    BIN_OP(%);

    BIN_OP(<<);
    BIN_OP(>>);

    BIN_OP(|);
    BIN_OP(&);
    BIN_OP(^);
    BIN_OP(COMMA);

    BIN_OP(==);
    BIN_OP(!=);
    BIN_OP(<=);
    BIN_OP(<);
    BIN_OP(>=);
    BIN_OP(>);

    BIN_OP([]);
    BIN_OP(());

};

class TruthClass {
public:
  TruthClass() : value(false) {}
  TruthClass(bool value) : value(value) {}
  virtual ~TruthClass() {};
  operator bool() { return value; }
  bool value;
};
Cython-0.26.1/tests/run/pure_mode_cmethod_inheritance_T583.py0000664000175000017500000000270312542002467024732 0ustar  stefanstefan00000000000000class Base(object):
    '''
    >>> base = Base()
    >>> print(base.noargs())
    Base
    >>> print(base.int_arg(1))
    Base
    >>> print(base._class())
    Base
    '''
    def noargs(self):
        return "Base"
    def int_arg(self, i):
        return "Base"
    @classmethod
    def _class(tp):
        return "Base"


class Derived(Base):
    '''
    >>> derived = Derived()
    >>> print(derived.noargs())
    Derived
    >>> print(derived.int_arg(1))
    Derived
    >>> print(derived._class())
    Derived
    '''
    def noargs(self):
        return "Derived"
    def int_arg(self, i):
        return "Derived"
    @classmethod
    def _class(tp):
        return "Derived"


class DerivedDerived(Derived):
    '''
    >>> derived = DerivedDerived()
    >>> print(derived.noargs())
    DerivedDerived
    >>> print(derived.int_arg(1))
    DerivedDerived
    >>> print(derived._class())
    DerivedDerived
    '''
    def noargs(self):
        return "DerivedDerived"
    def int_arg(self, i):
        return "DerivedDerived"
    @classmethod
    def _class(tp):
        return "DerivedDerived"


class Derived2(Base):
    '''
    >>> derived = Derived2()
    >>> print(derived.noargs())
    Derived2
    >>> print(derived.int_arg(1))
    Derived2
    >>> print(derived._class())
    Derived2
    '''
    def noargs(self):
        return "Derived2"
    def int_arg(self, i):
        return "Derived2"
    @classmethod
    def _class(tp):
        return "Derived2"
Cython-0.26.1/tests/run/cpp_function_lib.cpp0000664000175000017500000000170113143605603021617 0ustar  stefanstefan00000000000000#include "cpp_function_lib.h"

double add_one(double a, int b)
{
    return a + (double) b + 1.0;
}

double add_two(double a, int b)
{
    return a + (double) b + 2.0;
}


AddAnotherFunctor::AddAnotherFunctor(double to_add)
    : to_add(to_add)
{
}

double AddAnotherFunctor::operator()(double a, int b) const
{
    return a + (double) b + this->to_add;
};


FunctionKeeper::FunctionKeeper(std::function user_function)
    : my_function(user_function)
{
}

FunctionKeeper::~FunctionKeeper()
{
}

void FunctionKeeper::set_function(std::function user_function)
{
    this->my_function = user_function;
}

std::function FunctionKeeper::get_function() const
{
    return this->my_function;
}

double FunctionKeeper::call_function(double a, int b) const
{
    if (!this->my_function) {
        throw std::runtime_error("Trying to call undefined function!");
    }
    return this->my_function(a, b);
};
Cython-0.26.1/tests/run/pinard5.pyx0000664000175000017500000000051712542002467017707 0ustar  stefanstefan00000000000000cdef class Tri:
    def test(self):
        return 1

cdef class Curseur:
    cdef Tri tri
    def detail(self):
        return produire_fiches(self.tri)

cdef produire_fiches(Tri tri):
    return tri.test()

def test():
    """
    >>> test()
    1
    """
    cdef Curseur c
    c = Curseur()
    c.tri = Tri()
    return c.detail()
Cython-0.26.1/tests/run/pyclass_dynamic_bases.pyx0000664000175000017500000000051712542002467022704 0ustar  stefanstefan00000000000000# mode: run
# tag: pyclass

class A(object):
    x = 1

class B(object):
    x = 2


def cond_if_bases(x):
    """
    >>> c = cond_if_bases(True)
    >>> c().p
    5
    >>> c().x
    1
    >>> c = cond_if_bases(False)
    >>> c().p
    5
    >>> c().x
    2
    """
    class PyClass(A if x else B):
        p = 5
    return PyClass
Cython-0.26.1/tests/run/r_pyclassdefault.pyx0000664000175000017500000000121612542002467021706 0ustar  stefanstefan00000000000000__doc__ = u"""
  >>> c = CoconutCarrier()
  >>> c.swallow(name = "Brian")
  This swallow is called Brian
  >>> c.swallow(airspeed = 42)
  This swallow is flying at 42 furlongs per fortnight
  >>> c.swallow(coconuts = 3)
  This swallow is carrying 3 coconuts
"""

class CoconutCarrier:

    def swallow(self, name = None, airspeed = None, coconuts = None):
        if name is not None:
            print u"This swallow is called", name
        if airspeed is not None:
            print u"This swallow is flying at", airspeed, u"furlongs per fortnight"
        if coconuts is not None:
            print u"This swallow is carrying", coconuts, u"coconuts"
Cython-0.26.1/tests/run/numpy_common.pxi0000664000175000017500000000033512542002467021043 0ustar  stefanstefan00000000000000# hack to avoid C compiler warnings about unused functions in the NumPy header files

cdef extern from *:
   bint FALSE "0"
   void import_array()
#   void import_umath()

if FALSE:
    import_array()
#    import_umath()
Cython-0.26.1/tests/run/cfuncdef.pyx0000664000175000017500000000042512542002467020120 0ustar  stefanstefan00000000000000cdef void ftang():
    cdef int x
    x = 0

cdef int foo(int i, char c):
    cdef float f, g
    f = 0
    g = 0

cdef spam(int i, obj, object object):
    cdef char c
    c = 0

def test():
    """
    >>> test()
    """
    ftang()
    foo(0, c'f')
    spam(25, None, None)
Cython-0.26.1/tests/run/iter.pyx0000664000175000017500000000101712542002467017304 0ustar  stefanstefan00000000000000
def call_iter1(x):
    """
    >>> [ i for i in iter([1,2,3]) ]
    [1, 2, 3]
    >>> [ i for i in call_iter1([1,2,3]) ]
    [1, 2, 3]
    """
    return iter(x)

class Ints(object):
    def __init__(self):
        self.i = 0
    def __call__(self):
        self.i += 1
        if self.i > 10:
            raise ValueError
        return self.i

def call_iter2(x, sentinel):
    """
    >>> [ i for i in iter(Ints(), 3) ]
    [1, 2]
    >>> [ i for i in call_iter2(Ints(), 3) ]
    [1, 2]
    """
    return iter(x, sentinel)
Cython-0.26.1/tests/run/exectest.pyx0000664000175000017500000000546212542002467020175 0ustar  stefanstefan00000000000000# -*- coding: utf-8 -*-

__doc__ = u"""
#>>> a
#Traceback (most recent call last):
#NameError: name 'a' is not defined
#>>> test_module_scope()
#>>> a
"""

#def test_module_scope():
#    exec "a=1+1"
#    return __dict__['a']

def test_dict_scope1():
    """
    >>> test_dict_scope1()
    2
    """
    cdef dict d = {}
    exec u"b=1+1" in d
    return d[u'b']

def test_dict_scope2(d):
    """
    >>> d = {}
    >>> test_dict_scope2(d)
    >>> d['b']
    2
    """
    exec u"b=1+1" in d

def test_dict_scope3(d1, d2):
    """
    >>> d1 = {}
    >>> test_dict_scope3(d1, d1)
    >>> d1['b']
    2

    >>> d1, d2 = {}, {}
    >>> test_dict_scope3(d1, d2)
    >>> (d1.get('b'), d2.get('b'))
    (None, 2)

    >>> d1, d2 = {}, {}
    >>> test_dict_scope3(d1, d2)
    >>> (d1.get('b'), d2.get('b'))
    (None, 2)
    """
    exec u"b=1+1" in d1, d2

def test_dict_scope_ref(d1, d2):
    """
    >>> d1, d2 = dict(a=11), dict(c=5)
    >>> test_dict_scope_ref(d1, d2)
    >>> (d1.get('b'), d2.get('b'))
    (None, 16)

    >>> d = dict(a=11, c=5)
    >>> test_dict_scope_ref(d, d)
    >>> d['b']
    16

    >>> d1, d2 = {}, {}
    >>> test_dict_scope_ref(d1, d2)         # doctest: +ELLIPSIS
    Traceback (most recent call last):
    NameError: ...name 'a' is not defined
    """
    exec u"b=a+c" in d1, d2

def test_dict_scope_tuple2():
    """
    >>> test_dict_scope_tuple2()
    2
    """
    cdef dict d = {}
    exec(u"b=1+1", d)   # Py3 compatibility syntax
    return d[u'b']

def test_dict_scope_tuple3(d1, d2):
    """
    >>> d1, d2 = {}, {}
    >>> test_dict_scope_tuple3(d1, d2)
    >>> (d1.get('b'), d2.get('b'))
    (None, 2)
    """
    exec(u"b=1+1", d1, d2)

def test_def(d, varref):
    """
    >>> d = dict(seq = [1,2,3,4])
    >>> add_iter = test_def(d, 'seq')
    >>> list(add_iter())
    [2, 3, 4, 5]
    """
    exec u"""
def test():
    for x in %s:
        yield x+1
""" % varref in d
    return d[u'test']

import sys

def test_encoding(d1, d2):
    u"""
    >>> d = {}
    >>> test_encoding(d, None)
    >>> print(d['b'])
    üöä
    """
    if sys.version_info[0] >= 3:
        s = "b = 'üöä'"
    else:
        s = "# -*- coding: utf-8 -*-" + "\n" + "b = u'üöä'"
    exec s in d1, d2

def test_encoding_unicode(d1, d2):
    u"""
    >>> d = {}
    >>> test_encoding_unicode(d, None)
    >>> print(d['b'])
    üöä
    """
    if sys.version_info[0] >= 3:
        s = u"b = 'üöä'"
    else:
        s = u"b = u'üöä'"
    exec s in d1, d2

def test_compile(d):
    """
    >>> d = dict(a=1, c=3)
    >>> test_compile(d)
    >>> d['b']
    4
    """
    c = compile(u"b = a+c", u"", u"exec")
    exec c in d

def exec_invalid_type(x):
    """
    >>> exec_invalid_type(42)
    Traceback (most recent call last):
    TypeError: exec: arg 1 must be string, bytes or code object, got int
    """
    exec x in {}
Cython-0.26.1/tests/run/builtin_basestring.pyx0000664000175000017500000000521612542002467022235 0ustar  stefanstefan00000000000000
cimport cython

import sys
IS_PY3 = sys.version_info[0] >= 3

ustring = u'abcdef'
sstring =  'abcdef'
bstring = b'abcdef'


def isinstance_basestring(obj):
    """
    >>> isinstance_basestring(ustring)
    True
    >>> isinstance_basestring(sstring)
    True
    >>> if IS_PY3: print(not isinstance_basestring(bstring))
    ... else: print(isinstance_basestring(bstring))
    True
    """
    return isinstance(obj, basestring)


def basestring_is_unicode_in_py3():
    """
    >>> basestring_is_unicode_in_py3()
    True
    """
    if IS_PY3:
        return basestring is unicode
    else:
        return basestring is not unicode


def unicode_subtypes_basestring():
    """
    >>> unicode_subtypes_basestring()
    True
    """
    return issubclass(unicode, basestring)


def basestring_typed_variable(obj):
    """
    >>> basestring_typed_variable(None) is None
    True
    >>> basestring_typed_variable(ustring) is ustring
    True
    >>> basestring_typed_variable(sstring) is sstring
    True
    >>> if IS_PY3: print(True)
    ... else: print(basestring_typed_variable(bstring) is bstring)
    True
    >>> class S(str): pass
    >>> basestring_typed_variable(S())   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...got S...
    """
    cdef basestring s
    s = u'abc'
    assert s
    s = 'abc'
    assert s
    # make sure coercion also works in conditional expressions
    s = u'abc' if obj else 'abc'
    assert s
    s = obj
    return s


def basestring_typed_argument(basestring obj):
    """
    >>> basestring_typed_argument(None) is None
    True
    >>> basestring_typed_argument(ustring) is ustring
    True
    >>> basestring_typed_argument(sstring) is sstring
    True
    >>> if IS_PY3: print(True)
    ... else: print(basestring_typed_argument(bstring) is bstring)
    True
    >>> class S(str): pass
    >>> basestring_typed_argument(S())   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...got S...
    """
    return obj


@cython.test_assert_path_exists(
    "//SimpleCallNode",
    "//SimpleCallNode//NoneCheckNode",
    "//SimpleCallNode//AttributeNode[@is_py_attr = false]")
def basestring_join(basestring s, *values):
    """
    >>> print(basestring_join(ustring, 'a', 'b', 'c'))
    aabcdefbabcdefc
    >>> print(basestring_join(sstring, 'a', 'b', 'c'))
    aabcdefbabcdefc
    >>> if IS_PY3: print('abcdefabcdefabcdef')
    ... else: print(basestring_join(bstring, bstring, bstring).decode('utf8'))
    abcdefabcdefabcdef
    >>> basestring_join(None, 'a', 'b', 'c')
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'join'
    """
    return s.join(values)
Cython-0.26.1/tests/run/unicodeliteralsdefault.pyx0000664000175000017500000000276412542002467023106 0ustar  stefanstefan00000000000000## keep two lines free to make sure PEP 263 does not apply
##

##

# This file is written in UTF-8, but it has no encoding declaration,
# so it just defaults to UTF-8 (PEP 3120).

__doc__ = br"""
    >>> sa
    'abc'
    >>> ua
    u'abc'
    >>> b
    u'123'
    >>> c
    u'S\xf8k ik'
    >>> d
    u'\xfc\xd6\xe4'
    >>> e
    u'\x03g\xf8\uf8d2S\xf8k ik'
    >>> f
    u'\xf8'
    >>> add
    u'S\xf8k ik\xfc\xd6\xe4abc'
    >>> null
    u'\x00'
""".decode("ASCII") + b"""
    >>> len(sa)
    3
    >>> len(ua)
    3
    >>> len(b)
    3
    >>> len(c)
    6
    >>> len(d)
    3
    >>> len(e)
    10
    >>> len(f)
    1
    >>> len(add)
    12
    >>> len(null)
    1
""".decode("ASCII") + u"""
    >>> ua == u'abc'
    True
    >>> b == u'123'
    True
    >>> c == u'Søk ik'
    True
    >>> d == u'üÖä'
    True
    >>> e == u'\x03\x67\xf8\uf8d2Søk ik'     # unescaped by Cython
    True
    >>> e == u'\\x03\\x67\\xf8\\uf8d2Søk ik' # unescaped by Python
    True
    >>> f == u'\xf8'  # unescaped by Cython
    True
    >>> f == u'\\xf8' # unescaped by Python
    True
    >>> add == u'Søk ik' + u'üÖä' + 'abc'
    True
    >>> null == u'\\x00' # unescaped by Python (required by doctest)
    True
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u" u'", u" '")
else:
    __doc__ = __doc__.replace(u" b'", u" '")

sa = 'abc'
ua = u'abc'

b = u'123'
c = u'Søk ik'
d = u'üÖä'
e = u'\x03\x67\xf8\uf8d2Søk ik'
f = u'\xf8'

add = u'Søk ik' + u'üÖä' + u'abc'
null = u'\x00'
Cython-0.26.1/tests/run/subclasses.pyx0000664000175000017500000000230712542002467020513 0ustar  stefanstefan00000000000000cdef class Base0:
    pass

cdef class Base(Base0):
    pass

cdef class Foo(Base):
   cdef fooit(self):
       return 42

cdef class Bar(Foo):
   pass

cdef class Bam(Bar):
   pass

cdef class Zoo(Bam):
   pass


def fooit(Foo foo):
    """
    >>> zoo = Zoo()
    >>> for cl in (Zoo, Bam, Bar, Foo, Base, Base0): assert isinstance(zoo, cl)
    >>> fooit(zoo)
    42
    >>> bam = Bam()
    >>> for cl in (Bam, Bar, Foo, Base, Base0): assert isinstance(bam, cl)
    >>> fooit(bam)
    42
    >>> bar = Bar()
    >>> for cl in (Bar, Foo, Base, Base0): assert isinstance(bar, cl)
    >>> fooit(bar)
    42
    >>> foo = Foo()
    >>> for cl in (Foo, Base, Base0): assert isinstance(foo, cl)
    >>> fooit(foo)
    42
    >>> base = Base()
    >>> for cl in (Base, Base0): assert isinstance(base, cl)
    >>> fooit(base)
    Traceback (most recent call last):
    TypeError: Argument 'foo' has incorrect type (expected subclasses.Foo, got subclasses.Base)
    >>> base0 = Base0()
    >>> for cl in (Base0,): assert isinstance(base0, cl)
    >>> fooit(base0)
    Traceback (most recent call last):
    TypeError: Argument 'foo' has incorrect type (expected subclasses.Foo, got subclasses.Base0)
    """
    return foo.fooit()
Cython-0.26.1/tests/run/cpp_stl_function.pyx0000664000175000017500000000470113143605603021714 0ustar  stefanstefan00000000000000# distutils: extra_compile_args=-std=c++0x
# mode: run
# tag: cpp

from libcpp.functional cimport function
cimport cpp_function_lib

def test_simple_function():
    '''
    >>> test_simple_function()
    6.0
    '''
    return cpp_function_lib.add_one(2.0, 3)


def test_AddAnotherFunctor(n):
    '''
    >>> test_AddAnotherFunctor(5.0)
    10.0
    '''
    return cpp_function_lib.AddAnotherFunctor(5.0).call(2.0, 3)


cdef class FunctionKeeper:
    """
    >>> fk = FunctionKeeper('add_one')
    >>> fk(2.0, 3)
    6.0
    >>> fk = FunctionKeeper('add_two')
    >>> fk(2.0, 3)
    7.0
    >>> fk = FunctionKeeper('AddAnotherFunctor5')
    >>> fk(2.0, 3)
    10.0
    >>> fk = FunctionKeeper('default')
    >>> bool(fk)
    False
    >>> fk(2.0, 3)
    Traceback (most recent call last):
    ...
    RuntimeError: Trying to call undefined function!
    >>> fk.set_function('AddAnotherFunctor5')
    >>> fk(2.0, 3)
    10.0
    >>> bool(fk)
    True
    >>> fk.set_function('NULL')
    >>> bool(fk)
    False
    """
    cdef cpp_function_lib.FunctionKeeper* function_keeper
    
    cdef function[double(double, int)]* _get_function_ptr_from_name(self, function_name):
        cdef function[double(double, int)] *f
        
        if function_name == 'add_one':
            f = new function[double(double, int)](cpp_function_lib.add_one)
        elif function_name == 'add_two':
            f = new function[double(double, int)](cpp_function_lib.add_two)
        elif function_name == 'AddAnotherFunctor5':
            f = new function[double(double, int)]()
            f[0] = cpp_function_lib.AddAnotherFunctor(5.0)
        elif function_name == 'NULL':
            f = new function[double(double, int)](NULL)
        elif function_name == 'default':
            f = new function[double(double, int)]()
            
        return f
   
    def __cinit__(self, function_name):
        cdef function[double(double, int)] *f = self._get_function_ptr_from_name(function_name)
        self.function_keeper = new cpp_function_lib.FunctionKeeper(f[0])
        del f

    def __dealloc__(self):
        del self.function_keeper

    def __call__(self, a, b):
        return self.function_keeper.call_function(a, b)

    def __bool__(self):
        return  self.function_keeper.get_function()

    def set_function(self, function_name):
        cdef function[double(double, int)] *f = self._get_function_ptr_from_name(function_name)
        self.function_keeper.set_function(f[0])
        del f
Cython-0.26.1/tests/run/jarausch1.pyx0000664000175000017500000000043012542002467020220 0ustar  stefanstefan00000000000000__doc__ = u"""
   >>> b == br'\\\\'
   True
   >>> s ==  r'\\\\'
   True
   >>> u == ur'\\\\'
   True
"""

import sys
if sys.version_info[0] < 3:
    __doc__ = __doc__.replace(u" br'", u" r'")
else:
    __doc__ = __doc__.replace(u" ur'", u" r'")

b = br'\\'
s =  r'\\'
u = ur'\\'
Cython-0.26.1/tests/run/cpp_exceptions_helper.h0000664000175000017500000000232112542002467022331 0ustar  stefanstefan00000000000000#include 
#include 
#include 

int raise_int(int fire) {
    if (fire) {
        throw 1;
    }
    return 0;
}

int raise_index(int fire) {
    if (fire) {
        throw std::out_of_range("c++ error");
    }
    return 0;
}

class Foo {
 public:
  int bar(int fire) {
    if (fire) {
      throw 1;
    }
    return 0;
  }
};

void raise_domain_error() {
    throw std::domain_error("domain_error");
}

void raise_ios_failure() {
    throw std::ios_base::failure("iostream failure");
}

void raise_memory() {
    // std::bad_alloc can only be default constructed,
    // so we have no control over the error message
    throw std::bad_alloc();
}

void raise_overflow() {
    throw std::overflow_error("overflow_error");
}

void raise_range_error() {
    throw std::range_error("range_error");
}

struct Base { virtual ~Base() {} };
struct Derived : Base { void use() const { abort(); } };

void raise_typeerror() {
    Base foo;
    Base &bar = foo;    // prevents "dynamic_cast can never succeed" warning
    Derived &baz = dynamic_cast(bar);
    baz.use();          // not reached; prevents "unused variable" warning
}

void raise_underflow() {
    throw std::underflow_error("underflow_error");
}
Cython-0.26.1/tests/run/hash_T326.pyx0000664000175000017500000000107212542002467020003 0ustar  stefanstefan00000000000000# mode: run
# ticket: 326
# tag: hash


cdef class A:
    """
    >>> hash(A(5))
    5
    >>> hash(A(-1))
    -2
    >>> hash(A(-2))
    -2
    >>> hash(A(100))
    Traceback (most recent call last):
    ...
    TypeError: That's kind of a round number...
    """
    cdef long a
    def __init__(self, a):
        self.a = a
    def __hash__(self):
        if self.a == 100:
            raise TypeError, u"That's kind of a round number..."
        else:
            return self.a


cpdef long __hash__(long x):
    """
    >>> __hash__(-1)
    -1
    """
    return x
Cython-0.26.1/tests/run/control_flow_stack_allocation.pyx0000664000175000017500000000143012542002467024441 0ustar  stefanstefan00000000000000# mode: run
# tag: werror, control-flow
# cython: warn.unused=True, warn.unused_arg=True, warn.unused_result=True

cdef struct S:
    int x
    float y


cdef stack_alloc_test(int[2] array_arg, S struct_arg):
    cdef int[2] array_var
    cdef S struct_var, struct_var_by_value

    for i in range(2):
        array_var[i] = array_arg[i]
    struct_var.x, struct_var.y = struct_arg.x, struct_arg.y
    struct_var_by_value = struct_var

    return [ i for i in array_var ], struct_var_by_value


def test():
    """
    >>> a,d = test()
    >>> a
    [0, 1]
    >>> sorted(d.items())
    [('x', 1), ('y', 2.0)]
    """
    cdef int[2] array_var
    cdef S struct_var
    for i in range(2):
        array_var[i] = i
    struct_var = [1, 2.0]

    return stack_alloc_test(array_var, struct_var)
Cython-0.26.1/tests/run/non_future_division.pyx0000664000175000017500000000611613023021033022417 0ustar  stefanstefan00000000000000# Py2.x mixed true-div/floor-div behaviour of '/' operator


def bigints(values):
    for x in values:
        print(repr(x).rstrip('L'))


def doit(x,y):
    """
    >>> doit(1,2)
    (0, 0)
    >>> doit(4,3)
    (1, 1)
    >>> doit(4,3.0)
    (1.3333333333333333, 1.0)
    >>> doit(4,2)
    (2, 2)
    """
    return x/y, x//y

def doit_inplace(x,y):
    """
    >>> doit_inplace(1,2)
    0
    """
    x /= y
    return x

def doit_inplace_floor(x,y):
    """
    >>> doit_inplace_floor(1,2)
    0
    """
    x //= y
    return x

def constants():
    """
    >>> constants()
    (0, 0, 2.5, 2.0, 2, 2)
    """
    return 1/2, 1//2, 5/2.0, 5//2.0, 5/2, 5//2


def py_mix(a):
    """
    >>> py_mix(1)
    (0, 0, 0.5, 0.0, 0, 0)
    >>> py_mix(1.0)
    (0.5, 0.0, 0.5, 0.0, 0.5, 0.0)
    >>> 2**53 / 2.0
    4503599627370496.0
    >>> bigints(py_mix(2**53))
    4503599627370496
    4503599627370496
    4503599627370496.0
    4503599627370496.0
    4503599627370496
    4503599627370496
    >>> bigints(py_mix(2**53 + 1))
    4503599627370496
    4503599627370496
    4503599627370496.0
    4503599627370496.0
    4503599627370496
    4503599627370496
    >>> py_mix(2**53 + 1.0)
    (4503599627370496.0, 4503599627370496.0, 4503599627370496.0, 4503599627370496.0, 4503599627370496.0, 4503599627370496.0)
    """
    return a/2, a//2, a/2.0, a//2.0, a/2, a//2


def py_mix_by_neg1(a):
    """
    >>> py_mix_by_neg1(0)
    (0, 0, -0.0, -0.0, 0, 0)
    >>> py_mix_by_neg1(-1)
    (1, 1, 1.0, 1.0, 1, 1)
    >>> py_mix_by_neg1(int(2**31-1))
    (-2147483647, -2147483647, -2147483647.0, -2147483647.0, -2147483647, -2147483647)
    >>> bigints(py_mix_by_neg1(int(-2**31-1)))
    2147483649
    2147483649
    2147483649.0
    2147483649.0
    2147483649
    2147483649
    >>> results = py_mix_by_neg1(int(2**63-1))
    >>> results[2] == results[3] == float(2**63-1) / -1.0 or results
    True
    >>> results[0] == results[1] == results[4] == results[5] == (2**63-1) // -1 or results
    True
    >>> results = py_mix_by_neg1(int(-2**63-1))
    >>> results[2] == results[3] == float(-2**63-1) / -1.0 or results
    True
    >>> results[0] == results[1] == results[4] == results[5] == (-2**63-1) // -1 or results
    True
    """
    return a/-1, a//-1, a/-1.0, a//-1.0, a/-1, a//-1


def py_mix_rev(a):
    """
    >>> py_mix_rev(4)
    (0, 0, 1.25, 1.0, 1, 1)
    >>> py_mix_rev(4.0)
    (0.25, 0.0, 1.25, 1.0, 1.25, 1.0)
    """
    return 1/a, 1//a, 5.0/a, 5.0//a, 5/a, 5//a

def int_mix(int a):
    """
    >>> int_mix(1)
    (0, 0, 0.5, 0.0, 0, 0)
    """
    return a/2, a//2, a/2.0, a//2.0, a/2, a//2

def int_mix_rev(int a):
    """
    >>> int_mix_rev(4)
    (0, 0, 1.25, 1.0, 1, 1)
    """
    return 1/a, 1//a, 5.0/a, 5.0//a, 5/a, 5//a

def float_mix(float a):
    """
    >>> float_mix(1.0)
    (0.5, 0.0, 0.5, 0.0, 0.5, 0.0)
    """
    return a/2, a//2, a/2.0, a//2.0, a/2, a//2

def float_mix_rev(float a):
    """
    >>> float_mix_rev(4.0)
    (0.25, 0.0, 1.25, 1.0, 1.25, 1.0)
    """
    return 1/a, 1//a, 5.0/a, 5.0//a, 5/a, 5//a

def int_int(int a, int b):
    """
    >>> int_int(1, 2)
    (0, 2)
    """
    return a/b, b/a
Cython-0.26.1/tests/run/yield_from_pep380.pyx0000664000175000017500000006170512574327400021605 0ustar  stefanstefan00000000000000# -*- coding: utf-8 -*-

"""
Test suite for PEP 380 implementation

adapted from original tests written by Greg Ewing
see 
"""

import sys


def _lines(trace):
    for line in trace:
        print(line)


def test_delegation_of_initial_next_to_subgenerator():
    """
    >>> _lines(test_delegation_of_initial_next_to_subgenerator())
    Starting g1
    Starting g2
    Yielded 42
    Finishing g2
    Finishing g1
    """
    trace = []
    def g1():
        trace.append("Starting g1")
        yield from g2()
        trace.append("Finishing g1")
    def g2():
        trace.append("Starting g2")
        yield 42
        trace.append("Finishing g2")
    for x in g1():
        trace.append("Yielded %s" % (x,))
    return trace

def test_raising_exception_in_initial_next_call():
    """
    >>> _lines(test_raising_exception_in_initial_next_call())
    Starting g1
    Starting g2
    Finishing g2
    Finishing g1
    """
    trace = []
    def g1():
        try:
            trace.append("Starting g1")
            yield from g2()
        finally:
            trace.append("Finishing g1")
    def g2():
        try:
            trace.append("Starting g2")
            raise ValueError("spanish inquisition occurred")
        finally:
            trace.append("Finishing g2")
    try:
        for x in g1():
            trace.append("Yielded %s" % (x,))
    except ValueError as e:
        pass
    else:
        trace.append("subgenerator failed to raise ValueError")
    return trace

def test_delegation_of_next_call_to_subgenerator():
    """
    >>> _lines(test_delegation_of_next_call_to_subgenerator())
    Starting g1
    Yielded g1 ham
    Starting g2
    Yielded g2 spam
    Yielded g2 more spam
    Finishing g2
    Yielded g1 eggs
    Finishing g1
    """
    trace = []
    def g1():
        trace.append("Starting g1")
        yield "g1 ham"
        yield from g2()
        yield "g1 eggs"
        trace.append("Finishing g1")
    def g2():
        trace.append("Starting g2")
        yield "g2 spam"
        yield "g2 more spam"
        trace.append("Finishing g2")
    for x in g1():
        trace.append("Yielded %s" % (x,))
    return trace

def test_raising_exception_in_delegated_next_call():
    """
    >>> _lines(test_raising_exception_in_delegated_next_call())
    Starting g1
    Yielded g1 ham
    Starting g2
    Yielded g2 spam
    Finishing g2
    Finishing g1
    """
    trace = []
    def g1():
        try:
            trace.append("Starting g1")
            yield "g1 ham"
            yield from g2()
            yield "g1 eggs"
        finally:
            trace.append("Finishing g1")
    def g2():
        try:
            trace.append("Starting g2")
            yield "g2 spam"
            raise ValueError("hovercraft is full of eels")
            yield "g2 more spam"
        finally:
            trace.append("Finishing g2")
    try:
        for x in g1():
            trace.append("Yielded %s" % (x,))
    except ValueError:
        pass
    else:
        trace.append("subgenerator failed to raise ValueError")
    return trace

def test_delegation_of_send():
    """
    >>> _lines(test_delegation_of_send())
    Starting g1
    g1 received 1
    Starting g2
    Yielded g2 spam
    g2 received 2
    Yielded g2 more spam
    g2 received 3
    Finishing g2
    Yielded g1 eggs
    g1 received 4
    Finishing g1
    """
    trace = []
    def g1():
        trace.append("Starting g1")
        x = yield "g1 ham"
        trace.append("g1 received %s" % (x,))
        yield from g2()
        x = yield "g1 eggs"
        trace.append("g1 received %s" % (x,))
        trace.append("Finishing g1")
    def g2():
        trace.append("Starting g2")
        x = yield "g2 spam"
        trace.append("g2 received %s" % (x,))
        x = yield "g2 more spam"
        trace.append("g2 received %s" % (x,))
        trace.append("Finishing g2")
    g = g1()
    y = next(g)
    x = 1
    try:
        while 1:
            y = g.send(x)
            trace.append("Yielded %s" % (y,))
            x += 1
    except StopIteration:
        pass
    return trace

def test_handling_exception_while_delegating_send():
    """
    >>> _lines(test_handling_exception_while_delegating_send())
    Starting g1
    g1 received 1
    Starting g2
    Yielded g2 spam
    g2 received 2
    """
    trace = []
    def g1():
        trace.append("Starting g1")
        x = yield "g1 ham"
        trace.append("g1 received %s" % (x,))
        yield from g2()
        x = yield "g1 eggs"
        trace.append("g1 received %s" % (x,))
        trace.append("Finishing g1")
    def g2():
        trace.append("Starting g2")
        x = yield "g2 spam"
        trace.append("g2 received %s" % (x,))
        raise ValueError("hovercraft is full of eels")
        x = yield "g2 more spam"
        trace.append("g2 received %s" % (x,))
        trace.append("Finishing g2")
    def run():
        g = g1()
        y = next(g)
        x = 1
        try:
            while 1:
                y = g.send(x)
                trace.append("Yielded %s" % (y,))
                x += 1
        except StopIteration:
            trace.append("StopIteration")
    try:
        run()
    except ValueError:
        pass # ok
    else:
        trace.append("no ValueError")
    return trace

def test_delegating_close():
    """
    >>> _lines(test_delegating_close())
    Starting g1
    Yielded g1 ham
    Starting g2
    Yielded g2 spam
    Finishing g2
    Finishing g1
    """
    trace = []
    def g1():
        try:
            trace.append("Starting g1")
            yield "g1 ham"
            yield from g2()
            yield "g1 eggs"
        finally:
            trace.append("Finishing g1")
    def g2():
        try:
            trace.append("Starting g2")
            yield "g2 spam"
            yield "g2 more spam"
        finally:
            trace.append("Finishing g2")
    g = g1()
    for i in range(2):
        x = next(g)
        trace.append("Yielded %s" % (x,))
    g.close()
    return trace

def test_handing_exception_while_delegating_close():
    """
    >>> _lines(test_handing_exception_while_delegating_close())
    Starting g1
    Yielded g1 ham
    Starting g2
    Yielded g2 spam
    Finishing g2
    Finishing g1
    nybbles have exploded with delight
    """
    trace = []
    def g1():
        try:
            trace.append("Starting g1")
            yield "g1 ham"
            yield from g2()
            yield "g1 eggs"
        finally:
            trace.append("Finishing g1")
    def g2():
        try:
            trace.append("Starting g2")
            yield "g2 spam"
            yield "g2 more spam"
        finally:
            trace.append("Finishing g2")
            raise ValueError("nybbles have exploded with delight")
    try:
        g = g1()
        for i in range(2):
            x = next(g)
            trace.append("Yielded %s" % (x,))
        g.close()
    except ValueError as e:
        trace.append(e.args[0])
        # FIXME: __context__ is currently not set
        #if sys.version_info[0] >= 3:
        #    assert isinstance(e.__context__, GeneratorExit), 'exception context is %r' % e.__context__
    else:
        trace.append("subgenerator failed to raise ValueError")
    return trace

def test_delegating_throw():
    """
    >>> _lines(test_delegating_throw())
    Starting g1
    Yielded g1 ham
    Starting g2
    Yielded g2 spam
    Finishing g2
    Finishing g1
    """
    trace = []
    def g1():
        try:
            trace.append("Starting g1")
            yield "g1 ham"
            yield from g2()
            yield "g1 eggs"
        finally:
            trace.append("Finishing g1")
    def g2():
        try:
            trace.append("Starting g2")
            yield "g2 spam"
            yield "g2 more spam"
        finally:
            trace.append("Finishing g2")
    try:
        g = g1()
        for i in range(2):
            x = next(g)
            trace.append("Yielded %s" % (x,))
        e = ValueError("tomato ejected")
        g.throw(e)
    except ValueError:
        pass
    else:
        trace.append("subgenerator failed to raise ValueError")
    return trace

def __test_value_attribute_of_StopIteration_exception():
    """
    StopIteration:
    value = None
    StopIteration: spam
    value = spam
    StopIteration: spam
    value = eggs
    """
    trace = []
    def pex(e):
        trace.append("%s: %s" % (e.__class__.__name__, e))
        trace.append("value = %s" % (e.value,))
    e = StopIteration()
    pex(e)
    e = StopIteration("spam")
    pex(e)
    e.value = "eggs"
    pex(e)
    return trace


def test_exception_value_crash():
    """
    >>> test_exception_value_crash()
    ['g2']
    """
    # There used to be a refcount error in CPython when the return value
    # stored in the StopIteration has a refcount of 1.
    def g1():
        yield from g2()
    def g2():
        yield "g2"
        return [42]
    return list(g1())


def test_return_none():
    """
    >>> test_return_none()
    ['g2']
    """
    # There used to be a refcount error in CPython when the return value
    # stored in the StopIteration has a refcount of 1.
    def g1():
        yield from g2()
    def g2():
        yield "g2"
        return None
    return list(g1())


def test_finally_return_none(raise_exc=None):
    """
    >>> gen = test_finally_return_none()
    >>> next(gen)
    'g2'
    >>> next(gen)
    Traceback (most recent call last):
    StopIteration

    >>> gen = test_finally_return_none()
    >>> next(gen)
    'g2'
    >>> try: gen.throw(ValueError())
    ... except StopIteration: pass
    ... else: print("FAILED")
    """
    # There used to be a refcount error in CPython when the return value
    # stored in the StopIteration has a refcount of 1.
    def g1():
        yield from g2()
    def g2():
        try:
            yield "g2"
        finally:
            return None
    return g1()


def test_generator_return_value():
    """
    >>> _lines(test_generator_return_value())
    Starting g1
    Yielded g1 ham
    Starting g2
    Yielded g2 spam
    Yielded g2 more spam
    Finishing g2
    g2 returned None
    Starting g2
    Yielded g2 spam
    Yielded g2 more spam
    Finishing g2
    g2 returned 42
    Yielded g1 eggs
    Finishing g1
    """
    trace = []
    def g1():
        trace.append("Starting g1")
        yield "g1 ham"
        ret = yield from g2()
        trace.append("g2 returned %s" % (ret,))
        ret = yield from g2(42)
        trace.append("g2 returned %s" % (ret,))
        yield "g1 eggs"
        trace.append("Finishing g1")
    def g2(v = None):
        trace.append("Starting g2")
        yield "g2 spam"
        yield "g2 more spam"
        trace.append("Finishing g2")
        if v:
            return v
    for x in g1():
        trace.append("Yielded %s" % (x,))
    return trace

def test_delegation_of_next_to_non_generator():
    """
    >>> _lines(test_delegation_of_next_to_non_generator())
    Yielded 0
    Yielded 1
    Yielded 2
    """
    trace = []
    def g():
        yield from range(3)
    for x in g():
        trace.append("Yielded %s" % (x,))
    return trace

def test_conversion_of_sendNone_to_next():
    """
    >>> _lines(test_conversion_of_sendNone_to_next())
    Yielded: 0
    Yielded: 1
    Yielded: 2
    """
    trace = []
    def g():
        yield from range(3)
    gi = g()
    for x in range(3):
        y = gi.send(None)
        trace.append("Yielded: %s" % (y,))
    return trace

def test_delegation_of_close_to_non_generator():
    """
    >>> _lines(test_delegation_of_close_to_non_generator())
    starting g
    finishing g
    """
    trace = []
    def g():
        try:
            trace.append("starting g")
            yield from range(3)
            trace.append("g should not be here")
        finally:
            trace.append("finishing g")
    gi = g()
    next(gi)
    gi.close()
    return trace

def test_delegating_throw_to_non_generator():
    """
    >>> _lines(test_delegating_throw_to_non_generator())
    Starting g
    Yielded 0
    Yielded 1
    Yielded 2
    Yielded 3
    Yielded 4
    Finishing g
    """
    trace = []
    def g():
        try:
            trace.append("Starting g")
            yield from range(10)
        finally:
            trace.append("Finishing g")
    try:
        gi = g()
        for i in range(5):
            x = next(gi)
            trace.append("Yielded %s" % (x,))
        e = ValueError("tomato ejected")
        gi.throw(e)
    except ValueError:
        pass
    else:
        trace.append("subgenerator failed to raise ValueError")
    return trace

def test_attempting_to_send_to_non_generator():
    """
    >>> _lines(test_attempting_to_send_to_non_generator())
    starting g
    finishing g
    """
    trace = []
    def g():
        try:
            trace.append("starting g")
            yield from range(3)
            trace.append("g should not be here")
        finally:
            trace.append("finishing g")
    try:
        gi = g()
        next(gi)
        for x in range(3):
            y = gi.send(42)
            trace.append("Should not have yielded: %s" % y)
    except AttributeError:
        pass
    else:
        trace.append("was able to send into non-generator")
    return trace

def test_broken_getattr_handling():
    """
    >>> test_broken_getattr_handling()
    []
    """
    class Broken:
        def __iter__(self):
            return self
        def __next__(self):
            return 1
        next = __next__
        def __getattr__(self, attr):
            1/0

    def g():
        yield from Broken()

    not_raised = []
    try:
        gi = g()
        assert next(gi) == 1
        gi.send(1)
    except ZeroDivisionError:
        pass
    else:
        not_raised.append(1)

    try:
        gi = g()
        assert next(gi) == 1
        gi.throw(AttributeError)
    except ZeroDivisionError:
        pass
    else:
        not_raised.append(2)

    """
    # this currently only calls PyErr_WriteUnraisable() and doesn't raise ...
    try:
        gi = g()
        assert next(gi) == 1
        gi.close()
    except ZeroDivisionError:
        pass
    else:
        not_raised.append(3)
    """
    gi = g()
    assert next(gi) == 1
    gi.close()

    return not_raised

def test_exception_in_initial_next_call():
    """
    >>> _lines(test_exception_in_initial_next_call())
    g1 about to yield from g2
    """
    trace = []
    def g1():
        trace.append("g1 about to yield from g2")
        yield from g2()
        trace.append("g1 should not be here")
    def g2():
        yield 1/0
    def run():
        gi = g1()
        next(gi)
    try:
        run()
    except ZeroDivisionError:
        pass
    else:
        trace.append("ZeroDivisionError not raised")
    return trace

def test_attempted_yield_from_loop():
    """
    >>> _lines(test_attempted_yield_from_loop())
    g1: starting
    Yielded: y1
    g1: about to yield from g2
    g2: starting
    Yielded: y2
    g2: about to yield from g1
    """
    trace = []
    def g1():
        trace.append("g1: starting")
        yield "y1"
        trace.append("g1: about to yield from g2")
        yield from g2()
        trace.append("g1 should not be here")

    def g2():
        trace.append("g2: starting")
        yield "y2"
        trace.append("g2: about to yield from g1")
        yield from gi
        trace.append("g2 should not be here")
    try:
        gi = g1()
        for y in gi:
            trace.append("Yielded: %s" % (y,))
    except ValueError:
        pass # "generator already executing"
    else:
        trace.append("subgenerator didn't raise ValueError")
    return trace

def test_attempted_reentry():
    """
    >>> _lines(test_attempted_reentry())
    g1: starting
    Yielded: y1
    g1: about to yield from g2
    g2: starting
    Yielded: y2
    g2: about to yield from g1
    g2: caught ValueError
    Yielded: y3
    g1: after delegating to g2
    Yielded: y4
    """
    trace = []
    def g1():
        trace.append("g1: starting")
        yield "y1"
        trace.append("g1: about to yield from g2")
        yield from g2()
        trace.append("g1: after delegating to g2")
        yield "y4"

    def g2():
        trace.append("g2: starting")
        yield "y2"
        trace.append("g2: about to yield from g1")
        try:
            yield from gi
        except ValueError:
            trace.append("g2: caught ValueError")
        else:
            trace.append("g1 did not raise ValueError on reentry")
        yield "y3"
    gi = g1()
    for y in gi:
        trace.append("Yielded: %s" % (y,))
    return trace

def test_returning_value_from_delegated_throw():
    """
    >>> _lines(test_returning_value_from_delegated_throw())
    Starting g1
    Yielded g1 ham
    Starting g2
    Yielded g2 spam
    Caught LunchError in g2
    Yielded g2 yet more spam
    Yielded g1 eggs
    Finishing g1
    """
    trace = []
    def g1():
        try:
            trace.append("Starting g1")
            yield "g1 ham"
            yield from g2()
            yield "g1 eggs"
        finally:
            trace.append("Finishing g1")
    def g2():
        try:
            trace.append("Starting g2")
            yield "g2 spam"
            yield "g2 more spam"
        except LunchError:
            trace.append("Caught LunchError in g2")
            yield "g2 lunch saved"
            yield "g2 yet more spam"
    class LunchError(Exception):
        pass
    g = g1()
    for i in range(2):
        x = next(g)
        trace.append("Yielded %s" % (x,))
    e = LunchError("tomato ejected")
    g.throw(e)
    for x in g:
        trace.append("Yielded %s" % (x,))
    return trace

def test_next_and_return_with_value():
    """
    >>> _lines(test_next_and_return_with_value())
    g starting
    f resuming g
    g returning None
    f caught StopIteration
    g starting
    f resuming g
    g returning 42
    f caught StopIteration
    """
    trace = []
    def f(r):
        gi = g(r)
        next(gi)
        try:
            trace.append("f resuming g")
            next(gi)
            trace.append("f SHOULD NOT BE HERE")
        except StopIteration:
            trace.append("f caught StopIteration")
    def g(r):
        trace.append("g starting")
        yield
        trace.append("g returning %s" % (r,))
        return r
    f(None)
    f(42)
    return trace

def test_send_and_return_with_value():
    """
    >>> _lines(test_send_and_return_with_value())
    g starting
    f sending spam to g
    g received spam
    g returning None
    f caught StopIteration
    g starting
    f sending spam to g
    g received spam
    g returning 42
    f caught StopIteration
    """
    trace = []
    def f(r):
        gi = g(r)
        next(gi)
        try:
            trace.append("f sending spam to g")
            gi.send("spam")
            trace.append("f SHOULD NOT BE HERE")
        except StopIteration:
            trace.append("f caught StopIteration")
    def g(r):
        trace.append("g starting")
        x = yield
        trace.append("g received %s" % (x,))
        trace.append("g returning %s" % (r,))
        return r
    f(None)
    f(42)
    return trace

def test_catching_exception_from_subgen_and_returning():
    """
    Test catching an exception thrown into a
    subgenerator and returning a value

    >>> _lines(test_catching_exception_from_subgen_and_returning())
    1
    inner caught ValueError
    inner returned 2 to outer
    2
    """
    trace = []
    def inner():
        try:
            yield 1
        except ValueError:
            trace.append("inner caught ValueError")
        return 2

    def outer():
        v = yield from inner()
        trace.append("inner returned %r to outer" % v)
        yield v
    g = outer()
    trace.append(next(g))
    trace.append(g.throw(ValueError))
    return trace

def test_throwing_GeneratorExit_into_subgen_that_returns():
    """
    Test throwing GeneratorExit into a subgenerator that
    catches it and returns normally.

    >>> _lines(test_throwing_GeneratorExit_into_subgen_that_returns())
    Enter g
    Enter f
    """
    trace = []
    def f():
        try:
            trace.append("Enter f")
            yield
            trace.append("Exit f")
        except GeneratorExit:
            return
    def g():
        trace.append("Enter g")
        yield from f()
        trace.append("Exit g")
    try:
        gi = g()
        next(gi)
        gi.throw(GeneratorExit)
    except GeneratorExit:
        pass
    else:
        trace.append("subgenerator failed to raise GeneratorExit")
    return trace

def test_throwing_GeneratorExit_into_subgenerator_that_yields():
    """
    Test throwing GeneratorExit into a subgenerator that
    catches it and yields.

    >>> _lines(test_throwing_GeneratorExit_into_subgenerator_that_yields())
    Enter g
    Enter f
    """
    trace = []
    def f():
        try:
            trace.append("Enter f")
            yield
            trace.append("Exit f")
        except GeneratorExit:
            yield
    def g():
        trace.append("Enter g")
        yield from f()
        trace.append("Exit g")
    try:
        gi = g()
        next(gi)
        gi.throw(GeneratorExit)
    except RuntimeError:
        pass # "generator ignored GeneratorExit"
    else:
        trace.append("subgenerator failed to raise GeneratorExit")
    return trace

def test_throwing_GeneratorExit_into_subgen_that_raises():
    """
    Test throwing GeneratorExit into a subgenerator that
    catches it and raises a different exception.

    >>> _lines(test_throwing_GeneratorExit_into_subgen_that_raises())
    Enter g
    Enter f
    """
    trace = []
    def f():
        try:
            trace.append("Enter f")
            yield
            trace.append("Exit f")
        except GeneratorExit:
            raise ValueError("Vorpal bunny encountered")
    def g():
        trace.append("Enter g")
        yield from f()
        trace.append("Exit g")
    try:
        gi = g()
        next(gi)
        gi.throw(GeneratorExit)
    except ValueError:
        pass # "Vorpal bunny encountered"
    else:
        trace.append("subgenerator failed to raise ValueError")
    return trace

def test_yield_from_empty():
    """
    >>> test_yield_from_empty()
    """
    def g():
        yield from ()
    try:
        next(g())
    except StopIteration:
        pass
    else:
        return "FAILED"

# test re-entry guards

def _reentering_gen():
    def one():
        yield 0
        yield from two()
        yield 3
    def two():
        yield 1
        try:
            yield from g1
        except ValueError:
            pass
        yield 2
    g1 = one()
    return g1

def test_delegating_generators_claim_to_be_running_next():
    """
    >>> test_delegating_generators_claim_to_be_running_next()
    [0, 1, 2, 3]
    """
    return list(_reentering_gen())

def test_delegating_generators_claim_to_be_running_send():
    """
    >>> test_delegating_generators_claim_to_be_running_send()
    [0, 1, 2, 3]
    """
    g1 = _reentering_gen()
    res = [next(g1)]
    try:
        while True:
            res.append(g1.send(42))
    except StopIteration:
        pass
    return res

def test_delegating_generators_claim_to_be_running_throw():
    """
    >>> test_delegating_generators_claim_to_be_running_throw()
    [0, 1, 2, 3]
    """
    class MyErr(Exception):
        pass
    def one():
        try:
            yield 0
        except MyErr:
            pass
        yield from two()
        try:
            yield 3
        except MyErr:
            pass
    def two():
        try:
            yield 1
        except MyErr:
            pass
        try:
            yield from g1
        except ValueError:
            pass
        try:
            yield 2
        except MyErr:
            pass
    g1 = one()
    res = [next(g1)]
    try:
        while True:
            res.append(g1.throw(MyErr))
    except StopIteration:
        pass
    return res

def test_delegating_generators_claim_to_be_running_close():
    """
    >>> test_delegating_generators_claim_to_be_running_close()
    42
    """
    class MyIt(object):
        def __iter__(self):
            return self
        def __next__(self):
            return 42
        next = __next__
        def close(self):
            assert g1.gi_running
            try:
                next(g1)
            except ValueError:
                pass # guard worked
            else:
                assert False, "re-entry guard failed to bark"
    def one():
        yield from MyIt()
    g1 = one()
    ret = next(g1)
    g1.close()
    return ret


def yield_in_return(x):
    """
    >>> x = yield_in_return(range(3))
    >>> for _ in range(10):
    ...     try:
    ...         print(next(x))
    ...     except StopIteration:
    ...         if sys.version_info >= (3,3):
    ...             print(sys.exc_info()[1].value is None)
    ...         else:
    ...             print(True)
    ...         break
    0
    1
    2
    True
    """
    return (yield from x)


def gi_yieldfrom(it):
    """
    >>> it = iter([1, 2, 3])
    >>> g = gi_yieldfrom(it)
    >>> g.gi_yieldfrom is None or "ERROR: %r" % g.gi_yieldfrom
    True
    >>> next(g)
    1
    >>> g.gi_yieldfrom is it or "ERROR: %r" % g.gi_yieldfrom
    True
    """
    x = yield from it
    return x
Cython-0.26.1/tests/run/generator_frame_cycle.py0000664000175000017500000000103112542002467022464 0ustar  stefanstefan00000000000000# mode: run
# tag: generator

import sys

def _next(it):
    if sys.version_info[0] >= 3:
        return next(it)
    else:
        return it.next()

def test_generator_frame_cycle():
    """
    >>> test_generator_frame_cycle()
    ("I'm done",)
    """
    testit = []
    def whoo():
        try:
            yield
        except:
            yield
        finally:
            testit.append("I'm done")
    g = whoo()
    _next(g)
    # Frame object cycle
    eval('g.throw(ValueError)', {'g': g})
    del g
    return tuple(testit)
Cython-0.26.1/tests/run/class_redefine.py0000664000175000017500000000041612542002467021121 0ustar  stefanstefan00000000000000
class set(object):
    def __init__(self, x):
        self.x = x

SET = set([1])

class set(object):
    def __init__(self, x):
        self.X = x

def test_class_redef(x):
    """
    >>> SET.x
    [1]
    >>> test_class_redef(2).X
    [2]
    """
    return set([x])
Cython-0.26.1/tests/run/ctruthtests.pyx0000664000175000017500000000301412542002467020734 0ustar  stefanstefan00000000000000def test_ptr():
    """
    >>> test_ptr()
    False
    """
    cdef void* p = NULL
    if p:
        return True
    else:
        return False

def test_ptr2():
    """
    >>> test_ptr2()
    2
    """
    cdef char* p1 = NULL
    cdef char* p2 = NULL
    p1 += 1

    if p1 and p2:
        return 1
    elif p1 or p2:
        return 2
    else:
        return 3

def test_int(int i):
    """
    >>> test_int(0)
    False
    >>> test_int(1)
    True
    """
    if i:
        return True
    else:
        return False

def test_short(short i):
    """
    >>> test_short(0)
    False
    >>> test_short(1)
    True
    """
    if i:
        return True
    else:
        return False

def test_Py_ssize_t(Py_ssize_t i):
    """
    >>> test_Py_ssize_t(0)
    False
    >>> test_Py_ssize_t(1)
    True
    """
    if i:
        return True
    else:
        return False

cdef class TestExtInt:
    cdef int i
    def __init__(self, i): self.i = i

def test_attr_int(TestExtInt e):
    """
    >>> test_attr_int(TestExtInt(0))
    False
    >>> test_attr_int(TestExtInt(1))
    True
    """
    if e.i:
        return True
    else:
        return False

ctypedef union _aux:
    size_t i
    void *p

cdef class TestExtPtr:
    cdef void* p
    def __init__(self, int i):
        cdef _aux aux
        aux.i = i
        self.p = aux.p

def test_attr_ptr(TestExtPtr e):
    """
    >>> test_attr_ptr(TestExtPtr(0))
    False
    >>> test_attr_ptr(TestExtPtr(1))
    True
    """
    if e.p:
        return True
    else:
        return False
Cython-0.26.1/tests/run/getattr3call.pyx0000664000175000017500000000175412542002467020742 0ustar  stefanstefan00000000000000
class test(object):
    a = 1
t = test()

def getattr2_literal_unicode(a):
    """
    >>> getattr2_literal_unicode(t)
    1
    >>> getattr2_literal_unicode(object())
    Traceback (most recent call last):
    AttributeError: 'object' object has no attribute 'a'
    """
    return getattr(a, u"a")

def getattr3_literal_unicode(a, b):
    """
    >>> getattr3_literal_unicode(t, 2)
    (1, 2)
    """
    return getattr(a, u"a", b), getattr(a, u"b", b)

def getattr2_simple(a, b):
    """
    >>> getattr2_simple(t, 'a')
    1
    >>> getattr2_simple(t, 'b')
    Traceback (most recent call last):
    AttributeError: 'test' object has no attribute 'b'
    """
    return getattr(a, b)

def getattr3_explicit(a, b, c):
    """
    >>> getattr3_explicit(t, 'a', 2)
    1
    >>> getattr3_explicit(t, 'b', 2)
    2
    """
    return getattr3(a, b, c)

def getattr3_args(a, b, c):
    """
    >>> getattr3_args(t, 'a', 2)
    1
    >>> getattr3_args(t, 'b', 2)
    2
    """
    return getattr(a, b, c)
Cython-0.26.1/tests/run/closure_leak_1.pyx0000664000175000017500000000032713023021033021215 0ustar  stefanstefan00000000000000# mode: run
# tag: closure

def reassign_args(x, *args):
    """
    >>> reassign_args(1, [1,2,3,4])
    """
    a,args = args[0], args[1:]
    b = False
    if b:
        c = x.map_coefficients(lambda c: c(*args))
Cython-0.26.1/tests/run/generators.pyx0000664000175000017500000002263312574327400020523 0ustar  stefanstefan00000000000000# mode: run
# tag: generators

try:
    import backports_abc
except ImportError: pass
else: backports_abc.patch()

try:
    from collections.abc import Generator
except ImportError:
    try:
        from collections import Generator
    except ImportError:
        Generator = object  # easy win


def very_simple():
    """
    >>> x = very_simple()
    >>> next(x)
    1
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    >>> next(x)
    Traceback (most recent call last):
    StopIteration

    >>> x = very_simple()
    >>> x.send(1)
    Traceback (most recent call last):
    TypeError: can't send non-None value to a just-started generator
    """
    yield 1


def attributes():
    """
    >>> x = attributes()
    >>> x.__name__
    'attributes'
    >>> x.__qualname__
    'attributes'
    >>> x.gi_running  # before next()
    False
    >>> inner = next(x)
    >>> x.gi_running  # after next()
    False
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    >>> x.gi_running  # after termination
    False

    >>> y = inner()
    >>> y.__name__
    ''
    >>> y.__qualname__
    'attributes..inner..'

    >>> y.__name__ = 123
    Traceback (most recent call last):
    TypeError: __name__ must be set to a string object
    >>> y.__name__
    ''
    >>> y.__qualname__ = None
    Traceback (most recent call last):
    TypeError: __qualname__ must be set to a string object
    >>> y.__qualname__
    'attributes..inner..'

    >>> y.__name__ = 'abc'
    >>> y.__name__
    'abc'
    >>> y.__name__ = None
    Traceback (most recent call last):
    TypeError: __name__ must be set to a string object
    >>> y.__name__
    'abc'
    >>> y.__qualname__ = 'huhu'
    >>> y.__qualname__
    'huhu'
    >>> y.__qualname__ = 123
    Traceback (most recent call last):
    TypeError: __qualname__ must be set to a string object
    >>> y.__qualname__
    'huhu'
    """
    def inner():
        return (lambda : (yield 1))
    yield inner()


def simple():
    """
    >>> x = simple()
    >>> list(x)
    [1, 2, 3]
    """
    yield 1
    yield 2
    yield 3

def simple_seq(seq):
    """
    >>> x = simple_seq("abc")
    >>> list(x)
    ['a', 'b', 'c']
    """
    for i in seq:
        yield i

def simple_send():
    """
    >>> x = simple_send()
    >>> next(x)
    >>> x.send(1)
    1
    >>> x.send(2)
    2
    >>> x.send(3)
    3
    """
    i = None
    while True:
        i = yield i

def raising():
    """
    >>> x = raising()
    >>> next(x)
    Traceback (most recent call last):
    KeyError: 'foo'
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    """
    yield {}['foo']

def with_outer(*args):
    """
    >>> x = with_outer(1, 2, 3)
    >>> list(x())
    [1, 2, 3]
    """
    def generator():
        for i in args:
            yield i
    return generator

def with_outer_raising(*args):
    """
    >>> x = with_outer_raising(1, 2, 3)
    >>> list(x())
    [1, 2, 3]
    """
    def generator():
        for i in args:
            yield i
        raise StopIteration
    return generator

def test_close():
    """
    >>> x = test_close()
    >>> x.close()
    >>> x = test_close()
    >>> next(x)
    >>> x.close()
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    """
    while True:
        yield

def test_ignore_close():
    """
    >>> x = test_ignore_close()
    >>> x.close()
    >>> x = test_ignore_close()
    >>> next(x)
    >>> x.close()
    Traceback (most recent call last):
    RuntimeError: generator ignored GeneratorExit
    """
    try:
        yield
    except GeneratorExit:
        yield

def check_throw():
    """
    >>> x = check_throw()
    >>> x.throw(ValueError)
    Traceback (most recent call last):
    ValueError
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    >>> x = check_throw()
    >>> next(x)
    >>> x.throw(ValueError)
    >>> next(x)
    >>> x.throw(IndexError, "oops")
    Traceback (most recent call last):
    IndexError: oops
    >>> next(x)
    Traceback (most recent call last):
    StopIteration
    """
    while True:
        try:
            yield
        except ValueError:
            pass

def test_first_assignment():
    """
    >>> gen = test_first_assignment()
    >>> next(gen)
    5
    >>> next(gen)
    10
    >>> next(gen)
    (5, 10)
    """
    cdef x = 5 # first
    yield x
    cdef y = 10 # first
    yield y
    yield (x,y)

def test_swap_assignment():
    """
    >>> gen = test_swap_assignment()
    >>> next(gen)
    (5, 10)
    >>> next(gen)
    (10, 5)
    """
    x,y = 5,10
    yield (x,y)
    x,y = y,x   # no ref-counting here
    yield (x,y)


class Foo(object):
    """
    >>> obj = Foo()
    >>> list(obj.simple(1, 2, 3))
    [1, 2, 3]
    """
    def simple(self, *args):
        for i in args:
            yield i

def generator_nonlocal():
    """
    >>> g = generator_nonlocal()
    >>> list(g(5))
    [2, 3, 4, 5, 6]
    """
    def f(x):
        def g(y):
            nonlocal x
            for i in range(y):
                x += 1
                yield x
        return g
    return f(1)

def test_nested(a, b, c):
    """
    >>> obj = test_nested(1, 2, 3)
    >>> [i() for i in obj]
    [1, 2, 3, 4]
    """
    def one():
        return a
    def two():
        return b
    def three():
        return c
    def new_closure(a, b):
        def sum():
            return a + b
        return sum
    yield one
    yield two
    yield three
    yield new_closure(a, c)


def tolist(func):
    def wrapper(*args, **kwargs):
        return list(func(*args, **kwargs))
    return wrapper

@tolist
def test_decorated(*args):
    """
    >>> test_decorated(1, 2, 3)
    [1, 2, 3]
    """
    for i in args:
        yield i


def test_return(a):
    """
    >>> d = dict()
    >>> obj = test_return(d)
    >>> next(obj)
    1
    >>> next(obj)
    Traceback (most recent call last):
    StopIteration
    >>> d['i_was_here']
    True
    """
    yield 1
    a['i_was_here'] = True
    return


def test_return_in_finally(a):
    """
    >>> d = dict()
    >>> obj = test_return_in_finally(d)
    >>> next(obj)
    1
    >>> next(obj)
    Traceback (most recent call last):
    StopIteration
    >>> d['i_was_here']
    True

    >>> d = dict()
    >>> obj = test_return_in_finally(d)
    >>> next(obj)
    1
    >>> obj.send(2)
    Traceback (most recent call last):
    StopIteration
    >>> d['i_was_here']
    True

    >>> obj = test_return_in_finally(None)
    >>> next(obj)
    1
    >>> next(obj)
    Traceback (most recent call last):
    StopIteration

    >>> obj = test_return_in_finally(None)
    >>> next(obj)
    1
    >>> obj.send(2)
    Traceback (most recent call last):
    StopIteration
    """
    yield 1
    try:
        a['i_was_here'] = True
    finally:
        return


def test_return_none_in_finally(a):
    """
    >>> d = dict()
    >>> obj = test_return_none_in_finally(d)
    >>> next(obj)
    1
    >>> next(obj)
    Traceback (most recent call last):
    StopIteration
    >>> d['i_was_here']
    True

    >>> obj = test_return_none_in_finally(None)
    >>> next(obj)
    1
    >>> next(obj)
    Traceback (most recent call last):
    StopIteration
    """
    yield 1
    try:
        a['i_was_here'] = True
    finally:
        return None


def test_copied_yield(foo):
    """
    >>> class Manager(object):
    ...    def __enter__(self):
    ...        return self
    ...    def __exit__(self, type, value, tb):
    ...        pass
    >>> list(test_copied_yield(Manager()))
    [1]
    """
    with foo:
        yield 1

def test_nested_yield():
    """
    >>> obj = test_nested_yield()
    >>> next(obj)
    1
    >>> obj.send(2)
    2
    >>> obj.send(3)
    3
    >>> obj.send(4)
    Traceback (most recent call last):
    StopIteration
    """
    yield (yield (yield 1))

def test_inside_lambda():
    """
    >>> obj = test_inside_lambda()()
    >>> next(obj)
    1
    >>> next(obj)
    2
    >>> next(obj)
    Traceback (most recent call last):
    StopIteration
    """
    return lambda:((yield 1), (yield 2))

def test_nested_gen(int n):
    """
    >>> [list(a) for a in test_nested_gen(5)]
    [[], [0], [0, 1], [0, 1, 2], [0, 1, 2, 3]]
    """
    for a in range(n):
        yield (b for b in range(a))

def test_lambda(n):
    """
    >>> [i() for i in test_lambda(3)]
    [0, 1, 2]
    """
    for i in range(n):
        yield lambda : i


def test_with_gil_section():
    """
    >>> list(test_with_gil_section())
    [0, 1, 2]
    """
    cdef int i
    with nogil:
        for i in range(3):
            with gil:
                yield i


def test_double_with_gil_section():
    """
    >>> list(test_double_with_gil_section())
    [0, 1, 2, 3]
    """
    cdef int i,j
    with nogil:
        for i in range(2):
            with gil:
                with nogil:
                    for j in range(2):
                        with gil:
                            yield i*2+j
                with nogil:
                    pass
            with gil:
                pass


def test_generator_abc():
    """
    >>> isinstance(test_generator_abc(), Generator)
    True

    >>> try:
    ...     from collections.abc import Generator
    ... except ImportError:
    ...     try:
    ...         from collections import Generator
    ...     except ImportError:
    ...         Generator = object  # easy win

    >>> isinstance(test_generator_abc(), Generator)
    True
    >>> isinstance((lambda:(yield))(), Generator)
    True
    """
    yield 1
Cython-0.26.1/tests/run/arg_incref.pyx0000664000175000017500000000027112542002467020441 0ustar  stefanstefan00000000000000def f(dict d, x=4):
    """
    >>> f({1:1, 2:2})
    [1, 2]
    """
    cdef dict d_new = {}
    l = []
    for k in d:
        d = d_new
        l.append(k)
    l.sort()
    return l
Cython-0.26.1/tests/run/pylistsubtype.pyx0000664000175000017500000000101412542002467021276 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> l1 = Sub1([1,2,3])
    >>> len(l1)
    3

    >>> l2 = Sub2([1,2,3])
    >>> len(l2)
    3

    >>> isinstance(l1, list)
    True
    >>> isinstance(l2, list)
    True
    >>> isinstance(l1, Sub1)
    True
    >>> isinstance(l1, Sub2)
    True
    >>> isinstance(l2, Sub1)
    False
    >>> isinstance(l2, Sub2)
    True
"""

cdef extern from *:
    ctypedef class __builtin__.list [ object PyListObject ]:
        pass

cdef class Sub2(list):
    cdef char character

cdef class Sub1(Sub2):
    pass
Cython-0.26.1/tests/run/purecdef.py0000664000175000017500000000440412542002467017751 0ustar  stefanstefan00000000000000import cython
from cython import cfunc, cclass, ccall

@cython.test_assert_path_exists('//CFuncDefNode')
@cython.cfunc
def ftang():
    x = 0

@cython.test_assert_path_exists('//CFuncDefNode')
@cfunc
def fpure(a):
    return a*2

def test():
    """
    >>> test()
    4
    """
    ftang()
    return fpure(2)

with cfunc:
    @cython.test_assert_path_exists('//CFuncDefNode')
    def fwith1(a):
        return a*3

    @cython.test_assert_path_exists('//CFuncDefNode')
    def fwith2(a):
        return a*4

with cclass:
    @cython.test_assert_path_exists('//CClassDefNode')
    class Egg(object):
        pass
    @cython.test_assert_path_exists('//CClassDefNode')
    class BigEgg(object):
        @cython.test_assert_path_exists('//CFuncDefNode')
        @cython.cfunc
        def f(self, a):
            return a*10

def test_with():
    """
    >>> test_with()
    (3, 4, 50)
    """
    return fwith1(1), fwith2(1), BigEgg().f(5)

@cython.test_assert_path_exists('//CClassDefNode')
@cython.cclass
class PureFoo(object):
    a = cython.declare(cython.double)

    def __init__(self, a):
        self.a = a

    def __call__(self):
        return self.a

    @cython.test_assert_path_exists('//CFuncDefNode')
    @cython.cfunc
    def puremeth(self, a):
        return a*2

def test_method():
    """
    >>> test_method()
    4
    True
    """
    x = PureFoo(2)
    print(x.puremeth(2))
    if cython.compiled:
        print(isinstance(x(), float))
    else:
        print(True)
    return

@cython.ccall
def ccall_sqr(x):
    return x*x

@cclass
class Overidable(object):
    @ccall
    def meth(self):
        return 0

def test_ccall():
    """
    >>> test_ccall()
    25
    >>> ccall_sqr(5)
    25
    """
    return ccall_sqr(5)

def test_ccall_method(x):
    """
    >>> test_ccall_method(Overidable())
    0
    >>> Overidable().meth()
    0
    >>> class Foo(Overidable):
    ...    def meth(self):
    ...        return 1
    >>> test_ccall_method(Foo())
    1
    >>> Foo().meth()
    1
    """
    return x.meth()

@cython.cfunc
@cython.returns(cython.p_int)
@cython.locals(xptr=cython.p_int)
def typed_return(xptr):
    return xptr

def test_typed_return():
    """
    >>> test_typed_return()
    """
    x = cython.declare(int, 5)
    assert typed_return(cython.address(x))[0] is x
Cython-0.26.1/tests/run/r_addint.pyx0000664000175000017500000000057112542002467020131 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> def test(a, b):
    ...     return (a, b, add(a, b))

    >>> test(1, 2)
    (1, 2, 3)
    >>> [ repr(f) for f in test(17.25, 88.5) ]
    ['17.25', '88.5', '105.75']
    >>> test(u'eggs', u'spam')
    (u'eggs', u'spam', u'eggsspam')
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u"u'", u"'")

def add(x, y):
    return x + y
Cython-0.26.1/tests/run/funcexceptreplace.pyx0000664000175000017500000000051412542002467022042 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> try: exc()
... except IndexError:
...     if IS_PY3:
...         print(isinstance(sys.exc_info()[1].__context__, ValueError))
...     else:
...         print(True)
True
"""

import sys
IS_PY3 = sys.version_info[0] >= 3

def exc():
    try:
        raise ValueError
    except ValueError:
        raise IndexError
Cython-0.26.1/tests/run/r_huss3.pyx0000664000175000017500000000100712542002467017726 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> try:
...     foo()
... except Exception, e:
...     print("%s: %s" % (e.__class__.__name__, e))
ValueError: 
>>> try:
...     bar()
... except Exception, e:
...     print("%s: %s" % (e.__class__.__name__, e))
"""

import sys
if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u"Exception, e", u"Exception as e")

def bar():
    try:
        raise TypeError
    except TypeError:
        pass

def foo():
    try:
        raise ValueError
    except ValueError, e:
        bar()
        raise
Cython-0.26.1/tests/run/libcpp_all.pyx0000664000175000017500000000755213023021033020435 0ustar  stefanstefan00000000000000# tag: cpp

import cython

cimport libcpp

cimport libcpp.deque
cimport libcpp.list
cimport libcpp.map
cimport libcpp.pair
cimport libcpp.queue
cimport libcpp.set
cimport libcpp.stack
cimport libcpp.vector
cimport libcpp.complex
cimport libcpp.limits

from libcpp.deque  cimport *
from libcpp.list   cimport *
from libcpp.map    cimport *
from libcpp.pair   cimport *
from libcpp.queue  cimport *
from libcpp.set    cimport *
from libcpp.stack  cimport *
from libcpp.vector cimport *
from libcpp.complex cimport *
from libcpp.limits cimport *

cdef libcpp.deque.deque[int]   d1 = deque[int]()
cdef libcpp.list.list[int]     l1 = list[int]()
cdef libcpp.map.map[int,int]   m1 = map[int,int]()
cdef libcpp.pair.pair[int,int] p1 = pair[int,int](1,2)
cdef libcpp.queue.queue[int]   q1 = queue[int]()
cdef libcpp.set.set[int]       s1 = set[int]()
cdef libcpp.stack.stack[int]   t1 = stack[int]()
cdef libcpp.vector.vector[int] v1 = vector[int]()

cdef deque[int].iterator id1 = d1.begin()
cdef deque[int].iterator id2 = d1.end()
cdef deque[int].reverse_iterator rid1 = d1.rbegin()
cdef deque[int].reverse_iterator rid2 = d1.rend()

cdef list[int].iterator il1 = l1.begin()
cdef list[int].iterator il2 = l1.end()
cdef list[int].reverse_iterator ril1 = l1.rbegin()
cdef list[int].reverse_iterator ril2 = l1.rend()

cdef map[int,int].iterator im1 = m1.begin()
cdef map[int,int].iterator im2 = m1.end()
cdef map[int,int].reverse_iterator rim1 = m1.rbegin()
cdef map[int,int].reverse_iterator rim2 = m1.rend()
cdef pair[map[int,int].iterator, bint] pimb = m1.insert(p1)

cdef set[int].iterator is1 = s1.begin()
cdef set[int].iterator is2 = s1.end()
cdef set[int].reverse_iterator ris1 = s1.rbegin()
cdef set[int].reverse_iterator ris2 = s1.rend()
cdef pair[set[int].iterator, bint] pisb = s1.insert(4)

cdef vector[int].iterator iv1 = v1.begin()
cdef vector[int].iterator iv2 = v1.end()
cdef vector[int].reverse_iterator riv1 = v1.rbegin()
cdef vector[int].reverse_iterator riv2 = v1.rend()

def test_vector_coercion(*args):
    """
    >>> test_vector_coercion(1.75)
    [1.75]
    >>> test_vector_coercion(1, 10, 100)
    [1.0, 10.0, 100.0]
    """
    v = new vector[double]()
    for a in args:
        v.push_back(a)
    return [v[0][i] for i in range(v.size())]

def test_const_vector(*args):
    """
    >>> test_const_vector(1.75)
    [1.75]
    >>> test_const_vector(1, 10, 100)
    [1.0, 10.0, 100.0]
    """
    cdef vector[double] v
    for a in args:
        v.push_back(a)
    return const_vector_to_list(v)

cdef const_vector_to_list(const vector[double]& cv):
    cdef vector[double].const_iterator iter = cv.const_begin()
    cdef lst = []
    while iter != cv.const_end():
        lst.append(cython.operator.dereference(iter))
        cython.operator.preincrement(iter)
    return lst

cdef double dmax = numeric_limits[double].max()
cdef double dmin = numeric_limits[double].min()
cdef double deps = numeric_limits[double].epsilon()
cdef double dqnan = numeric_limits[double].quiet_NaN()
cdef double dsnan = numeric_limits[double].signaling_NaN()
cdef double dinf = numeric_limits[double].infinity()

cdef int imax = numeric_limits[int].max()
cdef int imin = numeric_limits[int].min()
cdef int ieps = numeric_limits[int].epsilon()
cdef int iqnan = numeric_limits[int].quiet_NaN()
cdef int isnan = numeric_limits[int].signaling_NaN()
cdef int iinf = numeric_limits[int].infinity()

#API checks for containers with std::allocator declared
from libcpp.memory cimport allocator

cdef libcpp.vector.vector[int,allocator[int]] vec_alloc_int = libcpp.vector.vector[int,allocator[int]](10,1)
assert vec_alloc_int.size() == 10

cdef libcpp.list.list[int,allocator[int]] list_alloc_int = libcpp.list.list[int,allocator[int]](10,1)
assert list_alloc_int.size() == 10

##Something about the default params breaks the auto-conversion...
def convert_to_vector(I):
    """
    >>> convert_to_vector([1,2,3,4])
    """
    cdef vector[int] x = I

    
Cython-0.26.1/tests/run/cmethod_inline_T474.pyx0000664000175000017500000000135512542002467022051 0ustar  stefanstefan00000000000000# mode: run
# ticket: 474
cimport cython


cdef class TestInlineMethod(object):
    """
    >>> test = TestInlineMethod()
    >>> test.test_cdef_method()
    0
    """

    @cython.test_assert_path_exists(
        "//AttributeNode[@entry.is_inline_cmethod=True]",
        "//AttributeNode[@entry.is_final_cmethod=True]")
    def test_cdef_method(self):
        return self.cdef_inline_method()


cdef class Subtyping(TestInlineMethod):
    """
    >>> test = Subtyping()
    >>> test.test_cdef_subtyping()
    0
    """

    @cython.test_assert_path_exists(
        "//AttributeNode[@entry.is_inline_cmethod=True]",
        "//AttributeNode[@entry.is_final_cmethod=True]")
    def test_cdef_subtyping(self):
        return self.cdef_inline_method()
Cython-0.26.1/tests/run/float_len_T480.pyx0000664000175000017500000000050212542002467021021 0ustar  stefanstefan00000000000000# ticket: 480

def f(x):
    return x

def len_f(x):
    """
    >>> len_f([1,2,3])
    3
    """
    return len(f(x))

def float_len_f(x):
    """
    >>> float_len_f([1,2,3])
    3.0
    """
    return float(len(f(x)))

def cast_len_f(x):
    """
    >>> cast_len_f([1,2,3])
    3.0
    """
    return len(f(x))
Cython-0.26.1/tests/run/call_crash.pyx0000664000175000017500000000064712542002467020444 0ustar  stefanstefan00000000000000cdef class A:
    """
    >>> A().test(3)
    9
    """

    cdef int (*func_ptr)(int)

    def __init__(self):
        self.func_ptr = &func

    cdef int do_it(self, int s):
        cdef int r = first_call(self).func_ptr(s) # the temp for first_call(self) not properly freed
        return r

    def test(self, s):
        return self.do_it(s)

cdef A first_call(A x):
    return x

cdef int func(int s):
    return s*s
Cython-0.26.1/tests/run/exceptionpropagation.pyx0000664000175000017500000000164412542002467022611 0ustar  stefanstefan00000000000000cdef int CHKERR(int ierr) except -1:
    if ierr==0: return 0
    raise RuntimeError

cdef int obj2int(object ob) except *:
    return ob

def foo(a):
    """
    >>> foo(0)
    >>> foo(1)
    Traceback (most recent call last):
    RuntimeError
    """
    cdef int i = obj2int(a)
    CHKERR(i)

cdef int* except_expr(bint fire) except -1:
    if fire:
        raise RuntimeError

def test_except_expr(bint fire):
    """
    >>> test_except_expr(False)
    >>> test_except_expr(True)
    Traceback (most recent call last):
    ...
    RuntimeError
    """
    except_expr(fire)

cdef double except_big_result(bint fire) except 100000000000000000000000000000000:
    if fire:
        raise RuntimeError

def test_except_big_result(bint fire):
    """
    >>> test_except_big_result(False)
    >>> test_except_big_result(True)
    Traceback (most recent call last):
    ...
    RuntimeError
    """
    except_big_result(fire)
Cython-0.26.1/tests/run/ctypedef_int_types_defs_T333.pxd0000664000175000017500000000115412542002467023734 0ustar  stefanstefan00000000000000cdef extern from "ctypedef_int_types_chdr_T333.h":
    ctypedef int SChar     ## "signed char"
    ctypedef int UChar     ## "unsigned char"
    ctypedef int SShort    ## "signed short"
    ctypedef int UShort    ## "unsigned short"
    ctypedef int SInt      ## "signed int"
    ctypedef int UInt      ## "unsigned int"
    ctypedef int SLong     ## "signed long"
    ctypedef int ULong     ## "unsigned long"
    ctypedef int SLongLong ## "signed PY_LONG_LONG"
    ctypedef int ULongLong ## "unsigned PY_LONG_LONG"

cdef extern from *:
    ctypedef int ExtSInt "signed short"
    ctypedef int ExtUInt "unsigned short"
Cython-0.26.1/tests/run/closures_T82.pyx0000664000175000017500000001056012542002467020640 0ustar  stefanstefan00000000000000# mode: run
# tag: closures
# ticket: 82
# preparse: id
# preparse: def_to_cdef

cimport cython

def add_n(int n):
    """
    >>> f = add_n(3)
    >>> f(2)
    5

    >>> f = add_n(1000000)
    >>> f(1000000), f(-1000000)
    (2000000, 0)
    """
    def f(int x):
        return x+n
    return f

def a(int x):
    """
    >>> a(5)()
    8
    """
    def b():
        def c():
            return 3+x
        return c()
    return b

def local_x(int arg_x):
    """
    >>> local_x(1)(2)(4)
    4 2 1
    15
    """
    cdef int local_x = arg_x
    def y(arg_y):
        y = arg_y
        def z(long arg_z):
            cdef long z = arg_z
            print z, y, local_x
            return 8+z+y+local_x
        return z
    return y

def x(int x):
    """
    >>> x(1)(2)(4)
    15
    """
    def y(y):
        def z(long z):
            return 8+z+y+x
        return z
    return y

def x2(int x2):
    """
    >>> x2(1)(2)(4)
    4 2 1
    15
    """
    def y2(y2):
        def z2(long z2):
            print z2, y2, x2
            return 8+z2+y2+x2
        return z2
    return y2


def inner_override(a,b):
    """
    >>> inner_override(2,4)()
    5
    """
    def f():
        a = 1
        return a+b
    return f


def reassign(x):
    """
    >>> reassign(4)(2)
    3
    """
    def f(a):
        return a+x
    x = 1
    return f

def reassign_int(x):
    """
    >>> reassign_int(4)(2)
    3
    """
    def f(int a):
        return a+x
    x = 1
    return f

def reassign_int_int(int x):
    """
    >>> reassign_int_int(4)(2)
    3
    """
    def f(int a):
        return a+x
    x = 1
    return f


def cy_twofuncs(x):
    """
    >>> def py_twofuncs(x):
    ...    def f(a):
    ...        return g(x) + a
    ...    def g(b):
    ...        return x + b
    ...    return f

    >>> py_twofuncs(1)(2) == cy_twofuncs(1)(2)
    True
    >>> py_twofuncs(3)(5) == cy_twofuncs(3)(5)
    True
    """
    def f(a):
        return g(x) + a
    def g(b):
        return x + b
    return f

def switch_funcs(a, b, int ix):
    """
    >>> switch_funcs([1,2,3], [4,5,6], 0)([10])
    [1, 2, 3, 10]
    >>> switch_funcs([1,2,3], [4,5,6], 1)([10])
    [4, 5, 6, 10]
    >>> switch_funcs([1,2,3], [4,5,6], 2) is None
    True
    """
    def f(x):
        return a + x
    def g(x):
        return b + x
    if ix == 0:
        return f
    elif ix == 1:
        return g
    else:
        return None

def ignore_func(x):
    def f():
        return x
    return None

def call_ignore_func():
    """
    >>> call_ignore_func()
    """
    ignore_func((1,2,3))

def more_inner_funcs(x):
    """
    >>> inner_funcs = more_inner_funcs(1)(2,4,8)
    >>> inner_funcs[0](16), inner_funcs[1](32), inner_funcs[2](64)
    (19, 37, 73)
    """
    # called with x==1
    def f(a):
        def g(b):
            # called with 16
            return a+b+x
        return g
    def g(b):
        def f(a):
            # called with 32
            return a+b+x
        return f
    def h(b):
        def f(a):
            # called with 64
            return a+b+x
        return f
    def resolve(a_f, b_g, b_h):
        # called with (2,4,8)
        return f(a_f), g(b_g), h(b_h)
    return resolve


@cython.test_assert_path_exists("//DefNode//DefNode//DefNode//DefNode",
                                "//DefNode[@needs_outer_scope = False]", # deep_inner()
                                "//DefNode//DefNode//DefNode//DefNode[@needs_closure = False]", # h()
                                )
@cython.test_fail_if_path_exists("//DefNode//DefNode[@needs_outer_scope = False]")
def deep_inner():
    """
    >>> deep_inner()()
    2
    """
    cdef int x = 1
    def f():
        def g():
            def h():
                return x+1
            return h
        return g()
    return f()


@cython.test_assert_path_exists("//DefNode//DefNode//DefNode",
                                "//DefNode//DefNode//DefNode[@needs_outer_scope = False]",  # a()
                                "//DefNode//DefNode//DefNode[@needs_closure = False]", # a(), g(), h()
                                )
@cython.test_fail_if_path_exists("//DefNode//DefNode//DefNode[@needs_closure = True]") # a(), g(), h()
def deep_inner_sibling():
    """
    >>> deep_inner_sibling()()
    2
    """
    cdef int x = 1
    def f():
        def a():
            return 1
        def g():
            return x+a()
        def h():
            return g()
        return h
    return f()
Cython-0.26.1/tests/run/shapes.h0000664000175000017500000000242112542002467017233 0ustar  stefanstefan00000000000000#ifndef SHAPES_H
#define SHAPES_H

namespace shapes {

    int constructor_count = 0;
    int destructor_count = 0;

    class Shape
    {
    public:
        virtual float area() const = 0;
        Shape() { constructor_count++; }
        virtual ~Shape() { destructor_count++; }
    };

    class Rectangle : public Shape
    {
    public:
    	Rectangle() { }
        Rectangle(int width, int height)
        {
            this->width = width;
            this->height = height;
        }

        float area() const { return width * height; }
        int width;
        int height;

        int method(int arg) {
            return width * height + arg;
        }

    };

    class Square : public Rectangle
    {
    public:
        Square(int side) : Rectangle(side, side) { this->side = side; }
        int side;
    };
    
    class Ellipse : public Shape {
    public:
        Ellipse(int a, int b) { this->a = a; this->b = b; }
        float area() const { return 3.1415926535897931f * a * b; }
        int a, b;
    };
    
    class Circle : public Ellipse {
    public:
        Circle(int radius) : Ellipse(radius, radius) { this->radius = radius; }
        int radius;
    };

    class Empty : public Shape {
    public:
        float area() const { return 0; }
    };

}

#endif
Cython-0.26.1/tests/run/withnogil.pyx0000664000175000017500000000040212542002467020342 0ustar  stefanstefan00000000000000def f(x):
    """
    >>> f(1)
    (1, 17)
    """
    cdef int y
    z = 42
    with nogil:
        y = 17
    z = x
    return z,y

def g():
    """
    >>> g()
    1
    """
    with nogil:
        h()
    return 1

cdef int h() nogil except -1:
    pass
Cython-0.26.1/tests/run/cpdef_extern_func.pxd0000664000175000017500000000020312542002467021771 0ustar  stefanstefan00000000000000# cython: c_string_type=str
# cython: c_string_encoding=ascii

cdef extern from "math.h":
    cpdef double pxd_sqrt "sqrt"(double)
Cython-0.26.1/tests/run/capiimpl.pyx0000664000175000017500000000156712542002467020151 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> import sys
>>> sys.getrefcount(Foo.__pyx_vtable__)
2
>>> sys.getrefcount(__pyx_capi__['bar'])
2
>>> sys.getrefcount(__pyx_capi__['spam'])
2
>>> sys.getrefcount(__pyx_capi__['ten'])
2
>>> sys.getrefcount(__pyx_capi__['pi'])
2
>>> sys.getrefcount(__pyx_capi__['obj'])
2
>>> sys.getrefcount(__pyx_capi__['dct'])
2
>>> sys.getrefcount(__pyx_capi__['tpl'])
2
>>> sys.getrefcount(__pyx_capi__['one'])
2
>>> sys.getrefcount(__pyx_capi__['two'])
Traceback (most recent call last):
  ...
KeyError: 'two'
"""

cdef public api class Foo [type FooType, object FooObject]:
    cdef void bar(self):
        pass

cdef public api void bar():
    pass
cdef api void spam():
    pass

cdef api int    ten = 10
cdef api double pi = 3.14
cdef api object obj = object()
cdef api dict   dct = {}

cdef public api tuple tpl = ()
cdef public api float one = 1
cdef public     float two = 2

Cython-0.26.1/tests/run/decorators_T593.pyx0000664000175000017500000000372112542002467021236 0ustar  stefanstefan00000000000000# mode: run
# ticket: 593
# tag: property, decorator

"""
>>> am_i_buggy
False
>>> Foo
False
"""
def testme(func):
    try:
        am_i_buggy
        return True
    except NameError:
        return False

@testme
def am_i_buggy():
    pass

def testclass(klass):
    try:
        Foo
        return True
    except NameError:
        return False
@testclass
class Foo:
    pass


def called_deco(a,b,c):
    def count(f):
        a.append( (b,c) )
        return f
    return count

L = []

@called_deco(L, 5, c=6)
@called_deco(L, c=3, b=4)
@called_deco(L, 1, 2)
def wrapped_func(x):
    """
    >>> L
    [(1, 2), (4, 3), (5, 6)]
    >>> wrapped_func(99)
    99
    >>> L
    [(1, 2), (4, 3), (5, 6)]
    """
    return x


def class_in_closure(x):
    """
    >>> C1, c0 = class_in_closure(5)
    >>> C1().smeth1()
    (5, ())
    >>> C1.smeth1(1,2)
    (5, (1, 2))
    >>> C1.smeth1()
    (5, ())
    >>> c0.smeth0()
    1
    >>> c0.__class__.smeth0()
    1
    """
    class ClosureClass1(object):
        @staticmethod
        def smeth1(*args):
            return x, args

    class ClosureClass0(object):
        @staticmethod
        def smeth0():
            return 1

    return ClosureClass1, ClosureClass0()

def class_not_in_closure():
    """
    >>> c = class_not_in_closure()
    >>> c.smeth0()
    1
    >>> c.__class__.smeth0()
    1
    """
    class ClosureClass0(object):
        @staticmethod
        def smeth0():
            return 1

    return ClosureClass0()

class ODict(dict):
   def __init__(self):
       dict.__init__(self)
       self._order = []
       dict.__setitem__(self, '_order', self._order)
   def __setitem__(self, key, value):
       dict.__setitem__(self, key, value)
       self._order.append(key)

class Base(type):
   @staticmethod
   def __prepare__(*args, **kwargs):
       return ODict()

class Bar(metaclass=Base):
   """
   >>> Bar._order
   ['__module__', '__qualname__', '__doc__', 'bar']
   """
   @property
   def bar(self):
       return 0
Cython-0.26.1/tests/run/bint.pyx0000664000175000017500000000114612542002467017300 0ustar  stefanstefan00000000000000from cython cimport typeof

def test(bint value):
    """
    >>> test(True)
    True
    >>> test(False)
    False
    >>> test(None)
    False

    >>> test(0)
    False
    >>> test(1)
    True
    >>> test(-1)
    True
    >>> test(100)
    True

    >>> test(0.0)
    False
    >>> test(0.1)
    True

    >>> test([])
    False
    >>> test([1, 2, 3])
    True
    """
    return value

def test_types(bint a):
    """
    >>> test_types(None)
    """
    cdef bint b = a
    assert typeof(a) == 'bint', typeof(a)
    assert typeof(b) == 'bint', typeof(b)
    c = b
    assert typeof(c) == 'bint', typeof(c)
Cython-0.26.1/tests/run/cpp_operator_exc_handling.pyx0000664000175000017500000002050113023021033023521 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

from cython.operator import (preincrement, predecrement,
                             postincrement, postdecrement)
from libcpp cimport bool

cdef extern from "cpp_operator_exc_handling_helper.hpp" nogil:
    cppclass wrapped_int:
        long long val
        wrapped_int()
        wrapped_int(long long val)
        wrapped_int(long long v1, long long v2) except +
        wrapped_int operator+(wrapped_int &other) except +ValueError
        wrapped_int operator+() except +RuntimeError
        wrapped_int operator-(wrapped_int &other) except +
        wrapped_int operator-() except +
        wrapped_int operator*(wrapped_int &other) except +OverflowError
        wrapped_int operator/(wrapped_int &other) except +
        wrapped_int operator%(wrapped_int &other) except +
        long long operator^(wrapped_int &other) except +
        long long operator&(wrapped_int &other) except +
        long long operator|(wrapped_int &other) except +
        wrapped_int operator~() except +
        long long operator&() except +
        long long operator==(wrapped_int &other) except +
        long long operator!=(wrapped_int &other) except +
        long long operator<(wrapped_int &other) except +
        long long operator<=(wrapped_int &other) except +
        long long operator>(wrapped_int &other) except +
        long long operator>=(wrapped_int &other) except +
        wrapped_int operator<<(long long shift) except +
        wrapped_int operator>>(long long shift) except +
        wrapped_int &operator++() except +
        wrapped_int &operator--() except +
        wrapped_int operator++(int) except +
        wrapped_int operator--(int) except +
        wrapped_int operator!() except +
        bool operator bool() except +
        wrapped_int &operator[](long long &index) except +IndexError
        long long &operator()() except +AttributeError
        wrapped_int &operator=(const wrapped_int &other) except +ArithmeticError
        wrapped_int &operator=(const long long &vao) except +


def assert_raised(f, *args, **kwargs):
    err = kwargs.get('err', None)
    if err is None:
        try:
            f(*args)
            raised = False
        except:
            raised = True
    else:
        try:
            f(*args)
            raised = False
        except err:
            raised = True
    assert raised

def initialization(long long a, long long b):
    cdef wrapped_int w = wrapped_int(a, b)
    return w.val

def addition(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return (wa + wb).val

def subtraction(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return (wa - wb).val

def multiplication(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return (wa * wb).val

def division(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return (wa / wb).val

def mod(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return (wa % wb).val

def minus(long long a):
    cdef wrapped_int wa = wrapped_int(a)
    return (-wa).val

def plus(long long a):
    cdef wrapped_int wa = wrapped_int(a)
    return (+wa).val

def xor(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return wa ^ wb

def bitwise_and(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return wa & wb

def bitwise_or(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return wa | wb

def bitwise_not(long long a):
    cdef wrapped_int wa = wrapped_int(a)
    return (~a).val

def address(long long a):
    cdef wrapped_int wa = wrapped_int(a)
    return &wa

def iseq(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return wa == wb

def neq(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return wa != wb

def less(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return wa < wb

def leq(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return wa <= wb

def greater(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return wa > wb

def geq(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    return wa < wb

def left_shift(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    return (wa << b).val

def right_shift(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    return (wa >> b).val

def cpp_preincrement(long long a):
    cdef wrapped_int wa = wrapped_int(a)
    return preincrement(wa).val

def cpp_predecrement(long long a):
    cdef wrapped_int wa = wrapped_int(a)
    return predecrement(wa).val

def cpp_postincrement(long long a):
    cdef wrapped_int wa = wrapped_int(a)
    return postincrement(wa).val

def cpp_postdecrement(long long a):
    cdef wrapped_int wa = wrapped_int(a)
    return postdecrement(wa).val

def negate(long long a):
    cdef wrapped_int wa = wrapped_int(a)
    return (not wa).val

def bool_cast(long long a):
    cdef wrapped_int wa = wrapped_int(a)
    if wa:
        return True
    else:
        return False

def index(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    return wa[b].val

def assign_index(long long a, long long b, long long c):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    wb[c] = wa
    return wb.val

def call(long long a):
    cdef wrapped_int wa = wrapped_int(a)
    return wa()

def assign_same(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    cdef wrapped_int wb = wrapped_int(b)
    wa = wb
    return wa.val

def assign_different(long long a, long long b):
    cdef wrapped_int wa = wrapped_int(a)
    wa = b
    return wa.val

def cascaded_assign(long long a, long long b, long long c):
    cdef wrapped_int wa = wrapped_int(a)
    a = b = c
    return a.val

def separate_exceptions(long long a, long long b, long long c, long long d, long long e):
    cdef:
        wrapped_int wa = wrapped_int(a)
        wrapped_int wc = wrapped_int(c)
        wrapped_int wd = wrapped_int(d)
        wrapped_int we = wrapped_int(e)
    wa[b] = (+wc) * wd + we
    return a.val

def call_temp_separation(long long a, long long b, long long c):
    cdef:
        wrapped_int wa = wrapped_int(a)
        wrapped_int wc = wrapped_int(c)
    wa[b] = wc()
    return wa.val

def test_operator_exception_handling():
    """
    >>> test_operator_exception_handling()
    """
    assert_raised(initialization, 1, 4)
    assert_raised(addition, 1, 4)
    assert_raised(subtraction, 1, 4)
    assert_raised(multiplication, 1, 4)
    assert_raised(division, 1, 4)
    assert_raised(mod, 1, 4)
    assert_raised(minus, 4)
    assert_raised(plus, 4)
    assert_raised(xor, 1, 4)
    assert_raised(address, 4)
    assert_raised(iseq, 1, 4)
    assert_raised(neq, 1, 4)
    assert_raised(left_shift, 1, 4)
    assert_raised(right_shift, 1, 4)
    assert_raised(cpp_preincrement, 4)
    assert_raised(cpp_predecrement, 4)
    assert_raised(cpp_postincrement, 4)
    assert_raised(cpp_postdecrement, 4)
    assert_raised(negate, 4)
    assert_raised(bool_cast, 4)
    assert_raised(index, 1, 4)
    assert_raised(assign_index, 1, 4, 4)
    assert_raised(call, 4)
    assert_raised(assign_same, 4, 4)
    assert_raised(assign_different, 4, 4)
    assert_raised(cascaded_assign, 4, 4, 1)
    assert_raised(cascaded_assign, 4, 1, 4)
    assert_raised(separate_exceptions, 1, 1, 1, 1, 4, err=ValueError)
    assert_raised(separate_exceptions, 1, 1, 1, 4, 1, err=OverflowError)
    assert_raised(separate_exceptions, 1, 1, 4, 1, 1, err=RuntimeError)
    assert_raised(separate_exceptions, 1, 4, 1, 1, 1, err=IndexError)
    assert_raised(separate_exceptions, 4, 1, 1, 1, 3, err=ArithmeticError)
    assert_raised(call_temp_separation, 2, 1, 4, err=AttributeError)
    assert_raised(call_temp_separation, 2, 4, 1, err=IndexError)
Cython-0.26.1/tests/run/py_classbody.py0000664000175000017500000000273712542002467020656 0ustar  stefanstefan00000000000000# mode: run
# tag: pyclass, global


pyvar = 2

class TestPyAttr(object):
    """
    >>> TestPyAttr.pyvar    # doctest: +ELLIPSIS
    Traceback (most recent call last):
    AttributeError: ...TestPyAttr...has no attribute 'pyvar'
    >>> TestPyAttr.pyval1
    3
    >>> TestPyAttr.pyval2
    2
    """
    pyvar = 3
    pyval1 = pyvar
    del pyvar
    pyval2 = pyvar


import cython
cdefvar = cython.declare(int, 10)

class TestCdefAttr(object):
    """
    >>> TestCdefAttr.cdefvar   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    AttributeError: ...TestCdefAttr...has no attribute 'cdefvar'
    >>> TestCdefAttr.cdefval1
    11

    >>> #TestCdefAttr.cdefval2
    """
    cdefvar = 11
    cdefval1 = cdefvar
    del cdefvar
    # cdefval2 = cdefvar       # FIXME: doesn't currently work ...


class ForLoopInPyClass(object):
    """
    >>> ForLoopInPyClass.i    # doctest: +ELLIPSIS
    Traceback (most recent call last):
    AttributeError: ...ForLoopInPyClass... has no attribute ...i...
    >>> ForLoopInPyClass.k
    0
    >>> ForLoopInPyClass.m
    1
    """
    for i in range(0):
        pass

    for k in range(1):
        pass

    for m in range(2):
        pass


def del_in_class(x):
    """
    >>> del_in_class(True)
    no error
    >>> del_in_class(False)
    NameError
    """
    try:
        class Test(object):
            if x:
                attr = 1
            del attr
    except NameError:
        print("NameError")
    else:
        print("no error")
Cython-0.26.1/tests/run/boolop.pyx0000664000175000017500000000314712542002467017641 0ustar  stefanstefan00000000000000
def simple_values(obj1, obj2, obj3, obj4):
    """
    >>> simple_values(True, False, 23, 'test')
    (0.0, 1.0, False, False)
    """
    cdef int bool1, bool2
    cdef float bool3, bool4
    cdef char *ptr1, *ptr2, *ptr0
    cdef float f
    bool1 = 1
    bool2 = 0
    ptr1 = ptr2 = NULL
    f = 0.0

    bool3 = bool1 and bool2
    bool3 = bool1 or bool2
    bool3 = obj1 and obj2
    ptr0 = ptr1 and ptr2
    bool3 = bool1 and f
    bool4 = bool1 and bool2 and bool3
    bool4 = bool1 or bool2 and bool3
    obj4 = obj1 and obj2 and obj3
    obj5 = (obj1 + obj2 + obj3) and obj4
    return bool3, bool4, obj4, obj5


def non_simple_values(obj1, obj2, obj3, obj4):
    """
    >>> non_simple_values(1, 2, 3, 4)
    (7, 3, 7, 3, 7, 7, 5, 5)
    >>> non_simple_values(0, 0, 3, 4)
    (0, 7, 4, 4, 4, 4, 4, 4)
    >>> non_simple_values(0, 0, 1, -1)
    (0, 0, -1, 0, -1, -1, 0, 0)
    >>> non_simple_values(1, -1, 1, -1)
    (0, 0, 0, 0, 0, 0, 0, 0)
    >>> non_simple_values(1, 2, 1, -1)
    (0, 3, 0, 3, 0, 0, 1, 1)
    >>> non_simple_values(2, 1, 1, -1)
    (0, 3, 1, 3, 0, 0, 1, 1)
    """
    and1 = obj1 + obj2 and obj3 + obj4
    or1 = obj1 + obj2 or obj3 + obj4
    and_or = obj1 + obj2 and obj3 + obj4 or obj1 + obj4
    or_and = obj1 + obj2 or obj3 + obj4 and obj1 + obj4
    and_or_and = obj1 + obj2 and obj3 + obj4 or obj1 + obj4 and obj2 + obj4
    and1_or_and = (and1 or (obj1 + obj4 and obj2 + obj4))
    or_and_or = (obj1 + obj2 or obj3 + obj4) and (obj1 + obj4 or obj2 + obj4)
    or1_and_or = (or1 and (obj1 + obj4 or obj2 + obj4))
    return (and1, or1, and_or, or_and, and_or_and, and1_or_and, or_and_or, or1_and_or)
Cython-0.26.1/tests/run/temp_sideeffects_T654.pyx0000664000175000017500000000134512542002467022400 0ustar  stefanstefan00000000000000# ticket: 654

# function call arguments

# not really a bug, Cython warns about it now -- C argument evaluation order is undefined

arg_order = []

cdef int f():
    arg_order.append(1)
    return 1

def g():
    arg_order.append(2)
    return 2

cdef call2(int x, object o):
    return x, o

def test_c_call():
    """
    >>> arg_order
    []
    >>> test_c_call()
    (1, 2)
    >>> arg_order
    [1, 2]
    """
    return call2(f(), g())

# module globals

cdef object X = 1
cdef redefine_global():
    global X
    x,X = X,2
    return x

cdef call3(object x1, int o, object x2):
    return (x1, o, x2)

def test_global_redefine():
    """
    >>> test_global_redefine()
    (1, 1, 2)
    """
    return call3(X, redefine_global(), X)
Cython-0.26.1/tests/run/cpp_templates.pyx0000664000175000017500000000604313023021033021166 0ustar  stefanstefan00000000000000# tag: cpp

from cython.operator import dereference as deref

cdef extern from "cpp_templates_helper.h":
    cdef cppclass Wrap[T, AltType=*, UndeclarableAltType=*]:
        Wrap(T)
        void set(T)
        T get()
        bint operator==(Wrap[T])

        AltType get_alt_type()
        void set_alt_type(AltType)

        UndeclarableAltType create()
        bint accept(UndeclarableAltType)

    cdef cppclass Pair[T1,T2]:
        Pair(T1,T2)
        T1 first()
        T2 second()
        bint operator==(Pair[T1,T2])
        bint operator!=(Pair[T1,T2])

    cdef cppclass SuperClass[T1, T2]:
        pass

    cdef cppclass SubClass[T2, T3](SuperClass[T2, T3]):
        pass

    cdef cppclass Div[T]:
        @staticmethod
        T half(T value)

def test_int(int x, int y):
    """
    >>> test_int(3, 4)
    (3, 4, False)
    >>> test_int(100, 100)
    (100, 100, True)
    """
    try:
        a = new Wrap[int](x)
        b = new Wrap[int](0)
        b.set(y)
        return a.get(), b.get(), a[0] == b[0]
    finally:
        del a, b


def test_double(double x, double y):
    """
    >>> test_double(3, 3.5)
    (3.0, 3.5, False)
    >>> test_double(100, 100)
    (100.0, 100.0, True)
    """
    try:
        a = new Wrap[double](x)
        b = new Wrap[double](-1)
        b.set(y)
        return a.get(), b.get(), deref(a) == deref(b)
    finally:
        del a, b


def test_default_template_arguments(double x):
    """
    >>> test_default_template_arguments(3.5)
    (3.5, 3.0)
    """
    try:
        a = new Wrap[double](x)
        b = new Wrap[double, int, long](x)

        ax = a.get_alt_type()
        a.set_alt_type(ax)
        assert a.accept(a.create())  # never declared

        bx = b.get_alt_type()
        b.set_alt_type(bx)

        bc = b.create()              # declaration here is fine
        assert b.accept(bc)

        return a.get(), b.get()
    finally:
        del a


def test_pair(int i, double x):
    """
    >>> test_pair(1, 1.5)
    (1, 1.5, True, False)
    >>> test_pair(2, 2.25)
    (2, 2.25, True, False)
    """
    try:
        pair = new Pair[int, double](i, x)
        return pair.first(), pair.second(), deref(pair) == deref(pair), deref(pair) != deref(pair)
    finally:
        del pair

def test_ptr(int i):
    """
    >>> test_ptr(3)
    3
    >>> test_ptr(5)
    5
    """
    try:
        w = new Wrap[int*](&i)
        return deref(w.get())
    finally:
        del w

cdef double f(double x):
    return x*x

def test_func_ptr(double x):
    """
    >>> test_func_ptr(3)
    9.0
    >>> test_func_ptr(-1.5)
    2.25
    """
    try:
        w = new Wrap[double (*)(double)](&f)
        return w.get()(x)
    finally:
        del w

def test_cast_template_pointer():
    """
    >>> test_cast_template_pointer()
    """
    cdef SubClass[int, float] *sub = new SubClass[int, float]()
    cdef SuperClass[int, float] *sup

    sup = sub
    sup =  sub

def test_static(x):
    """
    >>> test_static(2)
    (1, 1.0)
    >>> test_static(3)
    (1, 1.5)
    """
    return Div[int].half(x), Div[double].half(x)
Cython-0.26.1/tests/run/r_docstrings.pyx0000664000175000017500000000351012542002467021041 0ustar  stefanstefan00000000000000# Some comments first


# More comments

'A module docstring'

doctest = u"""# Python 3 gets all of these right ...
    >>> __doc__
    'A module docstring'

    >>> f.__doc__
    '\\n    This is a function docstring.\\n    '

    >>> C.__doc__
    '\\n    This is a class docstring.\\n    '
    >>> CS.__doc__
    '\\n    This is a subclass docstring.\\n    '
    >>> print(CSS.__doc__)
    None

    >>> T.__doc__
    '\\n    This is an extension type docstring.\\n    '
    >>> TS.__doc__
    '\\n    This is an extension subtype docstring.\\n    '
    >>> TSS.__doc__

Compare with standard Python:

    >>> def Pyf():
    ...     '''
    ...     This is a function docstring.
    ...     '''
    >>> Pyf.__doc__
    '\\n    This is a function docstring.\\n    '

    >>> class PyC:
    ...     '''
    ...     This is a class docstring.
    ...     '''
    >>> class PyCS(C):
    ...     '''
    ...     This is a subclass docstring.
    ...     '''
    >>> class PyCSS(CS):
    ...     pass

    >>> PyC.__doc__
    '\\n    This is a class docstring.\\n    '
    >>> PyCS.__doc__
    '\\n    This is a subclass docstring.\\n    '
    >>> PyCSS.__doc__
"""

__test__ = {"test_docstrings" : doctest}

def f():
    """
    This is a function docstring.
    """

class C:
    """
    This is a class docstring.
    """

class CS(C):
    """
    This is a subclass docstring.
    """

class CSS(CS):
    pass

cdef class T:
    """
    This is an extension type docstring.
    """

cdef class TS(T):
    """
    This is an extension subtype docstring.
    """

cdef class TSS(TS):
    pass


def n():
    "This is not a docstring".lower()

class PyN(object):
    u"This is not a docstring".lower()

cdef class CN(object):
    b"This is not a docstring".lower()


def test_non_docstrings():
    """
    >>> n.__doc__
    >>> PyN.__doc__
    >>> CN.__doc__
    """
Cython-0.26.1/tests/run/coverage_cmd.srctree0000664000175000017500000001335012542002467021611 0ustar  stefanstefan00000000000000# mode: run
# tag: coverage,trace

"""
PYTHON -c 'import shutil; shutil.copy("pkg/coverage_test_pyx.pyx", "pkg/coverage_test_pyx.pxi")'
PYTHON setup.py build_ext -i
PYTHON -m coverage run coverage_test.py
PYTHON collect_coverage.py
"""

######## setup.py ########

from distutils.core import setup
from Cython.Build import cythonize

setup(ext_modules = cythonize([
    'coverage_test_*.py*',
    'pkg/coverage_test_*.py*'
]))


######## .coveragerc ########
[run]
plugins = Cython.Coverage


######## pkg/__init__.py ########

######## pkg/coverage_test_py.py ########
# cython: linetrace=True
# distutils: define_macros=CYTHON_TRACE=1

def func1(a, b):
    x = 1               #  5
    c = func2(a) + b    #  6
    return x + c        #  7


def func2(a):
    return a * 2        # 11


######## pkg/coverage_test_pyx.pyx ########
# cython: linetrace=True
# distutils: define_macros=CYTHON_TRACE=1

def func1(int a, int b):
    cdef int x = 1      #  5
    c = func2(a) + b    #  6
    return x + c        #  7


def func2(int a):
    return a * 2        # 11


######## coverage_test_include_pyx.pyx ########
# cython: linetrace=True
# distutils: define_macros=CYTHON_TRACE=1

cdef int x = 5                                   #  4

cdef int cfunc1(int x):                          #  6
    return x * 3                                 #  7

include "pkg/coverage_test_pyx.pxi"              #  9

def main_func(int x):                            # 11
    return cfunc1(x) + func1(x, 4) + func2(x)    # 12


######## coverage_test.py ########

import os.path
try:
    # io.StringIO in Py2.x cannot handle str ...
    from StringIO import StringIO
except ImportError:
    from io import StringIO

from pkg import coverage_test_py
from pkg import coverage_test_pyx
import coverage_test_include_pyx


for module in [coverage_test_py, coverage_test_pyx, coverage_test_include_pyx]:
    assert not any(module.__file__.endswith(ext) for ext in '.py .pyc .pyo .pyw .pyx .pxi'.split()), \
        module.__file__


def run_coverage(module):
    module_name = module.__name__
    module_path = module_name.replace('.', os.path.sep) + '.' + module_name.rsplit('_', 1)[-1]

    assert module.func1(1, 2) == (1 * 2) + 2 + 1
    assert module.func2(2) == 2 * 2
    if '_include_' in module_name:
        assert module.main_func(2) == (2 * 3) + ((2 * 2) + 4 + 1) + (2 * 2)


if __name__ == '__main__':
    run_coverage(coverage_test_py)
    run_coverage(coverage_test_pyx)
    run_coverage(coverage_test_include_pyx)


######## collect_coverage.py ########

import re
import sys
import os
import os.path
import subprocess
from glob import iglob


def run_coverage_command(*command):
    env = dict(os.environ, LANG='', LC_ALL='C')
    process = subprocess.Popen(
        [sys.executable, '-m', 'coverage'] + list(command),
        stdout=subprocess.PIPE, env=env)
    stdout, _ = process.communicate()
    return stdout


def run_report():
    stdout = run_coverage_command('report', '--show-missing')
    stdout = stdout.decode('iso8859-1')  # 'safe' decoding
    lines = stdout.splitlines()
    print(stdout)

    # FIXME:  'coverage_test_pyx.pxi' may not be found if coverage.py requests it before the .pyx file
    for module_path in ('coverage_test_py.py', 'coverage_test_pyx.pyx', 'coverage_test_include_pyx.pyx'):
        assert any(module_path in line for line in lines), "'%s' not found in coverage report:\n\n%s" % (
            module_path, stdout)

    files = {}
    line_iter = iter(lines)
    for line in line_iter:
        if line.startswith('---'):
            break
    extend = [''] * 2
    for line in line_iter:
        if not line or line.startswith('---'):
            continue
        name, statements, missed, covered, _missing = (line.split(None, 4) + extend)[:5]
        missing = []
        for start, end in re.findall('([0-9]+)(?:-([0-9]+))?', _missing):
            if end:
                missing.extend(range(int(start), int(end)+1))
            else:
                missing.append(int(start))
        files[os.path.basename(name)] = (statements, missed, covered, missing)

    assert  7 not in files['coverage_test_pyx.pyx'][-1], files['coverage_test_pyx.pyx']
    assert 12 not in files['coverage_test_pyx.pyx'][-1], files['coverage_test_pyx.pyx']


def run_xml_report():
    stdout = run_coverage_command('xml', '-o', '-')
    print(stdout)

    import xml.etree.ElementTree as etree
    data = etree.fromstring(stdout)

    files = {}
    for module in data.iterfind('.//class'):
        files[module.get('filename').replace('\\', '/')] = dict(
            (int(line.get('number')), int(line.get('hits')))
            for line in module.findall('lines/line')
        )

    assert files['pkg/coverage_test_pyx.pyx'][5] > 0, files['pkg/coverage_test_pyx.pyx']
    assert files['pkg/coverage_test_pyx.pyx'][6] > 0, files['pkg/coverage_test_pyx.pyx']
    assert files['pkg/coverage_test_pyx.pyx'][7] > 0, files['pkg/coverage_test_pyx.pyx']


def run_html_report():
    stdout = run_coverage_command('html', '-d', 'html')
    _parse_lines = re.compile(
        r']* id=["\'][^0-9"\']*(?P[0-9]+)[^0-9"\']*["\'][^>]*'
        r' class=["\'][^"\']*(?Pmis|run)[^"\']*["\']').findall

    files = {}
    for file_path in iglob('html/*.html'):
        with open(file_path) as f:
            page = f.read()
        executed = set()
        missing = set()
        for line, has_run in _parse_lines(page):
            (executed if has_run == 'run' else missing).add(int(line))
        files[file_path] = (executed, missing)

    executed, missing = [data for path, data in files.items() if 'coverage_test_pyx' in path][0]
    assert executed
    assert 5 in executed, executed
    assert 6 in executed, executed
    assert 7 in executed, executed


if __name__ == '__main__':
    run_report()
    run_xml_report()
    run_html_report()
Cython-0.26.1/tests/run/ass2local.pyx0000664000175000017500000000010412542002467020220 0ustar  stefanstefan00000000000000def f():
    """
    >>> f()
    42
    """
    a = 42
    return a
Cython-0.26.1/tests/run/strconstinclass.pyx0000664000175000017500000000012112542002467021570 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> c = C()
    >>> c.x
    'foo'
"""

class C:
    x = "foo"
Cython-0.26.1/tests/run/test_raisefrom.pyx0000664000175000017500000000365212542002467021376 0ustar  stefanstefan00000000000000
import sys
import unittest

# adapted from pyregr
class TestCause(unittest.TestCase):
    def test_invalid_cause(self):
        try:
            raise IndexError from 5
        except TypeError as e:
            self.assertTrue("exception cause" in str(e))
        else:
            self.fail("No exception raised")

    def test_raise_from_none_sets_no_cause(self):
        try:
            raise IndexError from None
        except IndexError as e:
            self.assertFalse(e.__cause__)
            if sys.version_info[:2] >= (3,3):
                self.assertTrue(e.__suppress_context__)
        else:
            self.fail("No exception raised")

    def test_raise_from_none_covers_context(self):
        try:
            try:
                raise IndexError("INDEX")
            except IndexError as e:
                raise ValueError("VALUE") from None
            else:
                self.fail("No exception raised")
        except ValueError as e:
            self.assertFalse(e.__cause__)
            self.assertTrue(e.__context__)
            if sys.version_info[:2] >= (3,3):
                self.assertTrue(e.__suppress_context__)

    def test_class_cause(self):
        try:
            raise IndexError from KeyError
        except IndexError as e:
            self.assertTrue(isinstance(e.__cause__, KeyError))
        else:
            self.fail("No exception raised")

    def test_instance_cause(self):
        cause = KeyError()
        try:
            raise IndexError from cause
        except IndexError as e:
            self.assertTrue(e.__cause__ is cause)
        else:
            self.fail("No exception raised")

    def test_erroneous_cause(self):
        class MyException(Exception):
            def __init__(self):
                raise RuntimeError()

        try:
            raise IndexError from MyException
        except RuntimeError:
            pass
        else:
            self.fail("No exception raised")
Cython-0.26.1/tests/run/dict_getitem.pyx0000664000175000017500000000341312542002467021004 0ustar  stefanstefan00000000000000# mode: run
# tag: dict, getitem

cimport cython

def test(dict d, index):
    """
    >>> d = { 1: 10 }
    >>> test(d, 1)
    10

    >>> test(d, 2)
    Traceback (most recent call last):
    KeyError: 2

    >>> test(d, (1,2))
    Traceback (most recent call last):
    KeyError: (1, 2)

    >>> import sys
    >>> try: d[(1,)]
    ... except KeyError:
    ...     args = sys.exc_info()[1].args
    ...     if sys.version_info >= (2,5): print(args)
    ...     else: print((args,))   # fake it for older CPython versions
    ((1,),)

    >>> import sys
    >>> try: test(d, (1,))
    ... except KeyError:
    ...     args = sys.exc_info()[1].args
    ...     if sys.version_info >= (2,5): print(args)
    ...     else: print((args,))   # fake it for older CPython versions
    ((1,),)

    >>> class Unhashable:
    ...    def __hash__(self):
    ...        raise ValueError
    >>> test(d, Unhashable())
    Traceback (most recent call last):
    ValueError

    >>> test(None, 1) # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...object...
    """
    return d[index]

def getitem_tuple(dict d, index):
    """
    >>> d = {1: 1, (1,): 2}
    >>> getitem_tuple(d, 1)
    (1, 2)
    """
    return d[index], d[index,]

def getitem_in_condition(dict d, key, expected_result):
    """
    >>> d = dict(a=1, b=2)
    >>> getitem_in_condition(d, 'a', 1)
    True
    """
    return d[key] is expected_result or d[key] == expected_result

@cython.test_fail_if_path_exists('//NoneCheckNode')
def getitem_not_none(dict d not None, key):
    """
    >>> d = { 1: 10 }
    >>> test(d, 1)
    10

    >>> test(d, 2)
    Traceback (most recent call last):
    KeyError: 2

    >>> test(d, (1,2))
    Traceback (most recent call last):
    KeyError: (1, 2)
    """
    return d[key]
Cython-0.26.1/tests/run/cdef_class_order.pyx0000664000175000017500000000025512542002467021625 0ustar  stefanstefan00000000000000cimport cython

cdef class B

cdef class A(object):
    cdef list dealloc1

cdef class B(A):
    cdef list dealloc2

def test():
    """
    >>> test()
    """
    A(), B()
Cython-0.26.1/tests/run/bint_binop_T145.pyx0000664000175000017500000000450712542002467021210 0ustar  stefanstefan00000000000000# ticket: 145

cimport cython

@cython.test_fail_if_path_exists('//BoolBinopNode')
def or_literal_bint():
    """
    >>> True or 5
    True
    >>> or_literal_bint()
    True
    """
    return True or 5

@cython.test_fail_if_path_exists('//BoolBinopNode')
def and_literal_bint():
    """
    >>> 5 and True
    True
    >>> and_literal_bint()
    True
    """
    return 5 and True

@cython.test_fail_if_path_exists('//BoolBinopNode')
def False_and_True_or_0():
    """
    >>> False and True or 0
    0
    >>> False_and_True_or_0()
    0
    """
    return False and True or 0

@cython.test_fail_if_path_exists('//BoolBinopNode')
def True_and_True_or_0():
    """
    >>> True and True or 0
    True
    >>> True_and_True_or_0()
    True
    """
    return True and True or 0

def x_and_True_or_False(x):
    """
    >>> x_and_True_or_False(0)
    False
    >>> x_and_True_or_False(1)
    True
    >>> x_and_True_or_False('abc')
    True
    >>> x_and_True_or_False([])
    False
    """
    return x and True or False

def x_and_True_or_0(x):
    """
    >>> 0 and True or 0
    0
    >>> x_and_True_or_0(0)
    0

    >>> 1 and True or 0
    True
    >>> x_and_True_or_0(1)
    True

    >>> x_and_True_or_0('abc')
    True
    >>> x_and_True_or_0([])
    0
    """
    return x and True or 0

def x_and_True_or_1(x):
    """
    >>> 0 and True or 1
    1
    >>> x_and_True_or_1(0)
    1

    >>> 1 and True or 1
    True
    >>> x_and_True_or_1(1)
    True

    >>> x_and_True_or_1('abc')
    True
    >>> x_and_True_or_1([])
    1
    """
    return x and True or 1

def x_and_1_or_False(x):
    """
    >>> 0 and 1 or False
    False
    >>> x_and_1_or_False(0)
    False

    >>> 1 and 1 or False
    1
    >>> x_and_1_or_False(1)
    1

    >>> x_and_1_or_False('abc')
    1
    >>> x_and_1_or_False([])
    False
    """
    return x and 1 or False

def test_large_int(unsigned long x):
    """
    >>> try: test_large_int(1 << 127)
    ... except OverflowError: print(True)
    True
    >>> try: test_large_int(1 << 63)
    ... except OverflowError: print(True)
    True
    >>> try: test_large_int(1 << 48)
    ... except OverflowError: print(True)
    True
    >>> try: test_large_int(1 << 31)
    ... except OverflowError: print(True)
    True
    >>> test_large_int(0)
    False
    """
    if True and x:
        return True
    else:
        return False
Cython-0.26.1/tests/run/cdef_bool_T227.pyx0000664000175000017500000000101212542002467020766 0ustar  stefanstefan00000000000000# ticket: 227

from cpython.bool cimport bool

def foo(bool a):
    """
    >>> foo(True)
    True
    >>> foo(False)
    False
    >>> foo('abc') # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...
    """
    return a == True

def call_cfoo(a):
    """
    >>> call_cfoo(True)
    True
    >>> call_cfoo(False)
    False
    >>> call_cfoo('abc') # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...
    """
    return cfoo(a)

cdef cfoo(bool a):
    return a == True
Cython-0.26.1/tests/run/auto_cpdef.py0000664000175000017500000000243212542002467020264 0ustar  stefanstefan00000000000000# cython: auto_cpdef=True
# mode:run
# tag: directive,auto_cpdef

import cython

def str(arg):
    """
    This is a bit evil - str gets mapped to a C-API function and is
    being redefined here.

    >>> print(str('TEST'))
    STR
    """
    return 'STR'

@cython.test_assert_path_exists('//SimpleCallNode[@function.type.is_cfunction = True]')
@cython.test_fail_if_path_exists('//SimpleCallNode[@function.type.is_builtin_type = True]')
def call_str(arg):
    """
    >>> print(call_str('TEST'))
    STR
    """
    return str(arg)


def stararg_func(*args):
    """
    >>> stararg_func(1, 2)
    (1, 2)
    """
    return args

def starstararg_func(**kwargs):
    """
    >>> starstararg_func(a=1)
    1
    """
    return kwargs['a']

l = lambda x: 1

def test_lambda():
    """
    >>> l(1)
    1
    """


# test classical import fallbacks
try:
    from math import fabs
except ImportError:
    def fabs(x):
        if x < 0:
            return -x
        else:
            return x

try:
    from math import no_such_function
except ImportError:
    def no_such_function(x):
        return x + 1.0


def test_import_fallback():
    """
    >>> fabs(1.0)
    1.0
    >>> no_such_function(1.0)
    2.0
    >>> test_import_fallback()
    (1.0, 2.0)
    """
    return fabs(1.0), no_such_function(1.0)
Cython-0.26.1/tests/run/ass2global.py0000664000175000017500000000103412542002467020201 0ustar  stefanstefan00000000000000"""
    >>> getg()
    5
    >>> setg(42)
    >>> getg()
    42
"""

g = 5


def setg(a):
    global g
    g = a


def getg():
    return g


class Test(object):
    """
    >>> global_in_class
    9
    >>> Test.global_in_class
    Traceback (most recent call last):
    AttributeError: type object 'Test' has no attribute 'global_in_class'
    >>> Test().global_in_class
    Traceback (most recent call last):
    AttributeError: 'Test' object has no attribute 'global_in_class'
    """
    global global_in_class
    global_in_class = 9
Cython-0.26.1/tests/run/tuple.pyx0000664000175000017500000000501312542002467017472 0ustar  stefanstefan00000000000000
cimport cython


def f(obj1, obj2, obj3, obj4, obj5):
    """
    >>> f(1,2,3,4,5)
    ()
    """
    obj1 = ()
    return obj1


def g(obj1, obj2, obj3, obj4, obj5):
    """
    >>> g(1,2,3,4,5)
    (2,)
    """
    obj1 = ()
    obj1 = (obj2,)
    return obj1


def h(obj1, obj2, obj3, obj4, obj5):
    """
    >>> h(1,2,3,4,5)
    (2, 3)
    """
    obj1 = ()
    obj1 = (obj2,)
    obj1 = obj2, obj3
    return obj1


def j(obj1, obj2, obj3, obj4, obj5):
    """
    >>> j(1,2,3,4,5)
    (2, 3, 4)
    """
    obj1 = ()
    obj1 = (obj2,)
    obj1 = obj2, obj3
    obj1 = (obj2, obj3, obj4)
    return obj1


def k(obj1, obj2, obj3, obj4, obj5):
    """
    >>> k(1,2,3,4,5)
    (2, 3, 4)
    """
    obj1 = ()
    obj1 = (obj2,)
    obj1 = obj2, obj3
    obj1 = (obj2, obj3, obj4)
    obj1 = (obj2, obj3, obj4,)
    return obj1


def l(obj1, obj2, obj3, obj4, obj5):
    """
    >>> l(1,2,3,4,5)
    (17, 42, 88)
    """
    obj1 = ()
    obj1 = (obj2,)
    obj1 = obj2, obj3
    obj1 = (obj2, obj3, obj4)
    obj1 = (obj2, obj3, obj4,)
    obj1 = 17, 42, 88
    return obj1


def tuple_none():
    """
    >>> tuple_none()   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...itera...
    """
    return tuple(None)


def tuple_none_list():
    """
    >>> tuple_none_list()   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...iterable...
    """
    cdef list none = None
    return tuple(none)


@cython.test_fail_if_path_exists(
    '//SimpleCallNode',
    '//PythonCapiCallNode'
)
def tuple_of_tuple_literal():
    """
    >>> tuple_of_tuple_literal()
    (1, 2, 3)
    """
    return tuple(tuple(tuple((1,2,3))))


@cython.test_fail_if_path_exists(
    '//SimpleCallNode',
    '//PythonCapiCallNode'
)
def tuple_of_args_tuple(*args):
    """
    >>> tuple_of_args_tuple(1,2,3)
    (1, 2, 3)
    """
    return tuple(tuple(tuple(args)))


@cython.test_fail_if_path_exists('//SimpleCallNode//SimpleCallNode')
def tuple_of_object(ob):
    """
    >>> tuple(type(1))
    Traceback (most recent call last):
    TypeError: 'type' object is not iterable
    >>> sorted(tuple(set([1, 2, 3])))
    [1, 2, 3]
    """
    return tuple(ob)


@cython.test_fail_if_path_exists(
    '//SimpleCallNode',
    '//PythonCapiCallNode//PythonCapiCallNode'
)
def tuple_of_tuple_or_none(tuple x):
    """
    >>> tuple_of_tuple_or_none((1,2,3))
    (1, 2, 3)
    >>> tuple_of_tuple_or_none(None)   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...itera...
    """
    return tuple(tuple(tuple(x)))
Cython-0.26.1/tests/run/complex_numbers_T305.pyx0000664000175000017500000001530513143605603022262 0ustar  stefanstefan00000000000000# ticket: 305

from cpython.object cimport Py_EQ, Py_NE

cimport cython

DEF C21 = 2-1j


cdef class Complex3j:
    """
    >>> Complex3j() == 3j
    True
    >>> Complex3j() == Complex3j()
    True
    >>> Complex3j() != 3j
    False
    >>> Complex3j() != 3
    True
    >>> Complex3j() != Complex3j()
    False
    """
    def __richcmp__(a, b, int op):
        if op == Py_EQ or op == Py_NE:
            if isinstance(a, Complex3j):
                eq = isinstance(b, Complex3j) or b == 3j
            else:
                eq = isinstance(b, Complex3j) and a == 3j
            return eq if op == Py_EQ else not eq
        return NotImplemented


def test_object_conversion(o):
    """
    >>> test_object_conversion(2)
    ((2+0j), (2+0j))
    >>> test_object_conversion(2j - 0.5)
    ((-0.5+2j), (-0.5+2j))
    """
    cdef float complex a = o
    cdef double complex b = o
    return (a, b)


def test_arithmetic(double complex z, double complex w):
    """
    >>> test_arithmetic(2j, 4j)
    (2j, -2j, 6j, -2j, (-8+0j), (0.5+0j))
    >>> test_arithmetic(6+12j, 3j)
    ((6+12j), (-6-12j), (6+15j), (6+9j), (-36+18j), (4-2j))
    >>> test_arithmetic(5-10j, 3+4j)
    ((5-10j), (-5+10j), (8-6j), (2-14j), (55-10j), (-1-2j))
    """
    return +z, -z+0, z+w, z-w, z*w, z/w


def test_div(double complex a, double complex b, expected):
    """
    >>> big = 2.0**1023
    >>> test_div(1 + 1j, 1 + big*1j, 1/big - 1j/big)
    >>> test_div(1 + 1j, 1/big + 1j/big, big)
    """
    # Can't count on good c99 complex division :(
    if '_c99_' not in __name__:
        assert a / b == expected, (a / b, expected)


def test_pow(double complex z, double complex w, tol=None):
    """
    Various implementations produce slightly different results...

    >>> a = complex(3, 1)
    >>> test_pow(a, 1, 1e-15)
    True
    >>> test_pow(a, 2, 1e-15)
    True
    >>> test_pow(a, a, 1e-15)
    True
    >>> test_pow(complex(0.5, -.25), complex(3, 4), 1e-15)
    True
    >>> test_pow(-0.5, 1j, tol=1e-15)
    True
    """
    if tol is None:
        return z**w
    else:
        return abs(z**w / z ** w - 1) < tol


def test_int_pow(double complex z, int n, tol=None):
    """
    >>> [test_int_pow(complex(0, 1), k, 1e-15) for k in range(-4, 5)]
    [True, True, True, True, True, True, True, True, True]
    >>> [test_int_pow(complex(0, 2), k, 1e-15) for k in range(-4, 5)]
    [True, True, True, True, True, True, True, True, True]
    >>> [test_int_pow(complex(2, 0.5), k, 1e-14) for k in range(0, 10)]
    [True, True, True, True, True, True, True, True, True, True]
    >>> test_int_pow(-0.5, 5, tol=1e-15)
    True
    """
    if tol is None:
        return z**n + 0 # add zero to normalize zero sign
    else:
        return abs(z**n / z ** n - 1) < tol


@cython.cdivision(False)
def test_div_by_zero(double complex z):
    """
    >>> test_div_by_zero(4j)
    -0.25j
    >>> test_div_by_zero(0)
    Traceback (most recent call last):
    ...
    ZeroDivisionError: float division
    """
    return 1/z


def test_coercion(int a, float b, double c, float complex d, double complex e):
    """
    >>> test_coercion(1, 1.5, 2.5, 4+1j, 10j)
    (1+0j)
    (1.5+0j)
    (2.5+0j)
    (4+1j)
    10j
    (9+21j)
    """
    cdef double complex z
    z = a; print z
    z = b; print z
    z = c; print z
    z = d; print z
    z = e; print z
    return z + a + b + c + d + e


def test_compare(double complex a, double complex b):
    """
    >>> test_compare(3, 3)
    (True, False, False, False, False, True, False)
    >>> test_compare(3j, 3j)
    (True, False, True, True, True, False, False)
    >>> test_compare(3j, 4j)
    (False, True, True, False, True, True, False)
    >>> test_compare(3, 4)
    (False, True, False, False, False, True, False)
    >>> test_compare(2-1j, 4)
    (False, True, False, False, False, True, True)
    """
    return a == b, a != b, a == 3j, 3j == b, a == Complex3j(), Complex3j() != b, a == C21


def test_compare_coerce(double complex a, int b):
    """
    >>> test_compare_coerce(3, 4)
    (False, True, False, False, False, True)
    >>> test_compare_coerce(4+1j, 4)
    (False, True, False, True, False, True)
    >>> test_compare_coerce(4, 4)
    (True, False, False, False, False, True)
    >>> test_compare_coerce(3j, 4)
    (False, True, True, False, True, False)
    """
    return a == b, a != b, a == 3j, 4+1j == a, a == Complex3j(), Complex3j() != a


def test_literal():
    """
    >>> test_literal()
    (5j, (1-2.5j), (2-1j))
    """
    return 5j, 1-2.5j, C21


def test_real_imag(double complex z):
    """
    >>> test_real_imag(1-3j)
    (1.0, -3.0)
    >>> test_real_imag(5)
    (5.0, 0.0)
    >>> test_real_imag(1.5j)
    (0.0, 1.5)
    """
    return z.real, z.imag

def test_real_imag_assignment(object a, double b):
    """
    >>> test_real_imag_assignment(1, 2)
    (1+2j)
    >>> test_real_imag_assignment(1.5, -3.5)
    (1.5-3.5j)
    """
    cdef double complex z
    z.real = a
    z.imag = b
    return z

def test_conjugate(float complex z):
    """
    >>> test_conjugate(2+3j)
    (2-3j)
    """
    return z.conjugate()

def test_conjugate_double(double complex z):
    """
    >>> test_conjugate_double(2+3j)
    (2-3j)
    """
    return z.conjugate()

ctypedef double complex cdouble
def test_conjugate_typedef(cdouble z):
    """
    >>> test_conjugate_typedef(2+3j)
    (2-3j)
    """
    return z.conjugate()

cdef cdouble test_conjugate_nogil(cdouble z) nogil:
    # Really just a compile test.
    return z.conjugate()
test_conjugate_nogil(0) # use it

## cdef extern from "complex_numbers_T305.h":
##     ctypedef double double_really_float "myfloat"
##     ctypedef float float_really_double "mydouble"
##     ctypedef float real_float "myfloat"
##     ctypedef double real_double "mydouble"

## def test_conjugate_nosizeassumptions(double_really_float x,
##                                      float_really_double y,
##                                      real_float z, real_double w):
##     """
##     >>> test_conjugate_nosizeassumptions(1, 1, 1, 1)
##     (-1j, -1j, -1j, -1j)
##     >>> ["%.2f" % x.imag for x in test_conjugate_nosizeassumptions(2e300, 2e300, 2e300, 2e300)]
##     ['-inf', '-2e+300', '-inf', '-2e+300']
##     """
##     cdef double complex I = 1j
##     return ((x*I).conjugate(), (y*I).conjugate(), (z*I).conjugate(), (w*I).conjugate())

ctypedef double mydouble
def test_coerce_typedef_multiply(mydouble x, double complex z):
    """
    >>> test_coerce_typedef_multiply(3, 1+1j)
    (3+3j)
    """
    return x * z

ctypedef int myint
def test_coerce_typedef_multiply_int(myint x, double complex z):
    """
    >>> test_coerce_typedef_multiply_int(3, 1+1j)
    (3+3j)
    """
    return x * z

cpdef double complex complex_retval():
    """
    >>> complex_retval()
    1j
    """
    return 1j
Cython-0.26.1/tests/run/complex_int_T446_fix.h0000664000175000017500000000011512542002467021656 0ustar  stefanstefan00000000000000#if defined _MSC_VER && defined __cplusplus
#define CYTHON_CCOMPLEX 0
#endif
Cython-0.26.1/tests/run/genexpr_T491.pyx0000664000175000017500000000145112542002467020534 0ustar  stefanstefan00000000000000# ticket: 491

def test_genexpr():
    """
    >>> gen = test_genexpr()
    >>> list(gen)
    [0, 1, 2, 3, 4]
    """
    return (i for i in range(5))

def test_genexpr_typed():
    """
    >>> gen = test_genexpr_typed()
    >>> list(gen)
    [0, 1, 2, 3, 4]
    """
    cdef int i
    return (i for i in range(5))

def test_genexpr_funccall():
    """
    >>> test_genexpr_funccall()
    [0, 1, 2, 3, 4]
    """
    return list(i for i in range(5))

def test_genexpr_scope():
    """
    >>> test_genexpr_scope()
    ([0, 1, 2, 3, 4], 'abc')
    """
    i = 'abc'
    gen = (i for i in range(5))
    lst = list(gen)
    return lst, i

def test_genexpr_closure():
    """
    >>> gen = test_genexpr_closure()
    >>> list(gen)
    ['a', 'b', 'c']
    """
    abc = 'a' + 'b' + 'c'
    return (c for c in abc)
Cython-0.26.1/tests/run/delslice.py0000664000175000017500000000405212542002467017737 0ustar  stefanstefan00000000000000# mode: run
# tag: del, slicing

def del_constant_start_stop(x):
    """
    >>> l = [1,2,3,4]
    >>> del_constant_start_stop(l)
    [1, 2]

    >>> l = [1,2,3,4,5,6,7]
    >>> del_constant_start_stop(l)
    [1, 2, 7]
    """
    del x[2:6]
    return x


def del_start(x, start):
    """
    >>> l = [1,2,3,4]
    >>> del_start(l, 2)
    [1, 2]

    >>> l = [1,2,3,4,5,6,7]
    >>> del_start(l, 20)
    [1, 2, 3, 4, 5, 6, 7]
    >>> del_start(l, 8)
    [1, 2, 3, 4, 5, 6, 7]
    >>> del_start(l, 4)
    [1, 2, 3, 4]

    >>> del_start(l, -2)
    [1, 2]
    >>> l
    [1, 2]
    >>> del_start(l, -2)
    []
    >>> del_start(l, 2)
    []
    >>> del_start(l, -2)
    []
    >>> del_start(l, 20)
    []

    >>> del_start([1,2,3,4], -20)
    []
    >>> del_start([1,2,3,4], 0)
    []
    """
    del x[start:]
    return x


def del_stop(x, stop):
    """
    >>> l = [1,2,3,4]
    >>> del_stop(l, 2)
    [3, 4]

    >>> l = [1,2,3,4,5,6,7]
    >>> del_stop(l, -20)
    [1, 2, 3, 4, 5, 6, 7]
    >>> del_stop(l, -8)
    [1, 2, 3, 4, 5, 6, 7]
    >>> del_stop(l, -4)
    [4, 5, 6, 7]

    >>> del_stop(l, -2)
    [6, 7]
    >>> l
    [6, 7]
    >>> del_stop(l, -2)
    [6, 7]
    >>> del_stop(l, 2)
    []
    >>> del_stop(l, -2)
    []
    >>> del_stop(l, 20)
    []

    >>> del_stop([1,2,3,4], -20)
    [1, 2, 3, 4]
    >>> del_stop([1,2,3,4], 0)
    [1, 2, 3, 4]
    """
    del x[:stop]
    return x


def del_start_stop(x, start, stop):
    """
    >>> l = [1,2,3,4]
    >>> del_start_stop(l, 0, 2)
    [3, 4]
    >>> l
    [3, 4]

    >>> l = [1,2,3,4,5,6,7]
    >>> del_start_stop(l, -1, -20)
    [1, 2, 3, 4, 5, 6, 7]
    >>> del_start_stop(l, -20, -8)
    [1, 2, 3, 4, 5, 6, 7]
    >>> del_start_stop(l, -6, -4)
    [1, 4, 5, 6, 7]

    >>> del_start_stop(l, -20, -2)
    [6, 7]
    >>> l
    [6, 7]
    >>> del_start_stop(l, -2, 1)
    [7]
    >>> del_start_stop(l, -2, 3)
    []
    >>> del_start_stop(l, 2, 4)
    []

    >>> del_start_stop([1,2,3,4], 20, -20)
    [1, 2, 3, 4]
    >>> del_start_stop([1,2,3,4], 0, 0)
    [1, 2, 3, 4]
    """
    del x[start:stop]
    return x
Cython-0.26.1/tests/run/unsigned_char_ptr_bytes_conversion_T359.pyx0000664000175000017500000000124612542002467026242 0ustar  stefanstefan00000000000000# ticket: 359

cdef unsigned char* some_c_unstring = 'test toast taste'

def test_uchar_conversion():
    """
    >>> py_string1, py_string2, py_string3 = test_uchar_conversion()
    >>> print(py_string1.decode('iso8859-1'))
    test toast taste
    >>> print(py_string2.decode('iso8859-1'))
    test toast taste
    >>> print(py_string3.decode('iso8859-1'))
    test toast taste
    """

    cdef object py_string1 = some_c_unstring

    cdef unsigned char* c_unstring_from_py = py_string1
    cdef object py_string2 = c_unstring_from_py

    cdef char* c_string_from_py = py_string2
    cdef object py_string3 = c_string_from_py

    return py_string1, py_string2, py_string3
Cython-0.26.1/tests/run/switch_transform.pyx0000664000175000017500000000037112542002467021737 0ustar  stefanstefan00000000000000# cython: optimize.use_switch=False

cdef extern from *:
    enum:
        ONE "1"
        ONE_AGAIN "1+0"

def is_not_one(int i):
    """
    >>> is_not_one(1)
    False
    >>> is_not_one(2)
    True
    """
    return i != ONE and i != ONE_AGAIN
Cython-0.26.1/tests/run/r_delgado_1.pyx0000664000175000017500000000050012542002467020475 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> try:
...     eggs().eat()
... except RuntimeError:
...    import sys
...    e = sys.exc_info()[1]
...    print("%s: %s" % (e.__class__.__name__, e))
RuntimeError: I don't like that
"""

cdef class eggs:

  def __dealloc__(self):
    pass

  def eat(self):
    raise RuntimeError(u"I don't like that")
Cython-0.26.1/tests/run/relativeimport_star_T542.pyx0000664000175000017500000000027312542002467023161 0ustar  stefanstefan00000000000000from distutils import core, version
__name__='distutils.core.cytest_relativeimport_T542' # fool Python we are in distutils
from . import *

__doc__ = """
>>> core.setup == setup
True
"""
Cython-0.26.1/tests/run/builtin_min_max.pyx0000664000175000017500000001546713023021033021516 0ustar  stefanstefan00000000000000# mode: run
# tag: optimisation

cimport cython


class loud_list(list):
    def __len__(self):
        print "calling __len__"
        return super(loud_list, self).__len__()


# min()

@cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def min3(a,b,c):
    """
    >>> min3(1,2,3)
    1
    >>> min3(2,3,1)
    1
    >>> min3(2,1,3)
    1
    >>> min3(3,1,2)
    1
    >>> min3(3,2,1)
    1
    """
    return min(a,b,c)


@cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def min3_list(a,b,c):
    """
    >>> min3_list(1,2,3)
    1
    >>> min3_list(2,3,1)
    1
    >>> min3_list(2,1,3)
    1
    >>> min3_list(3,1,2)
    1
    >>> min3_list(3,2,1)
    1
    """
    return min([a,b,c])


@cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def min3_tuple(a,b,c):
    """
    >>> min3_tuple(1,2,3)
    1
    >>> min3_tuple(2,3,1)
    1
    >>> min3_tuple(2,1,3)
    1
    >>> min3_tuple(3,1,2)
    1
    >>> min3_tuple(3,2,1)
    1
    """
    return min((a,b,c))


@cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def min3_typed(int a, int b, int c):
    """
    >>> min3_typed(1,2,3)
    1
    >>> min3_typed(2,3,1)
    1
    >>> min3_typed(2,1,3)
    1
    >>> min3_typed(3,1,2)
    1
    >>> min3_typed(3,2,1)
    1
    """
    return min(a,b,c)


@cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def literal_min3():
    """
    >>> literal_min3()
    (1, 1, 1, 1, 1)
    """
    return min(1,2,3), min(2,1,3), min(2,3,1), min(3,1,2), min(3,2,1)


@cython.test_assert_path_exists(
    '//PrintStatNode//CondExprNode')
@cython.test_fail_if_path_exists(
    '//PrintStatNode//SimpleCallNode//CoerceToPyTypeNode',
    '//PrintStatNode//SimpleCallNode//ConstNode')
def test_min2():
    """
    >>> test_min2()
    1
    1
    1
    1
    1
    calling __len__
    1
    calling __len__
    1
    """
    cdef int my_int = 1
    cdef object my_pyint = 2
    cdef object my_list = loud_list([1,2,3])

    print min(1, 2)
    print min(2, my_int)
    print min(my_int, 2)

    print min(my_int, my_pyint)
    print min(my_pyint, my_int)

    print min(my_int, len(my_list))
    print min(len(my_list), my_int)


@cython.test_assert_path_exists(
    '//PrintStatNode//CondExprNode')
@cython.test_fail_if_path_exists(
    '//PrintStatNode//SimpleCallNode//CoerceToPyTypeNode',
    '//PrintStatNode//SimpleCallNode//ConstNode')
def test_min3():
    """
    >>> test_min3()
    calling __len__
    1
    calling __len__
    calling __len__
    2
    """
    cdef int my_int = 1
    cdef object my_pyint = 2
    cdef object my_list = loud_list([1,2,3])

    print min(my_int, my_pyint, len(my_list))
    print min(my_pyint, my_list.__len__(), len(my_list))


@cython.test_assert_path_exists(
    '//PrintStatNode//CondExprNode')
@cython.test_fail_if_path_exists(
    '//PrintStatNode//SimpleCallNode//CoerceToPyTypeNode',
    '//PrintStatNode//SimpleCallNode//ConstNode')
def test_minN():
    """
    >>> test_minN()
    calling __len__
    0
    calling __len__
    0
    calling __len__
    0
    """
    cdef int my_int = 1
    cdef object my_pyint = 2
    cdef object my_list = loud_list([1,2,3])

    print min(my_int, 2, my_int, 0, my_pyint, my_int, len(my_list))
    print min(my_int, my_int, 0, my_pyint, my_int, len(my_list))
    print min(my_int, my_int, 2, my_int, 0, my_pyint, my_int, len(my_list))


# max()

@cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def max3(a,b,c):
    """
    >>> max3(1,2,3)
    3
    >>> max3(2,3,1)
    3
    >>> max3(2,1,3)
    3
    >>> max3(3,1,2)
    3
    >>> max3(3,2,1)
    3
    """
    return max(a,b,c)


@cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def max3_typed(int a, int b, int c):
    """
    >>> max3_typed(1,2,3)
    3
    >>> max3_typed(2,3,1)
    3
    >>> max3_typed(2,1,3)
    3
    >>> max3_typed(3,1,2)
    3
    >>> max3_typed(3,2,1)
    3
    """
    return max(a,b,c)


@cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def literal_max3():
    """
    >>> literal_max3()
    (3, 3, 3, 3, 3)
    """
    return max(1,2,3), max(2,1,3), max(2,3,1), max(3,1,2), max(3,2,1)


def max1(x):
    """
    >>> max1([1, 2, 3])
    3
    >>> max1([2])
    2
    """
    return max(x)


@cython.test_assert_path_exists(
    '//PrintStatNode//CondExprNode')
@cython.test_fail_if_path_exists(
    '//PrintStatNode//SimpleCallNode//CoerceToPyTypeNode',
    '//PrintStatNode//SimpleCallNode//ConstNode')
def test_max2():
    """
    >>> test_max2()
    2
    2
    2
    2
    2
    calling __len__
    3
    calling __len__
    3
    """
    cdef int my_int = 1
    cdef object my_pyint = 2
    cdef object my_list = loud_list([1,2,3])

    print max(1, 2)
    print max(2, my_int)
    print max(my_int, 2)

    print max(my_int, my_pyint)
    print max(my_pyint, my_int)

    print max(my_int, len(my_list))
    print max(len(my_list), my_int)


@cython.test_assert_path_exists(
    '//PrintStatNode//CondExprNode')
@cython.test_fail_if_path_exists(
    '//PrintStatNode//SimpleCallNode//CoerceToPyTypeNode',
    '//PrintStatNode//SimpleCallNode//ConstNode')
def test_max3():
    """
    >>> test_max3()
    calling __len__
    3
    calling __len__
    calling __len__
    3
    """
    cdef int my_int = 1
    cdef object my_pyint = 2
    cdef object my_list = loud_list([1,2,3])

    print max(my_int, my_pyint, len(my_list))
    print max(my_pyint, my_list.__len__(), len(my_list))


@cython.test_assert_path_exists(
    '//PrintStatNode//CondExprNode')
@cython.test_fail_if_path_exists(
    '//PrintStatNode//SimpleCallNode//CoerceToPyTypeNode',
    '//PrintStatNode//SimpleCallNode//ConstNode')
def test_maxN():
    """
    >>> test_maxN()
    calling __len__
    3
    calling __len__
    3
    calling __len__
    3
    """
    cdef int my_int = 1
    cdef object my_pyint = 2
    cdef object my_list = loud_list([1,2,3])

    print max(my_int, 2, my_int, 0, my_pyint, my_int, len(my_list))
    print max(my_int, my_int, 0, my_pyint, my_int, len(my_list))
    print max(my_int, my_int, 2, my_int, 0, my_pyint, my_int, len(my_list))


'''
# ticket 772
# FIXME: signed vs. unsigned fails to safely handle intermediate results

@cython.test_assert_path_exists("//CondExprNode")
@cython.test_fail_if_path_exists("//SimpleCallNode")
def max3_typed_signed_unsigned(int a, unsigned int b, int c):
    """
    >>> max3_typed_signed_unsigned(1,2,-3)
    2
    >>> max3_typed_signed_unsigned(-2,3,1)
    3
    >>> max3_typed_signed_unsigned(-2,1,-3)
    1
    >>> max3_typed_signed_unsigned(3,-1,2)
    3
    >>> max3_typed_signed_unsigned(-3,2,1)
    2
    """
    return max(a,b,c)
'''
Cython-0.26.1/tests/run/datetime_cimport.pyx0000664000175000017500000000210112542002467021665 0ustar  stefanstefan00000000000000# coding: utf-8

from cpython.datetime cimport import_datetime
from cpython.datetime cimport date, time, datetime, timedelta, PyDateTime_IMPORT

import_datetime()
        
def test_date(int year, int month, int day):
    '''
    >>> val = test_date(2012, 12, 31)
    >>> print(val)
    2012-12-31
    '''
    val = date(year, month, day)
    return val

def test_time(int hour, int minute, int second, int microsecond):
    '''
    >>> val = test_time(12, 20, 55, 0)
    >>> print(val)
    12:20:55
    '''
    val = time(hour, minute, second, microsecond)
    return val

def test_datetime(int year, int month, int day, int hour, int minute, int second, int microsecond):
    '''
    >>> val = test_datetime(2012, 12, 31, 12, 20, 55, 0)
    >>> print(val)
    2012-12-31 12:20:55
    '''
    val = datetime(year, month, day, hour, minute, second, microsecond)
    return val

def test_timedelta(int days, int seconds, int useconds):
    '''
    >>> val = test_timedelta(30, 0, 0)
    >>> print(val)
    30 days, 0:00:00
    '''
    val = timedelta(days, seconds, useconds)
    return val
Cython-0.26.1/tests/run/numpy_math.pyx0000664000175000017500000000224113023021033020503 0ustar  stefanstefan00000000000000# tag: numpy
# tag: no-cpp
# Numpy <= 1.7.1 doesn't have a C++ guard in the header file.

cimport numpy.math as npmath


def test_fp_classif():
    """
    >>> test_fp_classif()
    """

    cdef double d_zero
    cdef float f_zero

    d_zero = -1 * 0.
    f_zero = -1 * 0.

    assert d_zero == npmath.NZERO
    assert f_zero == npmath.NZERO

    assert npmath.signbit(d_zero)
    assert npmath.signbit(f_zero)

    d_zero = 1 * 0.
    f_zero = 1 * 0.

    assert d_zero == npmath.PZERO
    assert f_zero == npmath.PZERO

    assert not npmath.signbit(d_zero)
    assert not npmath.signbit(f_zero)

    assert not npmath.isinf(d_zero)
    assert not npmath.isinf(f_zero)

    assert not npmath.isnan(d_zero)
    assert not npmath.isnan(f_zero)

    assert npmath.isinf(-npmath.INFINITY)
    assert npmath.isinf(npmath.INFINITY)
    assert npmath.isnan(npmath.NAN)

    assert npmath.signbit(npmath.copysign(1., -1.))


def test_nextafter():
    """
    >>> test_nextafter()
    """

    x = npmath.nextafter(npmath.EULER, 1)
    assert npmath.isfinite(x)
    assert x > npmath.EULER

    x = npmath.nextafter(npmath.PI_4, -1)
    assert npmath.isfinite(x)
    assert x < npmath.PI_4
Cython-0.26.1/tests/run/pyextattrref.pyx0000664000175000017500000000055412542002467021107 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> s = Spam(Eggs("ham"))
    >>> test(s)
    'ham'
"""

cdef class Eggs:
    cdef object ham
    def __init__(self, ham):
        self.ham = ham

cdef class Spam:
    cdef Eggs eggs
    def __init__(self, eggs):
        self.eggs = eggs

cdef object tomato(Spam s):
    food = s.eggs.ham
    return food

def test(Spam s):
    return tomato(s)
Cython-0.26.1/tests/run/cpp_nonstdint.h0000664000175000017500000000671512542002467020644 0ustar  stefanstefan00000000000000// -*- c++ -*-
#include 
template
class Integral {

  unsigned char bytes[N];

 public:
  Integral() {
    for (unsigned int i=0; i(const Integral &I) const
  { return cmp(I) > 0; }
  bool operator<=(const Integral &I) const
  { return cmp(I) <= 0; }
  bool operator>=(const Integral &I) const
  { return cmp(I) >= 0; }
  bool operator==(const Integral &I) const
  { return cmp(I) == 0; }
  bool operator!=(const Integral &I) const
  { return cmp(I) != 0; }
  
  bool operator==(const long long value) const {
    size_t len = sizeof(long long) > N ? sizeof(long long) : N;
    unsigned char* extended = new unsigned char[len];
    unsigned char* other;
    if (sizeof(long long) < N) {
        resize_signed_int((unsigned char*)&value, sizeof(value), extended, len);
        other = bytes;
    } else {
        resize_signed_int(bytes, N, extended, len);
    }
    bool res = memcmp(extended, other, len);
    delete extended;
    return res;
  }
  bool operator!=(const long long val) const
  { return !(*this == val); }

 private:
  static bool is_le() {
    int one = 1;
    int b = (int)*(unsigned char *)&one;
    return b ? true : false;
  }
  static unsigned int lsb() {
    return is_le() ? 0 : N-1;
  }
  static unsigned int msb() {
    return is_le() ? N-1 : 0;
  }
  int cmp(const Integral& J) const {
    const Integral& I = *this;
    unsigned char sI = I.bytes[msb()] & 0x80;
    unsigned char sJ = J.bytes[msb()] & 0x80;
    if (sI > sJ) return -1;
    if (sI < sJ) return +1;
    unsigned char bI = I.bytes[msb()] & 0x7F;
    unsigned char bJ = J.bytes[msb()] & 0x7F;
    int cmpabs = 0;
    if (bI < bJ)
      cmpabs = -1;
    else if (bI > bJ)
      cmpabs = +1;
    else {
      int incr = is_le() ? -1 : 1;
      unsigned int i = msb() + incr;
      while (i != lsb()) {
	if (I.bytes[i] < J.bytes[i])
	  { cmpabs = -1;  break; }
	if (I.bytes[i] > J.bytes[i])
	  { cmpabs = +1;  break; }
	i += incr;
      }
    }
    if (sI) return -cmpabs;
    else    return +cmpabs;
  }
  
  static void resize_signed_int(const unsigned char* src, size_t src_len, unsigned char* dst, size_t dst_len) {
    unsigned char msb;
    size_t dst_offset = 0;
    size_t src_offset = 0;
    if (is_le()) {
        dst_offset = 0;
        src_offset = 0;
        msb = ((unsigned char*) src)[src_len - 1];
        if (src_len > dst_len) {
            src_len = dst_len;
        }
    } else {
        if (dst_len > src_len) {
            dst_offset = dst_len - src_len;
        } else {
            src_offset = src_len - dst_len;
            src_len = dst_len;
        }
        msb = ((unsigned char*) src)[0];
    }
    if (msb & 0x80) {
        memset(dst, 0xFF, dst_len);
    } else {
        memset(dst, 0, dst_len);
    }
    memcpy(dst + dst_offset, src + src_offset, src_len);
  }
};

typedef Integral<3> Int24;
typedef Integral<7> Int56;
typedef Integral<11> Int88;
typedef Integral<64> Int512;
Cython-0.26.1/tests/run/str_char_coercion_T412.pyx0000664000175000017500000000246612542002467022552 0ustar  stefanstefan00000000000000# ticket: 412

cdef int   i = 'x'
cdef char  c = 'x'
cdef char* s = 'x'

def test_eq():
    """
    >>> test_eq()
    True
    True
    True
    True
    """
    print i ==  'x'
    print i == c'x'
    print c ==  'x'
    print c == c'x'
#    print s ==  'x' # error
#    print s == c'x' # error

def test_cascaded_eq():
    """
    >>> test_cascaded_eq()
    True
    True
    True
    True
    True
    True
    True
    True
    """
    print  'x' == i ==  'x'
    print  'x' == i == c'x'
    print c'x' == i ==  'x'
    print c'x' == i == c'x'

    print  'x' == c ==  'x'
    print  'x' == c == c'x'
    print c'x' == c ==  'x'
    print c'x' == c == c'x'

def test_cascaded_ineq():
    """
    >>> test_cascaded_ineq()
    True
    True
    True
    True
    True
    True
    True
    True
    """
    print  'a' <= i <=  'z'
    print  'a' <= i <= c'z'
    print c'a' <= i <=  'z'
    print c'a' <= i <= c'z'

    print  'a' <= c <=  'z'
    print  'a' <= c <= c'z'
    print c'a' <= c <=  'z'
    print c'a' <= c <= c'z'

def test_long_ineq():
    """
    >>> test_long_ineq()
    True
    """
    print 'a' < 'b' < 'c' < 'd' < c < 'y' < 'z'

def test_long_ineq_py():
    """
    >>> test_long_ineq_py()
    True
    True
    """
    print 'abcdef' < 'b' < 'c' < 'd' < 'y' < 'z'
    print 'a' < 'b' < 'cde' < 'd' < 'y' < 'z'
Cython-0.26.1/tests/run/test_coroutines_pep492.pyx0000664000175000017500000013327313023021033022670 0ustar  stefanstefan00000000000000# cython: language_level=3, binding=True
# mode: run
# tag: pep492, asyncfor, await

import re
import gc
import sys
import copy
#import types
import pickle
import os.path
#import inspect
import unittest
import warnings
import contextlib

from Cython.Compiler import Errors


try:
    from types import coroutine as types_coroutine
except ImportError:
    # duck typed types.coroutine() decorator copied from types.py in Py3.5
    class types_coroutine(object):
        def __init__(self, gen):
            self._gen = gen

        class _GeneratorWrapper(object):
            def __init__(self, gen):
                self.__wrapped__ = gen
                self.send = gen.send
                self.throw = gen.throw
                self.close = gen.close
                self.__name__ = getattr(gen, '__name__', None)
                self.__qualname__ = getattr(gen, '__qualname__', None)
            @property
            def gi_code(self):
                return self.__wrapped__.gi_code
            @property
            def gi_frame(self):
                return self.__wrapped__.gi_frame
            @property
            def gi_running(self):
                return self.__wrapped__.gi_running
            cr_code = gi_code
            cr_frame = gi_frame
            cr_running = gi_running
            def __next__(self):
                return next(self.__wrapped__)
            def __iter__(self):
                return self.__wrapped__
            __await__ = __iter__

        def __call__(self, *args, **kwargs):
            return self._GeneratorWrapper(self._gen(*args, **kwargs))


# compiled exec()
def exec(code_string, l, g):
    from Cython.Shadow import inline
    try:
        from StringIO import StringIO
    except ImportError:
        from io import StringIO

    old_stderr = sys.stderr
    try:
        sys.stderr = StringIO()
        ns = inline(code_string, locals=l, globals=g, lib_dir=os.path.dirname(__file__))
    finally:
        sys.stderr = old_stderr
    g.update(ns)


class AsyncYieldFrom(object):
    def __init__(self, obj):
        self.obj = obj

    def __await__(self):
        yield from self.obj


class AsyncYield(object):
    def __init__(self, value):
        self.value = value

    def __await__(self):
        yield self.value


def run_async(coro):
    #assert coro.__class__ is types.GeneratorType
    assert coro.__class__.__name__ in ('coroutine', '_GeneratorWrapper'), coro.__class__.__name__

    buffer = []
    result = None
    while True:
        try:
            buffer.append(coro.send(None))
        except StopIteration as ex:
            result = ex.value if sys.version_info >= (3, 5) else ex.args[0] if ex.args else None
            break
    return buffer, result


def run_async__await__(coro):
    assert coro.__class__.__name__ in ('coroutine', '_GeneratorWrapper'), coro.__class__.__name__
    aw = coro.__await__()
    buffer = []
    result = None
    i = 0
    while True:
        try:
            if i % 2:
                buffer.append(next(aw))
            else:
                buffer.append(aw.send(None))
            i += 1
        except StopIteration as ex:
            result = ex.value if sys.version_info >= (3, 5) else ex.args[0] if ex.args else None
            break
    return buffer, result


@contextlib.contextmanager
def silence_coro_gc():
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        yield
        gc.collect()


def min_py27(method):
    return None if sys.version_info < (2, 7) else method


def ignore_py26(manager):
    @contextlib.contextmanager
    def dummy():
        yield
    return dummy() if sys.version_info < (2, 7) else manager


class AsyncBadSyntaxTest(unittest.TestCase):

    @contextlib.contextmanager
    def assertRaisesRegex(self, exc_type, regex):
        # the error messages usually don't match, so we just ignore them
        try:
            yield
        except exc_type:
            self.assertTrue(True)
        else:
            self.assertTrue(False)

    def test_badsyntax_9(self):
        ns = {}
        for comp in {'(await a for a in b)',
                     '[await a for a in b]',
                     '{await a for a in b}',
                     '{await a: a for a in b}'}:

            with self.assertRaisesRegex(Errors.CompileError, 'await.*in comprehen'):
                exec('async def f():\n\t{0}'.format(comp), ns, ns)

    def test_badsyntax_10(self):
        # Tests for issue 24619

        samples = [
            """async def foo():
                   def bar(): pass
                   await = 1
            """,

            """async def foo():

                   def bar(): pass
                   await = 1
            """,

            """async def foo():
                   def bar(): pass
                   if 1:
                       await = 1
            """,

            """def foo():
                   async def bar(): pass
                   if 1:
                       await a
            """,

            """def foo():
                   async def bar(): pass
                   await a
            """,

            """def foo():
                   def baz(): pass
                   async def bar(): pass
                   await a
            """,

            """def foo():
                   def baz(): pass
                   # 456
                   async def bar(): pass
                   # 123
                   await a
            """,

            """async def foo():
                   def baz(): pass
                   # 456
                   async def bar(): pass
                   # 123
                   await = 2
            """,

            """def foo():

                   def baz(): pass

                   async def bar(): pass

                   await a
            """,

            """async def foo():

                   def baz(): pass

                   async def bar(): pass

                   await = 2
            """,

            """async def foo():
                   def async(): pass
            """,

            """async def foo():
                   def await(): pass
            """,

            """async def foo():
                   def bar():
                       await
            """,

            """async def foo():
                   return lambda async: await
            """,

            """async def foo():
                   return lambda a: await
            """,

            """await a()""",

            """async def foo(a=await b):
                   pass
            """,

            """async def foo(a:await b):
                   pass
            """,

            """def baz():
                   async def foo(a=await b):
                       pass
            """,

            """async def foo(async):
                   pass
            """,

            """async def foo():
                   def bar():
                        def baz():
                            async = 1
            """,

            """async def foo():
                   def bar():
                        def baz():
                            pass
                        async = 1
            """,

            """def foo():
                   async def bar():

                        async def baz():
                            pass

                        def baz():
                            42

                        async = 1
            """,

            """async def foo():
                   def bar():
                        def baz():
                            pass\nawait foo()
            """,

            """def foo():
                   def bar():
                        async def baz():
                            pass\nawait foo()
            """,

            """async def foo(await):
                   pass
            """,

            """def foo():

                   async def bar(): pass

                   await a
            """,

            """def foo():
                   async def bar():
                        pass\nawait a
            """]

        for code in samples:
            # assertRaises() differs in Py2.6, so use our own assertRaisesRegex() instead
            with self.subTest(code=code), self.assertRaisesRegex(Errors.CompileError, '.'):
                exec(code, {}, {})

    if not hasattr(unittest.TestCase, 'subTest'):
        @contextlib.contextmanager
        def subTest(self, code, **kwargs):
            try:
                yield
            except Exception:
                print(code)
                raise

    def test_goodsyntax_1(self):
        # Tests for issue 24619

        def foo(await):
            async def foo(): pass
            async def foo():
                pass
            return await + 1
        self.assertEqual(foo(10), 11)

        def foo(await):
            async def foo(): pass
            async def foo(): pass
            return await + 2
        self.assertEqual(foo(20), 22)

        def foo(await):

            async def foo(): pass

            async def foo(): pass

            return await + 2
        self.assertEqual(foo(20), 22)

        def foo(await):
            """spam"""
            async def foo(): \
                pass
            # 123
            async def foo(): pass
            # 456
            return await + 2
        self.assertEqual(foo(20), 22)

        def foo(await):
            def foo(): pass
            def foo(): pass
            async def bar(): return await_
            await_ = await
            try:
                bar().send(None)
            except StopIteration as ex:
                return ex.args[0]
        self.assertEqual(foo(42), 42)

        async def f(z):
            async def g(): pass
            await z
        await = 1
        #self.assertTrue(inspect.iscoroutinefunction(f))


class TokenizerRegrTest(unittest.TestCase):

    def test_oneline_defs(self):
        buf = []
        for i in range(500):
            buf.append('def i{i}(): return {i}'.format(i=i))
        buf = '\n'.join(buf)

        # Test that 500 consequent, one-line defs is OK
        ns = {}
        exec(buf, ns, ns)
        self.assertEqual(ns['i499'](), 499)

        # Test that 500 consequent, one-line defs *and*
        # one 'async def' following them is OK
        buf += '\nasync def foo():\n    return'
        ns = {}
        exec(buf, ns, ns)
        self.assertEqual(ns['i499'](), 499)
        self.assertEqual(type(ns['foo']()).__name__, 'coroutine')
        #self.assertTrue(inspect.iscoroutinefunction(ns['foo']))


class CoroutineTest(unittest.TestCase):

    @classmethod
    def setUpClass(cls):
        # never mark warnings as "already seen" to prevent them from being suppressed
        from warnings import simplefilter
        simplefilter("always")

    @contextlib.contextmanager
    def assertRaises(self, exc_type):
        try:
            yield
        except exc_type:
            self.assertTrue(True)
        else:
            self.assertTrue(False)

    @contextlib.contextmanager
    def assertRaisesRegex(self, exc_type, regex):
        # the error messages usually don't match, so we just ignore them
        try:
            yield
        except exc_type:
            self.assertTrue(True)
        else:
            self.assertTrue(False)

    @contextlib.contextmanager
    def assertWarnsRegex(self, exc_type, regex):
        from warnings import catch_warnings
        with catch_warnings(record=True) as log:
            yield

        first_match = None
        for warning in log:
            w = warning.message
            if not isinstance(w, exc_type):
                continue
            if first_match is None:
                first_match = w
            if re.search(regex, str(w)):
                self.assertTrue(True)
                return

        if first_match is None:
            self.assertTrue(False, "no warning was raised of type '%s'" % exc_type.__name__)
        else:
            self.assertTrue(False, "'%s' did not match '%s'" % (first_match, regex))

    if not hasattr(unittest.TestCase, 'assertRegex'):
        def assertRegex(self, value, regex):
            self.assertTrue(re.search(regex, str(value)),
                            "'%s' did not match '%s'" % (value, regex))

    if not hasattr(unittest.TestCase, 'assertIn'):
        def assertIn(self, member, container, msg=None):
            self.assertTrue(member in container, msg)

    if not hasattr(unittest.TestCase, 'assertIsNone'):
        def assertIsNone(self, value, msg=None):
            self.assertTrue(value is None, msg)

    if not hasattr(unittest.TestCase, 'assertIsNotNone'):
        def assertIsNotNone(self, value, msg=None):
            self.assertTrue(value is not None, msg)

    def test_gen_1(self):
        def gen(): yield
        self.assertFalse(hasattr(gen, '__await__'))

    def test_func_attributes(self):
        async def foo():
            return 10

        f = foo()
        self.assertEqual(f.__name__, 'foo')
        self.assertEqual(f.__qualname__, 'CoroutineTest.test_func_attributes..foo')
        self.assertEqual(f.__module__, 'test_coroutines_pep492')

    def test_func_1(self):
        async def foo():
            return 10

        f = foo()
        self.assertEqual(f.__class__.__name__, 'coroutine')
        #self.assertIsInstance(f, types.CoroutineType)
        #self.assertTrue(bool(foo.__code__.co_flags & 0x80))
        #self.assertTrue(bool(foo.__code__.co_flags & 0x20))
        #self.assertTrue(bool(f.cr_code.co_flags & 0x80))
        #self.assertTrue(bool(f.cr_code.co_flags & 0x20))
        self.assertEqual(run_async(f), ([], 10))

        self.assertEqual(run_async__await__(foo()), ([], 10))

        def bar(): pass
        self.assertFalse(bool(bar.__code__.co_flags & 0x80))

    # TODO
    def __test_func_2(self):
        async def foo():
            raise StopIteration

        with self.assertRaisesRegex(
                RuntimeError, "coroutine raised StopIteration"):

            run_async(foo())

    def test_func_3(self):
        async def foo():
            raise StopIteration

        with silence_coro_gc():
            self.assertRegex(repr(foo()), '^$')

    def test_func_4(self):
        async def foo():
            raise StopIteration

        check = lambda: self.assertRaisesRegex(
            TypeError, "'coroutine' object is not iterable")

        with check():
            list(foo())

        with check():
            tuple(foo())

        with check():
            sum(foo())

        with check():
            iter(foo())

        with check():
            next(foo())

        with silence_coro_gc(), check():
            for i in foo():
                pass

        with silence_coro_gc(), check():
            [i for i in foo()]

    def test_func_5(self):
        @types_coroutine
        def bar():
            yield 1

        async def foo():
            await bar()

        check = lambda: self.assertRaisesRegex(
            TypeError, "'coroutine' object is not iterable")

        with check():
            for el in foo(): pass

        # the following should pass without an error
        for el in bar():
            self.assertEqual(el, 1)
        self.assertEqual([el for el in bar()], [1])
        self.assertEqual(tuple(bar()), (1,))
        self.assertEqual(next(iter(bar())), 1)

    def test_func_6(self):
        @types_coroutine
        def bar():
            yield 1
            yield 2

        async def foo():
            await bar()

        f = foo()
        self.assertEqual(f.send(None), 1)
        self.assertEqual(f.send(None), 2)
        with self.assertRaises(StopIteration):
            f.send(None)

    # TODO (or not? see test_func_8() below)
    def __test_func_7(self):
        async def bar():
            return 10

        def foo():
            yield from bar()

        with silence_coro_gc(), self.assertRaisesRegex(
            TypeError,
            "cannot 'yield from' a coroutine object in a non-coroutine generator"):

            list(foo())

    def test_func_8(self):
        @types_coroutine
        def bar():
            return (yield from foo())

        async def foo():
            return 'spam'

        self.assertEqual(run_async(bar()), ([], 'spam') )

    def test_func_9(self):
        async def foo(): pass

        with self.assertWarnsRegex(
            RuntimeWarning, "coroutine '.*test_func_9.*foo' was never awaited"):

            foo()
            gc.collect()

    def test_func_10(self):
        N = 0

        @types_coroutine
        def gen():
            nonlocal N
            try:
                a = yield
                yield (a ** 2)
            except ZeroDivisionError:
                N += 100
                raise
            finally:
                N += 1

        async def foo():
            await gen()

        coro = foo()
        aw = coro.__await__()
        self.assertTrue(aw is iter(aw))
        next(aw)
        self.assertEqual(aw.send(10), 100)
        with self.assertRaises(TypeError):
            type(aw).send(None, None)

        self.assertEqual(N, 0)
        aw.close()
        self.assertEqual(N, 1)
        with self.assertRaises(TypeError):   # removed from CPython test suite?
            type(aw).close(None)

        coro = foo()
        aw = coro.__await__()
        next(aw)
        with self.assertRaises(ZeroDivisionError):
            aw.throw(ZeroDivisionError, None, None)
        self.assertEqual(N, 102)
        with self.assertRaises(TypeError):   # removed from CPython test suite?
            type(aw).throw(None, None, None, None)

    def test_func_11(self):
        async def func(): pass
        coro = func()
        # Test that PyCoro_Type and _PyCoroWrapper_Type types were properly
        # initialized
        self.assertIn('__await__', dir(coro))
        self.assertIn('__iter__', dir(coro.__await__()))
        self.assertIn('coroutine_wrapper', repr(coro.__await__()))
        coro.close() # avoid RuntimeWarning

    def test_func_12(self):
        async def g():
            i = me.send(None)
            await None
        me = g()
        with self.assertRaisesRegex(ValueError,
                                    "coroutine already executing"):
            me.send(None)

    def test_func_13(self):
        async def g():
            pass
        with self.assertRaisesRegex(
            TypeError,
            "can't send non-None value to a just-started coroutine"):

            g().send('spam')

    def test_func_14(self):
        @types_coroutine
        def gen():
            yield
        async def coro():
            try:
                await gen()
            except GeneratorExit:
                await gen()
        c = coro()
        c.send(None)
        with self.assertRaisesRegex(RuntimeError,
                                    "coroutine ignored GeneratorExit"):
            c.close()

    def test_cr_await(self):
        @types_coroutine
        def a():
            #self.assertEqual(inspect.getcoroutinestate(coro_b), inspect.CORO_RUNNING)
            self.assertIsNone(coro_b.cr_await)
            yield
            #self.assertEqual(inspect.getcoroutinestate(coro_b), inspect.CORO_RUNNING)
            # FIXME: no idea why the following works in CPython:
            #self.assertIsNone(coro_b.cr_await)

        async def c():
            await a()

        async def b():
            self.assertIsNone(coro_b.cr_await)
            await c()
            self.assertIsNone(coro_b.cr_await)

        coro_b = b()
        #self.assertEqual(inspect.getcoroutinestate(coro_b), inspect.CORO_CREATED)
        self.assertIsNone(coro_b.cr_await)

        coro_b.send(None)
        #self.assertEqual(inspect.getcoroutinestate(coro_b), inspect.CORO_SUSPENDED)
        #self.assertEqual(coro_b.cr_await.cr_await.gi_code.co_name, 'a')
        self.assertIsNotNone(coro_b.cr_await.cr_await)
        self.assertEqual(coro_b.cr_await.cr_await.__name__, 'a')

        with self.assertRaises(StopIteration):
            coro_b.send(None)  # complete coroutine
        #self.assertEqual(inspect.getcoroutinestate(coro_b), inspect.CORO_CLOSED)
        self.assertIsNone(coro_b.cr_await)

    def test_corotype_1(self):
        async def f(): pass
        ct = type(f())
        self.assertIn('into coroutine', ct.send.__doc__)
        self.assertIn('inside coroutine', ct.close.__doc__)
        self.assertIn('in coroutine', ct.throw.__doc__)
        self.assertIn('of the coroutine', ct.__dict__['__name__'].__doc__)
        self.assertIn('of the coroutine', ct.__dict__['__qualname__'].__doc__)
        self.assertEqual(ct.__name__, 'coroutine')

        async def f(): pass
        c = f()
        self.assertIn('coroutine object', repr(c))
        c.close()

    def test_await_1(self):

        async def foo():
            await 1
        with self.assertRaisesRegex(TypeError, "object int can.t.*await"):
            run_async(foo())

    def test_await_2(self):
        async def foo():
            await []
        with self.assertRaisesRegex(TypeError, "object list can.t.*await"):
            run_async(foo())

    def test_await_3(self):
        async def foo():
            await AsyncYieldFrom([1, 2, 3])

        self.assertEqual(run_async(foo()), ([1, 2, 3], None))
        self.assertEqual(run_async__await__(foo()), ([1, 2, 3], None))

    def test_await_4(self):
        async def bar():
            return 42

        async def foo():
            return await bar()

        self.assertEqual(run_async(foo()), ([], 42))

    def test_await_5(self):
        class Awaitable(object):
            def __await__(self):
                return

        async def foo():
            return (await Awaitable())

        with self.assertRaisesRegex(
            TypeError, "__await__.*returned non-iterator of type"):

            run_async(foo())

    def test_await_6(self):
        class Awaitable(object):
            def __await__(self):
                return iter([52])

        async def foo():
            return (await Awaitable())

        self.assertEqual(run_async(foo()), ([52], None))

    def test_await_7(self):
        class Awaitable(object):
            def __await__(self):
                yield 42
                return 100

        async def foo():
            return (await Awaitable())

        self.assertEqual(run_async(foo()), ([42], 100))

    def test_await_8(self):
        class Awaitable(object):
            pass

        async def foo(): return (await Awaitable())

        with self.assertRaisesRegex(
            TypeError, "object Awaitable can't be used in 'await' expression"):

            run_async(foo())

    def test_await_9(self):
        def wrap():
            return bar

        async def bar():
            return 42

        async def foo():
            b = bar()

            db = {'b':  lambda: wrap}

            class DB(object):
                b = staticmethod(wrap)

            return (await bar() + await wrap()() + await db['b']()()() +
                    await bar() * 1000 + await DB.b()())

        async def foo2():
            return -await bar()

        self.assertEqual(run_async(foo()), ([], 42168))
        self.assertEqual(run_async(foo2()), ([], -42))

    def test_await_10(self):
        async def baz():
            return 42

        async def bar():
            return baz()

        async def foo():
            return await (await bar())

        self.assertEqual(run_async(foo()), ([], 42))

    def test_await_11(self):
        def ident(val):
            return val

        async def bar():
            return 'spam'

        async def foo():
            return ident(val=await bar())

        async def foo2():
            return await bar(), 'ham'

        self.assertEqual(run_async(foo2()), ([], ('spam', 'ham')))

    def test_await_12(self):
        async def coro():
            return 'spam'

        class Awaitable(object):
            def __await__(self):
                return coro()

        async def foo():
            return await Awaitable()

        with self.assertRaisesRegex(
            TypeError, "__await__\(\) returned a coroutine"):

            run_async(foo())

    def test_await_13(self):
        class Awaitable(object):
            def __await__(self):
                return self

        async def foo():
            return await Awaitable()

        with self.assertRaisesRegex(
            TypeError, "__await__.*returned non-iterator of type"):

            run_async(foo())

    def test_await_14(self):
        class Wrapper(object):
            # Forces the interpreter to use CoroutineType.__await__
            def __init__(self, coro):
                self.coro = coro
            def __await__(self):
                return self.coro.__await__()

        class FutureLike(object):
            def __await__(self):
                return (yield)

        class Marker(Exception):
            pass

        async def coro1():
            try:
                return await FutureLike()
            except ZeroDivisionError:
                raise Marker
        async def coro2():
            return await Wrapper(coro1())

        c = coro2()
        c.send(None)
        with self.assertRaisesRegex(StopIteration, 'spam'):
            c.send('spam')

        c = coro2()
        c.send(None)
        with self.assertRaises(Marker):
            c.throw(ZeroDivisionError)

    def test_await_iterator(self):
        async def foo():
            return 123

        coro = foo()
        it = coro.__await__()
        self.assertEqual(type(it).__name__, 'coroutine_wrapper')

        with self.assertRaisesRegex(TypeError, "cannot instantiate 'coroutine_wrapper' type"):
            type(it)()  # cannot instantiate

        with self.assertRaisesRegex(StopIteration, "123"):
            next(it)

    def test_with_1(self):
        class Manager(object):
            def __init__(self, name):
                self.name = name

            async def __aenter__(self):
                await AsyncYieldFrom(['enter-1-' + self.name,
                                      'enter-2-' + self.name])
                return self

            async def __aexit__(self, *args):
                await AsyncYieldFrom(['exit-1-' + self.name,
                                      'exit-2-' + self.name])

                if self.name == 'B':
                    return True


        async def foo():
            async with Manager("A") as a, Manager("B") as b:
                await AsyncYieldFrom([('managers', a.name, b.name)])
                1/0

        f = foo()
        result, _ = run_async(f)

        self.assertEqual(
            result, ['enter-1-A', 'enter-2-A', 'enter-1-B', 'enter-2-B',
                     ('managers', 'A', 'B'),
                     'exit-1-B', 'exit-2-B', 'exit-1-A', 'exit-2-A']
        )

        async def foo():
            async with Manager("A") as a, Manager("C") as c:
                await AsyncYieldFrom([('managers', a.name, c.name)])
                1/0

        with self.assertRaises(ZeroDivisionError):
            run_async(foo())

    def test_with_2(self):
        class CM(object):
            def __aenter__(self):
                pass

        async def foo():
            async with CM():
                pass

        with self.assertRaisesRegex(AttributeError, '__aexit__'):
            run_async(foo())

    def test_with_3(self):
        class CM(object):
            def __aexit__(self):
                pass

        async def foo():
            async with CM():
                pass

        with self.assertRaisesRegex(AttributeError, '__aenter__'):
            run_async(foo())

    def test_with_4(self):
        class CM(object):
            def __enter__(self):
                pass

            def __exit__(self):
                pass

        async def foo():
            async with CM():
                pass

        with self.assertRaisesRegex(AttributeError, '__aexit__'):
            run_async(foo())

    def test_with_5(self):
        # While this test doesn't make a lot of sense,
        # it's a regression test for an early bug with opcodes
        # generation

        class CM(object):
            async def __aenter__(self):
                return self

            async def __aexit__(self, *exc):
                pass

        async def func():
            async with CM():
                assert (1, ) == 1

        with self.assertRaises(AssertionError):
            run_async(func())

    def test_with_6(self):
        class CM(object):
            def __aenter__(self):
                return 123

            def __aexit__(self, *e):
                return 456

        async def foo():
            async with CM():
                pass

        with self.assertRaisesRegex(
            TypeError, "object int can't be used in 'await' expression"):
            # it's important that __aexit__ wasn't called
            run_async(foo())

    def test_with_7(self):
        class CM(object):
            async def __aenter__(self):
                return self

            def __aexit__(self, *e):
                return 444

        async def foo():
            async with CM():
                1/0

        try:
            run_async(foo())
        except TypeError as exc:
            self.assertRegex(
                exc.args[0], "object int can't be used in 'await' expression")
            if sys.version_info[0] >= 3:
                self.assertTrue(exc.__context__ is not None)
                self.assertTrue(isinstance(exc.__context__, ZeroDivisionError))
        else:
            self.fail('invalid asynchronous context manager did not fail')


    def test_with_8(self):
        CNT = 0

        class CM(object):
            async def __aenter__(self):
                return self

            def __aexit__(self, *e):
                return 456

        async def foo():
            nonlocal CNT
            async with CM():
                CNT += 1


        with self.assertRaisesRegex(
            TypeError, "object int can't be used in 'await' expression"):

            run_async(foo())

        self.assertEqual(CNT, 1)

    def test_with_9(self):
        CNT = 0

        class CM(object):
            async def __aenter__(self):
                return self

            async def __aexit__(self, *e):
                1/0

        async def foo():
            nonlocal CNT
            async with CM():
                CNT += 1

        with self.assertRaises(ZeroDivisionError):
            run_async(foo())

        self.assertEqual(CNT, 1)

    def test_with_10(self):
        CNT = 0

        class CM(object):
            async def __aenter__(self):
                return self

            async def __aexit__(self, *e):
                1/0

        async def foo():
            nonlocal CNT
            async with CM():
                async with CM():
                    raise RuntimeError

        try:
            run_async(foo())
        except ZeroDivisionError as exc:
            pass  # FIXME!
            #if sys.version_info[0] >= 3:
            #    self.assertTrue(exc.__context__ is not None)
            #    self.assertTrue(isinstance(exc.__context__, ZeroDivisionError))
            #    self.assertTrue(isinstance(exc.__context__.__context__, RuntimeError))
        else:
            self.fail('exception from __aexit__ did not propagate')

    def test_with_11(self):
        CNT = 0

        class CM(object):
            async def __aenter__(self):
                raise NotImplementedError

            async def __aexit__(self, *e):
                1/0

        async def foo():
            nonlocal CNT
            async with CM():
                raise RuntimeError

        try:
            run_async(foo())
        except NotImplementedError as exc:
            if sys.version_info[0] >= 3:
                self.assertTrue(exc.__context__ is None)
        else:
            self.fail('exception from __aenter__ did not propagate')

    def test_with_12(self):
        CNT = 0

        class CM(object):
            async def __aenter__(self):
                return self

            async def __aexit__(self, *e):
                return True

        async def foo():
            nonlocal CNT
            async with CM() as cm:
                self.assertIs(cm.__class__, CM)
                raise RuntimeError

        run_async(foo())

    def test_with_13(self):
        CNT = 0

        class CM(object):
            async def __aenter__(self):
                1/0

            async def __aexit__(self, *e):
                return True

        async def foo():
            nonlocal CNT
            CNT += 1
            async with CM():
                CNT += 1000
            CNT += 10000

        with self.assertRaises(ZeroDivisionError):
            run_async(foo())
        self.assertEqual(CNT, 1)

    def test_for_1(self):
        aiter_calls = 0

        class AsyncIter(object):
            def __init__(self):
                self.i = 0

            async def __aiter__(self):
                nonlocal aiter_calls
                aiter_calls += 1
                return self

            async def __anext__(self):
                self.i += 1

                if not (self.i % 10):
                    await AsyncYield(self.i * 10)

                if self.i > 100:
                    raise StopAsyncIteration

                return self.i, self.i


        buffer = []
        async def test1():
            with ignore_py26(self.assertWarnsRegex(PendingDeprecationWarning, "legacy")):
                async for i1, i2 in AsyncIter():
                    buffer.append(i1 + i2)

        yielded, _ = run_async(test1())
        # Make sure that __aiter__ was called only once
        self.assertEqual(aiter_calls, 1)
        self.assertEqual(yielded, [i * 100 for i in range(1, 11)])
        self.assertEqual(buffer, [i*2 for i in range(1, 101)])


        buffer = []
        async def test2():
            nonlocal buffer
            with ignore_py26(self.assertWarnsRegex(PendingDeprecationWarning, "legacy")):
                async for i in AsyncIter():
                    buffer.append(i[0])
                    if i[0] == 20:
                        break
                else:
                    buffer.append('what?')
            buffer.append('end')

        yielded, _ = run_async(test2())
        # Make sure that __aiter__ was called only once
        self.assertEqual(aiter_calls, 2)
        self.assertEqual(yielded, [100, 200])
        self.assertEqual(buffer, [i for i in range(1, 21)] + ['end'])


        buffer = []
        async def test3():
            nonlocal buffer
            with ignore_py26(self.assertWarnsRegex(PendingDeprecationWarning, "legacy")):
                async for i in AsyncIter():
                    if i[0] > 20:
                        continue
                    buffer.append(i[0])
                else:
                    buffer.append('what?')
            buffer.append('end')

        yielded, _ = run_async(test3())
        # Make sure that __aiter__ was called only once
        self.assertEqual(aiter_calls, 3)
        self.assertEqual(yielded, [i * 100 for i in range(1, 11)])
        self.assertEqual(buffer, [i for i in range(1, 21)] +
                                 ['what?', 'end'])

    def test_for_2(self):
        tup = (1, 2, 3)
        refs_before = sys.getrefcount(tup)

        async def foo():
            async for i in tup:
                print('never going to happen')

        with self.assertRaisesRegex(
                TypeError, "async for' requires an object.*__aiter__.*tuple"):

            run_async(foo())

        self.assertEqual(sys.getrefcount(tup), refs_before)

    def test_for_3(self):
        class I(object):
            def __aiter__(self):
                return self

        aiter = I()
        refs_before = sys.getrefcount(aiter)

        async def foo():
            async for i in aiter:
                print('never going to happen')

        with self.assertRaisesRegex(
                TypeError,
                "async for' received an invalid object.*__aiter.*\: I"):

            run_async(foo())

        self.assertEqual(sys.getrefcount(aiter), refs_before)

    def test_for_4(self):
        class I(object):
            def __aiter__(self):
                return self

            def __anext__(self):
                return ()

        aiter = I()
        refs_before = sys.getrefcount(aiter)

        async def foo():
            async for i in aiter:
                print('never going to happen')

        with self.assertRaisesRegex(
                TypeError,
                "async for' received an invalid object.*__anext__.*tuple"):

            run_async(foo())

        self.assertEqual(sys.getrefcount(aiter), refs_before)

    def test_for_5(self):
        class I(object):
            async def __aiter__(self):
                return self

            def __anext__(self):
                return 123

        async def foo():
            with self.assertWarnsRegex(PendingDeprecationWarning, "legacy"):
                async for i in I():
                    print('never going to happen')

        with self.assertRaisesRegex(
                TypeError,
                "async for' received an invalid object.*__anext.*int"):

            run_async(foo())

    def test_for_6(self):
        I = 0

        class Manager(object):
            async def __aenter__(self):
                nonlocal I
                I += 10000

            async def __aexit__(self, *args):
                nonlocal I
                I += 100000

        class Iterable(object):
            def __init__(self):
                self.i = 0

            def __aiter__(self):
                return self

            async def __anext__(self):
                if self.i > 10:
                    raise StopAsyncIteration
                self.i += 1
                return self.i

        ##############

        manager = Manager()
        iterable = Iterable()
        mrefs_before = sys.getrefcount(manager)
        irefs_before = sys.getrefcount(iterable)

        async def main():
            nonlocal I

            async with manager:
                async for i in iterable:
                    I += 1
            I += 1000

        run_async(main())
        self.assertEqual(I, 111011)

        self.assertEqual(sys.getrefcount(manager), mrefs_before)
        self.assertEqual(sys.getrefcount(iterable), irefs_before)

        ##############

        async def main():
            nonlocal I

            async with Manager():
                async for i in Iterable():
                    I += 1
            I += 1000

            async with Manager():
                async for i in Iterable():
                    I += 1
            I += 1000

        run_async(main())
        self.assertEqual(I, 333033)

        ##############

        async def main():
            nonlocal I

            async with Manager():
                I += 100
                async for i in Iterable():
                    I += 1
                else:
                    I += 10000000
            I += 1000

            async with Manager():
                I += 100
                async for i in Iterable():
                    I += 1
                else:
                    I += 10000000
            I += 1000

        run_async(main())
        self.assertEqual(I, 20555255)

    def test_for_7(self):
        CNT = 0
        class AI(object):
            async def __aiter__(self):
                1/0
        async def foo():
            nonlocal CNT
            with self.assertWarnsRegex(PendingDeprecationWarning, "legacy"):
                async for i in AI():
                    CNT += 1
            CNT += 10
        with self.assertRaises(ZeroDivisionError):
            run_async(foo())
        self.assertEqual(CNT, 0)

    def test_for_8(self):
        CNT = 0
        class AI:
            def __aiter__(self):
                1/0
        async def foo():
            nonlocal CNT
            async for i in AI():
                CNT += 1
            CNT += 10
        with self.assertRaises(ZeroDivisionError):
            run_async(foo())
            with warnings.catch_warnings():
                warnings.simplefilter("error")
                # Test that if __aiter__ raises an exception it propagates
                # without any kind of warning.
                run_async(foo())
        self.assertEqual(CNT, 0)

    @min_py27
    def test_for_9(self):
        # Test that PendingDeprecationWarning can safely be converted into
        # an exception (__aiter__ should not have a chance to raise
        # a ZeroDivisionError.)
        class AI:
            async def __aiter__(self):
                1/0
        async def foo():
            async for i in AI():
                pass

        with self.assertRaises(PendingDeprecationWarning):
            with warnings.catch_warnings():
                warnings.simplefilter("error")
                run_async(foo())

    @min_py27
    def test_for_10(self):
        # Test that PendingDeprecationWarning can safely be converted into
        # an exception.
        class AI:
            async def __aiter__(self):
                pass
        async def foo():
            async for i in AI():
                pass

        with self.assertRaises(PendingDeprecationWarning):
            with warnings.catch_warnings():
                warnings.simplefilter("error")
                run_async(foo())

    def test_copy(self):
        async def func(): pass
        coro = func()
        with self.assertRaises(TypeError):
            copy.copy(coro)

        aw = coro.__await__()
        try:
            with self.assertRaises(TypeError):
                copy.copy(aw)
        finally:
            aw.close()

    def test_pickle(self):
        async def func(): pass
        coro = func()
        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
            with self.assertRaises((TypeError, pickle.PicklingError)):
                pickle.dumps(coro, proto)

        aw = coro.__await__()
        try:
            for proto in range(pickle.HIGHEST_PROTOCOL + 1):
                with self.assertRaises((TypeError, pickle.PicklingError)):
                    pickle.dumps(aw, proto)
        finally:
            aw.close()


class CoroAsyncIOCompatTest(unittest.TestCase):

    def test_asyncio_1(self):
        import asyncio

        class MyException(Exception):
            pass

        buffer = []

        class CM(object):
            async def __aenter__(self):
                buffer.append(1)
                await asyncio.sleep(0.01)
                buffer.append(2)
                return self

            async def __aexit__(self, exc_type, exc_val, exc_tb):
                await asyncio.sleep(0.01)
                buffer.append(exc_type.__name__)

        async def f():
            async with CM() as c:
                await asyncio.sleep(0.01)
                raise MyException
            buffer.append('unreachable')

        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        try:
            loop.run_until_complete(f())
        except MyException:
            pass
        finally:
            loop.close()
            asyncio.set_event_loop(None)

        self.assertEqual(buffer, [1, 2, 'MyException'])


class SysSetCoroWrapperTest(unittest.TestCase):

    def test_set_wrapper_1(self):
        async def foo():
            return 'spam'

        wrapped = None
        def wrap(gen):
            nonlocal wrapped
            wrapped = gen
            return gen

        self.assertIsNone(sys.get_coroutine_wrapper())

        sys.set_coroutine_wrapper(wrap)
        self.assertIs(sys.get_coroutine_wrapper(), wrap)
        try:
            f = foo()
            self.assertTrue(wrapped)

            self.assertEqual(run_async(f), ([], 'spam'))
        finally:
            sys.set_coroutine_wrapper(None)

        self.assertIsNone(sys.get_coroutine_wrapper())

        wrapped = None
        with silence_coro_gc():
            foo()
        self.assertFalse(wrapped)

    def test_set_wrapper_2(self):
        self.assertIsNone(sys.get_coroutine_wrapper())
        with self.assertRaisesRegex(TypeError, "callable expected, got int"):
            sys.set_coroutine_wrapper(1)
        self.assertIsNone(sys.get_coroutine_wrapper())

    def test_set_wrapper_3(self):
        async def foo():
            return 'spam'

        def wrapper(coro):
            async def wrap(coro):
                return await coro
            return wrap(coro)

        sys.set_coroutine_wrapper(wrapper)
        try:
            with silence_coro_gc(), self.assertRaisesRegex(
                RuntimeError,
                "coroutine wrapper.*\.wrapper at 0x.*attempted to "
                "recursively wrap .* wrap .*"):

                foo()
        finally:
            sys.set_coroutine_wrapper(None)

    def test_set_wrapper_4(self):
        @types_coroutine
        def foo():
            return 'spam'

        wrapped = None
        def wrap(gen):
            nonlocal wrapped
            wrapped = gen
            return gen

        sys.set_coroutine_wrapper(wrap)
        try:
            foo()
            self.assertIs(
                wrapped, None,
                "generator-based coroutine was wrapped via "
                "sys.set_coroutine_wrapper")
        finally:
            sys.set_coroutine_wrapper(None)


class CAPITest(unittest.TestCase):

    def test_tp_await_1(self):
        from _testcapi import awaitType as at

        async def foo():
            future = at(iter([1]))
            return (await future)

        self.assertEqual(foo().send(None), 1)

    def test_tp_await_2(self):
        # Test tp_await to __await__ mapping
        from _testcapi import awaitType as at
        future = at(iter([1]))
        self.assertEqual(next(future.__await__()), 1)

    def test_tp_await_3(self):
        from _testcapi import awaitType as at

        async def foo():
            future = at(1)
            return (await future)

        with self.assertRaisesRegex(
                TypeError, "__await__.*returned non-iterator of type 'int'"):
            self.assertEqual(foo().send(None), 1)


# disable some tests that only apply to CPython

# TODO?
if True or sys.version_info < (3, 5):
    SysSetCoroWrapperTest = None
    CAPITest = None

if sys.version_info < (3, 5):  # (3, 4, 4)
    CoroAsyncIOCompatTest = None
else:
    try:
        import asyncio
    except ImportError:
        CoroAsyncIOCompatTest = None

if __name__=="__main__":
    unittest.main()
Cython-0.26.1/tests/run/float_division.pyx0000664000175000017500000000613312542002467021356 0ustar  stefanstefan00000000000000# mode: run
# tag: division


def int_by_float():
    """
    >>> int_by_float()
    0.5
    """
    return 1 / 2.0


def float_by_int():
    """
    >>> float_by_int()
    2.0
    """
    return 2.0 / 1


def float_by_float():
    """
    >>> float_by_float()
    1.5
    """
    return 3.0 / 2.0


def div_1_by(x):
    """
    >>> div_1_by(1.0)
    1.0
    >>> div_1_by(2.0)
    0.5
    >>> div_1_by(0.5)
    2.0
    >>> 1.0 / float('inf')
    0.0
    >>> div_1_by(float('inf'))
    0.0
    >>> div_1_by(float('-inf'))
    -0.0
    >>> div_1_by(float('nan'))
    nan
    """
    return 1.0 / x


def div_by_2(x):
    """
    >>> div_by_2(1.0)
    0.5
    >>> float('inf') / 2.0
    inf
    >>> div_by_2(float('inf'))
    inf
    >>> div_by_2(float('-inf'))
    -inf
    >>> float('nan') / 2.0
    nan
    >>> div_by_2(float('nan'))
    nan
    """
    return x / 2.0


def div_by_neg_2(x):
    """
    >>> div_by_neg_2(1.0)
    -0.5
    >>> div_by_neg_2(-1.0)
    0.5
    >>> (-2**14) / (-2.0)
    8192.0
    >>> div_by_neg_2(-2**14)
    8192.0
    >>> (-2**52) / (-2.0)
    2251799813685248.0
    >>> div_by_neg_2(-2**52)
    2251799813685248.0
    >>> (-2**53-1) / (-2.0)
    4503599627370496.0
    >>> div_by_neg_2(-2**53-1)
    4503599627370496.0
    >>> float('inf') / -2.0
    -inf
    >>> div_by_neg_2(float('inf'))
    -inf
    >>> div_by_neg_2(float('-inf'))
    inf
    >>> float('nan') / -2.0
    nan
    >>> div_by_neg_2(float('nan'))
    nan
    """
    return x / -2.0


def div_neg_2_by(x):
    """
    >>> div_neg_2_by(1.0)
    -2.0
    >>> div_neg_2_by(-1)
    2.0
    >>> div_neg_2_by(-2.0)
    1.0
    >>> div_neg_2_by(-2)
    1.0
    >>> -2.0 / float('inf')
    -0.0
    >>> div_neg_2_by(float('inf'))
    -0.0
    >>> div_neg_2_by(float('-inf'))
    0.0
    >>> float('nan') / -2.0
    nan
    >>> div_neg_2_by(float('nan'))
    nan
    """
    return (-2.0) / x


def div_by_nan(x):
    """
    >>> 1.0 / float('nan')
    nan
    >>> div_by_nan(1.0)
    nan
    >>> float('nan') / float('nan')
    nan
    >>> div_by_nan(float('nan'))
    nan
    >>> float('inf') / float('nan')
    nan
    >>> div_by_nan(float('inf'))
    nan
    """
    return x / float("nan")


def div_nan_by(x):
    """
    >>> float('nan') / 1.0
    nan
    >>> div_nan_by(1.0)
    nan
    >>> float('nan') / float('nan')
    nan
    >>> div_nan_by(float('nan'))
    nan
    """
    return float("nan") / x


def div_by_inf(x):
    """
    >>> 1 / float('inf')
    0.0
    >>> div_by_inf(1)
    0.0
    >>> 1.0 / float('inf')
    0.0
    >>> div_by_inf(1.0)
    0.0
    >>> div_by_inf(float('inf'))
    nan
    """
    return x / float("inf")


def div_inf_by(x):
    """
    >>> float('inf') / 1.0
    inf
    >>> div_inf_by(1.0)
    inf
    >>> float('inf') / float('nan')
    nan
    >>> div_inf_by(float('nan'))
    nan
    >>> float('inf') / float('-inf')
    nan
    >>> div_inf_by(float('-inf'))
    nan
    """
    return float("inf") / x


def div_neg_inf_by(x):
    """
    >>> float('-inf') / 1.0
    -inf
    >>> div_neg_inf_by(1.0)
    -inf
    >>> float('-inf') / -1.0
    inf
    >>> div_neg_inf_by(-1.0)
    inf
    """
    return float("-inf") / x
Cython-0.26.1/tests/run/cpp_stl_conversion.pyx0000664000175000017500000001205413023021033022236 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror
# distutils: extra_compile_args=-std=c++0x

import sys
from libcpp.map cimport map
from libcpp.unordered_map cimport unordered_map
from libcpp.set cimport set as cpp_set
from libcpp.unordered_set cimport unordered_set
from libcpp.string cimport string
from libcpp.pair cimport pair
from libcpp.vector cimport vector
from libcpp.list cimport list as cpp_list

py_set = set
py_xrange = xrange
py_unicode = unicode

cdef string add_strings(string a, string b):
    return a + b

def normalize(bytes b):
    if sys.version_info[0] >= 3:
        return b.decode("ascii")
    else:
        return b

def test_string(o):
    """
    >>> normalize(test_string("abc".encode('ascii')))
    'abc'
    >>> normalize(test_string("abc\\x00def".encode('ascii')))
    'abc\\x00def'
    """
    cdef string s = o
    return s

def test_encode_to_string(o):
    """
    >>> normalize(test_encode_to_string('abc'))
    'abc'
    >>> normalize(test_encode_to_string('abc\\x00def'))
    'abc\\x00def'
    """
    cdef string s = o.encode('ascii')
    return s

def test_encode_to_string_cast(o):
    """
    >>> normalize(test_encode_to_string_cast('abc'))
    'abc'
    >>> normalize(test_encode_to_string_cast('abc\\x00def'))
    'abc\\x00def'
    """
    s = o.encode('ascii')
    return s

def test_unicode_encode_to_string(unicode o):
    """
    >>> normalize(test_unicode_encode_to_string(py_unicode('abc')))
    'abc'
    >>> normalize(test_unicode_encode_to_string(py_unicode('abc\\x00def')))
    'abc\\x00def'
    """
    cdef string s = o.encode('ascii')
    return s

def test_string_call(a, b):
    """
    >>> normalize(test_string_call("abc".encode('ascii'), "xyz".encode('ascii')))
    'abcxyz'
    """
    return add_strings(a, b)

def test_int_vector(o):
    """
    >>> test_int_vector([1, 2, 3])
    [1, 2, 3]
    >>> test_int_vector((1, 10, 100))
    [1, 10, 100]
    >>> test_int_vector(py_xrange(1,10,2))
    [1, 3, 5, 7, 9]
    >>> test_int_vector([10**20])       #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...
    """
    cdef vector[int] v = o
    return v

def test_string_vector(s):
    """
    >>> list(map(normalize, test_string_vector('ab cd ef gh'.encode('ascii'))))
    ['ab', 'cd', 'ef', 'gh']
    """
    cdef vector[string] cpp_strings = s.split()
    return cpp_strings

cdef list convert_string_vector(vector[string] vect):
    return vect

def test_string_vector_temp_funcarg(s):
    """
    >>> list(map(normalize, test_string_vector_temp_funcarg('ab cd ef gh'.encode('ascii'))))
    ['ab', 'cd', 'ef', 'gh']
    """
    return convert_string_vector(s.split())

def test_double_vector(o):
    """
    >>> test_double_vector([1, 2, 3])
    [1.0, 2.0, 3.0]
    >>> test_double_vector([10**20])
    [1e+20]
    """
    cdef vector[double] v = o
    return v

def test_repeated_double_vector(a, b, int n):
    """
    >>> test_repeated_double_vector(1, 1.5, 3)
    [1.0, 1.5, 1.0, 1.5, 1.0, 1.5]
    """
    cdef vector[double] v = [a, b] * n
    return v

ctypedef int my_int

def test_typedef_vector(o):
    """
    >>> test_typedef_vector([1, 2, 3])
    [1, 2, 3]
    >>> test_typedef_vector([1, 2, 3**100])       #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...
    >>> test_typedef_vector([1, 2, None])       #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    TypeError: an integer is required
    """
    cdef vector[my_int] v = o
    return v

def test_pair(o):
    """
    >>> test_pair((1, 2))
    (1, 2.0)
    """
    cdef pair[long, double] p = o
    return p

def test_list(o):
    """
    >>> test_list([1, 2, 3])
    [1, 2, 3]
    """
    cdef cpp_list[int] l = o
    return l

def test_set(o):
    """
    >>> sorted(test_set([1, 2, 3]))
    [1, 2, 3]
    >>> sorted(test_set([1, 2, 3, 3]))
    [1, 2, 3]
    >>> type(test_set([])) is py_set
    True
    """
    cdef cpp_set[long] s = o
    return s

def test_unordered_set(o):
   """
   >>> sorted(test_unordered_set([1, 2, 3]))
   [1, 2, 3]
   >>> sorted(test_unordered_set([1, 2, 3, 3]))
   [1, 2, 3]
   >>> type(test_unordered_set([])) is py_set
   True
   """
   cdef unordered_set[long] s = o
   return s

def test_map(o):
    """
    >>> test_map({1: 1.0, 2: 0.5, 3: 0.25})
    {1: 1.0, 2: 0.5, 3: 0.25}
    """
    cdef map[int, double] m = o
    return m

def test_unordered_map(o):
   """
   >>> d = test_map({1: 1.0, 2: 0.5, 3: 0.25})
   >>> sorted(d)
   [1, 2, 3]
   >>> (d[1], d[2], d[3])
   (1.0, 0.5, 0.25)
   """
   cdef unordered_map[int, double] m = o
   return m

def test_nested(o):
    """
    >>> test_nested({})
    {}
    >>> d = test_nested({(1.0, 2.0): [1, 2, 3], (1.0, 0.5): [1, 10, 100]})
    >>> type(d) is dict or type(d)
    True
    >>> sorted(d)
    [(1.0, 0.5), (1.0, 2.0)]
    >>> d[(1.0, 0.5)]
    [1, 10, 100]
    >>> d[(1.0, 2.0)]
    [1, 2, 3]
    """
    cdef map[pair[double, double], vector[int]] m = o
    return m

cpdef enum Color:
    RED = 0
    GREEN
    BLUE

def test_enum_map(o):
    """
    >>> test_enum_map({RED: GREEN})
    {0: 1}
    """
    cdef map[Color, Color] m = o
    return m
Cython-0.26.1/tests/run/unpack_fused.pyx0000664000175000017500000000541212542002467021013 0ustar  stefanstefan00000000000000
ctypedef fused sequence:
    list
    tuple
    object

def unpack_one(sequence it):
    """
    >>> items = [1]
    >>> unpack_one(items)
    1
    >>> unpack_one(iter(items))
    1
    >>> unpack_one(list(items))
    1
    >>> unpack_one(tuple(items))
    1
    """
    a, = it
    return a

def unpack_two(sequence it):
    """
    >>> items = [1,2]
    >>> unpack_two(items)
    (1, 2)
    >>> unpack_two(iter(items))
    (1, 2)
    >>> unpack_two(list(items))
    (1, 2)
    >>> unpack_two(tuple(items))
    (1, 2)
    """
    a,b = it
    return a,b

def unpack_two_int(sequence it):
    """
    >>> items = [1,2]
    >>> unpack_two_int(items)
    (1, 2)
    >>> unpack_two_int(iter(items))
    (1, 2)
    >>> unpack_two_int(list(items))
    (1, 2)
    >>> unpack_two_int(tuple(items))
    (1, 2)

    >>> items = [1, object()]
    >>> unpack_two_int(items)
    Traceback (most recent call last):
    TypeError: an integer is required
    >>> unpack_two_int(iter(items))
    Traceback (most recent call last):
    TypeError: an integer is required
    >>> unpack_two_int(list(items))
    Traceback (most recent call last):
    TypeError: an integer is required
    >>> unpack_two_int(tuple(items))
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    cdef int b
    a,b = it
    return a,b

def unpack_many(sequence it):
    """
    >>> items = range(1,13)
    >>> unpack_many(items)
    (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)
    >>> unpack_many(iter(items))
    (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)
    >>> unpack_many(list(items))
    (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)
    >>> unpack_many(tuple(items))
    (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)
    """
    a,b,c,d,e,f,g,h,i,j,k,l = it
    return a,b,c,d,e,f,g,h,i,j,k,l

def unpack_many_int(sequence it):
    """
    >>> items = range(1,13)
    >>> unpack_many_int(items)
    (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)
    >>> unpack_many_int(iter(items))
    (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)
    >>> unpack_many_int(list(items))
    (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)
    >>> unpack_many_int(tuple(items))
    (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)

    >>> items = range(1,10)
    >>> unpack_many_int(items)
    Traceback (most recent call last):
    ValueError: need more than 9 values to unpack
    >>> unpack_many_int(iter(items))
    Traceback (most recent call last):
    ValueError: need more than 9 values to unpack
    >>> unpack_many_int(list(items))
    Traceback (most recent call last):
    ValueError: need more than 9 values to unpack
    >>> unpack_many_int(tuple(items))
    Traceback (most recent call last):
    ValueError: need more than 9 values to unpack
    """
    cdef int b
    cdef long f
    cdef Py_ssize_t h
    a,b,c,d,e,f,g,h,i,j,k,l = it
    return a,b,c,d,e,f,g,h,i,j,k,l
Cython-0.26.1/tests/run/kwargproblems.pyx0000664000175000017500000000057412542002467021227 0ustar  stefanstefan00000000000000
def test(**kw):
    """
    >>> d = {1 : 2}
    >>> test(**d)       # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...keywords must be strings
    >>> d
    {1: 2}
    >>> d = {}
    >>> test(**d)
    {'arg': 3}
    >>> d
    {}
    >>> d = {'arg' : 2}
    >>> test(**d)
    {'arg': 3}
    >>> d
    {'arg': 2}
    """
    kw['arg'] = 3
    return kw
Cython-0.26.1/tests/run/cdef_opt.pxd0000664000175000017500000000006112542002467020075 0ustar  stefanstefan00000000000000cdef class A:
    cpdef foo(self, bint a=*, b=*)
Cython-0.26.1/tests/run/temp_alloc_T409.pyx0000664000175000017500000000037412542002467021205 0ustar  stefanstefan00000000000000# ticket: 409
# Extracted from sage/plot/plot3d/index_face_set.pyx:502
# Turns out to be a bug in implementation of PEP 3132 (Extended Iterable Unpacking)

def foo():
    """
    >>> foo()
    ([0, 0], [0, 0])
    """
    a = b = [0,0]
    return a, b
Cython-0.26.1/tests/run/funcexceptreraise.pyx0000664000175000017500000000227212542002467022064 0ustar  stefanstefan00000000000000import sys

def reraise(f, exc):
    """
    >>> def f(exc): raise exc
    >>> reraise(f, TypeError)
    Traceback (most recent call last):
    TypeError

    >>> def f(exc): raise exc('hiho')
    >>> reraise(f, TypeError)
    Traceback (most recent call last):
    TypeError: hiho
    """
    try:
        f(exc)
    except:
        assert sys.exc_info()[0] is exc, str(sys.exc_info()[1])
        raise

def reraise_original(f, exc, raise_catch):
    """
    >>> def f(exc): raise exc
    >>> def raise_catch_py():
    ...     try: raise ValueError
    ...     except: pass

    >>> reraise_original(f, TypeError, raise_catch_py)
    Traceback (most recent call last):
    TypeError

    >>> reraise_original(f, TypeError, raise_catch_cy)
    Traceback (most recent call last):
    TypeError

    >>> reraise_original(f, TypeError, raise_catch_cy_non_empty)
    Traceback (most recent call last):
    TypeError
    """
    try:
        f(exc)
    except:
        raise_catch()
        assert sys.exc_info()[0] is exc, str(sys.exc_info()[1])
        raise


def raise_catch_cy():
    try: raise ValueError
    except: pass

def raise_catch_cy_non_empty():
    try: raise ValueError
    except:
        a = 1+1
Cython-0.26.1/tests/run/pep448_extended_unpacking.pyx0000664000175000017500000002604112542002467023310 0ustar  stefanstefan00000000000000
cimport cython


class Iter(object):
    def __init__(self, it=()):
        self.it = iter(it)
    def __iter__(self):
        return self
    def __next__(self):
        return next(self.it)
    next = __next__


class Map(object):
    def __init__(self, mapping={}):
        self.mapping = mapping
    def __iter__(self):
        return iter(self.mapping)
    def keys(self):
        return self.mapping.keys()
    def __getitem__(self, key):
        return self.mapping[key]


#### tuples


@cython.test_fail_if_path_exists(
    "//TupleNode//TupleNode",
    "//MergedSequenceNode",
)
def unpack_tuple_literal():
    """
    >>> unpack_tuple_literal()
    (1, 2, 4, 5)
    """
    return (*(1, 2, *(4, 5)),)


def unpack_tuple_literal_mult():
    """
    >>> unpack_tuple_literal_mult()
    (1, 2, 4, 5, 4, 5, 1, 2, 4, 5, 4, 5, 1, 2, 4, 5, 4, 5)
    """
    return (*((1, 2, *((4, 5) * 2)) * 3),)


@cython.test_fail_if_path_exists(
    "//TupleNode//TupleNode",
    "//MergedSequenceNode",
)
def unpack_tuple_literal_empty():
    """
    >>> unpack_tuple_literal_empty()
    ()
    """
    return (*(*(), *()), *(), *(*(*(),),))


def unpack_tuple_simple(it):
    """
    >>> unpack_tuple_simple([])
    ()
    >>> unpack_tuple_simple(set())
    ()
    >>> unpack_tuple_simple(Iter())
    ()

    >>> unpack_tuple_simple([1])
    (1,)

    >>> unpack_tuple_simple([2, 1])
    (2, 1)
    >>> unpack_tuple_simple((2, 1))
    (2, 1)
    >>> sorted(unpack_tuple_simple(set([2, 1])))
    [1, 2]
    >>> unpack_tuple_simple(Iter([2, 1]))
    (2, 1)
    """
    return (*it,)


def unpack_tuple_from_iterable(it):
    """
    >>> unpack_tuple_from_iterable([1, 2, 3])
    (1, 2, 1, 2, 3, 1, 1, 2, 3, 1, 2, 3, 1, 2, 3, 2, 1, 1, 2, 3)
    >>> unpack_tuple_from_iterable((1, 2, 3))
    (1, 2, 1, 2, 3, 1, 1, 2, 3, 1, 2, 3, 1, 2, 3, 2, 1, 1, 2, 3)
    >>> sorted(unpack_tuple_from_iterable(set([1, 2, 3])))
    [1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3]

    >>> unpack_tuple_from_iterable([1, 2])
    (1, 2, 1, 2, 1, 1, 2, 1, 2, 1, 2, 2, 1, 1, 2)
    >>> sorted(unpack_tuple_from_iterable(set([1, 2])))
    [1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2]
    >>> unpack_tuple_from_iterable(Iter([1, 2]))
    (1, 2, 1, 2, 1, 2, 1)

    >>> unpack_tuple_from_iterable([3])
    (1, 2, 3, 1, 3, 3, 3, 2, 1, 3)
    >>> unpack_tuple_from_iterable(set([3]))
    (1, 2, 3, 1, 3, 3, 3, 2, 1, 3)
    >>> unpack_tuple_from_iterable(Iter([3]))
    (1, 2, 3, 1, 2, 1)

    >>> unpack_tuple_from_iterable([])
    (1, 2, 1, 2, 1)
    >>> unpack_tuple_from_iterable(set([]))
    (1, 2, 1, 2, 1)
    >>> unpack_tuple_from_iterable([])
    (1, 2, 1, 2, 1)
    >>> unpack_tuple_from_iterable(Iter([1, 2, 3]))
    (1, 2, 1, 2, 3, 1, 2, 1)
    """
    return (1, 2, *it, 1, *(*it, *it), *it, 2, 1, *it)


def unpack_tuple_keep_originals(a, b, c):
    """
    >>> a = b = [1, 2]
    >>> c = [3, 4]
    >>> unpack_tuple_keep_originals(a, b, c)
    (1, 2, 1, 2, 2, 3, 4)
    >>> a
    [1, 2]
    >>> b
    [1, 2]
    >>> c
    [3, 4]

    >>> a = b = (1, 2)
    >>> c = (3, 4)
    >>> unpack_tuple_keep_originals(a, b, c)
    (1, 2, 1, 2, 2, 3, 4)
    >>> a
    (1, 2)
    >>> b
    (1, 2)
    >>> c
    (3, 4)
    """
    return (*a, *b, 2, *c)


#### lists


@cython.test_fail_if_path_exists(
    "//ListNode//ListNode",
    "//MergedSequenceNode",
)
def unpack_list_literal():
    """
    >>> unpack_list_literal()
    [1, 2, 4, 5]
    """
    return [*[1, 2, *[4, 5]]]


def unpack_list_literal_mult():
    """
    >>> unpack_list_literal_mult()
    [1, 2, 4, 5, 4, 5, 1, 2, 4, 5, 4, 5, 1, 2, 4, 5, 4, 5]
    """
    return [*([1, 2, *([4, 5] * 2)] * 3)]


@cython.test_fail_if_path_exists(
    "//ListNode//ListNode",
    "//MergedSequenceNode",
)
def unpack_list_literal_empty():
    """
    >>> unpack_list_literal_empty()
    []
    """
    return [*[*[], *[]], *[], *[*[*[]]]]


def unpack_list_simple(it):
    """
    >>> unpack_list_simple([])
    []
    >>> unpack_list_simple(set())
    []
    >>> unpack_list_simple(Iter())
    []

    >>> unpack_list_simple([1])
    [1]

    >>> unpack_list_simple([2, 1])
    [2, 1]
    >>> unpack_list_simple((2, 1))
    [2, 1]
    >>> sorted(unpack_list_simple(set([2, 1])))
    [1, 2]
    >>> unpack_list_simple(Iter([2, 1]))
    [2, 1]
    """
    return [*it]


def unpack_list_from_iterable(it):
    """
    >>> unpack_list_from_iterable([1, 2, 3])
    [1, 2, 1, 2, 3, 1, 1, 2, 3, 1, 2, 3, 1, 2, 3, 2, 1, 1, 2, 3]
    >>> unpack_list_from_iterable((1, 2, 3))
    [1, 2, 1, 2, 3, 1, 1, 2, 3, 1, 2, 3, 1, 2, 3, 2, 1, 1, 2, 3]
    >>> sorted(unpack_list_from_iterable(set([1, 2, 3])))
    [1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3]

    >>> unpack_list_from_iterable([1, 2])
    [1, 2, 1, 2, 1, 1, 2, 1, 2, 1, 2, 2, 1, 1, 2]
    >>> sorted(unpack_list_from_iterable(set([1, 2])))
    [1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2]
    >>> unpack_list_from_iterable(Iter([1, 2]))
    [1, 2, 1, 2, 1, 2, 1]

    >>> unpack_list_from_iterable([3])
    [1, 2, 3, 1, 3, 3, 3, 2, 1, 3]
    >>> unpack_list_from_iterable(set([3]))
    [1, 2, 3, 1, 3, 3, 3, 2, 1, 3]
    >>> unpack_list_from_iterable(Iter([3]))
    [1, 2, 3, 1, 2, 1]

    >>> unpack_list_from_iterable([])
    [1, 2, 1, 2, 1]
    >>> unpack_list_from_iterable(set([]))
    [1, 2, 1, 2, 1]
    >>> unpack_list_from_iterable([])
    [1, 2, 1, 2, 1]
    >>> unpack_list_from_iterable(Iter([1, 2, 3]))
    [1, 2, 1, 2, 3, 1, 2, 1]
    """
    return [1, 2, *it, 1, *[*it, *it], *it, 2, 1, *it]


def unpack_list_keep_originals(a, b, c):
    """
    >>> a = b = [1, 2]
    >>> c = [3, 4]
    >>> unpack_list_keep_originals(a, b, c)
    [1, 2, 1, 2, 2, 3, 4]
    >>> a
    [1, 2]
    >>> b
    [1, 2]
    >>> c
    [3, 4]
    """
    return [*a, *b, 2, *c]


###### sets


@cython.test_fail_if_path_exists(
    "//SetNode//SetNode",
    "//MergedSequenceNode",
)
def unpack_set_literal():
    """
    >>> s = unpack_set_literal()
    >>> s == set([1, 2, 4, 5]) or s
    True
    """
    return {*{1, 2, *{4, 5}}}


def unpack_set_simple(it):
    """
    >>> s = unpack_set_simple([])
    >>> s == set([]) or s
    True

    >>> s = unpack_set_simple(set())
    >>> s == set([]) or s
    True

    >>> s = unpack_set_simple(Iter())
    >>> s == set([]) or s
    True

    >>> s = unpack_set_simple([1])
    >>> s == set([1]) or s
    True

    >>> s = unpack_set_simple([2, 1])
    >>> s == set([1, 2]) or s
    True

    >>> s = unpack_set_simple((2, 1))
    >>> s == set([1, 2]) or s
    True

    >>> s = unpack_set_simple(set([2, 1]))
    >>> s == set([1, 2]) or s
    True

    >>> s = unpack_set_simple(Iter([2, 1]))
    >>> s == set([1, 2]) or s
    True
    """
    return {*it}


def unpack_set_from_iterable(it):
    """
    >>> s = unpack_set_from_iterable([1, 2, 3])
    >>> s == set([1, 2, 3]) or s
    True

    >>> s = unpack_set_from_iterable([1, 2])
    >>> s == set([1, 2]) or s
    True

    >>> s = unpack_set_from_iterable(set([1, 2]))
    >>> s == set([1, 2]) or s
    True

    >>> s = unpack_set_from_iterable(Iter([1, 2]))
    >>> s == set([1, 2]) or s
    True

    >>> s = unpack_set_from_iterable([3])
    >>> s == set([1, 2, 3]) or s
    True

    >>> s = unpack_set_from_iterable(set([3]))
    >>> s == set([1, 2, 3]) or s
    True

    >>> s = unpack_set_from_iterable(Iter([3]))
    >>> s == set([1, 2, 3]) or s
    True

    >>> s = unpack_set_from_iterable([])
    >>> s == set([1, 2]) or s
    True

    >>> s = unpack_set_from_iterable(set([]))
    >>> s == set([1, 2]) or s
    True

    >>> s = unpack_set_from_iterable([])
    >>> s == set([1, 2]) or s
    True

    >>> s = unpack_set_from_iterable((1, 2, 3))
    >>> s == set([1, 2, 3]) or s
    True

    >>> s = unpack_set_from_iterable(set([1, 2, 3]))
    >>> s == set([1, 2, 3]) or s
    True

    >>> s = unpack_set_from_iterable(Iter([1, 2, 3]))
    >>> s == set([1, 2, 3]) or s
    True
    """
    return {1, 2, *it, 1, *{*it, *it}, *it, 2, 1, *it, *it}


def unpack_set_keep_originals(a, b, c):
    """
    >>> a = b = set([1, 2])
    >>> c = set([3, 4])
    >>> s = unpack_set_keep_originals(a, b, c)
    >>> s == set([1, 2, 3, 4]) or s
    True
    >>> a == set([1, 2]) or a
    True
    >>> b == set([1, 2]) or b
    True
    >>> c == set([3, 4]) or c
    True
    """
    return {*a, *b, 2, *c}


#### dicts


@cython.test_fail_if_path_exists(
    "//DictNode//DictNode",
    "//MergedDictNode",
)
def unpack_dict_literal():
    """
    >>> d = unpack_dict_literal()
    >>> d == dict(a=1, b=2, c=4, d=5) or d
    True
    """
    return {**{'a': 1, 'b': 2, **{'c': 4, 'd': 5}}}


@cython.test_fail_if_path_exists(
    "//DictNode//DictNode",
    "//MergedDictNode",
)
def unpack_dict_literal_empty():
    """
    >>> unpack_dict_literal_empty()
    {}
    """
    return {**{**{}, **{}}, **{}, **{**{**{}}}}


def unpack_dict_simple(it):
    """
    >>> d = unpack_dict_simple({})
    >>> d == {} or d
    True

    >>> d = unpack_dict_simple([])
    >>> d == {} or d
    True

    >>> d = unpack_dict_simple(set())
    >>> d == {} or d
    True

    >>> d = unpack_dict_simple(Iter())
    >>> d == {} or d
    True

    >>> d = unpack_dict_simple(Map())
    >>> d == {} or d
    True

    >>> d = unpack_dict_simple(dict(a=1))
    >>> d == dict(a=1) or d
    True

    >>> d = unpack_dict_simple(dict(a=1, b=2))
    >>> d == dict(a=1, b=2) or d
    True

    >>> d = unpack_dict_simple(Map(dict(a=1, b=2)))
    >>> d == dict(a=1, b=2) or d
    True
    """
    return {**it}


def unpack_dict_from_iterable(it):
    """
    >>> d = unpack_dict_from_iterable(dict(a=1, b=2, c=3))
    >>> d == dict(a=1, b=2, c=3) or d
    True

    >>> d = unpack_dict_from_iterable(dict(a=1, b=2))
    >>> d == dict(a=1, b=2) or d
    True

    >>> d = unpack_dict_from_iterable(Map(dict(a=1, b=2)))
    >>> d == dict(a=1, b=2) or d
    True

    >>> d = unpack_dict_from_iterable(dict(a=3))
    >>> d == dict(a=3, b=5) or d
    True

    >>> d = unpack_dict_from_iterable(Map(dict(a=3)))
    >>> d == dict(a=3, b=5) or d
    True

    >>> d = unpack_dict_from_iterable({})
    >>> d == dict(a=4, b=5) or d
    True

    >>> d = unpack_dict_from_iterable(Map())
    >>> d == dict(a=4, b=5) or d
    True

    >>> d = unpack_dict_from_iterable(Iter())
    Traceback (most recent call last):
    TypeError: 'Iter' object is not a mapping

    >>> d = unpack_dict_from_iterable([])
    Traceback (most recent call last):
    TypeError: 'list' object is not a mapping

    >>> d = unpack_dict_from_iterable(dict(b=2, c=3))
    >>> d == dict(a=4, b=2, c=3) or d
    True

    >>> d = unpack_dict_from_iterable(Map(dict(b=2, c=3)))
    >>> d == dict(a=4, b=2, c=3) or d
    True

    >>> d = unpack_dict_from_iterable(dict(a=2, c=3))
    >>> d == dict(a=2, b=5, c=3) or d
    True

    >>> d = unpack_dict_from_iterable(Map(dict(a=2, c=3)))
    >>> d == dict(a=2, b=5, c=3) or d
    True
    """
    return {'a': 2, 'b': 3, **it, 'a': 1, **{**it, **it}, **it, 'a': 4, 'b': 5, **it, **it}


def unpack_dict_keep_originals(a, b, c):
    """
    >>> a = b = {1: 2}
    >>> c = {2: 3, 4: 5}
    >>> d = unpack_dict_keep_originals(a, b, c)
    >>> d == {1: 2, 2: 3, 4: 5} or d
    True
    >>> a
    {1: 2}
    >>> b
    {1: 2}
    >>> c == {2: 3, 4: 5} or c
    True
    """
    return {**a, **b, 2: 4, **c}
Cython-0.26.1/tests/run/pycmp.pyx0000664000175000017500000000102512542002467017470 0ustar  stefanstefan00000000000000def f():
    """
    >>> f()
    """
    cdef int bool, int1, int2
    cdef object obj1, obj2
    int1 = 0
    int2 = 0
    obj1 = 1
    obj2 = 2
    bool = obj1 == obj2
    assert not bool
    bool = obj1 <> int2
    assert bool
    bool = int1 == obj2
    assert not bool
    bool = obj1 is obj2
    assert not bool
    bool = obj1 is not obj2
    assert bool

def g():
    """
    >>> g()
    """
    cdef int bool
    obj1 = 1
    obj2 = []
    bool = obj1 in obj2
    assert not bool
    bool = obj1 not in obj2
    assert bool
Cython-0.26.1/tests/run/__getattribute_subclasses__.pyx0000664000175000017500000001743213150045407024074 0ustar  stefanstefan00000000000000# mode: run

# __getattribute__ and __getattr__ special methods and subclasses.

cdef class boring:
    cdef readonly int boring_member
    cdef readonly int getattr_called
    cdef int getattribute_called
    def __init__(self):
        self.boring_member = 10

cdef class getattr_boring(boring):
    """
    getattr does not override members.

    >>> a = getattr_boring()
    >>> a.boring_member
    10
    >>> a.getattr_called
    0
    >>> print(a.resolved_by)
    getattr_boring
    >>> a.getattr_called
    1
    >>> a.no_such_member
    Traceback (most recent call last):
    AttributeError
    >>> a.getattr_called
    2
    """
    def __getattr__(self,n):
        self.getattr_called += 1
        if n == 'resolved_by':
            return 'getattr_boring'
        elif n == 'getattr_boring':
            return True
        else:
            raise AttributeError


# currently fails, see #1793
#class getattr_boring_py(getattr_boring):
#    __doc__ = getattr_boring.__doc__.replace(
#        'getattr_boring()', 'getattr_boring_py()')


cdef class getattribute_boring(boring):
    """
    getattribute overrides members.

    >>> a = getattribute_boring()
    >>> a.getattribute_called
    1
    >>> a.boring_member
    Traceback (most recent call last):
    AttributeError
    >>> a.getattribute_called
    3
    >>> print(a.resolved_by)
    getattribute_boring
    >>> a.getattribute_called
    5
    >>> a.no_such_member
    Traceback (most recent call last):
    AttributeError
    >>> a.getattribute_called
    7
    """
    def __getattribute__(self,n):
        self.getattribute_called += 1
        if n == 'resolved_by':
            return 'getattribute_boring'
        elif n == 'getattribute_boring':
            return True
        elif n == 'getattribute_called':
            return self.getattribute_called
        else:
            raise AttributeError


class getattribute_boring_py(getattribute_boring):
    __doc__ = getattribute_boring.__doc__.replace(
        'getattribute_boring()', 'getattribute_boring_py()')


cdef class _getattr:
    cdef readonly int getattr_called
    def __getattr__(self,n):
        self.getattr_called += 1
        if n == 'resolved_by':
            return '_getattr'
        elif n == '_getattr':
            return True
        elif n == 'getattr_called':
            # must only get here if __getattribute__ is overwritten
            assert 'getattribute' in type(self).__name__
            return self.getattr_called
        else:
            raise AttributeError


class getattr_py(_getattr):
    """
    getattr is inherited.

    >>> a = getattr_py()
    >>> a.getattr_called
    0
    >>> print(a.resolved_by)
    _getattr
    >>> a.getattr_called
    1
    >>> print(a._getattr)
    True
    >>> a.getattr_called
    2
    >>> a.no_such_member
    Traceback (most recent call last):
    AttributeError

    # currently fails, see #1793
    #>>> a.getattr_called
    #3
    """


cdef class _getattribute:
    cdef int getattribute_called
    def __getattribute__(self,n):
        self.getattribute_called += 1
        if n == 'resolved_by':
            return '_getattribute'
        elif n == '_getattribute':
            return True
        elif n == 'getattribute_called':
            return self.getattribute_called
        else:
            raise AttributeError


class getattribute_py(_getattribute):
    """
    getattribute is inherited.

    >>> a = getattribute_py()
    >>> a.getattribute_called
    1
    >>> print(a.resolved_by)
    _getattribute
    >>> a.getattribute_called
    3
    >>> print(a._getattribute)
    True
    >>> a.getattribute_called
    5
    >>> a.no_such_member
    Traceback (most recent call last):
    AttributeError
    >>> a.getattribute_called
    7
    """


cdef class boring_getattribute(_getattribute):
    cdef readonly int boring_getattribute_member

cdef class boring_boring_getattribute(boring_getattribute):
    """
    getattribute is inherited.

    >>> a = boring_boring_getattribute()
    >>> a.getattribute_called
    1
    >>> a.boring_getattribute_member
    Traceback (most recent call last):
    AttributeError
    >>> a.getattribute_called
    3
    >>> a.boring_boring_getattribute_member
    Traceback (most recent call last):
    AttributeError
    >>> a.getattribute_called
    5
    >>> print(a.resolved_by)
    _getattribute
    >>> a.getattribute_called
    7
    >>> a.no_such_member
    Traceback (most recent call last):
    AttributeError
    >>> a.getattribute_called
    9
    """
    cdef readonly int boring_boring_getattribute_member


class boring_boring_getattribute_py(boring_boring_getattribute):
    __doc__ = boring_boring_getattribute.__doc__.replace(
        'boring_boring_getattribute()', 'boring_boring_getattribute_py()')


cdef class boring_getattr(_getattr):
    cdef readonly int boring_getattr_member

cdef class boring_boring_getattr(boring_getattr):
    cdef readonly int boring_boring_getattr_member

cdef class getattribute_boring_boring_getattr(boring_boring_getattr):
    """
    __getattribute__ is always tried first, then __getattr__, regardless of where
    in the inheritance hiarchy they came from.

    >>> a = getattribute_boring_boring_getattr()
    >>> (a.getattr_called, a.getattribute_called)
    (1, 2)
    >>> print(a.resolved_by)
    getattribute_boring_boring_getattr
    >>> (a.getattr_called, a.getattribute_called)
    (2, 5)
    >>> a.getattribute_boring_boring_getattr
    True
    >>> (a.getattr_called, a.getattribute_called)
    (3, 8)
    >>> a._getattr
    True
    >>> (a.getattr_called, a.getattribute_called)
    (5, 11)
    >>> a.no_such_member
    Traceback (most recent call last):
    AttributeError
    >>> (a.getattr_called, a.getattribute_called)
    (7, 14)
    """
    cdef int getattribute_called
    def __getattribute__(self,n):
        self.getattribute_called += 1
        if n == 'resolved_by':
            return 'getattribute_boring_boring_getattr'
        elif n == 'getattribute_boring_boring_getattr':
            return True
        elif n == 'getattribute_called':
            return self.getattribute_called
        else:
            raise AttributeError


# currently fails, see #1793
#class getattribute_boring_boring_getattr_py(getattribute_boring_boring_getattr):
#    __doc__ = getattribute_boring_boring_getattr.__doc__.replace(
#        'getattribute_boring_boring_getattr()', 'getattribute_boring_boring_getattr_py()')


cdef class getattr_boring_boring_getattribute(boring_boring_getattribute):
    """
    __getattribute__ is always tried first, then __getattr__, regardless of where
    in the inheritance hiarchy they came from.

    >>> a = getattr_boring_boring_getattribute()
    >>> (a.getattr_called, a.getattribute_called)
    (1, 2)
    >>> print(a.resolved_by)
    _getattribute
    >>> (a.getattr_called, a.getattribute_called)
    (2, 5)
    >>> a.getattr_boring_boring_getattribute
    True
    >>> (a.getattr_called, a.getattribute_called)
    (4, 8)
    >>> a._getattribute
    True
    >>> (a.getattr_called, a.getattribute_called)
    (5, 11)
    >>> a.no_such_member
    Traceback (most recent call last):
    AttributeError
    >>> (a.getattr_called, a.getattribute_called)
    (7, 14)
    """
    cdef readonly int getattr_called  # note: property will not be used due to __getattribute__()
    def __getattr__(self,n):
        self.getattr_called += 1
        if n == 'resolved_by':
            return 'getattr_boring_boring_getattribute'
        elif n == 'getattr_boring_boring_getattribute':
            return True
        elif n == 'getattr_called':
            return self.getattr_called
        else:
            raise AttributeError


# currently fails, see #1793
#class getattr_boring_boring_getattribute_py(getattr_boring_boring_getattribute):
#    __doc__ = getattr_boring_boring_getattribute.__doc__.replace(
#        'getattr_boring_boring_getattribute()', 'getattr_boring_boring_getattribute_py()')
Cython-0.26.1/tests/run/ooo_base_classes.pyx0000664000175000017500000000042512542002467021646 0ustar  stefanstefan00000000000000cdef class B(A):
  cpdef foo(self):
    """
    >>> B().foo()
    B
    """
    print "B"

cdef class A(object):
  cpdef foo(self):
    """
    >>> A().foo()
    A
    """
    print "A"

cdef class C(A):
  cpdef foo(self):
    """
    >>> C().foo()
    C
    """
    print "C"
Cython-0.26.1/tests/run/ext_instance_type_T232.pyx0000664000175000017500000000032512542002467022601 0ustar  stefanstefan00000000000000# ticket: 232

cdef class MyExt:
    cdef object attr

def set_attr(value):
    """
    >>> set_attr(5)
    """
    MyExt().attr = value

def get_attr():
    """
    >>> get_attr()
    """
    return MyExt().attr
Cython-0.26.1/tests/run/no_gc_clear.pyx0000664000175000017500000000372512542002467020604 0ustar  stefanstefan00000000000000"""
Check that the @cython.no_gc_clear decorator disables generation of the
tp_clear slot so that __dealloc__ will still see the original reference
contents.

Discussed here: http://article.gmane.org/gmane.comp.python.cython.devel/14986
"""

cimport cython
from cpython.ref cimport PyObject, Py_TYPE

# Pull tp_clear for PyTypeObject as I did not find another way to access it
# from Cython code.

cdef extern from *:
    ctypedef struct PyTypeObject:
        void (*tp_clear)(object)

    ctypedef struct __pyx_CyFunctionObject:
        PyObject* func_closure


def is_tp_clear_null(obj):
    return (Py_TYPE(obj)).tp_clear is NULL


def is_closure_tp_clear_null(func):
    return is_tp_clear_null(
        (<__pyx_CyFunctionObject*>func).func_closure)


@cython.no_gc_clear
cdef class DisableTpClear:
    """
    An extension type that has a tp_clear method generated to test that it
    actually clears the references to NULL.

    >>> uut = DisableTpClear()
    >>> is_tp_clear_null(uut)
    True
    >>> uut.call_tp_clear()
    >>> type(uut.requires_cleanup) == list
    True
    >>> del uut
    """

    cdef public object requires_cleanup

    def __cinit__(self):
        self.requires_cleanup = [
                "Some object that needs cleaning in __dealloc__"]

    def call_tp_clear(self):
        cdef PyTypeObject *pto = Py_TYPE(self)
        if pto.tp_clear != NULL:
            pto.tp_clear(self)


def test_closure_without_clear(str x):
    """
    >>> c = test_closure_without_clear('abc')
    >>> is_tp_clear_null(c)
    False
    >>> is_closure_tp_clear_null(c)
    True
    >>> c('cba')
    'abcxyzcba'
    """
    def c(str s):
        return x + 'xyz' + s
    return c


def test_closure_with_clear(list x):
    """
    >>> c = test_closure_with_clear(list('abc'))
    >>> is_tp_clear_null(c)
    False
    >>> is_closure_tp_clear_null(c)
    False
    >>> c('cba')
    'abcxyzcba'
    """
    def c(str s):
        return ''.join(x) + 'xyz' + s
    return c
Cython-0.26.1/tests/run/array_cimport.srctree0000664000175000017500000000116012542002467022042 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import ttt"

######## setup.py ########

from Cython.Build.Dependencies import cythonize
from distutils.core import setup

setup(
  ext_modules = cythonize("*.pyx"),
)


######## tt.pxd ########

from cpython.array cimport array

cdef class Foo:
    cdef array obj

######## tt.pyx ########

cdef class Foo:
    def __init__(self, data):
        self.obj = data

######## ttt.pyx ########

from array import array
from cpython.array cimport array
from tt cimport Foo

cdef array a = array('i', [1,2,3])
cdef Foo x
print a.data.as_ints[0]
x = Foo(a)
print x.obj.data.as_ints[0]
Cython-0.26.1/tests/run/metaclass.pyx0000664000175000017500000001034713023021033020304 0ustar  stefanstefan00000000000000
cimport cython

class Base(type):
    def __new__(cls, name, bases, attrs):
        attrs['metaclass_was_here'] = True
        return type.__new__(cls, name, bases, attrs)

@cython.test_assert_path_exists("//PyClassMetaclassNode", "//Py3ClassNode")
class Foo(object):
    """
    >>> obj = Foo()
    >>> obj.metaclass_was_here
    True
    """
    __metaclass__ = Base


def non_type_metaclass(name, bases, namespace):
    namespace['BASES'] = [b.__name__ for b in bases]
    namespace['NAME'] = name
    return type(name, bases, namespace)

class FunctionAsPy2Metaclass(object):
    """
    >>> obj = FunctionAsPy2Metaclass()
    >>> obj.NAME
    'FunctionAsPy2Metaclass'
    >>> obj.BASES
    ['object']
    >>> obj.x
    1
    """
    __metaclass__ = non_type_metaclass
    x = 1


class ODict(dict):
    def __init__(self):
        dict.__init__(self)
        self._order = []
        dict.__setitem__(self, '_order', self._order)

    def __setitem__(self, key, value):
        dict.__setitem__(self, key, value)
        self._order.append(key)

class Py3MetaclassPlusAttr(type):
    def __new__(cls, name, bases, attrs, **kwargs):
        assert isinstance(attrs, ODict), str(type(attrs))
        for key, value in kwargs.items():
            attrs[key] = value
        attrs['metaclass_was_here'] = True
        return type.__new__(cls, name, bases, attrs)

    def __init__(self, cls, attrs, obj, **kwargs):
        pass

    @staticmethod
    def __prepare__(*args, **kwargs):
        return ODict()

@cython.test_fail_if_path_exists("//PyClassMetaclassNode")
@cython.test_assert_path_exists("//Py3ClassNode")
class Py3ClassMCOnly(object, metaclass=Py3MetaclassPlusAttr):
    """
    >>> obj = Py3ClassMCOnly()
    >>> obj.bar
    321
    >>> obj.metaclass_was_here
    True
    >>> obj._order
    ['__module__', '__qualname__', '__doc__', 'bar', 'metaclass_was_here']
    """
    bar = 321

class Py3InheritedMetaclass(Py3ClassMCOnly):
    """
    >>> obj = Py3InheritedMetaclass()
    >>> obj.bar
    345
    >>> obj.metaclass_was_here
    True
    >>> obj._order
    ['__module__', '__qualname__', '__doc__', 'bar', 'metaclass_was_here']
    """
    bar = 345

class Py3Base(type):
    def __new__(cls, name, bases, attrs, **kwargs):
        assert isinstance(attrs, ODict), str(type(attrs))
        for key, value in kwargs.items():
            attrs[key] = value
        return type.__new__(cls, name, bases, attrs)

    def __init__(self, cls, attrs, obj, **kwargs):
        pass

    @staticmethod
    def __prepare__(*args, **kwargs):
        return ODict()

@cython.test_fail_if_path_exists("//PyClassMetaclassNode")
@cython.test_assert_path_exists("//Py3ClassNode")
class Py3Foo(object, metaclass=Py3Base, foo=123):
    """
    >>> obj = Py3Foo()
    >>> obj.foo
    123
    >>> obj.bar
    321
    >>> obj._order
    ['__module__', '__qualname__', '__doc__', 'bar', 'foo']
    """
    bar = 321

@cython.test_assert_path_exists("//PyClassMetaclassNode", "//Py3ClassNode")
class Py3FooInherited(Py3Foo, foo=567):
    """
    >>> obj = Py3FooInherited()
    >>> obj.foo
    567
    >>> obj.bar
    321
    >>> obj._order
    ['__module__', '__qualname__', '__doc__', 'bar', 'foo']
    """
    bar = 321

kwargs = {'foo': 123, 'bar': 456}

@cython.test_assert_path_exists("//PyClassMetaclassNode", "//Py3ClassNode")
class Py3Mixed(metaclass=Py3Base, **kwargs):
    """
    >>> Py3Mixed.foo
    123
    >>> Py3Mixed.bar
    456
    """

kwargs['metaclass'] = Py3Base

@cython.test_assert_path_exists("//PyClassMetaclassNode")
class Py3Kwargs(**kwargs):
    """
    >>> Py3Kwargs.foo
    123
    >>> Py3Kwargs.bar
    456
    """

class Base3(type):
    def __new__(cls, name, bases, attrs, **kwargs):
        kwargs['b'] = 2
        return type.__new__(cls, name, bases, attrs)

    def __init__(self, *args, **kwargs):
        self.kwargs = kwargs

    @staticmethod
    def __prepare__(*args, **kwargs):
        kwargs['a'] = 1
        return {}

kwargs = {'c': 0}

@cython.test_assert_path_exists("//PyClassMetaclassNode", "//Py3ClassNode")
class Foo3(metaclass=Base3, a=0, b=0, **kwargs):
    """
    >>> sorted(Foo3.kwargs.items())
    [('a', 0), ('b', 0), ('c', 0)]
    """

class PyClassWithNew(object):
    """
    >>> PyClassWithNew(389)
    389
    """
    def __new__(self, arg):
        return arg
Cython-0.26.1/tests/run/owned_arg_refs.pyx0000664000175000017500000000223412542002467021327 0ustar  stefanstefan00000000000000
cdef class Owner:
    cdef object x

cdef call_me_with_owner(Owner owner, x):
    owner.x = "def" # overwrite external reference
    return x        # crashes if x is not owned by function or caller

def test_ext_type_attr():
    """
    >>> test_ext_type_attr()
    'abc5'
    """
    owner = Owner()
    owner.x = ''.join("abc%d" % 5) # non-interned object
    return call_me_with_owner(owner, owner.x)


cdef void call_me_without_gil(Owner owner, x) with gil:
    owner.x = "def" # overwrite external reference
    print x         # crashes if x is not owned by function or caller

def test_ext_type_attr_nogil():
    """
    >>> test_ext_type_attr_nogil()
    abc5
    """
    owner = Owner()
    owner.x = ''.join("abc%d" % 5) # non-interned object
    with nogil:
        call_me_without_gil(owner, owner.x)


# the following isn't dangerous as long as index access uses temps

cdef call_me_with_list(list l, x):
    l[:] = [(1,2), (3,4)] # overwrite external reference
    return x              # crashes if x is not owned by function or caller

def test_index():
    """
    >>> test_index()
    [3, 4]
    """
    l = [[1,2],[3,4]]
    return call_me_with_list(l, l[1])
Cython-0.26.1/tests/run/public_enum.pyx0000664000175000017500000000065312542002467020650 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> BAR == 3
True
>>> HONK == 3+2+1
True
>>> X == 4*5 + 1
True
>>> NONPUBLIC         # doctest: +ELLIPSIS
Traceback (most recent call last):
NameError: ...name 'NONPUBLIC' is not defined
>>> NOWPUBLIC == 23 + 42
True
"""

DEF X = 4*5

cdef enum SECRET:
    NONPUBLIC = 23 + 42

cdef public enum FOO:
    BAR = 3
    HONK = 3+2+1
    NOWPUBLIC = NONPUBLIC
    X = X + 1          # FIXME: should this really work?
Cython-0.26.1/tests/run/static_methods.pyx0000664000175000017500000000373413023021033021344 0ustar  stefanstefan00000000000000cdef class A:
    @staticmethod
    def static_def(int x):
        """
        >>> A.static_def(2)
        ('def', 2)
        >>> A().static_def(2)
        ('def', 2)
        """
        return 'def', x

    @staticmethod
    cdef static_cdef(int* x):
        return 'cdef', x[0]

    @staticmethod
    cdef static_cdef2(int* x, int* y):
        return 'cdef2', x[0] + y[0]

    @staticmethod
    cdef static_cdef_untyped(a, b):
        return 'cdef_utyped', a, b

#     @staticmethod
#     cpdef static_cpdef(int x):
#         """
#         >>> A.static_def
#         >>> A.static_cpdef
#
#         >>> A().static_def
#         >>> A().static_cpdef
#
#         >>> A.static_cpdef(2)
#         ('cpdef', 2)
#         >>> A().static_cpdef(2)
#         ('cpdef', 2)
#         """
#         return 'cpdef', x

def call_static_def(int x):
    """
    >>> call_static_def(2)
    ('def', 2)
    """
    return A.static_def(x)

def call_static_cdef(int x):
    """
    >>> call_static_cdef(2)
    ('cdef', 2)
    """
    cdef int *x_ptr = &x
    return A.static_cdef(x_ptr)

def call_static_cdef2(int x, int y):
    """
    >>> call_static_cdef2(2, 3)
    ('cdef2', 5)
    """
    return A.static_cdef2(&x, &y)

def call_static_list_comprehension_GH1540(int x):
    """
    >>> call_static_list_comprehension_GH1540(5)
    [('cdef', 5), ('cdef', 5), ('cdef', 5)]
    """
    return [A.static_cdef(&x) for _ in range(3)]

# BROKEN
#def call_static_cdef_untyped(a, b):
#    """
#    >>> call_static_cdef_untyped(100, None)
#    ('cdef_untyped', 100, None)
#    """
#    return A.static_cdef_untyped(a, b)

# UNIMPLEMENTED
# def call_static_cpdef(int x):
#     """
#     >>> call_static_cpdef(2)
#     ('cpdef', 2)
#     """
#     return A.static_cpdef(x)

cdef class FromPxd:
    @staticmethod
    cdef static_cdef(int* x):
        return 'pxd_cdef', x[0]

def call_static_pxd_cdef(int x):
    """
    >>> call_static_pxd_cdef(2)
    ('pxd_cdef', 2)
    """
    cdef int *x_ptr = &x
    return FromPxd.static_cdef(x_ptr)
Cython-0.26.1/tests/run/function_as_method_T494.pyx0000664000175000017500000000024612542002467022740 0ustar  stefanstefan00000000000000# ticket: 494
# cython: binding=True

__doc__ = """
    >>> A.foo = foo
    >>> A().foo()
    True
"""

class A:
    pass

def foo(self):
    return self is not None
Cython-0.26.1/tests/run/r_vree_1.pyx0000664000175000017500000000131412542002467020043 0ustar  stefanstefan00000000000000import sys
if sys.version_info[0] < 3:
    __doc__ = u"""

    >>> test(0)
    0L
    >>> test(1)
    1L

    >>> sys.maxint + 1 > sys.maxint
    True
    >>> type(sys.maxint * 2 + 1) is long
    True

    >>> test(sys.maxint + 1) == sys.maxint + 1
    True
    >>> test(sys.maxint * 2 + 1) == sys.maxint * 2 + 1
    True

    >>> test(256 ** unsigned_long_size() - 1) > 0
    True
    >>> test(256 ** unsigned_long_size() - 1) > sys.maxint
    True
    """
else:
    __doc__ = u"""
    >>> test(0)
    0
    >>> test(1)
    1
    >>> test(256 ** unsigned_long_size() - 1) > 0
    True
    """

def test(k):
    cdef unsigned long m
    m = k
    return m

def unsigned_long_size():
    return sizeof(unsigned long)
Cython-0.26.1/tests/run/unsignedbehaviour_T184.pyx0000664000175000017500000000102012542002467022574 0ustar  stefanstefan00000000000000# ticket: 184

"""
>>> c_call()
(-10, 10)
>>> py_call()
(-10, 10)
>>> loop()
19
>>> rangelist()
[-3, -2, -1, 0, 1, 2]
"""

cdef c_g(int a, int b):
    return (a, b)

def py_g(a, b):
    return (a, b)

def c_call():
    cdef unsigned int i = 10
    return c_g(-i, i)

def py_call():
    cdef unsigned int i = 10
    return py_g(-i, i)

def loop():
    cdef unsigned int i = 10
    times = 0
    for x in range(-i,i):
        times += 1
    return times

def rangelist():
    cdef unsigned int i = 3
    return list(range(-i, i))
Cython-0.26.1/tests/run/modbody.pyx0000664000175000017500000000031712542002467020000 0ustar  stefanstefan00000000000000
def f():
    """
    >>> f()
    >>> g
    42
    >>> x == 'spam'
    True
    >>> y == 'eggs'
    True
    >>> z == 'spameggs'
    True
    """
    pass

g = 42
x = u"spam"
y = u"eggs"
if g:
    z = x + y
Cython-0.26.1/tests/run/flatin.pyx0000664000175000017500000000156612542002467017627 0ustar  stefanstefan00000000000000def test_in(s):
    """
    >>> test_in('ABC')
    1
    >>> test_in('abc')
    2
    >>> test_in('X')
    3
    >>> test_in('XYZ')
    4
    >>> test_in('ABCXYZ')
    5
    >>> test_in('')
    5
    """
    if s in (u'ABC', u'BCD', u'ABC'[:3], u'ABC'[::-1], u'ABC'[-1]):
        return 1
    elif s.upper() in (u'ABC', u'BCD'):
        return 2
    elif len(s) in (1,2):
        return 3
    elif len(s) in (3,4):
        return 4
    else:
        return 5

def test_not_in(s):
    """
    >>> test_not_in('abc')
    1
    >>> test_not_in('CDE')
    2
    >>> test_not_in('CDEF')
    3
    >>> test_not_in('BCD')
    4
    """
    if s not in (u'ABC', u'BCD', u'CDE', u'CDEF'):
        return 1
    elif s.upper() not in (u'ABC', u'BCD', u'CDEF'):
        return 2
    elif len(s) not in [3]:
        return 3
    elif len(s) not in [1,2]:
        return 4
    else:
        return 5
Cython-0.26.1/tests/run/ctypedef_int_types_chdr_T333.h0000664000175000017500000000054612542002467023373 0ustar  stefanstefan00000000000000typedef signed   char  	    SChar;
typedef unsigned char  	    UChar;
typedef signed   short 	    SShort;
typedef unsigned short 	    UShort;
typedef signed   int   	    SInt;
typedef unsigned int   	    UInt;
typedef signed   long  	    SLong;
typedef unsigned long  	    ULong;
typedef signed   long long  SLongLong;
typedef unsigned long long  ULongLong;
Cython-0.26.1/tests/run/typed_slice.pyx0000664000175000017500000000777412542002467020665 0ustar  stefanstefan00000000000000# mode: run
# tag: list, tuple, slice

def slice_list(list l, int start, int stop):
    """
    >>> slice_list([1,2,3,4], 1, 3)
    [2, 3]
    >>> slice_list([1,2,3,4], 1, 7)
    [2, 3, 4]
    >>> slice_list([], 1, 3)
    []
    >>> slice_list([1], 1, 3)
    []
    >>> slice_list([1,2,3,4], -3, -1)
    [2, 3]
    >>> slice_list([1,2,3,4], -10, -1)
    [1, 2, 3]
    >>> slice_list([], -3, -1)
    []
    >>> slice_list([1], -3, -1)
    []
    """
    return l[start:stop]

def slice_list_start(list l, int start):
    """
    >>> slice_list_start([1,2,3,4], 1)
    [2, 3, 4]
    >>> slice_list_start([], 1)
    []
    >>> slice_list_start([1], 1)
    []
    >>> slice_list_start([1], 2)
    []
    >>> slice_list_start([1,2,3,4], -3)
    [2, 3, 4]
    >>> slice_list_start([1,2,3,4], -10)
    [1, 2, 3, 4]
    >>> slice_list_start([], -3)
    []
    >>> slice_list_start([1], -3)
    [1]
    """
    return l[start:]


def slice_list_stop(list l, int stop):
    """
    >>> slice_list_stop([1,2,3,4], 3)
    [1, 2, 3]
    >>> slice_list_stop([1,2,3,4], 7)
    [1, 2, 3, 4]
    >>> slice_list_stop([], 3)
    []
    >>> slice_list_stop([1], 3)
    [1]
    >>> slice_list_stop([1,2,3,4], -3)
    [1]
    >>> slice_list_stop([1,2,3,4], -10)
    []
    >>> slice_list_stop([], -1)
    []
    >>> slice_list_stop([1], -1)
    []
    >>> slice_list_stop([1, 2], -3)
    []
    """
    return l[:stop]


def slice_list_copy(list l):
    """
    >>> slice_list_copy([])
    []
    >>> slice_list_copy([1,2,3])
    [1, 2, 3]
    """
    return l[:]


def slice_tuple_copy(tuple l):
    """
    >>> slice_tuple_copy(())
    ()
    >>> slice_tuple_copy((1,2,3))
    (1, 2, 3)
    """
    return l[:]


def slice_tuple(tuple t, int start, int stop):
    """
    >>> slice_tuple((1,2,3,4), 1, 3)
    (2, 3)
    >>> slice_tuple((1,2,3,4), 1, 7)
    (2, 3, 4)
    >>> slice_tuple((), 1, 3)
    ()
    >>> slice_tuple((1,), 1, 3)
    ()
    >>> slice_tuple((1,2,3,4), -3, -1)
    (2, 3)
    >>> slice_tuple((1,2,3,4), -10, -1)
    (1, 2, 3)
    >>> slice_tuple((), -3, -1)
    ()
    >>> slice_tuple((1,), -3, -1)
    ()
    """
    return t[start:stop]


def slice_tuple_start(tuple t, int start):
    """
    >>> slice_tuple_start((1,2,3,4), 1)
    (2, 3, 4)
    >>> slice_tuple_start((), 1)
    ()
    >>> slice_tuple_start((1,), 1)
    ()
    >>> slice_tuple_start((1,2,3,4), -3)
    (2, 3, 4)
    >>> slice_tuple_start((1,2,3,4), -10)
    (1, 2, 3, 4)
    >>> slice_tuple_start((), -3)
    ()
    >>> slice_tuple_start((1,), -3)
    (1,)
    """
    return t[start:]

def slice_tuple_stop(tuple t, int stop):
    """
    >>> slice_tuple_stop((1,2,3,4), 3)
    (1, 2, 3)
    >>> slice_tuple_stop((1,2,3,4), 7)
    (1, 2, 3, 4)
    >>> slice_tuple_stop((), 3)
    ()
    >>> slice_tuple_stop((1,), 3)
    (1,)
    >>> slice_tuple_stop((1,2,3,4), -1)
    (1, 2, 3)
    >>> slice_tuple_stop((), -1)
    ()
    """
    return t[:stop]


def slice_list_assign_list(list l):
    """
    >>> l = [1,2,3,4]
    >>> l2 = l[:]
    >>> slice_list_assign_list(l2)
    [1, 1, 2, 3, 4, 4]
    """
    l[1:3] = [1,2,3,4]
    return l


def slice_list_assign_tuple(list l):
    """
    >>> l = [1,2,3,4]
    >>> l2 = l[:]
    >>> slice_list_assign_tuple(l2)
    [1, 1, 2, 3, 4, 4]
    """
    l[1:3] = (1,2,3,4)
    return l


def slice_list_assign(list l, value):
    """
    >>> l = [1,2,3,4]
    >>> l2 = l[:]
    >>> slice_list_assign(l2, (1,2,3,4))
    [1, 1, 2, 3, 4, 4]
    >>> l2 = l[:]
    >>> slice_list_assign(l2, dict(zip(l,l)))
    [1, 1, 2, 3, 4, 4]
    """
    l[1:3] = value
    return l


def slice_charp(py_string_arg):
    """
    >>> print("%s" % slice_charp('abcdefg'))
    bc
    """
    cdef bytes py_string = py_string_arg.encode(u'ASCII')
    cdef char* s = py_string
    return s[1:3].decode(u'ASCII')


def slice_charp_repeat(py_string_arg):
    """
    >>> print("%s" % slice_charp_repeat('abcdefg'))
    cd
    """
    cdef bytes py_string = py_string_arg.encode(u'ASCII')
    cdef char* s = py_string
    cdef bytes slice_val = s[1:6]
    s = slice_val
    return s[1:3].decode(u'ASCII')
Cython-0.26.1/tests/run/lvalue_refs.pyx0000664000175000017500000000112312542002467020646 0ustar  stefanstefan00000000000000# tag: cpp

from libcpp.vector cimport vector

__doc__ = u"""
   >>> test_lvalue_ref_assignment()
"""

ctypedef double*  dp
ctypedef double** dpp

cdef void foo(vector[dpp] &bar, vector[vector[dp]] &baz) nogil:
    bar[0] = &baz[0][0]

def test_lvalue_ref_assignment():
    cdef vector[dpp]        bar
    cdef vector[vector[dp]] baz
    cdef vector[double]     data
    cdef dp                 bongle = &data[0]

    bar.resize(1)
    bar[0] = NULL
    baz.resize(1)
    baz[0].resize(1)
    baz[0][0] = bongle

    foo(bar, baz)

    assert bar[0] == &baz[0][0]
    assert bar[0][0] == bongle
Cython-0.26.1/tests/run/ishimoto2.pyx0000664000175000017500000000026312542002467020260 0ustar  stefanstefan00000000000000
class C:
    """
    >>> C().xxx(5)
    5
    >>> C().xxx()
    'a b'
    >>> C().xxx(42)
    42
    >>> C().xxx()
    'a b'
    """
    def xxx(self, p="a b"):
        return p
Cython-0.26.1/tests/run/longlongindex.pyx0000664000175000017500000000050312542002467021207 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> D = set_longlong(2**40, 2**50, 2, "yelp")
    >>> D[2**40]
    'yelp'
    >>> D[2**50]
    'yelp'
    >>> D[2]
    'yelp'
"""

ctypedef long long foo

def set_longlong(long long ob, foo x, long y, val):
    cdef object tank = {}
    tank[ob] = val
    tank[x] = val
    tank[y] = val
    return tank
Cython-0.26.1/tests/run/multass.pyx0000664000175000017500000000215112542002467020031 0ustar  stefanstefan00000000000000__doc__ = """
    >>> h()
    (1, b'test', 3, 1, b'test', 3)
"""

import sys
if sys.version_info[0] < 3:
    __doc__ = __doc__.replace(u" b'", u" '")

def f():
    """
    >>> f()
    (1, 2, 1, 2)
    """
    cdef object obj1a, obj2a, obj3a, obj1b, obj2b, obj3b
    obj1b, obj2b, obj3b = 1, 2, 3
    obj1a, obj2a = obj1b, obj2b
    return obj1a, obj2a, obj1b, obj2b

def g():
    """
    >>> g()
    (1, 1, 2, 2, 3, 3)
    """
    cdef object obj1a, obj2a, obj3a, obj1b, obj2b, obj3b
    obj1b, obj2b, obj3b = 1, 2, 3
    obj1a, [obj2a, obj3a] = [obj1b, (obj2b, obj3b)]
    return obj1a, obj1b, obj2a, obj2b, obj3a, obj3b

def h():
    cdef object obj1a, obj2a, obj3a, obj1b, obj2b, obj3b
    cdef int int1, int2
    cdef char *ptr1, *ptr2
    int2, ptr2, obj1b = 1, "test", 3
    int1, ptr1, obj1a = int2, ptr2, obj1b
    return int1, ptr1, obj1a, int2, ptr2, obj1b

def j():
    """
    >>> j()
    (2, 1, 4, 2, 6, 3)
    """
    cdef object obj1a, obj2a, obj3a, obj1b, obj2b, obj3b
    obj1b, obj2b, obj3b = 1, 2, 3
    obj1a, obj2a, obj3a = obj1b + 1, obj2b + 2, obj3b + 3
    return obj1a, obj1b, obj2a, obj2b, obj3a, obj3b
Cython-0.26.1/tests/run/overflow_check_ulonglong.pyx0000664000175000017500000000014412542002467023425 0ustar  stefanstefan00000000000000# cython: overflowcheck.fold = True


ctypedef unsigned long long INT

include "overflow_check.pxi"
Cython-0.26.1/tests/run/behnel1.pyx0000664000175000017500000000026112542002467017657 0ustar  stefanstefan00000000000000cdef class Spam:
    cdef eggs(self, a):
        return a

cdef Spam spam():
    return Spam()

def viking(a):
    """
    >>> viking(5)
    5
    """
    return spam().eggs(a)
Cython-0.26.1/tests/run/for_from_pyvar_loop_T601.pyx0000664000175000017500000000234012542002467023136 0ustar  stefanstefan00000000000000# ticket: 601

cdef unsigned long size2():
    return 3

def for_from_plain_ulong():
    """
    >>> for_from_plain_ulong()
    0
    1
    2
    """
    cdef object j = 0
    for j from 0 <= j < size2():
        print j

def for_in_plain_ulong():
    """
    >>> for_in_plain_ulong()
    0
    1
    2
    """
    cdef object j = 0
    for j in range(size2()):
        print j


cdef extern from "for_from_pyvar_loop_T601_extern_def.h":
    ctypedef unsigned long Ulong

cdef Ulong size():
    return 3

def for_from_ctypedef_ulong():
    """
    >>> for_from_ctypedef_ulong()
    0
    1
    2
    """
    cdef object j = 0
    for j from 0 <= j < size():
        print j

def for_in_ctypedef_ulong():
    """
    >>> for_in_ctypedef_ulong()
    0
    1
    2
    """
    cdef object j = 0
    for j in range(size()):
        print j


class ForFromLoopInPyClass(object):
    """
    >>> ForFromLoopInPyClass.i    # doctest: +ELLIPSIS
    Traceback (most recent call last):
    AttributeError: ...ForLoopInPyClass... has no attribute ...i...
    >>> ForFromLoopInPyClass.k
    0
    >>> ForFromLoopInPyClass.m
    1
    """
    for i from 0 <= i < 1:
        pass

    for k from 0 <= k < 2:
        pass

    for m from 0 <= m < 3:
        pass
Cython-0.26.1/tests/run/r_primes.pyx0000664000175000017500000000073612542002467020170 0ustar  stefanstefan00000000000000def primes(int kmax):
    """
    >>> primes(20)
    [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
    """
    cdef int n, k, i
    cdef int[1000] p
    result = []
    if kmax > 1000:
        kmax = 1000
    k = 0
    n = 2
    while k < kmax:
        i = 0
        while i < k and n % p[i] <> 0:
            i = i + 1
        if i == k:
            p[k] = n
            k = k + 1
            result.append(n)
        n = n + 1
    return result
Cython-0.26.1/tests/run/if_else_expr.pyx0000664000175000017500000000216413023021033020772 0ustar  stefanstefan00000000000000# mode: run
# tag: condexpr

cimport cython

cdef class Foo:
    cdef dict data

    def __repr__(self):
        return ''


cpdef test_type_cast(Foo obj, cond):
    """
    # Regression test: obj must be cast to (PyObject *) here
    >>> test_type_cast(Foo(), True)
    []
    >>> test_type_cast(Foo(), False)
    
    """
    return [obj] if cond else obj


cdef func(Foo foo, dict data):
    return foo, data


@cython.test_fail_if_path_exists('//PyTypeTestNode')
def test_cpp_pyobject_cast(Foo obj1, Foo obj2, cond):
    """
    >>> test_cpp_pyobject_cast(Foo(), Foo(), True)
    (, None)
    """
    return func(obj1 if cond else obj2, obj1.data if cond else obj2.data)


def test_charptr_coercion(x):
    """
    >>> print(test_charptr_coercion(True))
    abc
    >>> print(test_charptr_coercion(False))
    def
    """
    cdef char* s = b'abc' if x else b'def'
    return s.decode('ascii')


def test_syntax():
    """
    >>> test_syntax()
    (0, 0, 0)
    """
    # Py3 allows the 'else' keyword to directly follow a number
    x = 0 if 1else 1
    y = 0 if 1.0else 1
    z = 0 if 1.else 1
    return x, y, z
Cython-0.26.1/tests/run/datetime_pxd.pyx0000664000175000017500000001342212542002467021013 0ustar  stefanstefan00000000000000# coding: utf-8

#cimport cpython.datetime as cy_datetime
#from datetime import time, date, datetime, timedelta, tzinfo


from cpython.datetime cimport import_datetime
from cpython.datetime cimport time_new, date_new, datetime_new, timedelta_new
from cpython.datetime cimport time_tzinfo, datetime_tzinfo
from cpython.datetime cimport time_hour, time_minute, time_second, time_microsecond
from cpython.datetime cimport date_day, date_month, date_year
from cpython.datetime cimport datetime_day, datetime_month, datetime_year
from cpython.datetime cimport datetime_hour, datetime_minute, datetime_second, \
                              datetime_microsecond

import datetime as py_datetime

import_datetime()

ZERO = py_datetime.timedelta(0)

#
# Simple class from datetime docs
#
class FixedOffset(py_datetime.tzinfo):
    """Fixed offset in minutes east from UTC."""

    def __init__(self, offset, name):
        self._offset = py_datetime.timedelta(minutes = offset)
        self._name = name

    def utcoffset(self, dt):
        return self._offset

    def tzname(self, dt):
        return self._name

    def dst(self, dt):
        return ZERO
        
def do_date(int year, int month, int day):
    """
    >>> do_date(2012, 12, 31)
    (True, True, True, True)
    """
    v = date_new(year, month, day)
    return type(v) is py_datetime.date, v.year == year, v.month == month, v.day == day

def do_datetime(int year, int month, int day, 
        int hour, int minute, int second, int microsecond):
    """
    >>> do_datetime(2012, 12, 31, 12, 23, 0, 0)
    (True, True, True, True, True, True, True, True, True)
    """
    v = datetime_new(year, month, day, hour, minute, second, microsecond, None)
    return type(v) is py_datetime.datetime, v.year == year, v.month == month, v.day == day, \
           v.hour == hour, v.minute == minute, v.second == second, \
           v.microsecond == microsecond, v.tzinfo is None

def do_time(int hour, int minute, int second, int microsecond):
    """
    >>> do_time(12, 23, 0, 0)
    (True, True, True, True, True, True)
    """
    v = time_new(hour, minute, second, microsecond, None)
    return type(v) is py_datetime.time, \
           v.hour == hour, v.minute == minute, v.second == second, \
           v.microsecond == microsecond, v.tzinfo is None

def do_time_tzinfo(int hour, int minute, int second, int microsecond, object tz):
    """
    >>> tz = FixedOffset(60*3, 'Moscow')    
    >>> do_time_tzinfo(12, 23, 0, 0, tz)
    (True, True, True, True, True, True)
    """
    v = time_new(hour, minute, second, microsecond, tz)
    return type(v) is py_datetime.time, \
           v.hour == hour, v.minute == minute, v.second == second, \
           v.microsecond == microsecond, v.tzinfo is tz


def do_datetime_tzinfo(int year, int month, int day, 
        int hour, int minute, int second, int microsecond, object tz):
    """
    >>> tz = FixedOffset(60*3, 'Moscow')    
    >>> do_datetime_tzinfo(2012, 12, 31, 12, 23, 0, 0, tz)
    (True, True, True, True, True, True, True, True, True)
    """
    v = datetime_new(year, month, day, hour, minute, second, microsecond, tz)
    return type(v) is py_datetime.datetime, v.year == year, v.month == month, v.day == day, \
           v.hour == hour, v.minute == minute, v.second == second, \
           v.microsecond == microsecond, v.tzinfo is tz
           
def do_time_tzinfo2(int hour, int minute, int second, int microsecond, object tz):
    """
    >>> tz = FixedOffset(60*3, 'Moscow')    
    >>> do_time_tzinfo2(12, 23, 0, 0, tz)
    (True, True, True, True, True, True, True, True)
    """
    v = time_new(hour, minute, second, microsecond, None)
    v1 = time_new(
            time_hour(v), 
            time_minute(v), 
            time_second(v), 
            time_microsecond(v), 
            tz)
    r1 = (v1.tzinfo == tz)
    r2 = (tz == time_tzinfo(v1))
    v2 = time_new(
            time_hour(v1), 
            time_minute(v1), 
            time_second(v1), 
            time_microsecond(v1), 
            None)
    r3 = (v2.tzinfo == None)
    r4 = (None == time_tzinfo(v2))
    v3 = time_new(
            time_hour(v2), 
            time_minute(v2), 
            time_second(v2), 
            time_microsecond(v2), 
            tz)
    r5 = (v3.tzinfo == tz)
    r6 = (tz == time_tzinfo(v3))
    r7 = (v2 == v)
    r8 = (v3 == v1)
    return r1, r2, r3, r4, r5, r6, r7, r8


def do_datetime_tzinfo2(int year, int month, int day,
                              int hour, int minute, int second, int microsecond, object tz):
    """
    >>> tz = FixedOffset(60*3, 'Moscow')    
    >>> do_datetime_tzinfo2(2012, 12, 31, 12, 23, 0, 0, tz)
    (True, True, True, True, True, True, True, True)
    """
    v = datetime_new(year, month, day, hour, minute, second, microsecond, None)
    v1 = datetime_new(
            datetime_year(v), 
            datetime_month(v), 
            datetime_day(v), 
            datetime_hour(v), 
            datetime_minute(v), 
            datetime_second(v), 
            datetime_microsecond(v), 
            tz)
    r1 = (v1.tzinfo == tz)
    r2 = (tz == datetime_tzinfo(v1))
    v2 = datetime_new(
            datetime_year(v1), 
            datetime_month(v1), 
            datetime_day(v1), 
            datetime_hour(v1), 
            datetime_minute(v1), 
            datetime_second(v1), 
            datetime_microsecond(v1), 
            None)
    r3 = (v2.tzinfo == None)
    r4 = (None == datetime_tzinfo(v2))
    v3 = datetime_new(
            datetime_year(v2), 
            datetime_month(v2), 
            datetime_day(v2), 
            datetime_hour(v2), 
            datetime_minute(v2), 
            datetime_second(v2), 
            datetime_microsecond(v2), 
            tz)
    r5 = (v3.tzinfo == tz)
    r6 = (tz == datetime_tzinfo(v3))
    r7 = (v2 == v)
    r8 = (v3 == v1)
    return r1, r2, r3, r4, r5, r6, r7, r8
Cython-0.26.1/tests/run/uninitialized.py0000664000175000017500000000713012542002467021023 0ustar  stefanstefan00000000000000# mode: run
# tag: control-flow, uninitialized

def conditional(cond):
    """
    >>> conditional(True)
    []
    >>> conditional(False)
    Traceback (most recent call last):
    ...
    UnboundLocalError: local variable 'a' referenced before assignment
    """
    if cond:
        a = []
    return a

def inside_loop(iter):
    """
    >>> inside_loop([1,2,3])
    3
    >>> inside_loop([])
    Traceback (most recent call last):
    ...
    UnboundLocalError: local variable 'i' referenced before assignment
    """
    for i in iter:
        pass
    return i

def try_except(cond):
    """
    >>> try_except(True)
    []
    >>> try_except(False)
    Traceback (most recent call last):
    ...
    UnboundLocalError: local variable 'a' referenced before assignment
    """
    try:
        if cond:
            a = []
        raise ValueError
    except ValueError:
        return a

def try_finally(cond):
    """
    >>> try_finally(True)
    []
    >>> try_finally(False)
    Traceback (most recent call last):
    ...
    UnboundLocalError: local variable 'a' referenced before assignment
    """
    try:
        if cond:
            a = []
        raise ValueError
    finally:
        return a

def deleted(cond):
    """
    >>> deleted(False)
    {}
    >>> deleted(True)
    Traceback (most recent call last):
    ...
    UnboundLocalError: local variable 'a' referenced before assignment
    """
    a = {}
    if cond:
        del a
    return a

def test_nested(cond):
    """
    >>> test_nested(True)
    >>> test_nested(False)
    Traceback (most recent call last):
    ...
    UnboundLocalError: local variable 'a' referenced before assignment
    """
    if cond:
        def a():
            pass
    return a()

def test_outer(cond):
    """
    >>> test_outer(True)
    {}
    >>> test_outer(False)
    Traceback (most recent call last):
    ...
    UnboundLocalError: local variable 'a' referenced before assignment
    """
    if cond:
        a = {}
    def inner():
        return a
    return a

def test_inner(cond):
    """
    >>> test_inner(True)
    {}
    >>> test_inner(False)
    Traceback (most recent call last):
    ...
    NameError: free variable 'a' referenced before assignment in enclosing scope
    """
    if cond:
        a = {}
    def inner():
        return a
    return inner()

def test_class(cond):
    """
    >>> test_class(True)
    1
    >>> test_class(False)
    Traceback (most recent call last):
    ...
    UnboundLocalError: local variable 'A' referenced before assignment
    """
    if cond:
        class A:
            x = 1
    return A.x


def test_try_except_regression(c):
    """
    >>> test_try_except_regression(True)
    (123,)
    >>> test_try_except_regression(False)
    Traceback (most recent call last):
    ...
    UnboundLocalError: local variable 'a' referenced before assignment
    """
    if c:
        a = (123,)
    try:
        return a
    except:
        return a


def test_try_finally_regression(c):
    """
    >>> test_try_finally_regression(True)
    (123,)
    >>> test_try_finally_regression(False)
    Traceback (most recent call last):
    ...
    UnboundLocalError: local variable 'a' referenced before assignment
    """
    if c:
        a = (123,)
    try:
        return a
    finally:
        return a


def test_expression_calculation_order_bug(a):
    """
    >>> test_expression_calculation_order_bug(False)
    []
    >>> test_expression_calculation_order_bug(True)
    Traceback (most recent call last):
    ...
    UnboundLocalError: local variable 'b' referenced before assignment
    """
    if not a:
        b = []
    return (a or b) and (b or a)
Cython-0.26.1/tests/run/parallel.pyx0000664000175000017500000000414212542002467020137 0ustar  stefanstefan00000000000000# tag: run
# tag: openmp

cimport cython.parallel
from cython.parallel import prange, threadid
cimport openmp
from libc.stdlib cimport malloc, free

openmp.omp_set_nested(1)

def test_parallel():
    """
    >>> test_parallel()
    """
    cdef int maxthreads = openmp.omp_get_max_threads()
    cdef int *buf =  malloc(sizeof(int) * maxthreads)

    if buf == NULL:
        raise MemoryError

    with nogil, cython.parallel.parallel():
        buf[threadid()] = threadid()

    for i in range(maxthreads):
        assert buf[i] == i

    free(buf)

cdef int get_num_threads() with gil:
    print "get_num_threads called"
    return 3

def test_num_threads():
    """
    >>> test_num_threads()
    1
    get_num_threads called
    3
    get_num_threads called
    3
    """
    cdef int dyn = openmp.omp_get_dynamic()
    cdef int num_threads
    cdef int *p = &num_threads

    openmp.omp_set_dynamic(0)

    with nogil, cython.parallel.parallel(num_threads=1):
        p[0] = openmp.omp_get_num_threads()

    print num_threads

    with nogil, cython.parallel.parallel(num_threads=get_num_threads()):
        p[0] = openmp.omp_get_num_threads()

    print num_threads

    cdef int i
    num_threads = 0xbad
    for i in prange(1, nogil=True, num_threads=get_num_threads()):
        p[0] = openmp.omp_get_num_threads()
        break

    openmp.omp_set_dynamic(dyn)

    return num_threads

'''
def test_parallel_catch():
    """
    >>> test_parallel_catch()
    True
    """
    cdef int i, j, num_threads
    exceptions = []

    for i in prange(100, nogil=True, num_threads=4):
        num_threads = openmp.omp_get_num_threads()

        with gil:
            try:
                for j in prange(100, nogil=True):
                    if i + j > 60:
                        with gil:
                            raise Exception("try and catch me if you can!")
            except Exception, e:
                exceptions.append(e)
                break

    print len(exceptions) == num_threads
    assert len(exceptions) == num_threads, (len(exceptions), num_threads)
'''


OPENMP_PARALLEL = True
include "sequential_parallel.pyx"
Cython-0.26.1/tests/run/method_module_name_T422.pyx0000664000175000017500000000066512542002467022711 0ustar  stefanstefan00000000000000# ticket: 422

"""
>>> Foo.incr.__module__ is not None
True
>>> Foo.incr.__module__ == Foo.__module__ == bar.__module__
True
>>> Simpleton.incr.__module__ == Simpleton.__module__ == bar.__module__
True

"""
class Foo(object):
   def incr(self,x):
       return x+1

def bar():
    pass


class Simpleton:
   def __str__(self):
       return "A simpleton"

   def incr(self,x):
       """Increment x by one.
       """
       return x+1

Cython-0.26.1/tests/run/cpp_stl.pyx0000664000175000017500000000272513023021033017775 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

cdef extern from "vector" namespace "std":

    cdef cppclass vector[T]:

        T at(int)
        void push_back(T t)
        void assign(int, T)
        void clear()
        int size()

        cppclass iterator:
            T operator*()
            iterator operator++()
            bint operator==(iterator)
            bint operator!=(iterator)

        iterator end()
        iterator begin()

from cython.operator cimport dereference as deref, preincrement as inc

def test_vector(L):
    """
    >>> test_vector([1,10,100])
    1
    10
    100
    """
    v = new vector[int]()
    for a in L:
        v.push_back(a)
    cdef int i
    for i in range(len(L)):
        print v.at(i)
    del v

ctypedef int my_int
def test_vector_typedef(L):
    """
    >>> test_vector_typedef([1, 2, 3])
    [1, 2, 3]
    """
    cdef vector[my_int] v = L
    cdef vector[int] vv = v
    return vv

def test_vector_iterator(L):
    """
    >>> test_vector([11, 37, 389, 5077])
    11
    37
    389
    5077
    """
    v = new vector[int]()
    for a in L:
        v.push_back(a)
    cdef vector[int].iterator iter = v.begin()
    while iter != v.end():
        print deref(iter)
        inc(iter)
    del v

cdef class VectorWrapper:
    """
    >>> VectorWrapper(1, .5, .25, .125)
    [1.0, 0.5, 0.25, 0.125]
    """
    cdef vector[double] vector
    def __init__(self, *args):
        self.vector = args
    def __repr__(self):
        return repr(self.vector)
Cython-0.26.1/tests/run/ctypedef_int_types_T333.pyx0000664000175000017500000003637012542002467022770 0ustar  stefanstefan00000000000000# ticket: 333
#cython: autotestdict=True

# -------------------------------------------------------------------

cdef extern from "ctypedef_int_types_chdr_T333.h":
     ctypedef int SChar     ## "signed char"
     ctypedef int UChar     ## "unsigned char"
     ctypedef int SShort    ## "signed short"
     ctypedef int UShort    ## "unsigned short"
     ctypedef int SInt      ## "signed int"
     ctypedef int UInt      ## "unsigned int"
     ctypedef int SLong     ## "signed long"
     ctypedef int ULong     ## "unsigned long"
     ctypedef int SLongLong ## "signed PY_LONG_LONG"
     ctypedef int ULongLong ## "unsigned PY_LONG_LONG"

# -------------------------------------------------------------------

SCHAR_MAX = ((-1)>>1)
SCHAR_MIN = (-SCHAR_MAX-1)

def test_schar(SChar x):
   u"""
   >>> test_schar(-129) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to SChar
   >>> test_schar(-128)
   -128
   >>> test_schar(0)
   0
   >>> test_schar(127)
   127
   >>> test_schar(128) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to SChar
   """
   return x

def test_add_schar(x, y):
   u"""
   >>> test_add_schar(SCHAR_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to SChar
   >>> test_add_schar(SCHAR_MIN, 0) == SCHAR_MIN
   True
   >>> test_add_schar(SCHAR_MIN, 1) == SCHAR_MIN+1
   True
   >>> test_add_schar(SCHAR_MAX, -1) == SCHAR_MAX-1
   True
   >>> test_add_schar(SCHAR_MAX, 0) == SCHAR_MAX
   True
   >>> test_add_schar(SCHAR_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to SChar
   """
   cdef SChar r = x + y
   return r

UCHAR_MAX = ((-1))

def test_uchar(UChar x):
   u"""
   >>> test_uchar(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to UChar
   >>> test_uchar(0)
   0
   >>> test_uchar(1)
   1
   >>> test_uchar(UCHAR_MAX) == UCHAR_MAX
   True
   >>> test_uchar(UCHAR_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to UChar
   """
   return x

def test_add_uchar(x, y):
   u"""
   >>> test_add_uchar(UCHAR_MAX, 0) == UCHAR_MAX
   True
   >>> test_add_uchar(UCHAR_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to UChar
   """
   cdef UChar r = x + y
   return r

# -------------------------------------------------------------------

SSHORT_MAX = ((-1)>>1)
SSHORT_MIN = (-SSHORT_MAX-1)

def test_sshort(SShort x):
   u"""
   >>> test_sshort(SSHORT_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to SShort
   >>> test_sshort(SSHORT_MIN) == SSHORT_MIN
   True
   >>> test_sshort(-1)
   -1
   >>> test_sshort(0)
   0
   >>> test_sshort(1)
   1
   >>> test_sshort(SSHORT_MAX) == SSHORT_MAX
   True
   >>> test_sshort(SSHORT_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_sshort(x, y):
   u"""
   >>> test_add_sshort(SSHORT_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to SShort
   >>> test_add_sshort(SSHORT_MIN, 0) == SSHORT_MIN
   True
   >>> test_add_sshort(SSHORT_MIN, 1) == SSHORT_MIN+1
   True
   >>> test_add_sshort(SSHORT_MAX, -1) == SSHORT_MAX-1
   True
   >>> test_add_sshort(SSHORT_MAX, 0) == SSHORT_MAX
   True
   >>> test_add_sshort(SSHORT_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to SShort
   """
   cdef SShort r = x + y
   return r

USHORT_MAX = ((-1))

def test_ushort(UShort x):
   u"""
   >>> test_ushort(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to UShort
   >>> test_ushort(0)
   0
   >>> test_ushort(1)
   1
   >>> test_ushort(USHORT_MAX) == USHORT_MAX
   True
   >>> test_ushort(USHORT_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to UShort
   """
   return x

def test_add_ushort(x, y):
   u"""
   >>> test_add_ushort(USHORT_MAX, 0) == USHORT_MAX
   True
   >>> test_add_ushort(USHORT_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: value too large to convert to UShort
   """
   cdef UShort r = x + y
   return r

# -------------------------------------------------------------------

SINT_MAX = ((-1)>>1)
SINT_MIN = (-SINT_MAX-1)

def test_sint(SInt x):
   u"""
   >>> test_sint(SINT_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_sint(SINT_MIN) == SINT_MIN
   True
   >>> test_sint(-1)
   -1
   >>> test_sint(0)
   0
   >>> test_sint(1)
   1
   >>> test_sint(SINT_MAX) == SINT_MAX
   True
   >>> test_sint(SINT_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_sint(x, y):
   u"""
   >>> test_add_sint(SINT_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_add_sint(SINT_MIN, 0) == SINT_MIN
   True
   >>> test_add_sint(SINT_MIN, 1) == SINT_MIN+1
   True
   >>> test_add_sint(SINT_MAX, -1) == SINT_MAX-1
   True
   >>> test_add_sint(SINT_MAX, 0) == SINT_MAX
   True
   >>> test_add_sint(SINT_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef SInt r = x + y
   return r

UINT_MAX = (-1)

def test_uint(UInt x):
   u"""
   >>> test_uint(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to UInt
   >>> print(test_uint(0))
   0
   >>> print(test_uint(1))
   1
   >>> test_uint(UINT_MAX) == UINT_MAX
   True
   >>> test_uint(UINT_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_uint(x, y):
   u"""
   >>> test_add_uint(UINT_MAX, 0) == UINT_MAX
   True
   >>> test_add_uint(UINT_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef UInt r = x + y
   return r

# -------------------------------------------------------------------

SLONG_MAX = ((-1)>>1)
SLONG_MIN = (-SLONG_MAX-1)

def test_slong(long x):
   u"""
   >>> test_slong(SLONG_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_slong(SLONG_MIN) == SLONG_MIN
   True
   >>> test_slong(-1)
   -1
   >>> test_slong(0)
   0
   >>> test_slong(1)
   1
   >>> test_slong(SLONG_MAX) == SLONG_MAX
   True
   >>> test_slong(SLONG_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_slong(x, y):
   u"""
   >>> test_add_slong(SLONG_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_add_slong(SLONG_MIN, 0) == SLONG_MIN
   True
   >>> test_add_slong(SLONG_MIN, 1) == SLONG_MIN+1
   True
   >>> test_add_slong(SLONG_MAX, -1) == SLONG_MAX-1
   True
   >>> test_add_slong(SLONG_MAX, 0) == SLONG_MAX
   True
   >>> test_add_slong(SLONG_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef SLong r = x + y
   return r

ULONG_MAX = (-1)

def test_ulong(ULong x):
   u"""
   >>> test_ulong(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to ULong
   >>> print(test_ulong(0))
   0
   >>> print(test_ulong(1))
   1
   >>> test_ulong(ULONG_MAX) == ULONG_MAX
   True
   >>> test_ulong(ULONG_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_ulong(x, y):
   u"""
   >>> test_add_ulong(ULONG_MAX, 0) == ULONG_MAX
   True
   >>> test_add_ulong(ULONG_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef ULong r = x + y
   return r

# -------------------------------------------------------------------

SLONGLONG_MAX = ((-1)>>1)
SLONGLONG_MIN = (-SLONGLONG_MAX-1)

def test_slonglong(long long x):
   u"""
   >>> test_slonglong(SLONGLONG_MIN-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_slonglong(SLONGLONG_MIN) == SLONGLONG_MIN
   True
   >>> print(test_slonglong(-1))
   -1
   >>> print(test_slonglong(0))
   0
   >>> print(test_slonglong(1))
   1
   >>> test_slonglong(SLONGLONG_MAX) == SLONGLONG_MAX
   True
   >>> test_slonglong(SLONGLONG_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_slonglong(x, y):
   u"""
   >>> test_add_slonglong(SLONGLONG_MIN, -1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_add_slonglong(SLONGLONG_MIN, 0) == SLONGLONG_MIN
   True
   >>> test_add_slonglong(SLONGLONG_MIN, 1) == SLONGLONG_MIN+1
   True
   >>> test_add_slonglong(SLONGLONG_MAX, -1) == SLONGLONG_MAX-1
   True
   >>> test_add_slonglong(SLONGLONG_MAX, 0) == SLONGLONG_MAX
   True
   >>> test_add_slonglong(SLONGLONG_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef SLongLong r = x + y
   return r

ULONGLONG_MAX = (-1)

def test_ulonglong(ULongLong x):
   u"""
   >>> test_ulonglong(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to ULongLong
   >>> print(test_ulonglong(0))
   0
   >>> print(test_ulonglong(1))
   1
   >>> test_ulonglong(ULONGLONG_MAX) == ULONGLONG_MAX
   True
   >>> test_ulonglong(ULONGLONG_MAX+1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   return x

def test_add_ulonglong(x, y):
   u"""
   >>> test_add_ulonglong(ULONGLONG_MAX, 0) == ULONGLONG_MAX
   True
   >>> test_add_ulonglong(ULONGLONG_MAX, 1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   """
   cdef ULongLong r = x + y
   return r

# -------------------------------------------------------------------

cdef class MyClass:
    """
    >>> a = MyClass()

    >>> vals = (SCHAR_MIN,     UCHAR_MAX,
    ...         SSHORT_MIN,    USHORT_MAX,
    ...         SINT_MIN,      UINT_MAX,
    ...         SLONG_MIN,     ULONG_MAX,
    ...         SLONGLONG_MIN, ULONGLONG_MAX)
    >>> a.setvalues(*vals)
    >>> a.getvalues() == vals
    True

    >>> vals = (SCHAR_MAX,     UCHAR_MAX,
    ...         SSHORT_MAX,    USHORT_MAX,
    ...         SINT_MAX,      UINT_MAX,
    ...         SLONG_MAX,     ULONG_MAX,
    ...         SLONGLONG_MAX, ULONGLONG_MAX)
    >>> a.setvalues(*vals)
    >>> a.getvalues() == vals
    True

    >>> vals = (0,) * 10
    >>> a.setvalues(*vals)
    >>> a.getvalues() == vals
    True


    """
    cdef:
       SChar     attr_schar
       UChar     attr_uchar
       SShort    attr_sshort
       UShort    attr_ushort
       SInt      attr_sint
       UInt      attr_uint
       SLong     attr_slong
       ULong     attr_ulong
       SLongLong attr_slonglong
       ULongLong attr_ulonglong

    cpdef setvalues(self,
                    SChar     arg_schar     ,
                    UChar     arg_uchar     ,
                    SShort    arg_sshort    ,
                    UShort    arg_ushort    ,
                    SInt      arg_sint      ,
                    UInt      arg_uint      ,
                    SLong     arg_slong     ,
                    ULong     arg_ulong     ,
                    SLongLong arg_slonglong ,
                    ULongLong arg_ulonglong ):
        self.attr_schar     = arg_schar
        self.attr_uchar     = arg_uchar
        self.attr_sshort    = arg_sshort
        self.attr_ushort    = arg_ushort
        self.attr_sint      = arg_sint
        self.attr_uint      = arg_uint
        self.attr_slong     = arg_slong
        self.attr_ulong     = arg_ulong
        self.attr_slonglong = arg_slonglong
        self.attr_ulonglong = arg_ulonglong

    cpdef getvalues(self):
        return (self.attr_schar     ,
                self.attr_uchar     ,
                self.attr_sshort    ,
                self.attr_ushort    ,
                self.attr_sint      ,
                self.attr_uint      ,
                self.attr_slong     ,
                self.attr_ulong     ,
                self.attr_slonglong ,
                self.attr_ulonglong )


# -------------------------------------------------------------------

cdef extern from *:
    ctypedef signed   MySInt1 "signed short"
    ctypedef unsigned MyUInt1 "unsigned short"

def test_MySInt1(MySInt1 x):
   u"""
   >>> test_MySInt1(-1)
   -1
   >>> test_MySInt1(0)
   0
   >>> test_MySInt1(1)
   1
   """
   return x

def test_MyUInt1(MyUInt1 x):
   u"""
   >>> test_MyUInt1(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: ...
   >>> test_MyUInt1(0)
   0
   >>> test_MyUInt1(1)
   1
   """
   return x

cdef extern from *:
    ctypedef signed   MySInt2 "signed short"
    ctypedef unsigned MyUInt2 "unsigned short"

def test_MySInt2(MySInt2 x):
   u"""
   >>> test_MySInt2(-1)
   -1
   >>> test_MySInt2(0)
   0
   >>> test_MySInt2(1)
   1
   """
   return x

def test_MyUInt2(MyUInt2 x):
   u"""
   >>> test_MyUInt2(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to ...
   >>> test_MyUInt2(0)
   0
   >>> test_MyUInt2(1)
   1
   """
   return x

# -------------------------------------------------------------------

cimport ctypedef_int_types_defs_T333 as defs

def test_DefSInt(defs.SInt x):
   u"""
   >>> test_DefSInt(-1)
   -1
   >>> test_DefSInt(0)
   0
   >>> test_DefSInt(1)
   1
   """
   return x

def test_DefUChar(defs.UChar x):
   u"""
   >>> test_DefUChar(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to ...
   >>> test_DefUChar(0)
   0
   >>> test_DefUChar(1)
   1
   """
   return x

def test_ExtSInt(defs.ExtSInt x):
   u"""
   >>> test_ExtSInt(-1)
   -1
   >>> test_ExtSInt(0)
   0
   >>> test_ExtSInt(1)
   1
   """
   return x

def test_ExtUInt(defs.ExtUInt x):
   u"""
   >>> test_ExtUInt(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to ...
   >>> test_ExtUInt(0)
   0
   >>> test_ExtUInt(1)
   1
   """
   return x


ctypedef defs.SShort LocSInt
ctypedef defs.UShort LocUInt

def test_LocSInt(LocSInt x):
   u"""
   >>> test_LocSInt(-1)
   -1
   >>> test_LocSInt(0)
   0
   >>> test_LocSInt(1)
   1
   """
   return x

def test_LocUInt(LocUInt x):
   u"""
   >>> test_LocUInt(-1) #doctest: +ELLIPSIS
   Traceback (most recent call last):
       ...
   OverflowError: can't convert negative value to ...
   >>> test_LocUInt(0)
   0
   >>> test_LocUInt(1)
   1
   """
   return x

# -------------------------------------------------------------------
Cython-0.26.1/tests/run/int128.pyx0000664000175000017500000000707512542002467017400 0ustar  stefanstefan00000000000000
cdef extern from *:
    ctypedef long long int128_t "__int128_t"
    ctypedef unsigned long long uint128_t "__uint128_t"


def bigint(x):
    print(str(x).rstrip('L'))


def unsigned_conversion(x):
    """
    >>> bigint(unsigned_conversion(0))
    0
    >>> bigint(unsigned_conversion(2))
    2

    >>> unsigned_conversion(-2)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    OverflowError: can't convert negative value to ...uint128_t
    >>> unsigned_conversion(-2**120)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    OverflowError: can't convert negative value to ...uint128_t
    >>> unsigned_conversion(-2**127)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    OverflowError: can't convert negative value to ...uint128_t
    >>> unsigned_conversion(-2**128)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    OverflowError: can't convert negative value to ...uint128_t

    >>> bigint(unsigned_conversion(2**20))
    1048576
    >>> bigint(unsigned_conversion(2**30-1))
    1073741823
    >>> bigint(unsigned_conversion(2**30))
    1073741824
    >>> bigint(unsigned_conversion(2**30+1))
    1073741825

    >>> bigint(2**60)
    1152921504606846976
    >>> bigint(unsigned_conversion(2**60-1))
    1152921504606846975
    >>> bigint(unsigned_conversion(2**60))
    1152921504606846976
    >>> bigint(unsigned_conversion(2**60+1))
    1152921504606846977
    >>> bigint(2**64)
    18446744073709551616
    >>> bigint(unsigned_conversion(2**64))
    18446744073709551616

    >>> bigint(2**120)
    1329227995784915872903807060280344576
    >>> bigint(unsigned_conversion(2**120))
    1329227995784915872903807060280344576
    >>> bigint(2**128-1)
    340282366920938463463374607431768211455
    >>> bigint(unsigned_conversion(2**128-1))
    340282366920938463463374607431768211455
    >>> bigint(unsigned_conversion(2**128))  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    OverflowError: ... too big to convert
    """
    cdef uint128_t n = x
    return n


def signed_conversion(x):
    """
    >>> bigint(signed_conversion(0))
    0
    >>> bigint(signed_conversion(2))
    2
    >>> bigint(signed_conversion(-2))
    -2

    >>> bigint(signed_conversion(2**20))
    1048576
    >>> bigint(signed_conversion(2**32))
    4294967296
    >>> bigint(2**64)
    18446744073709551616
    >>> bigint(signed_conversion(2**64))
    18446744073709551616
    >>> bigint(signed_conversion(-2**64))
    -18446744073709551616

    >>> bigint(2**118)
    332306998946228968225951765070086144
    >>> bigint(signed_conversion(2**118))
    332306998946228968225951765070086144
    >>> bigint(signed_conversion(-2**118))
    -332306998946228968225951765070086144

    >>> bigint(2**120)
    1329227995784915872903807060280344576
    >>> bigint(signed_conversion(2**120))
    1329227995784915872903807060280344576
    >>> bigint(signed_conversion(-2**120))
    -1329227995784915872903807060280344576

    >>> bigint(2**127-1)
    170141183460469231731687303715884105727
    >>> bigint(signed_conversion(2**127-2))
    170141183460469231731687303715884105726
    >>> bigint(signed_conversion(2**127-1))
    170141183460469231731687303715884105727
    >>> bigint(signed_conversion(2**127))  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    OverflowError: ... too big to convert
    >>> bigint(signed_conversion(-2**127))
    -170141183460469231731687303715884105728
    >>> bigint(signed_conversion(-2**127-1))  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    OverflowError: ... too big to convert
    """
    cdef int128_t n = x
    return n
Cython-0.26.1/tests/run/baas3.pyx0000664000175000017500000000025112542002467017331 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> m = MyClass()
    >>> m is foo(m)
    True
"""

cdef class MyClass:
    pass

def foo(MyClass c):
    cdef MyClass res
    res = c
    return res
Cython-0.26.1/tests/run/special_method_docstrings.pyx0000664000175000017500000000210012542002467023552 0ustar  stefanstefan00000000000000cdef class A:
    """
    >>> A.__init__.__doc__
    'A.__init__ docstring'
    >>> A.__len__.__doc__
    'A.__len__ docstring'
    >>> A.__add__.__doc__
    'A.__add__ docstring'
    >>> A.__getattr__.__doc__
    'A.__getattr__ docstring'
    """
    def __init__(self):
        "A.__init__ docstring"
    def __len__(self):
        "A.__len__ docstring"
    def __add__(self, other):
        "A.__add__ docstring"
    def __getattr__(self, name):
        "A.__getattr__ docstring"

cdef class B(A):
    """
    >>> B.__init__.__doc__
    'A.__init__ docstring'
    >>> B.__len__.__doc__
    'B.__len__ docstring'
    >>> B.__add__.__doc__
    'A.__add__ docstring'
    >>> B.__getattr__.__doc__
    'A.__getattr__ docstring'
    """
    def __len__(self):
        "B.__len__ docstring"

class C(A):
    """
    >>> C.__init__.__doc__
    'A.__init__ docstring'
    >>> C.__len__.__doc__
    'C.__len__ docstring'
    >>> C.__add__.__doc__
    'A.__add__ docstring'
    >>> C.__getattr__.__doc__
    'A.__getattr__ docstring'
    """
    def __len__(self):
        "C.__len__ docstring"
Cython-0.26.1/tests/run/kwargs_passthrough.pyx0000664000175000017500000001074712542002467022300 0ustar  stefanstefan00000000000000cimport cython


@cython.test_fail_if_path_exists('//MergedDictNode')
def wrap_passthrough(f):
    """
    >>> def f(a=1): return a
    >>> wrapped = wrap_passthrough(f)
    >>> wrapped(1)
    CALLED
    1
    >>> wrapped(a=2)
    CALLED
    2
    """
    def wrapper(*args, **kwargs):
        print("CALLED")
        return f(*args, **kwargs)
    return wrapper


@cython.test_fail_if_path_exists('//MergedDictNode')
def unused(*args, **kwargs):
    """
    >>> unused()
    ()
    >>> unused(1, 2)
    (1, 2)
    """
    return args


@cython.test_fail_if_path_exists('//MergedDictNode')
def used_in_closure(**kwargs):
    """
    >>> used_in_closure()
    >>> d = {}
    >>> used_in_closure(**d)
    >>> d  # must not be modified
    {}
    """
    def func():
        kwargs['test'] = 1
    return func()


@cython.test_fail_if_path_exists('//MergedDictNode')
def modify_in_closure(**kwargs):
    """
    >>> func = modify_in_closure()
    >>> func()

    >>> d = {}
    >>> func = modify_in_closure(**d)
    >>> func()
    >>> d  # must not be modified
    {}
    """
    def func():
        kwargs['test'] = 1
    return func


@cython.test_assert_path_exists('//MergedDictNode')
def wrap_passthrough_more(f):
    """
    >>> def f(a=1, test=2):
    ...     return a, test
    >>> wrapped = wrap_passthrough_more(f)
    >>> wrapped(1)
    CALLED
    (1, 1)
    >>> wrapped(a=2)
    CALLED
    (2, 1)
    """
    def wrapper(*args, **kwargs):
        print("CALLED")
        return f(*args, test=1, **kwargs)
    return wrapper


@cython.test_fail_if_path_exists('//MergedDictNode')
def wrap_passthrough2(f):
    """
    >>> def f(a=1): return a
    >>> wrapped = wrap_passthrough2(f)
    >>> wrapped(1)
    CALLED
    1
    >>> wrapped(a=2)
    CALLED
    2
    """
    def wrapper(*args, **kwargs):
        print("CALLED")
        f(*args, **kwargs)
        return f(*args, **kwargs)
    return wrapper


@cython.test_fail_if_path_exists('//MergedDictNode')
def wrap_modify(f):
    """
    >>> def f(a=1, test=2):
    ...     return a, test

    >>> wrapped = wrap_modify(f)
    >>> wrapped(1)
    CALLED
    (1, 1)
    >>> wrapped(a=2)
    CALLED
    (2, 1)
    >>> wrapped(a=2, test=3)
    CALLED
    (2, 1)
    """
    def wrapper(*args, **kwargs):
        print("CALLED")
        kwargs['test'] = 1
        return f(*args, **kwargs)
    return wrapper


@cython.test_fail_if_path_exists('//MergedDictNode')
def wrap_modify_mix(f):
    """
    >>> def f(a=1, test=2):
    ...     return a, test

    >>> wrapped = wrap_modify_mix(f)
    >>> wrapped(1)
    CALLED
    (1, 1)
    >>> wrapped(a=2)
    CALLED
    (2, 1)
    >>> wrapped(a=2, test=3)
    CALLED
    (2, 1)
    """
    def wrapper(*args, **kwargs):
        print("CALLED")
        f(*args, **kwargs)
        kwargs['test'] = 1
        return f(*args, **kwargs)
    return wrapper


@cython.test_assert_path_exists('//MergedDictNode')
def wrap_modify_func(f):
    """
    >>> def f(a=1, test=2):
    ...     return a, test

    >>> wrapped = wrap_modify_func(f)
    >>> wrapped(1)
    CALLED
    (1, 1)
    >>> wrapped(a=2)
    CALLED
    (2, 1)
    >>> wrapped(a=2, test=3)
    CALLED
    (2, 1)
    """
    def modify(kw):
        kw['test'] = 1
        return kw

    def wrapper(*args, **kwargs):
        print("CALLED")
        return f(*args, **modify(kwargs))
    return wrapper


@cython.test_assert_path_exists('//MergedDictNode')
def wrap_modify_func_mix(f):
    """
    >>> def f(a=1, test=2):
    ...     return a, test

    >>> wrapped = wrap_modify_func_mix(f)
    >>> wrapped(1)
    CALLED
    (1, 1)
    >>> wrapped(a=2)
    CALLED
    (2, 1)
    >>> wrapped(a=2, test=3)
    CALLED
    (2, 1)
    """
    def modify(kw):
        kw['test'] = 1
        return kw

    def wrapper(*args, **kwargs):
        print("CALLED")
        f(*args, **kwargs)
        return f(*args, **modify(kwargs))
    return wrapper


@cython.test_fail_if_path_exists('//MergedDictNode')
def wrap_reassign(f):
    """
    >>> def f(a=1, test=2):
    ...     return a, test

    >>> wrapped = wrap_reassign(f)
    >>> wrapped(1)
    CALLED
    (1, 1)
    >>> wrapped(a=2)
    CALLED
    (1, 1)
    >>> wrapped(a=2, test=3)
    CALLED
    (1, 1)
    """
    def wrapper(*args, **kwargs):
        print("CALLED")
        kwargs = {'test': 1}
        return f(*args, **kwargs)
    return wrapper


@cython.test_fail_if_path_exists('//MergedDictNode')
def kwargs_metaclass(**kwargs):
    """
    >>> K = kwargs_metaclass()
    >>> K = kwargs_metaclass(metaclass=type)
    """
    class K(**kwargs):
        pass
    return K
Cython-0.26.1/tests/run/extclasspass.pyx0000664000175000017500000000014112542002467021053 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> e = Eggs()
    >>> type(e).__name__
    'Eggs'
"""

cdef class Eggs: pass
Cython-0.26.1/tests/run/king1.pyx0000664000175000017500000000051512542002467017354 0ustar  stefanstefan00000000000000DEF USTUFF = u"Spam"

def uf():
    """
    >>> uf()
    It works!
    """
    IF USTUFF == u"Spam":
        print "It works!"
    ELSE:
        print "Doesn't work"

DEF BSTUFF = b"Spam"

def bf():
    """
    >>> bf()
    It works!
    """
    IF BSTUFF == b"Spam":
        print "It works!"
    ELSE:
        print "Doesn't work"
Cython-0.26.1/tests/run/builtin_methods_return_values.pyx0000664000175000017500000000342712542002467024517 0ustar  stefanstefan00000000000000# mode: run
# tag: list, set, builtins
# ticket: 688

_set = set

class TestObj(object):
    pass

def _setattr(obj):
    """
    >>> t = TestObj()
    >>> _setattr(t) is None
    True
    >>> t.test is None
    True
    """
    setattr(obj, 'test', None)
    return setattr(obj, 'test', None)

def _delattr(obj):
    """
    >>> t = TestObj()
    >>> t.test1 = t.test2 = True
    >>> _delattr(t) is None
    True
    >>> hasattr(t, 'test1')
    False
    >>> hasattr(t, 'test2')
    False
    """
    delattr(obj, 'test1')
    return delattr(obj, 'test2')

def list_sort(list l):
    """
    >>> list_sort([1,2,3]) is None
    True
    """
    l.sort()
    return l.sort()

def list_reverse(list l):
    """
    >>> list_reverse([1,2,3]) is None
    True
    """
    l.reverse()
    return l.reverse()

def list_insert(list l):
    """
    >>> list_insert([1,2,3]) is None
    True
    """
    l.insert(1, 2)
    return l.insert(1, 2)

def list_append(list l):
    """
    >>> list_append([1,2,3]) is None
    True
    """
    l.append(1)
    return l.append(2)

def set_clear(set s):
    """
    >>> set_clear(_set([1,2,3])) is None
    True
    >>> set_clear(None)
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'clear'
    """
    s.clear()
    return s.clear()

def set_discard(set s):
    """
    >>> set_discard(_set([1,2,3])) is None
    True
    """
    s.discard(1)
    return s.discard(2)

def set_add(set s):
    """
    >>> set_add(_set([1,2,3])) is None
    True
    """
    s.add(1)
    return s.add(2)

def dict_clear(dict d):
    """
    >>> dict_clear({1:2,3:4}) is None
    True
    >>> dict_clear(None)
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'clear'
    """
    d.clear()
    return d.clear()
Cython-0.26.1/tests/run/unused_args.pyx0000664000175000017500000000153312542002467020663 0ustar  stefanstefan00000000000000cdef c_unused_simple(a, b, c):
    """
    >>> c_unused_simple(1, 2, 3)
    3
    """
    return a + b

cdef c_unused_optional(a, b, c=1, d=2):
    """
    >>> c_unused_optional(1, 2)
    4
    >>> c_unused_optional(1, 2, 3, 4)
    6
    """
    return b + d

cpdef cp_unused_simple(a, b, c):
    """
    >>> cp_unused_simple(1, 2, 3)
    3
    """
    return a + b

cpdef cp_unused_optional(a, b, c=1, d=2):
    """
    >>> cp_unused_optional(1, 2)
    4
    >>> cp_unused_optional(1, 2, 3, 4)
    6
    """
    return b + d


cdef class Unused:
    """
    >>> o = Unused()
    """

    cpdef cp_unused_simple(self, a, b, c):
        return c

    cpdef cp_unused_optional(self, a, b, c=1, d=2):
        return b + d

def def_unused(a, b, c):
    """
    >>> def_unused(1, 2, 3)
    """

def def_unused_metho(o):
    """
    >>> def_unused_metho(0)
    """
Cython-0.26.1/tests/run/packedstruct_T290.pyx0000664000175000017500000000035012542002467021552 0ustar  stefanstefan00000000000000# ticket: 290

"""
>>> f()
(9, 9)
"""

cdef packed struct MyCdefStruct:
    char a
    double b

ctypedef packed struct MyCTypeDefStruct:
    char a
    double b

def f():
    return (sizeof(MyCdefStruct), sizeof(MyCTypeDefStruct))
Cython-0.26.1/tests/run/inhcmethcall.pyx0000664000175000017500000000045212542002467020776 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> p = Norwegian()
>>> p.describe()
Norwegian
Parrot
"""

cdef class Parrot:

  cdef void _describe(self):
    print u"Parrot"

  def describe(self):
    self._describe()

cdef class Norwegian(Parrot):

  cdef void _describe(self):
    print u"Norwegian"
    Parrot._describe(self)
Cython-0.26.1/tests/run/classpass.pyx0000664000175000017500000000026512542002467020341 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> s = Spam()
    >>> s.__class__.__name__
    'Spam'

    >>> s = SpamT()
    >>> type(s).__name__
    'SpamT'
"""

class Spam: pass

class SpamT(object): pass
Cython-0.26.1/tests/run/overflow_check_longlong.pyx0000664000175000017500000000013412542002467023237 0ustar  stefanstefan00000000000000# cython: overflowcheck.fold = False


ctypedef long long INT

include "overflow_check.pxi"
Cython-0.26.1/tests/run/closure_tests_3.pyx0000664000175000017500000003366012542002467021472 0ustar  stefanstefan00000000000000# mode: run
# tag: closures
# preparse: id
# preparse: def_to_cdef
#
# closure_tests_3.pyx
#
# Battery of tests for closures in Cython. Based on the collection of
# compiler tests from P423/B629 at Indiana University, Spring 1999 and
# Fall 2000. Special thanks to R. Kent Dybvig, Dan Friedman, Kevin
# Millikin, and everyone else who helped to generate the original
# tests. Converted into a collection of Python/Cython tests by Craig
# Citro.
#
# Note: This set of tests is split (somewhat randomly) into several
# files, simply because putting all the tests in a single file causes
# gcc and g++ to buckle under the load.
#


def g1649():
    """
    >>> g1649()
    6
    """
    def g1648():
      def g1647(x_1211):
        return x_1211
      return g1647
    f_1212 = g1648()
    if (f_1212(True)):
      f_1212(3)
      f_1212(4)
    else:
      f_1212(5)
    return f_1212(6)


def g1653():
    """
    >>> g1653()
    5
    """
    def g1652():
      def g1651(x_1213):
        return (x_1213+1)
      return g1651
    f_1214 = g1652()
    def g1650():
      f_1215 = 3
      return (f_1215)+(1)
    return f_1214(g1650())


def g1662():
    """
    >>> g1662()
    51
    """
    x_1223 = 15
    def g1661():
      def g1660(h_1219, v_1218):
        return (h_1219)*(v_1218)
      return g1660
    f_1222 = g1661()
    def g1659():
      def g1658(x_1217):
        return (x_1217)+(5)
      return g1658
    k_1221 = g1659()
    def g1657():
      def g1656(x_1216):
        return (x_1216+1)
      return g1656
    g_1220 = g1657()
    def g1655():
      def g1654():
        g_1224 = 3
        return f_1222(g_1224, x_1223)
      return g_1220(g1654())
    return k_1221(g1655())


def g1665():
    """
    >>> g1665()
    5
    """
    x_1225 = 4
    def g1664():
      def g1663():
        return x_1225
      return g1663
    f_1226 = g1664()
    x_1225 = 5
    return f_1226()


def g1670():
    """
    >>> g1670()
    5
    """
    def g1669():
      def g1668():
        def g1667():
          def g1666():
            return 4
          return g1666
        y_1227 = g1667()
        return y_1227()
      return (g1668()+1)
    x_1228 = g1669()
    return x_1228


def g1674():
    """
    >>> g1674()
    1
    """
    def g1673():
      def g1671(n_1230):
        def g1672():
          return n_1230 == 0
        if (g1672()):
          return 1
        else:
          return one_1229((n_1230-1))
      return g1671
    one_1229 = g1673()
    return one_1229(13)


def g1681():
    """
    >>> g1681()
    True
    """
    def g1680():
      def g1678(x_1234):
        def g1679():
          return x_1234 == 0
        if (g1679()):
          return True
        else:
          return odd_1231((x_1234-1))
      return g1678
    even_1232 = g1680()
    def g1677():
      def g1675(x_1233):
        def g1676():
          return x_1233 == 0
        if (g1676()):
          return False
        else:
          return even_1232((x_1233-1))
      return g1675
    odd_1231 = g1677()
    return odd_1231(13)


def g1688():
    """
    >>> g1688()
    True
    """
    t_1236 = True
    f_1235 = False
    def g1687():
      def g1685(x_1240):
        def g1686():
          return x_1240 == 0
        if (g1686()):
          return t_1236
        else:
          return odd_1237((x_1240-1))
      return g1685
    even_1238 = g1687()
    def g1684():
      def g1682(x_1239):
        def g1683():
          return x_1239 == 0
        if (g1683()):
          return f_1235
        else:
          return even_1238((x_1239-1))
      return g1682
    odd_1237 = g1684()
    return odd_1237(13)


def g1698():
    """
    >>> g1698()
    True
    """
    def g1697():
      def g1696(x_1241):
        return x_1241
      return g1696
    even_1242 = g1697()
    def g1695():
      def g1694():
        def g1692(x_1246):
          def g1693():
            return x_1246 == 0
          if (g1693()):
            return True
          else:
            return odd_1243((x_1246-1))
        return g1692
      even_1244 = g1694()
      def g1691():
        def g1689(x_1245):
          def g1690():
            return x_1245 == 0
          if (g1690()):
            return False
          else:
            return even_1244((x_1245-1))
        return g1689
      odd_1243 = g1691()
      return odd_1243(13)
    return even_1242(g1695())


def g1702():
    """
    >>> g1702()
    120
    """
    def g1701():
      def g1699(n_1248):
        def g1700():
          return n_1248 == 0
        if (g1700()):
          return 1
        else:
          return (n_1248)*(fact_1247((n_1248-1)))
      return g1699
    fact_1247 = g1701()
    return fact_1247(5)


def g1716():
    """
    >>> g1716()
    10
    """
    x_1249 = 5
    def g1715():
      def g1713(u_1263, v_1262, w_1261):
        def g1714():
          return u_1263 == 0
        if (g1714()):
          return b_1251(v_1262, w_1261)
        else:
          return a_1252((u_1263)-(1), v_1262, w_1261)
      return g1713
    a_1252 = g1715()
    def g1712():
      def g1705(q_1255, r_1254):
        p_1256 = (q_1255)*(r_1254)
        def g1711():
          def g1709(n_1260):
            def g1710():
              return n_1260 == 0
            if (g1710()):
              return c_1250(p_1256)
            else:
              return o_1257((n_1260)-(1))
          return g1709
        e_1258 = g1711()
        def g1708():
          def g1706(n_1259):
            def g1707():
              return n_1259 == 0
            if (g1707()):
              return c_1250(x_1249)
            else:
              return e_1258((n_1259)-(1))
          return g1706
        o_1257 = g1708()
        return e_1258((q_1255)*(r_1254))
      return g1705
    b_1251 = g1712()
    def g1704():
      def g1703(x_1253):
        return (5)*(x_1253)
      return g1703
    c_1250 = g1704()
    return a_1252(3, 2, 1)


def g1729():
    """
    >>> g1729()
    537516
    """
    def g1728():
      def g1727(x_1269):
        return (x_1269+1)
      return g1727
    f_1276 = g1728()
    def g1726():
      def g1725(x_1268):
        return (x_1268-1)
      return g1725
    g_1275 = g1726()
    def g1724():
      def g1723(x_1267):
        return (x_1267+1)
      return g1723
    t_1274 = g1724()
    def g1722():
      def g1721(x_1266):
        return (x_1266+1)
      return g1721
    j_1273 = g1722()
    def g1720():
      def g1719(x_1265):
        return (x_1265+1)
      return g1719
    i_1272 = g1720()
    def g1718():
      def g1717(x_1264):
        return (x_1264+1)
      return g1717
    h_1271 = g1718()
    x_1270 = 80
    a_1279 = f_1276(x_1270)
    b_1278 = g_1275(x_1270)
    c_1277 = h_1271(i_1272(j_1273(t_1274(x_1270))))
    return (a_1279)*((b_1278)*((c_1277)+(0)))


def g1733():
    """
    >>> g1733()
    120
    """
    def g1732():
      def g1730(fact_1281, n_1280):
        def g1731():
          return n_1280 == 0
        if (g1731()):
          return 1
        else:
          return (fact_1281(fact_1281, (n_1280-1)))*(n_1280)
      return g1730
    fact_1282 = g1732()
    return fact_1282(fact_1282, 5)


def g1737():
    """
    >>> g1737()
    10000
    """
    def g1736():
      def g1735(x_1283):
        return (x_1283)+(1000)
      return g1735
    f_1284 = g1736()
    def g1734():
      return f_1284(-2) == 0
    if (g1734()):
      return f_1284(6000)
    else:
      return f_1284(f_1284(8000))


def g1741():
    """
    >>> g1741()
    10000
    """
    def g1740():
      def g1739(x_1285):
        return (x_1285)+(1000)
      return g1739
    f_1286 = g1740()
    def g1738():
      return f_1286(-1) == 0
    if (g1738()):
      return f_1286(6000)
    else:
      return f_1286(f_1286(8000))


def g1747():
    """
    >>> g1747()
    8000
    """
    def g1746():
      def g1745(x_1288, y_1287):
        return (x_1288)+(1000)
      return g1745
    f_1289 = g1746()
    def g1744():
      def g1743():
        def g1742():
          return 0
        return f_1289(3000, g1742())
      if (g1743()):
        return f_1289(f_1289(4000, 0), 0)
      else:
        return 8000
    return (g1744())+(2000)


def g1754():
    """
    >>> g1754()
    24
    """
    def g1753():
      def g1752():
        def g1751():
          def g1748(x_1290):
            def g1749(y_1291):
              def g1750(z_1292):
                return (x_1290)+((y_1291)+((z_1292)+(y_1291)))
              return g1750
            return g1749
          return g1748
        return g1751()(5)
      return g1752()(6)
    return g1753()(7)


def g1765():
    """
    >>> g1765()
    35
    """
    def g1764():
      def g1763():
        def g1762():
          def g1761():
            def g1760():
              def g1755(x_1293):
                def g1756(y_1294):
                  def g1757(z_1295):
                    def g1758(w_1296):
                      def g1759(u_1297):
                        return (x_1293)+((y_1294)+((z_1295)+((w_1296)+(u_1297))))
                      return g1759
                    return g1758
                  return g1757
                return g1756
              return g1755
            return g1760()(5)
          return g1761()(6)
        return g1762()(7)
      return g1763()(8)
    return g1764()(9)


def g1769():
    """
    >>> g1769()
    True
    """
    def g1768():
      def g1767(x_1298):
        return x_1298
      return g1767
    f_1299 = g1768()
    def g1766():
      return hasattr(f_1299, '__call__')
    if (g1766()):
      return True
    else:
      return False


def g1779():
    """
    >>> g1779()
    6
    """
    def g1778():
      def g1773(sum_1301, ls_1300):
        def g1777():
          return (ls_1300 == [])
        if (g1777()):
          return 0
        else:
          def g1776():
            return (ls_1300[0])
          def g1775():
            def g1774():
              return (ls_1300[1])
            return sum_1301(sum_1301, g1774())
          return (g1776())+(g1775())
      return g1773
    sum_1302 = g1778()
    def g1772():
      def g1771():
        def g1770():
          return [3,[]]
        return [2,g1770()]
      return [1,g1771()]
    return sum_1302(sum_1302, g1772())


def g1785():
    """
    >>> g1785()
    1500
    """
    def g1784():
      def g1783():
        def g1780(a_1303):
          def g1781():
            def g1782():
              if (True):
                return 200
            (a_1303)+(g1782())
            return 1500
          return g1781
        return g1780
      return g1783()(1000)
    return g1784()()


def g1791():
    """
    >>> g1791()
    102
    """
    def g1790():
      def g1789():
        def g1786(b_1304):
          def g1787(a_1305):
            def g1788():
              if (1):
                return 2
            a_1305 = g1788()
            return (a_1305)+(b_1304)
          return g1787
        return g1786
      return g1789()(100)
    return g1790()(200)


def g1800():
    """
    >>> g1800()
    2600
    """
    def g1799():
      def g1798():
        def g1797():
          def g1792(a_1306):
            def g1793(b_1307):
              def g1794():
                if (b_1307):
                  return 200
              a_1306 = g1794()
              def g1795(c_1308):
                def g1796():
                  if (300):
                    return 400
                c_1308 = g1796()
                return (a_1306)+((b_1307)+(c_1308))
              return g1795
            return g1793
          return g1792
        return g1797()(1000)
      return g1798()(2000)
    return g1799()(3000)


def g1807():
    """
    >>> g1807()
    3628800
    """
    def g1806():
      def g1804(x_1310):
        def g1805():
          return x_1310 == 0
        if (g1805()):
          return 1
        else:
          return (x_1310)*(f_1309((x_1310)-(1)))
      return g1804
    f_1309 = g1806()
    def g1803():
      def g1801(a_1311):
        def g1802(b_1312):
          return a_1311(b_1312)
        return g1802
      return g1801
    g_1313 = g1803()
    return g_1313(f_1309)(10)


def g1828():
    """
    >>> g1828()
    [52, [44, [17, [44, [52, 17]]]]]
    """
    def g1827():
      def g1826():
        return (a_1316)+(b_1315)
      return g1826
    f_1318 = g1827()
    def g1825():
      def g1822(y_1320):
        def g1824():
          def g1823(y_1321):
            return y_1321
          return g1823
        g_1317 = g1824()
        return (y_1320)+(y_1320)
      return g1822
    g_1317 = g1825()
    a_1316 = 17
    b_1315 = 35
    def g1821():
      def g1820():
        def g1819():
          return a_1316
        return g1819
      def g1818():
        def g1817(v_1319):
          a_1316 = v_1319
        return g1817
      return [g1820(),g1818()]
    h_1314 = g1821()
    x1_1324 = f_1318()
    x2_1323 = g_1317(22)
    def g1816():
      def g1815():
        return (h_1314[0])
      return g1815()()
    x3_1322 = g1816()
    x4_1325 = g_1317(22)
    def g1814():
      return (h_1314[1])
    g1814()(3)
    x5_1327 = f_1318()
    def g1813():
      def g1812():
        return (h_1314[0])
      return g1812()()
    x6_1326 = g1813()
    def g1811():
      def g1810():
        def g1809():
          def g1808():
            return [x5_1327,x6_1326]
          return [x4_1325,g1808()]
        return [x3_1322,g1809()]
      return [x2_1323,g1810()]
    return [x1_1324,g1811()]


def g1843():
    """
    >>> g1843()
    [52, [17, [35, [17, 35]]]]
    """
    def g1842():
      def g1841():
        return (a_1330)+(b_1329)
      return g1841
    f_1331 = g1842()
    a_1330 = 17
    b_1329 = 35
    def g1840():
      def g1839():
        def g1838():
          return a_1330
        return g1838
      def g1837():
        def g1836():
          return b_1329
        return g1836
      return [g1839(),g1837()]
    h_1328 = g1840()
    def g1835():
      def g1834():
        def g1833():
          def g1832():
            def g1831():
              return (h_1328[0])
            return g1831()()
          def g1830():
            def g1829():
              return (h_1328[1])
            return g1829()()
          return [g1832(),g1830()]
        return [b_1329,g1833()]
      return [a_1330,g1834()]
    return [f_1331(),g1835()]

Cython-0.26.1/tests/run/line_profile_test.srctree0000664000175000017500000000501113023021033022655 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON test_profile.py

######## setup.py ###########

from distutils.extension import Extension
from distutils.core import setup
from Cython.Build import cythonize

extensions = [
    Extension("collatz", ["collatz.pyx"], define_macros=[('CYTHON_TRACE', '1')])
]

setup(
    ext_modules = cythonize(extensions)
)

######## test_profile.py ###########

try:
    import line_profiler
except ImportError:
    print("No line profiler, skipping test.")
    import sys
    sys.exit(0)


def assert_stats(profile, name):
    profile.print_stats()
    stats = profile.get_stats()
    assert len(stats.timings) > 0, "No profile stats."
    for key, timings in stats.timings.items():
        if key[-1] == name:
            assert len(timings) > 0
            break
    else:
        raise ValueError("No stats for %s." % name)


from collatz import collatz
func = collatz
profile = line_profiler.LineProfiler(func)
profile.runcall(func, 19)
assert_stats(profile, func.__name__)

from collatz import cp_collatz
func = cp_collatz
profile = line_profiler.LineProfiler(func)
profile.runcall(func, 19)
assert_stats(profile, func.__name__)

from collatz import PyClass
obj = PyClass()
func = obj.py_pymethod
profile = line_profiler.LineProfiler(func)
profile.runcall(func)
assert_stats(profile, func.__name__)

from collatz import CClass
obj = CClass()
func = obj.c_pymethod
profile = line_profiler.LineProfiler(func)
profile.runcall(func)
assert_stats(profile, func.__name__)

func = obj.cp_pymethod
profile = line_profiler.LineProfiler(func)
profile.runcall(func, 19)
assert_stats(profile, func.__name__)


######## collatz.pyx ###########
# cython: linetrace=True

cimport cython

@cython.binding(True)
def collatz(n):
    while n > 1:
        if n % 2 == 0:
            n //= 2
        else:
            n = 3*n+1


@cython.binding(True)
cpdef cp_collatz(n):
    while n > 1:
        if n % 2 == 0:
            n //= 2
        else:
            n = 3*n+1


@cython.binding(True)
class PyClass(object):
    def py_pymethod(self):
        x = 1
        for i in range(10):
            a = x + 2
        return a * 3


@cython.binding(True)
cdef class CClass:
    def c_pymethod(self, c=2):
        for i in range(10):
            a = c + 1
        y = self.cmethod(c + a)
        return y * 4

    cpdef cp_pymethod(self, r):
        for i in range(10):
            a = r + 1
        z = self.c_pymethod(a) + self.cmethod(r)
        return z * 2

    cdef cmethod(self, s):
        for i in range(10):
            p = s + 3
        return p * 5
Cython-0.26.1/tests/run/funcexceptchained.pyx0000664000175000017500000000467212542002467022033 0ustar  stefanstefan00000000000000# mode: run
# tag: exceptions

import sys
IS_PY3 = sys.version_info[0] >= 3


__doc__ = u"""
>>> if not IS_PY3: sys.exc_clear()

>>> def test_py(outer_exc):
...   try:
...     raise AttributeError
...   except AttributeError:
...     print(sys.exc_info()[0] is AttributeError or sys.exc_info()[0])
...     try: raise KeyError
...     except:
...       print(sys.exc_info()[0] is KeyError or sys.exc_info()[0])
...       if IS_PY3:
...         print(isinstance(sys.exc_info()[1].__context__, AttributeError)
...               or sys.exc_info()[1].__context__)
...       else:
...         print(True)
...     print((IS_PY3 and sys.exc_info()[0] is AttributeError) or
...           (not IS_PY3 and sys.exc_info()[0] is KeyError) or
...           sys.exc_info()[0])
...   print((IS_PY3 and sys.exc_info()[0] is outer_exc) or
...         (not IS_PY3 and sys.exc_info()[0] is KeyError) or
...         sys.exc_info()[0])

>>> print(sys.exc_info()[0]) # 0
None

>>> test_py(None)
True
True
True
True
True
>>> print(sys.exc_info()[0]) # test_py()
None

>>> test_c(None)
True
True
True
True
True
>>> print(sys.exc_info()[0]) # test_c()
None

>>> def test_py2():
...   try:
...     raise Exception
...   except Exception:
...     test_py(Exception)
...     print(sys.exc_info()[0] is Exception or sys.exc_info()[0])
...   print((IS_PY3 and sys.exc_info()[0] is None) or
...         (not IS_PY3 and sys.exc_info()[0] is Exception) or
...         sys.exc_info()[0])

>>> test_py2()
True
True
True
True
True
True
True
>>> print(sys.exc_info()[0]) # test_py2()
None

>>> test_c2()
True
True
True
True
True
True
True
>>> print(sys.exc_info()[0]) # test_c2()
None
"""


def test_c(outer_exc):
    try:
        raise AttributeError
    except AttributeError:
        print(sys.exc_info()[0] is AttributeError or sys.exc_info()[0])
        try: raise KeyError
        except:
            print(sys.exc_info()[0] is KeyError or sys.exc_info()[0])
            if IS_PY3:
                print(isinstance(sys.exc_info()[1].__context__, AttributeError)
                      or sys.exc_info()[1].__context__)
            else:
                print(True)
        print(sys.exc_info()[0] is AttributeError or sys.exc_info()[0])
    print(sys.exc_info()[0] is outer_exc or sys.exc_info()[0])


def test_c2():
    try:
        raise Exception
    except Exception:
        test_c(Exception)
        print(sys.exc_info()[0] is Exception or sys.exc_info()[0])
    print(sys.exc_info()[0] is None or sys.exc_info()[0])
Cython-0.26.1/tests/run/lambda_module_T603.pyx0000664000175000017500000000067012542002467021646 0ustar  stefanstefan00000000000000# mode: run
# tag: lambda
# ticket: 603

# Module scope lambda functions

__doc__ = """
>>> pow2(16)
256
>>> with_closure(0)
0
>>> typed_lambda(1)(2)
3
>>> typed_lambda(1.5)(1.5)
2
>>> cdef_const_lambda()
123
>>> const_lambda()
321
"""

pow2 = lambda x: x * x
with_closure = lambda x:(lambda: x)()
typed_lambda = lambda int x : (lambda int y: x + y)

cdef int xxx = 123
cdef_const_lambda = lambda: xxx

yyy = 321
const_lambda = lambda: yyy
Cython-0.26.1/tests/run/pyclass_scope_T671.py0000664000175000017500000000313012542002467021537 0ustar  stefanstefan00000000000000# mode: run
# ticket: 671

A = 1234

class SimpleAssignment(object):
    """
    >>> SimpleAssignment.A
    1234
    """
    A = A

class SimpleRewrite(object):
    """
    >>> SimpleRewrite.A
    4321
    """
    A = 4321
    A = A

def simple_inner(a):
    """
    >>> simple_inner(4321).A
    1234
    """
    A = a
    class X(object):
        A = A
    return X

def conditional(a, cond):
    """
    >>> conditional(4321, False).A
    1234
    >>> conditional(4321, True).A
    4321
    """
    class X(object):
        if cond:
            A = a
        A = A
    return X

def name_error():
    """
    >>> name_error() #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    NameError: ...B...
    """
    class X(object):
        B = B

def conditional_name_error(cond):
    """
    >>> conditional_name_error(True).B
    4321
    >>> conditional_name_error(False).B #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    NameError: ...B...
    """
    class X(object):
        if cond:
            B = 4321
        B = B
    return X

C = 1111
del C

def name_error_deleted():
    """
    >>> name_error_deleted() #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    NameError: ...C...
    """
    class X(object):
        C = C

_set = set

def name_lookup_order():
    """
    >>> Scope = name_lookup_order()
    >>> Scope().set(2)
    42
    >>> Scope.test1 == _set()
    True
    >>> Scope.test2 == _set()
    True

    """
    class Scope(object):
        test1 = set()
        test2 = set()

        def set(self, x):
            return 42

    return Scope
Cython-0.26.1/tests/run/cascaded_typed_assignments_T466.pyx0000664000175000017500000000461512542002467024442 0ustar  stefanstefan00000000000000# mode: run
# ticket: 466
# extension to T409

cimport cython

def simple_parallel_typed():
    """
    >>> simple_parallel_typed()
    (1, 2, [1, 2], [1, 2])
    """
    cdef int a,c
    a, c = d = e = [1,2]
    return a, c, d, e

def simple_parallel_int_mix():
    """
    >>> simple_parallel_int_mix()
    (1, 2, 1, 2, 1, 2, [1, 2], [1, 2])
    """
    cdef int ai,bi
    cdef long al,bl
    cdef object ao, bo
    ai, bi = al, bl = ao, bo = c = d = [1,2]
    return ao, bo, ai, bi, al, bl, c, d

def simple_parallel_int_mix_recursive():
    """
    >>> simple_parallel_int_mix_recursive()
    (1, 2, 3, 1, [2, 3], 1, 2, 3, 1, 2, 3, [1, [2, 3]], [1, [2, 3]])
    """
    cdef int ai, bi, ci
    cdef long al, bl, cl
    cdef object ao, bo, co
    cdef object xo, yo
    ai, [bi, ci] = al, [bl, cl] = xo, yo = ao, [bo, co] = c = d = [1, [2, 3]]
    return ao, bo, co, xo, yo, ai, bi, ci, al, bl, cl, c, d

cdef int called = 0

cdef char* get_string():
    global called
    called += 1
    return "abcdefg"

def non_simple_rhs():
    """
    >>> non_simple_rhs()
    1
    """
    cdef char *a, *b
    cdef int orig_called = called
    a = b = get_string()
    assert a is b
    return called - orig_called

from libc.stdlib cimport malloc, free

def non_simple_rhs_malloc():
    """
    >>> non_simple_rhs_malloc()
    """
    cdef char *a, *b, **c

    c = &b
    c[0] = a = malloc(2)
    a[0] = c'X'
    b[1] = c'\0'

    # copy from different pointers to make sure they all point to the
    # same memory
    cdef char[2] x
    x[0] = b[0]
    x[1] = a[1]

    # clean up
    free(a)
    if b is not a: # shouldn't happen
        free(b)

    # check copied values
    assert x[0] == c'X'
    assert x[1] == c'\0'

@cython.test_assert_path_exists(
    '//CascadedAssignmentNode',
    '//CascadedAssignmentNode//CoerceToTempNode',
    '//CascadedAssignmentNode//CoerceToTempNode[@type.is_ptr]')
def assign_carray():
    """
    assign_carray()
    (1, 2, 3)
    """
    cdef int *b, *c
    cdef int[3] a
    a[0] = 1
    a[1] = 2
    a[2] = 3

    b = c = a+1
    assert b[0] == 2
    assert c[1] == 3
    return a[0], b[0], c[1]


def pyobject_from_cvalue(table, key):
    """
    >>> table = {'X':0, 'Y':1}
    >>> pyobject_from_cvalue(table, 'Z')
    2
    >>> pyobject_from_cvalue(table, 'X')
    0
    """
    cdef int num
    num = table.get(key, -1)
    if num < 0:
        num = table[key] = len(table)
    return num
Cython-0.26.1/tests/run/posix_time.pyx0000664000175000017500000000136712542002467020531 0ustar  stefanstefan00000000000000# tag: posix

from posix.time cimport *

def test_itimer(sec, usec):
    """
    >>> test_itimer(10, 2)
    (10, 2)
    """
    cdef itimerval t, gtime

    t.it_interval.tv_sec = sec
    t.it_interval.tv_usec = usec
    t.it_value.tv_sec = sec
    t.it_value.tv_usec = usec
    ret = setitimer(ITIMER_REAL, &t, NULL)
    assert ret == 0
    ret = getitimer(ITIMER_REAL, >ime)
    assert ret == 0
    t.it_interval.tv_sec = 0
    t.it_interval.tv_usec = 0
    t.it_value.tv_sec = 0
    t.it_value.tv_usec = 0
    ret = setitimer(ITIMER_REAL, &t, NULL)
    return gtime.it_interval.tv_sec, gtime.it_interval.tv_usec

def test_gettimeofday():
    """
    >>> test_gettimeofday()
    """
    cdef timeval t
    ret = gettimeofday(&t, NULL)
    assert ret == 0
Cython-0.26.1/tests/run/empty_declarators.pyx0000664000175000017500000000022512542002467022062 0ustar  stefanstefan00000000000000cpdef zed(short, long, complex, x):
    """
    >>> zed(short=1, long=2, complex=3, x=4)
    (1, 2, 3, 4)
    """
    return short, long, complex, x
Cython-0.26.1/tests/run/bad_c_struct_T252.pyx0000664000175000017500000000151412542002467021513 0ustar  stefanstefan00000000000000# ticket: 252

cdef cf(default=None):
    return default

cpdef cpf(default=100):
    """
    >>> cpf()
    100
    >>> cpf(1)
    1
    >>> cpf(default=2)
    2
    """
    default = cf(default)
    return default

def pf(default=100):
    """
    >>> pf()
    100
    >>> pf(1)
    1
    >>> pf(default=2)
    2
    """
    return default


cdef struct foo:
    int void
    int default

def test_struct():
    """
    >>> test_struct()
    (1, 2)
    """
    cdef foo foo_struct
    foo_struct.void = 1
    foo_struct.default = 2
    return foo_struct.void, foo_struct.default


cdef class Foo:
    cdef int void
    cdef int default

def test_class():
    """
    >>> test_class()
    (1, 2)
    """
    cdef Foo foo_instance = Foo()
    foo_instance.void = 1
    foo_instance.default = 2
    return foo_instance.void, foo_instance.default
Cython-0.26.1/tests/run/pinard8.pyx0000664000175000017500000000056212542002467017712 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> f = Fiche()
    >>> f[0] = 1
    >>> f.geti()
    1

    >>> f[1] = None
    >>> f.geti()
    0

    >>> f[0] = 1
    >>> f.geti()
    1
"""

cdef class Fiche:
    cdef int i

    def __setitem__(self, element, valeur):
        self.i = 0
        if valeur is None:
            return
        self.i = 1

    def geti(self):
        return self.i
Cython-0.26.1/tests/run/slice2.pyx0000664000175000017500000000321312542002467017522 0ustar  stefanstefan00000000000000# mode: run
# tag: slicing

def test_full(seq):
    """
    >>> l = [1,2,3,4]
    >>> test_full(l)
    [1, 2, 3, 4]
    >>> l == test_full(l)
    True
    >>> l is test_full(l)
    False
    >>> try: test_full(42)
    ... except TypeError: pass
    """
    obj = seq[:]
    return obj

def test_start(seq, start):
    """
    >>> test_start([1,2,3,4], 2)
    [3, 4]
    >>> test_start([1,2,3,4], 3)
    [4]
    >>> test_start([1,2,3,4], 4)
    []
    >>> test_start([1,2,3,4], 8)
    []
    >>> test_start([1,2,3,4], -3)
    [2, 3, 4]
    >>> test_start([1,2,3,4], -4)
    [1, 2, 3, 4]
    >>> test_start([1,2,3,4], -8)
    [1, 2, 3, 4]
    >>> test_start([1,2,3,4], 0)
    [1, 2, 3, 4]
    >>> try: test_start(42, 2, 3)
    ... except TypeError: pass
    """
    obj = seq[start:]
    return obj

def test_stop(seq, stop):
    """
    >>> test_stop([1,2,3,4], 3)
    [1, 2, 3]
    >>> test_stop([1,2,3,4], -1)
    [1, 2, 3]
    >>> test_stop([1,2,3,4], -3)
    [1]
    >>> test_stop([1,2,3,4], -4)
    []
    >>> test_stop([1,2,3,4], -8)
    []
    >>> test_stop([1,2,3,4], 0)
    []
    >>> try: test_stop(42, 3)
    ... except TypeError: pass
    """
    obj = seq[:stop]
    return obj

def test_start_and_stop(seq, start, stop):
    """
    >>> l = [1,2,3,4]
    >>> test_start_and_stop(l, 2, 3)
    [3]
    >>> test_start_and_stop(l, -3, -1)
    [2, 3]
    >>> try: test_start_and_stop(42, 2, 3)
    ... except TypeError: pass
    """
    obj = seq[start:stop]
    return obj

class A(object):
    pass

def slice_of_temporary_smoketest():
    """
    >>> slice_of_temporary_smoketest()
    [3, 2]
    """
    x = A()
    x.a = [1, 2]
    x.a[:] = [3,2]
    return x.a
Cython-0.26.1/tests/run/strmethods.pyx0000664000175000017500000000701612542002467020542 0ustar  stefanstefan00000000000000cimport cython

@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
def str_startswith(str s, sub, start=None, stop=None):
    """
    >>> str_startswith('a', 'a')
    True
    >>> str_startswith('ab', 'a')
    True
    >>> str_startswith('a', 'b')
    False
    >>> str_startswith('ab', 'b')
    False
    >>> str_startswith('a', ('a', 'b'))
    True
    >>> str_startswith('a', 'a', 1)
    False
    >>> str_startswith('a', 'a', 0, 0)
    False
    """

    if start is None:
      return s.startswith(sub)
    elif stop is None:
      return s.startswith(sub, start)
    else:
      return s.startswith(sub, start, stop)

@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
def str_endswith(str s, sub, start=None, stop=None):
    """
    >>> str_endswith('a', 'a')
    True
    >>> str_endswith('ba', 'a')
    True
    >>> str_endswith('a', 'b')
    False
    >>> str_endswith('ba', 'b')
    False
    >>> str_endswith('a', ('a', 'b'))
    True
    >>> str_endswith('a', 'a', 1)
    False
    >>> str_endswith('a', 'a', 0, 0)
    False
    """

    if start is None:
      return s.endswith(sub)
    elif stop is None:
      return s.endswith(sub, start)
    else:
      return s.endswith(sub, start, stop)


@cython.test_assert_path_exists(
    "//SimpleCallNode",
    "//SimpleCallNode//NoneCheckNode",
    "//SimpleCallNode//AttributeNode[@is_py_attr = false]")
def str_join(str s, args):
    """
    >>> print(str_join('a', list('bbb')))
    babab
    """
    result = s.join(args)
    assert cython.typeof(result) == 'basestring object', cython.typeof(result)
    return result


@cython.test_fail_if_path_exists(
    "//SimpleCallNode//NoneCheckNode",
)
@cython.test_assert_path_exists(
    "//SimpleCallNode",
    "//SimpleCallNode//AttributeNode[@is_py_attr = false]")
def literal_join(args):
    """
    >>> print(literal_join(list('abcdefg')))
    a|b|c|d|e|f|g
    """
    result = '|'.join(args)
    assert cython.typeof(result) == 'basestring object', cython.typeof(result)
    return result


# unicode.__mod__(format, values)

format1 = 'abc%sdef'
format2 = 'abc%sdef%sghi'

def mod_format(str s, values):
    """
    >>> mod_format(format1, 'sa') == 'abcsadef'  or  mod_format(format1, 'sa')
    True
    >>> mod_format(format2, ('XYZ', 'ABC')) == 'abcXYZdefABCghi'  or  mod_format(format2, ('XYZ', 'ABC'))
    True
    >>> mod_format(None, 'sa')
    Traceback (most recent call last):
    TypeError: unsupported operand type(s) for %: 'NoneType' and 'str'
    >>> class RMod(object):
    ...     def __rmod__(self, other):
    ...         return 123
    >>> mod_format(None, RMod())
    123
    """
    assert cython.typeof(s % values) == 'basestring object', cython.typeof(s % values)
    return s % values


def mod_format_literal(values):
    """
    >>> mod_format_literal('sa') == 'abcsadef'  or  mod_format(format1, 'sa')
    True
    >>> mod_format_literal(('sa',)) == 'abcsadef'  or  mod_format(format1, ('sa',))
    True
    >>> mod_format_literal(['sa']) == "abc['sa']def"  or  mod_format(format1, ['sa'])
    True
    """
    assert cython.typeof('abc%sdef' % values) == 'basestring object', cython.typeof('abc%sdef' % values)
    return 'abc%sdef' % values


def mod_format_tuple(*values):
    """
    >>> mod_format_tuple('sa') == 'abcsadef'  or  mod_format(format1, 'sa')
    True
    >>> mod_format_tuple()
    Traceback (most recent call last):
    TypeError: not enough arguments for format string
    """
    assert cython.typeof('abc%sdef' % values) == 'basestring object', cython.typeof('abc%sdef' % values)
    return 'abc%sdef' % values
Cython-0.26.1/tests/run/extkwonlyargs.pyx0000664000175000017500000000561412542002467021271 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> ext = Ext()
    >>> b,c,d,e,f,g,h,k = ext.b,ext.c,ext.d,ext.e,ext.f,ext.g,ext.h,ext.k

"""

cdef class Ext:
    def b(self, a, b, c):
        pass

    def c(self, a, b, c=1):
        pass

    def d(self, a, b, *, c = 88):
        pass

    def e(self, a, b, c = 88, **kwds):
        pass

    def f(self, a, b, *, c, d = 42):
        pass

    def g(self, a, b, *, c, d = 42, e = 17, f, **kwds):
        pass

    def h(self, a, b, *args, c, d = 42, e = 17, f, **kwds):
        pass

    def k(self, a, b, c=1, *args, d = 42, e = 17, f, **kwds):
        pass
"""# c
    >>> c(1,2)
    >>> c(1,2,3)
    >>> c(1,2,3,4)
    Traceback (most recent call last):
    TypeError: c() takes at most 3 positional arguments (4 given)

# b
    >>> b(1,2,3)
    >>> b(1,2,3,4)
    Traceback (most recent call last):
    TypeError: b() takes exactly 3 positional arguments (4 given)

# e
    >>> e(1,2)
    >>> e(1,2, c=1)
    >>> e(1,2, d=1)
    >>> e(1,2, c=1, d=2, e=3)
    >>> e(1,2,3)
    >>> e(1,2,3,4)
    Traceback (most recent call last):
    TypeError: e() takes at most 3 positional arguments (4 given)

# d
    >>> d(1,2)
    >>> d(1,2, c=1)

    >>> d(1,2,3)
    Traceback (most recent call last):
    TypeError: d() takes exactly 2 positional arguments (3 given)
    >>> d(1,2, d=1)
    Traceback (most recent call last):
    TypeError: d() got an unexpected keyword argument 'd'

# g
    >>> g(1,2, c=1, f=2)
    >>> g(1,2, c=1, e=0, f=2, d=11)
    >>> g(1,2, c=1, f=2, e=0, x=25)

    >>> g(1,2,3)
    Traceback (most recent call last):
    TypeError: g() takes exactly 2 positional arguments (3 given)
    >>> g(1,2)
    Traceback (most recent call last):
    TypeError: g() needs keyword-only argument c
    >>> g(1,2, c=1)
    Traceback (most recent call last):
    TypeError: g() needs keyword-only argument f

# f
    >>> f(1,2, c=1)
    >>> f(1,2, c=1, d=2)

    >>> f(1,2,3)
    Traceback (most recent call last):
    TypeError: f() takes exactly 2 positional arguments (3 given)
    >>> f(1,2)
    Traceback (most recent call last):
    TypeError: f() needs keyword-only argument c
    >>> f(1,2, c=1, e=2)
    Traceback (most recent call last):
    TypeError: f() got an unexpected keyword argument 'e'

# h
    >>> h(1,2, c=1, f=2)
    >>> h(1,2, c=1, f=2, e=3)
    >>> h(1,2,3,4,5,6, c=1, f=2)
    >>> h(1,2,3,4,5,6, c=1, f=2, e=3, x=25, y=11)

    >>> h(1,2,3)
    Traceback (most recent call last):
    TypeError: h() needs keyword-only argument c
    >>> h(1,2, d=1)
    Traceback (most recent call last):
    TypeError: h() needs keyword-only argument c

# k
    >>> k(1,2, c=1, f=2)
    >>> k(1,2, c=1, f=2, e=3)
    >>> k(1,2,3,4,5,6, d=1, f=2)
    >>> k(1,2,3,4,5,6, d=1, f=2, e=3, x=25, y=11)

    >>> k(1,2,3)
    Traceback (most recent call last):
    TypeError: k() needs keyword-only argument f
    >>> k(1,2, d=1)
    Traceback (most recent call last):
    TypeError: k() needs keyword-only argument f
"""
Cython-0.26.1/tests/run/carray_coercion.pyx0000664000175000017500000002566113143605603021515 0ustar  stefanstefan00000000000000# mode: run

import sys
IS_PY3 = sys.version_info[0] >= 3
IS_32BIT_PY2 = not IS_PY3 and sys.maxint < 2**32


from libc cimport stdint
from libc.stdint cimport int16_t as my_int16_t


def unlongify(v):
    # on 32bit Py2.x platforms, 'unsigned int' coerces to a Python long => fix doctest output here.
    s = repr(v)
    if IS_32BIT_PY2:
        assert s.count('L') == s.count(',') + 1, s
        s = s.replace('L', '')
    return s


def from_int_array():
    """
    >>> from_int_array()
    [1, 2, 3]
    """
    cdef int[3] v
    v[0] = 1
    v[1] = 2
    v[2] = 3
    return v


cpdef tuple tuple_from_int_array():
    """
    >>> tuple_from_int_array()
    (1, 2, 3)
    """
    cdef int[3] v
    v[0] = 1
    v[1] = 2
    v[2] = 3
    assert isinstance(v, tuple)
    return v


cdef extern from "stdint.h":
    ctypedef unsigned long uint32_t


def from_typedef_int_array():
    """
    >>> unlongify(from_typedef_int_array())
    '[1, 2, 3]'
    """
    cdef uint32_t[3] v
    v[0] = 1
    v[1] = 2
    v[2] = 3
    return v


cpdef tuple tuple_from_typedef_int_array():
    """
    >>> unlongify(tuple_from_typedef_int_array())
    '(1, 2, 3)'
    """
    cdef uint32_t[3] v
    v[0] = 1
    v[1] = 2
    v[2] = 3
    return v


def from_cimported_int_array():
    """
    >>> from_cimported_int_array()
    [1, 2, 3]
    """
    cdef stdint.int32_t[3] v
    v[0] = 1
    v[1] = 2
    v[2] = 3
    return v


def from_cimported_as_int_array():
    """
    >>> from_cimported_as_int_array()
    [1, 2, 3]
    """
    cdef my_int16_t[3] v
    v[0] = 1
    v[1] = 2
    v[2] = 3
    return v


def from_int_array_array():
    """
    >>> from_int_array_array()
    [[11, 12, 13], [21, 22, 23]]
    """
    cdef int[2][3] v
    v[0][0] = 11
    v[0][1] = 12
    v[0][2] = 13
    v[1][0] = 21
    v[1][1] = 22
    v[1][2] = 23
    return v


def assign_int_array_array():
    """
    >>> assign_int_array_array()
    [[11, 12, 13], [21, 22, 23]]
    """
    cdef int[2][3] v = [[11, 12, 13], [21, 22, 23]]
    return v


def assign_int_array_array_from_tuples():
    """
    >>> assign_int_array_array_from_tuples()
    [[11, 12, 13], [21, 22, 23]]
    """
    cdef int[2][3] v = ([11, 12, 13], [21, 22, 23])
    return v


''' FIXME: this currently crashes:
def assign_int_array_array_from_tuples():
    """
    >>> assign_int_array_array_from_tuples()
    [[11, 12, 13], [21, 22, 23]]
    """
    cdef int[2][3] v = ((11, 12, 13), (21, 22, 23))
    return v
'''


def build_from_list_of_arrays():
    """
    >>> build_from_list_of_arrays()
    [[11, 12, 13], [21, 22, 23]]
    """
    cdef int[3] x = [11, 12, 13]
    cdef int[3] y = [21, 22, 23]
    cdef int[2][3] v = [x, y]
    return v


def build_from_tuple_of_arrays():
    """
    >>> build_from_tuple_of_arrays()
    [[11, 12, 13], [21, 22, 23]]
    """
    cdef int[3] x = [11, 12, 13]
    cdef int[3] y = [21, 22, 23]
    cdef int[2][3] v = (x, y)
    return v


ctypedef struct MyStructType:
    int x
    double y


cdef struct MyStruct:
    int x
    double y


def from_struct_array():
    """
    >>> a, b = from_struct_array()
    >>> a['x'], a['y']
    (1, 2.0)
    >>> b['x'], b['y']
    (3, 4.0)
    """
    cdef MyStructType[2] v
    cdef MyStruct[2] w
    v[0] = MyStructType(1, 2)
    v[1] = MyStructType(3, 4)
    assert isinstance(v, tuple)
    assert isinstance(v, list)

    w[0] = MyStruct(1, 2)
    w[1] = MyStruct(3, 4)
    assert (w) == v
    assert w == (v)

    return v


def to_int_array(x):
    """
    >>> to_int_array([1, 2, 3])
    (1, 2, 3)
    >>> to_int_array([1, 2])
    Traceback (most recent call last):
    IndexError: not enough values found during array assignment, expected 3, got 2
    >>> to_int_array([1, 2, 3, 4])
    Traceback (most recent call last):
    IndexError: too many values found during array assignment, expected 3
    """
    cdef int[3] v = x
    return v[0], v[1], v[2]


def to_int_array_array(x):
    """
    >>> to_int_array_array([[1, 2, 3], [4, 5, 6]])
    (1, 2, 3, 4, 5, 6)
    >>> to_int_array_array(iter([[1, 2, 3], [4, 5, 6]]))
    (1, 2, 3, 4, 5, 6)

    >>> to_int_array_array([[1, 2, 3]])
    Traceback (most recent call last):
    IndexError: not enough values found during array assignment, expected 2, got 1
    >>> to_int_array_array(iter([[1, 2, 3]]))
    Traceback (most recent call last):
    IndexError: not enough values found during array assignment, expected 2, got 1

    >>> to_int_array_array([[1, 2, 3], [4, 5]])
    Traceback (most recent call last):
    IndexError: not enough values found during array assignment, expected 3, got 2
    >>> to_int_array_array(iter([[1, 2, 3], [4, 5]]))
    Traceback (most recent call last):
    IndexError: not enough values found during array assignment, expected 3, got 2

    >>> to_int_array_array([[1, 2, 3, 4], [5, 6, 7]])
    Traceback (most recent call last):
    IndexError: too many values found during array assignment, expected 3
    >>> to_int_array_array(iter([[1, 2, 3, 4], [5, 6, 7]]))
    Traceback (most recent call last):
    IndexError: too many values found during array assignment, expected 3
    """
    cdef int[2][3] v = x
    return v[0][0], v[0][1], v[0][2], v[1][0], v[1][1], v[1][2]


'''
# FIXME: this isn't currently allowed
cdef enum:
    SIZE_A = 2
    SIZE_B = 3

def to_int_array_array_enumsize(x):
    """
    >>> to_int_array_array([[1, 2, 3], [4, 5, 6]])
    (1, 2, 3, 4, 5, 6)
    >>> to_int_array_array(iter([[1, 2, 3], [4, 5, 6]]))
    (1, 2, 3, 4, 5, 6)
    >>> to_int_array([1, 2])
    Traceback (most recent call last):
    IndexError: not enough values found during array assignment, expected 3, got 2
    >>> to_int_array([1, 2, 3, 4])
    Traceback (most recent call last):
    IndexError: too many values found during array assignment, expected 3
    """
    cdef int[SIZE_A][SIZE_B] v = x
    return v[0][0], v[0][1], v[0][2], v[1][0], v[1][1], v[1][2]
'''


'''
# FIXME: this isn't currently supported
def array_as_argument(int[2] x):
    """
    >>> array_as_argument([1, 2])
    (1, 2)
    """
    return x[0], x[1]
'''


def to_int_array_slice(x):
    """
    >>> to_int_array_slice([1, 2, 3])
    (1, 2, 3)
    >>> to_int_array_slice([1, 2])
    Traceback (most recent call last):
    IndexError: not enough values found during array assignment, expected 3, got 2
    >>> to_int_array_slice([1, 2, 3, 4])
    Traceback (most recent call last):
    IndexError: too many values found during array assignment, expected 3
    """
    cdef int[3] v
    v[:] = x[:3]
    assert v[0] == x[0]
    assert v[1] == x[1]
    assert v[2] == x[2]
    v[:3] = [0, 0, 0]
    assert v[0] == 0
    assert v[1] == 0
    assert v[2] == 0
    v[:] = x
    return v[0], v[1], v[2]


def iterable_to_int_array(x):
    """
    >>> iterable_to_int_array(iter([1, 2, 3]))
    (1, 2, 3)
    >>> iterable_to_int_array(iter([1, 2]))
    Traceback (most recent call last):
    IndexError: not enough values found during array assignment, expected 3, got 2
    >>> iterable_to_int_array(iter([1, 2, 3, 4]))
    Traceback (most recent call last):
    IndexError: too many values found during array assignment, expected 3
    """
    cdef int[3] v
    v[:] = x
    return v[0], v[1], v[2]


def to_struct_array(x):
    """
    >>> a, b = to_struct_array(({'x': 1, 'y': 2}, {'x': 3, 'y': 4}))
    >>> a['x'], a['y']
    (1, 2.0)
    >>> b['x'], b['y']
    (3, 4.0)
    """
    cdef MyStructType[2] v
    v[:] = x

    cdef MyStruct[2] w
    w[:] = x

    assert w[0].x == v[0].x
    assert w[0].y == v[0].y
    assert w[1].x == v[1].x
    assert w[1].y == v[1].y

    return v[0], w[1]


def to_struct_array_array(x):
    """
    >>> (a1, a2, a3), (b1, b2, b3) = to_struct_array_array([
    ...     ({'x': 11, 'y': 12}, {'x': 13, 'y': 14}, {'x': 15, 'y': 16}),
    ...     ({'x': 21, 'y': 22}, {'x': 23, 'y': 24}, {'x': 25, 'y': 26}),
    ... ])
    >>> a1['x'], a1['y']
    (11, 12.0)
    >>> b3['x'], b3['y']
    (25, 26.0)
    """
    cdef MyStructType[2][3] v = x
    return v[0], v[1]


cdef struct StructWithArray:
    int a
    MyStruct[2] b


def to_struct_with_array(x):
    """
    >>> x, y = to_struct_with_array([
    ...     {'a': 11, 'b': [{'x': 12, 'y': 13}, {'x': 14, 'y': 15}]},
    ...     {'a': 21, 'b': [{'x': 22, 'y': 23}, {'x': 24, 'y': 25}]},
    ... ])
    >>> x['a'], y['a']
    (11, 21)
    >>> sorted(sorted(v.items()) for v in x['b'])
    [[('x', 12), ('y', 13.0)], [('x', 14), ('y', 15.0)]]
    >>> sorted(sorted(v.items()) for v in y['b'])
    [[('x', 22), ('y', 23.0)], [('x', 24), ('y', 25.0)]]

    >>> x, y = to_struct_with_array(iter([
    ...     {'a': 11, 'b': iter([{'x': 12, 'y': 13}, {'x': 14, 'y': 15}])},
    ...     {'a': 21, 'b': iter([{'x': 22, 'y': 23}, {'x': 24, 'y': 25}])},
    ... ]))
    >>> x['a'], y['a']
    (11, 21)
    >>> sorted(sorted(v.items()) for v in x['b'])
    [[('x', 12), ('y', 13.0)], [('x', 14), ('y', 15.0)]]
    >>> sorted(sorted(v.items()) for v in y['b'])
    [[('x', 22), ('y', 23.0)], [('x', 24), ('y', 25.0)]]
    """
    cdef StructWithArray[2] v
    v = x
    return v


def to_struct_with_array_slice(x):
    """
    >>> x, y = to_struct_with_array_slice([
    ...     {'a': 11, 'b': [{'x': 12, 'y': 13}, {'x': 14, 'y': 15}]},
    ...     {'a': 21, 'b': [{'x': 22, 'y': 23}, {'x': 24, 'y': 25}]},
    ... ])
    >>> x['a'], y['a']
    (11, 21)
    >>> sorted(sorted(v.items()) for v in x['b'])
    [[('x', 12), ('y', 13.0)], [('x', 14), ('y', 15.0)]]
    >>> sorted(sorted(v.items()) for v in y['b'])
    [[('x', 22), ('y', 23.0)], [('x', 24), ('y', 25.0)]]

    >>> x, y = to_struct_with_array_slice(iter([
    ...     {'a': 11, 'b': iter([{'x': 12, 'y': 13}, {'x': 14, 'y': 15}])},
    ...     {'a': 21, 'b': iter([{'x': 22, 'y': 23}, {'x': 24, 'y': 25}])},
    ... ]))
    >>> x['a'], y['a']
    (11, 21)
    >>> sorted(sorted(v.items()) for v in x['b'])
    [[('x', 12), ('y', 13.0)], [('x', 14), ('y', 15.0)]]
    >>> sorted(sorted(v.items()) for v in y['b'])
    [[('x', 22), ('y', 23.0)], [('x', 24), ('y', 25.0)]]
    """
    cdef StructWithArray[2] v
    v[:] = x
    return v


'''
# FIXME: this isn't currently allowed
def to_struct_with_array_slice_end(x):
    """
    >>> to_struct_with_array_slice_end([
    ...     {'a': 11, 'b': [{'x': 12, 'y': 13}, {'x': 14, 'y': 15}]},
    ... ])
    [{'a': 11, 'b': [{'y': 13.0, 'x': 12}, {'y': 15.0, 'x': 14}]}]
    >>> to_struct_with_array_slice_end(iter([
    ...     {'a': 11, 'b': iter([{'x': 12, 'y': 13}, {'x': 14, 'y': 15}])},
    ... ]))
    [{'a': 11, 'b': [{'y': 13.0, 'x': 12}, {'y': 15.0, 'x': 14}]}]
    >>> to_struct_with_array_slice_end(iter([
    ...     {'a': 11, 'b': iter([{'x': 12, 'y': 13}, {'x': 14, 'y': 15}])},
    ...     {'a': 21, 'b': iter([{'x': 22, 'y': 23}, {'x': 24, 'y': 25}])},
    ... ]))
    Traceback (most recent call last):
    IndexError: too many values found during array assignment, expected 1
    """
    cdef StructWithArray[2] v
    v[:1] = x
    return v


def to_int_array_slice_start_end(x):
    """
    >>> to_int_array_slice_start_end([1, 2, 3])
    (1, 2, 3, 2, 3)
    """
    cdef int[5] v
    v[2:] = x
    v[:3] = x
    return v[0], v[1], v[2], v[3], v[4]
'''
Cython-0.26.1/tests/run/special_methods_T561_py2.pyx0000664000175000017500000001007712542002467023023 0ustar  stefanstefan00000000000000# ticket: 561
# tag: py2
# This file tests the behavior of special methods under Python 2
# after #561.  (Only methods whose behavior differs between Python 2 and 3
# are tested here; see special_methods_T561.pyx for the rest of the tests.)

__doc__ = u"""
    >>> vs0 = VerySpecial(0)
    VS __init__ 0
    >>> vs1 = VerySpecial(1)
    VS __init__ 1
    >>> # Python 3 does not use __cmp__.
    >>> vs0_cmp = vs0.__cmp__
    >>> vs0_cmp(vs1)
    VS __cmp__ 0 1
    0
    >>> # Python 3 does not use __div__ or __idiv__.
    >>> vs0_div = vs0.__div__
    >>> vs0_div(vs1)
    VS __div__ 0 1
    >>> vs0_idiv = vs0.__idiv__
    >>> vs0_idiv(vs1)
    VS __idiv__ 0 /= 1
    >>> vs0_rdiv = vs0.__rdiv__
    >>> vs0_rdiv(vs1)
    VS __div__ 1 0
    >>> # Python 3 does not use __oct__ or __hex__.
    >>> vs0_oct = vs0.__oct__
    >>> vs0_oct()
    VS __oct__ 0
    >>> vs0_hex = vs0.__hex__
    >>> vs0_hex()
    VS __hex__ 0
    >>> # Python 3 does not use __nonzero__; if you define a __nonzero__
    >>> # method, Cython for Python 3 would give you a __bool__ method
    >>> # instead.
    >>> vs0_nonzero = vs0.__nonzero__
    >>> vs0_nonzero()
    VS __nonzero__ 0
    False
    >>> # If you define __next__, you get both __next__ and next (this behavior
    >>> # is unchanged by T561, but only happens in Python 2)
    >>> vs0_next = vs0.__next__
    >>> vs0_next()
    VS next/__next__ 0
    >>> vs0_next2 = vs0.next
    >>> vs0_next2()
    VS next/__next__ 0
    >>> # Cython supports getslice only for Python 2.
    >>> vs0_getslice = vs0.__getslice__
    >>> vs0_getslice(13, 42)
    VS __getslice__ 0 13 42
    >>> # Cython supports setslice and delslice only for Python 2.
    >>> # If you define either setslice or delslice, you get wrapper objects
    >>> # for both methods.  (This behavior is unchanged by #561.)
    >>> ss_setslice = SetSlice().__setslice__
    >>> ss_setslice(13, 42, 'foo')
    SetSlice setslice 13 42 'foo'
    >>> ss_delslice = SetSlice().__delslice__
    >>> ss_delslice(13, 42)
    Traceback (most recent call last):
    ...
    NotImplementedError: 2-element slice deletion not supported by special_methods_T561_py2.SetSlice
    >>> ds_setslice = DelSlice().__setslice__
    >>> ds_setslice(13, 42, 'foo')
    Traceback (most recent call last):
    ...
    NotImplementedError: 2-element slice assignment not supported by special_methods_T561_py2.DelSlice
    >>> ds_delslice = DelSlice().__delslice__
    >>> ds_delslice(13, 42)
    DelSlice delslice 13 42
    >>> sds_setslice = SetDelSlice().__setslice__
    >>> sds_setslice(13, 42, 'foo')
    SetDelSlice setslice 13 42 'foo'
    >>> sds_delslice = SetDelSlice().__delslice__
    >>> sds_delslice(13, 42)
    SetDelSlice delslice 13 42
    >>> # Python 3 does not use __long__.
    >>> Ll = Long().__long__
    >>> Ll()
    Long __long__
"""

cdef class VerySpecial:
    cdef readonly int value

    def __init__(self, v):
        self.value = v
        print "VS __init__ %d" % self.value

    def __getslice__(self, a, b):
        print "VS __getslice__ %d %d %d" % (self.value, a, b)

    def __next__(self):
        print "VS next/__next__ %d" % self.value

    def __nonzero__(self):
        print "VS __nonzero__ %d" % self.value

    def __oct__(self):
        print "VS __oct__ %d" % self.value

    def __hex__(self):
        print "VS __hex__ %d" % self.value

    def __cmp__(self, other):
        print "VS __cmp__ %d %d" % (self.value, other.value)

    def __div__(self, other):
        print "VS __div__ %d %d" % (self.value, other.value)

    def __idiv__(self, other):
        print "VS __idiv__ %d /= %d" % (self.value, other.value)

cdef class SetSlice:
    def __setslice__(self, a, b, value):
        print "SetSlice setslice %d %d %r" % (a, b, value)

cdef class DelSlice:
    def __delslice__(self, a, b):
        print "DelSlice delslice %d %d" % (a, b)

cdef class SetDelSlice:
    def __setslice__(self, a, b, value):
        print "SetDelSlice setslice %d %d %r" % (a, b, value)

    def __delslice__(self, a, b):
        print "SetDelSlice delslice %d %d" % (a, b)

cdef class Long:
    def __long__(self):
        print "Long __long__"
Cython-0.26.1/tests/run/unbound_builtin_methods.pyx0000664000175000017500000000134212542002467023265 0ustar  stefanstefan00000000000000
def list_insert(list l):
    """
    >>> list_insert([1,2,3])
    [1, 4, 2, 3]
    """
    list.insert(l, 1, 4)
    return l


def list_insert_literal():
    """
    >>> list_insert_literal()
    (None, [1, 4, 2, 3])
    """
    l = [1,2,3]
    r = list.insert(l, 1, 4)
    return r, l


def list_insert_assigned():
    """
    >>> list_insert_assigned()
    (None, [1, 4, 2, 3])
    """
    insert = list.insert
    l = [1,2,3]
    r = insert(l, 1, 4)
    return r, l


def list_pop():
    """
    >>> list_pop()
    (2, [1, 3])
    """
    l = [1,2,3]
    r = list.pop(l, 1)
    return r, l


def list_pop_assigned():
    """
    >>> list_pop_assigned()
    [1, 3]
    """
    pop = list.pop
    l = [1,2,3]
    pop(l, 1)
    return l
Cython-0.26.1/tests/run/struct_conversion_extern.pyx0000664000175000017500000000105312542002467023517 0ustar  stefanstefan00000000000000"""
Note: this tests if the necessary utility code is included in the module env,
despite potentially being already created before.
"""

cdef extern from "struct_conversion_extern_header.h":
    cdef struct my_date_t:
        int year
        int month
        int day


def test_extern_struct():
    """
    >>> test_extern_struct()
    [('day', 24), ('month', 6), ('year', 2000)]
    """
    cdef my_date_t day = my_date_t(year=2000, month=6, day=24)
    cdef object d = day
    assert type(d) is dict
    assert d == day
    return sorted(day.items())
Cython-0.26.1/tests/run/extern_builtins_T258.pyx0000664000175000017500000000163112542002467022303 0ustar  stefanstefan00000000000000# ticket: 258

cdef extern from "Python.h":

    ctypedef class __builtin__.list  [object PyListObject]:
        cdef Py_ssize_t allocated

    ctypedef class __builtin__.dict  [object PyDictObject]:
        pass

    cdef Py_ssize_t Py_SIZE(object o)

cdef list L = [1,2,4]
cdef dict d = {'A': 'a'}


def test_list(list L):
    """
    >>> test_list(list(range(10)))
    True
    >>> class list_subclass(list): pass
    >>> test_list(list_subclass([1,2,3]))
    True
    """
    return Py_SIZE(L) <= L.allocated

def test_tuple(tuple t):
    """
    Actual builtin types are restrictive wrt subclassing so optimizations can be safely performed.

    >>> test_tuple((1,2))
    2
    >>> class tuple_subclass(tuple): pass
    >>> test_tuple(tuple_subclass((1,2)))
    Traceback (most recent call last):
    ...
    TypeError: Argument 't' has incorrect type (expected tuple, got tuple_subclass)
    """
    return len(t)

Cython-0.26.1/tests/run/setjmp.pyx0000664000175000017500000000211212542002467017640 0ustar  stefanstefan00000000000000from libc.setjmp cimport *

cdef void check_nonzero(jmp_buf ctx, int x) nogil:
    if x == 0:
        longjmp(ctx, 1)

def nonzero(int x):
    """
    >>> nonzero(-1)
    True
    >>> nonzero(0)
    False
    >>> nonzero(1)
    True
    >>> nonzero(2)
    True

    """
    cdef jmp_buf ctx
    if setjmp(ctx) == 0:
        check_nonzero(ctx, x)
        return True
    else:
        return False


from libc.string cimport strcpy
cdef char[256] error_msg
cdef jmp_buf error_ctx
cdef void error(char msg[]) nogil:
    strcpy(error_msg,msg)
    longjmp(error_ctx, 1)

cdef void c_call(int x) nogil:
    if x<=0:
        error(b"expected a positive value")

def execute_c_call(int x):
    """
    >>> execute_c_call(+2)
    >>> execute_c_call(+1)
    >>> execute_c_call(+0)
    Traceback (most recent call last):
      ...
    RuntimeError: expected a positive value
    >>> execute_c_call(-1)
    Traceback (most recent call last):
      ...
    RuntimeError: expected a positive value
    """
    if not setjmp(error_ctx):
        c_call(x)
    else:
        raise RuntimeError(error_msg.decode())
Cython-0.26.1/tests/run/range_optimisation_T203.pyx0000664000175000017500000000515312542002467022751 0ustar  stefanstefan00000000000000# ticket: 203

cdef int get_bound(int m):
    print u"get_bound(%s)"%m
    return m

def for_from_range(a, b):
    """
    >>> for_from_range(5, 10)
    range(5)
    at 0
    at 1
    at 2
    at 3
    at 4
    range(5, 10)
    at 5
    at 6
    at 7
    at 8
    at 9
    range(5, 10, 2)
    at 5
    at 7
    at 9
    9
    >>> for_from_range(-5, -10)
    range(-5)
    range(-5, -10)
    range(-5, -10, 2)
    100
    """
    cdef int i = 100
    print u"range(%s)" % a
    for i in range(a):
        print u"at", i
    print u"range(%s, %s)" % (a, b)
    for i in range(a, b):
        print u"at", i
    print u"range(%s, %s, %s)" % (a, b, 2)
    for i in range(a, b, 2):
        print u"at", i
    return i

def for_from_bound_reassignment(int bound, int fake_bound):
    """
    >>> for_from_bound_reassignment(5, 1)
    at 0
    at 1
    at 2
    at 3
    at 4
    5
    """
    cdef int i = 100
    for i from 0 <= i < bound:
        print u"at", i
        bound = fake_bound
    return i

def for_from_step_reassignment(int bound, int step, int fake_step):
    """
    >>> for_from_step_reassignment(15, 5, 2)
    at 0
    at 5
    at 10
    15
    """
    cdef int i = 100
    for i from 0 <= i < bound by step:
        print u"at", i
        step = fake_step
    return i

def for_from_target_reassignment(int bound, int factor):
    """
    >>> for_from_target_reassignment(10, 2)
    at 0
    at 1
    at 3
    at 7
    15
    """
    cdef int i = 100
    for i from 0 <= i < bound:
        print u"at", i
        i *= factor
    return i

def for_from_py_target_reassignment(int bound, int factor):
    """
    >>> for_from_py_target_reassignment(10, 2)
    at 0
    at 1
    at 3
    at 7
    15
    """
    cdef object i
    for i from 0 <= i < bound:
        print u"at", i
        i *= factor
    return i

def for_from_py_global_target_reassignment(int bound, int factor):
    """
    >>> for_from_py_global_target_reassignment(10, 2)
    at 0
    at 1
    at 3
    at 7
    15
    """
    global g_var
    for g_var from 0 <= g_var < bound:
        print u"at", g_var
        g_var *= factor
    return g_var

def for_in_target_reassignment(int bound, int factor):
    """
    >>> for_in_target_reassignment(10, 2)
    at 0
    at 1
    at 2
    at 3
    at 4
    at 5
    at 6
    at 7
    at 8
    at 9
    18
    """
    cdef int i = 100
    for i in range(bound):
        print u"at", i
        i *= factor
    return i

def test_func(int n):
    """
    >>> test_func(5)
    get_bound(5)
    at 0
    at 1
    at 2
    at 3
    at 4
    5
    """
    cdef int i = 100
    for i from 0 <= i < get_bound(n):
        print u"at", i
    return i
Cython-0.26.1/tests/run/reraise_3args.pyx0000664000175000017500000000064112542002467021074 0ustar  stefanstefan00000000000000
import sys


class MyError(Exception):
    def __init__(self, name, var):
        self.name = name
        self.var = var


def reraise_explicitly():
    """
    >>> try: reraise_explicitly()
    ... except MyError: print("RAISED!")
    ... else: print("NOT RAISED!")
    RAISED!
    """
    try:
        raise MyError('Oh no!', 42)
    except MyError:
        tmp = sys.exc_info()

    raise tmp[0], tmp[1], tmp[2]
Cython-0.26.1/tests/run/pstats_profile_test.pyx0000664000175000017500000001224113143605603022436 0ustar  stefanstefan00000000000000# tag: pstats
# cython: profile = True

__doc__ = u"""
    >>> import os, tempfile, cProfile as profile, pstats
    >>> statsfile = tempfile.mkstemp()[1]
    >>> profile.runctx("test_profile(100)", locals(), globals(), statsfile)
    >>> s = pstats.Stats(statsfile)
    >>> short_stats = dict([(k[2], v[1]) for k,v in s.stats.items()])
    >>> short_stats['f_def']
    100
    >>> short_stats['f_cdef']
    100
    >>> short_stats['f_cpdef']
    200
    >>> short_stats['f_cpdef (wrapper)']
    100
    >>> short_stats['f_inline']
    100
    >>> short_stats['f_inline_prof']
    100
    >>> short_stats['f_noprof']
    Traceback (most recent call last):
    ...
    KeyError: 'f_noprof'
    >>> short_stats['f_raise']
    100

    >>> short_stats['withgil_prof']
    100
    >>> short_stats['withgil_noprof']
    Traceback (most recent call last):
    ...
    KeyError: 'withgil_noprof'

    >>> short_stats['nogil_prof']
    Traceback (most recent call last):
    ...
    KeyError: 'nogil_prof'
    >>> short_stats['nogil_noprof']
    Traceback (most recent call last):
    ...
    KeyError: 'nogil_noprof'

    >>> short_stats['f_raise']
    100

    >>> short_stats['m_def']
    200
    >>> short_stats['m_cdef']
    100
    >>> short_stats['m_cpdef']
    200
    >>> short_stats['m_cpdef (wrapper)']
    100

    >>> try:
    ...    os.unlink(statsfile)
    ... except:
    ...    pass

    >>> sorted(callees(s, 'test_profile'))  #doctest: +NORMALIZE_WHITESPACE
    ['f_cdef', 'f_cpdef', 'f_cpdef (wrapper)', 'f_def',
     'f_inline', 'f_inline_prof',
     'f_raise',
     'm_cdef', 'm_cpdef', 'm_cpdef (wrapper)', 'm_def',
     'withgil_prof']

    >>> profile.runctx("test_generators()", locals(), globals(), statsfile)
    >>> s = pstats.Stats(statsfile)
    >>> short_stats = dict([(k[2], v[1]) for k,v in s.stats.items()])
    >>> short_stats['generator']
    3

    >>> short_stats['generator_exception']
    2

    >>> short_stats['genexpr']
    11

    >>> sorted(callees(s, 'test_generators'))
    ['call_generator', 'call_generator_exception', 'generator_expr']

    >>> list(callees(s, 'call_generator'))
    ['generator']

    >>> list(callees(s, 'generator'))
    []

    >>> list(callees(s, 'generator_exception'))
    []

    >>> list(callees(s, 'generator_expr'))
    ['genexpr']

    >>> list(callees(s, 'genexpr'))
    []

    >>> def python_generator():
    ...   yield 1
    ...   yield 2
    >>> def call_python_generator():
    ...   list(python_generator())

    >>> profile.runctx("call_python_generator()", locals(), globals(), statsfile)
    >>> python_stats = pstats.Stats(statsfile)
    >>> python_stats_dict = dict([(k[2], v[1]) for k,v in python_stats.stats.items()])

    >>> profile.runctx("call_generator()", locals(), globals(), statsfile)
    >>> cython_stats = pstats.Stats(statsfile)
    >>> cython_stats_dict = dict([(k[2], v[1]) for k,v in cython_stats.stats.items()])

    >>> python_stats_dict['python_generator'] == cython_stats_dict['generator']
    True

    >>> try:
    ...    os.unlink(statsfile)
    ... except:
    ...    pass
"""

cimport cython

def callees(pstats, target_caller):
    pstats.calc_callees()
    for (_, _, caller), callees in pstats.all_callees.items():
      if caller == target_caller:
        for (file, line, callee) in callees.keys():
            if 'pyx' in file:
                yield callee

def test_profile(long N):
    cdef long i, n = 0
    cdef A a = A()
    for i from 0 <= i < N:
        n += f_def(i)
        n += f_cdef(i)
        n += f_cpdef(i)
        n += (f_cpdef)(i)
        n += f_inline(i)
        n += f_inline_prof(i)
        n += f_noprof(i)
        n += nogil_noprof(i)
        n += nogil_prof(i)
        n += withgil_noprof(i)
        n += withgil_prof(i)
        n += a.m_def(i)
        n += (a).m_def(i)
        n += a.m_cpdef(i)
        n += (a).m_cpdef(i)
        n += a.m_cdef(i)
        try:
            n += f_raise(i+2)
        except RuntimeError:
            pass
    return n

def f_def(long a):
    return a

cdef long f_cdef(long a):
    return a

cpdef long f_cpdef(long a):
    return a

cdef inline long f_inline(long a):
    return a

@cython.profile(True)
cdef inline long f_inline_prof(long a):
    return a

@cython.profile(False)
cdef int f_noprof(long a):
    return a

cdef long f_raise(long) except -2:
    raise RuntimeError

@cython.profile(False)
cdef int withgil_noprof(long a) with gil:
    return (a)
@cython.profile(True)
cdef int withgil_prof(long a) with gil:
    return (a)

@cython.profile(False)
cdef int nogil_noprof(long a) nogil:
    return a
@cython.profile(True)
cdef int nogil_prof(long a) nogil:
    return a

cdef class A(object):
    def m_def(self, long a):
        return a
    cpdef m_cpdef(self, long a):
        return a
    cdef m_cdef(self, long a):
        return a

def test_generators():
    call_generator()
    call_generator_exception()
    generator_expr()

def call_generator():
    list(generator())

def generator():
    yield 1
    yield 2

def call_generator_exception():
    try:
        list(generator_exception())
    except ValueError:
        pass

def generator_exception():
    yield 1
    raise ValueError(2)

def generator_expr():
    e = (x for x in range(10))
    return sum(e)
Cython-0.26.1/tests/run/relativeimport_T542.srctree0000664000175000017500000000251112542002467022754 0ustar  stefanstefan00000000000000# mode: run
# tag: import

"""
PYTHON setup.py build_ext -i
PYTHON test_relative_import.py
"""

######## setup.py ########

from Cython.Build.Dependencies import cythonize
from distutils.core import setup

setup(
  ext_modules = cythonize("*/*.pyx"),
)


######## test_relative_import.py ########

from relimport.testmod import test_relative, test_absolute
a, bmod, afunc, bfunc = test_relative()

try:
    test_absolute()
except ImportError:
    pass
else:
    assert False, "absolute import succeeded"

import relimport.a
import relimport.bmod
import relimport.testmod

assert relimport.a == a
assert relimport.bmod == bmod
assert afunc() == 'a', afunc
assert bfunc() == 'b', bfunc


######## relimport/__init__.py ########

######## relimport/a.pyx ########

def afunc(): return 'a'


######## relimport/bmod.pyx ########

def bfunc(): return 'b'


######## relimport/testmod.pyx ########
# cython: language_level=3

from relimport import a as global_a, bmod as global_bmod

from . import *

assert a is global_a, a
assert bmod is global_bmod, bmod

def test_relative():
    from . import a, bmod
    from . import (a, bmod)
    from . import (a, bmod,)
    from .a import afunc
    from .bmod import bfunc

    assert afunc() == 'a', afunc()
    assert bfunc() == 'b', bfunc()

    return a, bmod, afunc, bfunc

def test_absolute():
    import bmod
Cython-0.26.1/tests/run/final_in_pxd.srctree0000664000175000017500000000313712542002467021627 0ustar  stefanstefan00000000000000PYTHON -c "import a; assert a.__file__.rstrip('co').endswith('.py'), a.__file__; a.test()"
PYTHON setup.py build_ext --inplace
PYTHON -c "import a; assert not a.__file__.rstrip('co').endswith('.py'), a.__file__; a.test()"

######## setup.py ########

from Cython.Build.Dependencies import cythonize

from distutils.core import setup

setup(
    ext_modules = cythonize("a.py"),
    )

######## a.pxd ########

cimport cython

cdef class ExtType:
    @cython.final
    cdef int final_func(self)

@cython.final
cdef class FinalExtType:
    cdef int func(self)

@cython.final
cdef class FinalExtSubType(ExtType):
    cdef int func(self)

cdef class NonFinalSubType(ExtType):
    cdef int func(self)


######## a.py ########

import cython

class ExtType(object):
    @cython.test_assert_path_exists("//CFuncDefNode[@entry.is_final_cmethod=True]")
    def final_func(self):
        return 1

class FinalExtType(object):
    @cython.test_assert_path_exists("//CFuncDefNode[@entry.is_final_cmethod=True]")
    def func(self):
        return 2

class FinalExtSubType(ExtType):
    @cython.test_assert_path_exists("//CFuncDefNode[@entry.is_final_cmethod=True]")
    def func(self):
        return 3

class NonFinalSubType(ExtType):
    @cython.test_assert_path_exists("//CFuncDefNode[@entry.is_final_cmethod=True]")
    @cython.final
    def func(self):
        return 4


def test():
    assert ExtType().final_func() == 1
    assert FinalExtSubType().final_func() == 1
    assert NonFinalSubType().final_func() == 1

    assert FinalExtType().func() == 2
    assert FinalExtSubType().func() == 3
    assert NonFinalSubType().func() == 4
Cython-0.26.1/tests/run/r_jiba1.pyx0000664000175000017500000000066512542002467017660 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> test()
    This parrot is resting.
    Lovely plumage!
"""


cdef class Parrot:

    cdef void describe(self):
        print u"This parrot is resting."

    def describe_python(self):
        self.describe()

cdef class Norwegian(Parrot):

    cdef void describe(self):
        print u"Lovely plumage!"

def test():
    cdef Parrot p1, p2
    p1 = Parrot()
    p2 = Norwegian()
    p1.describe()
    p2.describe()
Cython-0.26.1/tests/run/char_constants_T99.pyx0000664000175000017500000000057612542002467022030 0ustar  stefanstefan00000000000000# ticket: 99

cdef char c = 'c'
cdef char* s = 'abcdef'

def global_c_and_s():
    """
    >>> global_c_and_s()
    99
    abcdef
    """
    pys = s
    print c
    print (pys.decode(u'ASCII'))

def local_c_and_s():
    """
    >>> local_c_and_s()
    98
    bcdefg
    """
    cdef char c = 'b'
    cdef char* s = 'bcdefg'
    pys = s
    print c
    print (pys.decode(u'ASCII'))
Cython-0.26.1/tests/run/cdef_class_field.pyx0000664000175000017500000000035712542002467021600 0ustar  stefanstefan00000000000000# mode: run
# tag: exttype
# ticket: 677

"""
>>> str(Foo(4))
'4'
>>> x
3
"""

x = 3
cdef int y

cdef class Foo:
    cdef int x
    cdef int y
    def __init__(self, x):
        self.x = x
    def __str__(self):
        return str(self.x)
Cython-0.26.1/tests/run/ct_DEF.pyx0000664000175000017500000000731413023021033017414 0ustar  stefanstefan00000000000000
cimport cython

__doc__ = u"""
    >>> s()
    b'spam'
"""

_unicode = unicode

import sys
IS_PY3 = sys.version_info[0] >= 3

if not IS_PY3:
    __doc__ = __doc__.replace(u" b'", u" '")


def print_large_number(n):
    print(str(n).rstrip('L'))


DEF TUPLE = (1, 2, u"buckle my shoe")
DEF TRUE_FALSE = (True, False)
DEF NONE = None

DEF CHAR = c'x'
DEF INT0 = -1
DEF INT1 = 42
DEF INT2 = 0x42
DEF INT3 = -0x42
DEF LONG = 666L
DEF LARGE_NUM32 = (1 << 32) - 1
DEF LARGE_NUM64 = (1 << 64) - 1
DEF FLOAT = 12.5
DEF BYTES = b"spam"
DEF UNICODE = u"spam-u"
DEF TWO = TUPLE[1]
DEF FIVE = TWO + 3
DEF TRUE  = TRUE_FALSE[0]
DEF FALSE = TRUE_FALSE[1]
DEF INT_TUPLE1 = TUPLE[:2]
DEF INT_TUPLE2 = TUPLE[1:4:2]
DEF ELLIPSIS = ...
DEF EXPRESSION = int(float(2*2)) + int(str(2)) + int(max(1,2,3)) + sum([TWO, FIVE])
DEF UNICODE_EXPRESSION = unicode(BYTES.decode('utf8')).encode('ascii').decode('latin1')


def c():
    """
    >>> c()
    120
    """
    cdef char c = CHAR
    return c

def i0():
    """
    >>> i0() == -1
    True
    """
    cdef int i = INT0
    return i

def i1():
    """
    >>> i1() == 42
    True
    """
    cdef int i = INT1
    return i

def i2():
    """
    >>> i2() == 0x42
    True
    """
    cdef int i = INT2
    return i

def i3():
    """
    >>> i3() == -0x42
    True
    """
    cdef int i = INT3
    return i

def l():
    """
    >>> l()
    666
    """
    cdef long l = LONG
    return l

def large_nums():
    """
    >>> ul32, ul64, l64, n64 = large_nums()
    >>> print_large_number(ul32)
    4294967295
    >>> print_large_number(ul64)
    18446744073709551615
    >>> print_large_number(l64)
    4294967295
    >>> print_large_number(n64)
    -4294967295
    """
    cdef unsigned long ul32 = LARGE_NUM32
    cdef unsigned long long ul64 = LARGE_NUM64
    cdef long long l64 = LARGE_NUM32
    cdef long long n64 = -LARGE_NUM32
    return ul32, ul64, l64, n64

def f():
    """
    >>> f()
    12.5
    """
    cdef float f = FLOAT
    return f

def s():
    """
    see module docstring above
    """
    cdef char* s = BYTES
    return s

def type_of_bytes():
    """
    >>> t, s = type_of_bytes()
    >>> assert t is bytes, t
    >>> assert type(s) is bytes, type(s)
    """
    t = type(BYTES)
    s = BYTES
    return t, s

def type_of_unicode():
    """
    >>> t, s = type_of_unicode()
    >>> assert t is _unicode, t
    >>> assert type(s) is _unicode, type(s)
    """
    t = type(UNICODE)
    s = UNICODE
    return t, s

@cython.test_assert_path_exists('//TupleNode')
def constant_tuple():
    """
    >>> constant_tuple()[:-1]
    (1, 2)
    >>> print(constant_tuple()[-1])
    buckle my shoe
    """
    cdef object t = TUPLE
    return t

@cython.test_assert_path_exists('//IntNode')
def tuple_indexing():
    """
    >>> tuple_indexing()
    2
    """
    cdef int two = INT_TUPLE1[-1]
    return two

def two():
    """
    >>> two()
    2
    """
    cdef int two = TWO
    return two

def five():
    """
    >>> five()
    5
    """
    cdef int five = FIVE
    return five

@cython.test_assert_path_exists('//BoolNode')
def true():
    """
    >>> true()
    True
    """
    cdef bint true = TRUE
    return true

@cython.test_assert_path_exists('//BoolNode')
def false():
    """
    >>> false()
    False
    """
    cdef bint false = FALSE
    return false

def ellipsis():
    """
    >>> ellipsis()
    Ellipsis
    """
    return ELLIPSIS

@cython.test_assert_path_exists('//IntNode')
@cython.test_fail_if_path_exists('//AddNode')
def expression():
    """
    >>> expression()
    16
    """
    cdef int i = EXPRESSION
    return i


def unicode_expression():
    """
    >>> print(unicode_expression())
    spam
    """
    s = UNICODE_EXPRESSION
    return s


def none():
    """
    >>> none()
    """
    return NONE
Cython-0.26.1/tests/run/cpp_template_subclasses_helper.h0000664000175000017500000000137512542002467024222 0ustar  stefanstefan00000000000000using namespace std;

class Base {
public:
    virtual const char* name() { return "Base"; }
    virtual ~Base() {}
};

template 
class A : public Base {
public:
    virtual const char* name() { return "A"; }
    A1 funcA(A1 x) { return x; }
};

template 
class B : public A {
public:
    virtual const char* name() { return "B"; }
    pair funcB(B1 x, B2 y) { return pair(x, y); }
};

template 
class C : public B {
public:
    virtual const char* name() { return "C"; }
    C1 funcC(C1 x) { return x; }
};

template 
class D : public C > {
    virtual const char* name() { return "D"; }
};

class E : public D {
    virtual const char* name() { return "E"; }
};
Cython-0.26.1/tests/run/python_bool_type.pyx0000664000175000017500000001173113023021033021723 0ustar  stefanstefan00000000000000
# tests copied from test/test_bool.py in Py2.7

cdef assertEqual(a,b):
    assert a == b, '%r != %r' % (a,b)

cdef assertIs(a,b):
    assert a is b, '%r is not %r' % (a,b)

cdef assertIsNot(a,b):
    assert a is not b, '%r is %r' % (a,b)

cdef assertNotIsInstance(a,b):
    assert not isinstance(a,b), 'isinstance(%r, %s)' % (a,b)


def test_int():
    """
    >>> test_int()
    """
    assertEqual(int(False), 0)
    assertIsNot(int(False), False)
    assertEqual(int(True), 1)
    assertIsNot(int(True), True)

def test_float():
    """
    >>> test_float()
    """
    assertEqual(float(False), 0.0)
    assertIsNot(float(False), False)
    assertEqual(float(True), 1.0)
    assertIsNot(float(True), True)

def test_repr():
    """
    >>> test_repr()
    """
    assertEqual(repr(False), 'False')
    assertEqual(repr(True), 'True')
    assertEqual(eval(repr(False)), False)
    assertEqual(eval(repr(True)), True)

def test_str():
    """
    >>> test_str()
    """
    assertEqual(str(False), 'False')
    assertEqual(str(True), 'True')

def test_math():
    """
    >>> test_math()
    """
    assertEqual(+False, 0)
    assertIsNot(+False, False)
    assertEqual(-False, 0)
    assertIsNot(-False, False)
    assertEqual(abs(False), 0)
    assertIsNot(abs(False), False)
    assertEqual(+True, 1)
    assertIsNot(+True, True)
    assertEqual(-True, -1)
    assertEqual(abs(True), 1)
    assertIsNot(abs(True), True)
    assertEqual(~False, -1)
    assertEqual(~True, -2)

    assertEqual(False+2, 2)
    assertEqual(True+2, 3)
    assertEqual(2+False, 2)
    assertEqual(2+True, 3)

    assertEqual(False+False, 0)
    assertIsNot(False+False, False)
    assertEqual(False+True, 1)
    assertIsNot(False+True, True)
    assertEqual(True+False, 1)
    assertIsNot(True+False, True)
    assertEqual(True+True, 2)

    assertEqual(True-True, 0)
    assertIsNot(True-True, False)
    assertEqual(False-False, 0)
    assertIsNot(False-False, False)
    assertEqual(True-False, 1)
    assertIsNot(True-False, True)
    assertEqual(False-True, -1)

    assertEqual(True*1, 1)
    assertEqual(False*1, 0)
    assertIsNot(False*1, False)

    assertEqual(True/1, 1)
    assertIsNot(True/1, True)
    assertEqual(False/1, 0)
    assertIsNot(False/1, False)

    for b in False, True:
        for i in 0, 1, 2:
            assertEqual(b**i, int(b)**i)
            assertIsNot(b**i, bool(int(b)**i))

    for a in False, True:
        for b in False, True:
            assertIs(a&b, bool(int(a)&int(b)))
            assertIs(a|b, bool(int(a)|int(b)))
            assertIs(a^b, bool(int(a)^int(b)))
            assertEqual(a&int(b), int(a)&int(b))
            assertIsNot(a&int(b), bool(int(a)&int(b)))
            assertEqual(a|int(b), int(a)|int(b))
            assertIsNot(a|int(b), bool(int(a)|int(b)))
            assertEqual(a^int(b), int(a)^int(b))
            assertIsNot(a^int(b), bool(int(a)^int(b)))
            assertEqual(int(a)&b, int(a)&int(b))
            assertIsNot(int(a)&b, bool(int(a)&int(b)))
            assertEqual(int(a)|b, int(a)|int(b))
            assertIsNot(int(a)|b, bool(int(a)|int(b)))
            assertEqual(int(a)^b, int(a)^int(b))
            assertIsNot(int(a)^b, bool(int(a)^int(b)))

    assertIs(1==1, True)
    assertIs(1==0, False)
    assertIs(0<1, True)
    assertIs(1<0, False)
    assertIs(0<=0, True)
    assertIs(1<=0, False)
    assertIs(1>0, True)
    assertIs(1>1, False)
    assertIs(1>=1, True)
    assertIs(0>=1, False)
    assertIs(0!=1, True)
    assertIs(0!=0, False)

    y = x = [1]
    assertIs(x is y, True)
    assertIs(x is not y, False)

    assertIs(1 in x, True)
    assertIs(0 in x, False)
    assertIs(1 not in x, False)
    assertIs(0 not in x, True)

    y = x = {1: 2}
    assertIs(x is y, True)
    assertIs(x is not y, False)

    assertIs(1 in x, True)
    assertIs(0 in x, False)
    assertIs(1 not in x, False)
    assertIs(0 not in x, True)

    assertIs(not True, False)
    assertIs(not False, True)

def test_convert():
    """
    >>> test_convert()
    """
    assertIs(bool(10), True)
    assertIs(bool(1), True)
    assertIs(bool(-1), True)
    assertIs(bool(0), False)
    assertIs(bool("hello"), True)
    assertIs(bool(""), False)
    assertIs(bool(), False)

def test_isinstance():
    """
    >>> test_isinstance()
    """
    assertIs(isinstance(True, bool), True)
    assertIs(isinstance(False, bool), True)
    assertIs(isinstance(True, int), True)
    assertIs(isinstance(False, int), True)
    assertIs(isinstance(1, bool), False)
    assertIs(isinstance(0, bool), False)

def test_issubclass():
    """
    >>> test_issubclass()
    """
    assertIs(issubclass(bool, int), True)
    assertIs(issubclass(int, bool), False)

def test_boolean():
    """
    >>> test_boolean()
    """
    assertEqual(True & 1, 1)
    assertNotIsInstance(True & 1, bool)
    assertIs(True & True, True)

    assertEqual(True | 1, 1)
    assertNotIsInstance(True | 1, bool)
    assertIs(True | True, True)

    assertEqual(True ^ 1, 0)
    assertNotIsInstance(True ^ 1, bool)
    assertIs(True ^ True, False)
Cython-0.26.1/tests/run/import_star.pyx0000664000175000017500000000153212574327400020710 0ustar  stefanstefan00000000000000# mode: run

cdef object executable, version_info
cdef long hexversion

ctypedef struct MyStruct:
    int x, y, z

# conversion code for this struct will be generated but not used
# (there used to be a problem getting Cython conversion code generated here)
cdef MyStruct _no_such_name_ = MyStruct(1, 2, 3)


from sys import *


def test_cdefed_objects():
    """
    >>> ex, vi = test_cdefed_objects()
    >>> assert ex is not None
    >>> assert vi is not None
    """
    return executable, version_info


def test_cdefed_cvalues():
    """
    >>> hexver = test_cdefed_cvalues()
    >>> assert hexver is not None
    >>> assert hexver > 0x02020000
    """
    return hexversion


def test_non_cdefed_names():
    """
    >>> mod, pth = test_non_cdefed_names()
    >>> assert mod is not None
    >>> assert pth is not None
    """
    return modules, path
Cython-0.26.1/tests/run/language_level.srctree0000664000175000017500000000144212542002467022144 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import directive2; import directive3"
PYTHON -c "import infile2; import infile3"

######## setup.py ########

from Cython.Build.Dependencies import cythonize
from distutils.core import setup

setup(
    ext_modules = (cythonize("infile*.py") +
                   cythonize("directive2.py", compiler_directives={'language_level': 2}) +
                   cythonize("directive3.py", compiler_directives={'language_level': 3})
                   )
)

######## directive3.py ########

import sys
print("SUCCESS", file=sys.stdout)

######## directive2.py ########

print "SUCCESS"

######## infile3.py ########

# cython: language_level=3

import sys
print("SUCCESS", file=sys.stdout)

######## infile2.py ########

# cython: language_level=2

print "SUCCESS"
Cython-0.26.1/tests/run/relative_cimport.srctree0000664000175000017500000000235312574327400022546 0ustar  stefanstefan00000000000000# mode: run
# tag: cimport

PYTHON setup.py build_ext --inplace
PYTHON -c "from pkg.b import test; assert test() == (1, 2)"
PYTHON -c "from pkg.sub.c import test; assert test() == (1, 2)"

######## setup.py ########

from distutils.core import setup
from Cython.Build import cythonize
from Cython.Distutils.extension import Extension

setup(
    ext_modules=cythonize('**/*.pyx'),
)


######## pkg/__init__.py ########

######## pkg/sub/__init__.py ########

######## pkg/a.pyx ########

from .sub.reimport cimport myint

cdef myint i = 5
assert i == 5

cdef class test_pxd:
    pass


######## pkg/a.pxd ########

cdef class test_pxd:
    cdef public int x
    cdef public int y


######## pkg/b.pyx ########

from . cimport a
from .a cimport test_pxd
cimport a as implicitly_relative_a

assert a.test_pxd is test_pxd
assert implicitly_relative_a.test_pxd is test_pxd

def test():
    cdef test_pxd obj = test_pxd()
    obj.x = 1
    obj.y = 2
    return (obj.x, obj.y)


######## pkg/sub/c.pyx ########

from ..a cimport test_pxd


def test():
    cdef test_pxd obj = test_pxd()
    obj.x = 1
    obj.y = 2
    return (obj.x, obj.y)


######## pkg/sub/tdef.pxd ########

ctypedef int myint


######## pkg/sub/reimport.pxd ########

from .tdef cimport myint
Cython-0.26.1/tests/run/extpropertyref.pyx0000664000175000017500000000167712542002467021457 0ustar  stefanstefan00000000000000# cython: autotestdict=True

cdef class Spam:

    property eggs:

        def __get__(self):
            """
            This is the docstring for Spam.eggs.__get__

            >>> True
            True
            """
            return 42

def tomato():
    """
    >>> tomato()
    42

    >>> lines = __test__.keys()
    >>> len(lines)
    3
    >>> 'Spam.eggs.__get__ (line 7)' in lines or lines
    True
    >>> 'tomato (line 16)' in lines or lines
    True
    """
    cdef Spam spam
    cdef object lettuce
    spam = Spam()
    lettuce = spam.eggs
    return lettuce

cdef class Bacon(object):
    cdef object number_of_slices
    cdef public object is_a_vegetable

def breakfast():
    """
    >>> breakfast()
    """
    cdef Bacon myslices = Bacon()
    myslices.is_a_vegetable = True
    assert myslices.is_a_vegetable, myslices.is_a_vegetable
    del myslices.is_a_vegetable
    assert myslices.is_a_vegetable is None, myslices.is_a_vegetable
Cython-0.26.1/tests/run/or.pyx0000664000175000017500000000202112542002467016755 0ustar  stefanstefan00000000000000a,b = 'a *','b *' # use non-interned strings

def or2_assign(a,b):
    """
    >>> or2_assign(2,3) == (2 or 3)
    True
    >>> or2_assign('a', 'b') == ('a' or 'b')
    True
    >>> or2_assign(a, b) == (a or b)
    True
    """
    c = a or b
    return c

def or2(a,b):
    """
    >>> or2(2,3) == (2 or 3)
    True
    >>> or2(0,2) == (0 or 2)
    True
    >>> or2('a', 'b') == ('a' or 'b')
    True
    >>> or2(a, b) == (a or b)
    True
    >>> or2('', 'b') == ('' or 'b')
    True
    >>> or2([], [1]) == ([] or [1])
    True
    >>> or2([], [a]) == ([] or [a])
    True
    """
    return a or b

def or3(a,b,c):
    """
    >>> or3(0,1,2) == (0 or 1 or 2)
    True
    >>> or3([],(),[1]) == ([] or () or [1])
    True
    """
    d = a or b or c
    return d

def or2_no_result(a,b):
    """
    >>> or2_no_result(2,3)
    >>> or2_no_result(0,2)
    >>> or2_no_result('a','b')
    >>> or2_no_result(a,b)
    >>> a or b
    'a *'
    """
    a or b

def or2_literal():
    """
    >>> or2_literal()
    5
    """
    return False or 5
Cython-0.26.1/tests/run/reduce_pickle.pyx0000664000175000017500000001524613150045407021145 0ustar  stefanstefan00000000000000import cython
import sys

if sys.version_info[0] < 3:
    __doc__ = """
    >>> import cPickle
    >>> a = A(5); a
    A(5)
    >>> cPickle.loads(cPickle.dumps(a))
    A(5)

    >>> b = B(0, 1); b
    B(x=0, y=1)
    >>> cPickle.loads(cPickle.dumps(b))
    B(x=0, y=1)
    """

cdef class A:
    """
    >>> a = A(3); a
    A(3)
    >>> import pickle
    >>> pickle.loads(pickle.dumps(a))
    A(3)
    """

    cdef int value

    def __init__(self, value):
        self.value = value

    def __repr__(self):
        return "A(%s)" % self.value

    def __reduce__(self):
        return A, (self.value,)

cdef class B:
    """
    >>> b = B(x=37, y=389); b
    B(x=37, y=389)
    >>> import pickle
    >>> pickle.loads(pickle.dumps(b))
    B(x=37, y=389)
    """

    cdef int x, y

    def __cinit__(self):
        self.x = self.y = -1

    def __init__(self, x=0, y=0):
        self.x = x
        self.y = y

    def __repr__(self):
        return "%s(x=%s, y=%s)" % (self.__class__.__name__, self.x, self.y)

    def __reduce__(self):
        return makeObj, (type(self), {'x': self.x, 'y': self.y})

def makeObj(obj_type, kwds):
    return obj_type(**kwds)


cdef class C(B):
    """
    >>> import pickle
    >>> pickle.loads(pickle.dumps(C(x=37, y=389)))
    C(x=37, y=389)
    """
    pass


@cython.auto_pickle(True)  # Not needed, just to test the directive.
cdef class DefaultReduce(object):
    """
    >>> a = DefaultReduce(11, 'abc'); a
    DefaultReduce(i=11, s='abc')
    >>> import pickle
    >>> pickle.loads(pickle.dumps(a))
    DefaultReduce(i=11, s='abc')
    >>> pickle.loads(pickle.dumps(DefaultReduce(i=11, s=None)))
    DefaultReduce(i=11, s=None)
    """

    cdef readonly int i
    cdef readonly str s

    def __init__(self, i=0, s=None):
        self.i = i
        self.s = s

    def __repr__(self):
        return "DefaultReduce(i=%s, s=%r)" % (self.i, self.s)


cdef class DefaultReduceSubclass(DefaultReduce):
    """
    >>> a = DefaultReduceSubclass(i=11, s='abc', x=1.5); a
    DefaultReduceSubclass(i=11, s='abc', x=1.5)
    >>> import pickle
    >>> pickle.loads(pickle.dumps(a))
    DefaultReduceSubclass(i=11, s='abc', x=1.5)
    """

    cdef double x

    def __init__(self, **kwargs):
        self.x = kwargs.pop('x', 0)
        super(DefaultReduceSubclass, self).__init__(**kwargs)

    def __repr__(self):
        return "DefaultReduceSubclass(i=%s, s=%r, x=%s)" % (self.i, self.s, self.x)


cdef class result(DefaultReduceSubclass):
    """
    >>> a = result(i=11, s='abc', x=1.5); a
    result(i=11, s='abc', x=1.5)
    >>> import pickle
    >>> pickle.loads(pickle.dumps(a))
    result(i=11, s='abc', x=1.5)
    """

    def __repr__(self):
        return "result(i=%s, s=%r, x=%s)" % (self.i, self.s, self.x)


class DefaultReducePySubclass(DefaultReduce):
    """
    >>> a = DefaultReducePySubclass(i=11, s='abc', x=1.5); a
    DefaultReducePySubclass(i=11, s='abc', x=1.5)
    >>> import pickle
    >>> pickle.loads(pickle.dumps(a))
    DefaultReducePySubclass(i=11, s='abc', x=1.5)

    >>> a.self_reference = a
    >>> a2 = pickle.loads(pickle.dumps(a))
    >>> a2.self_reference is a2
    True
    """
    def __init__(self, **kwargs):
        self.x = kwargs.pop('x', 0)
        super(DefaultReducePySubclass, self).__init__(**kwargs)

    def __repr__(self):
        return "DefaultReducePySubclass(i=%s, s=%r, x=%s)" % (self.i, self.s, self.x)


cdef class NoReduceDueToIntPtr(object):
    """
    >>> import pickle
    >>> pickle.dumps(NoReduceDueToIntPtr())
    Traceback (most recent call last):
    ...
    TypeError: self.int_ptr cannot be converted to a Python object for pickling
    """
    cdef int* int_ptr

cdef class NoReduceDueToNontrivialCInit(object):
    """
    >>> import pickle
    >>> pickle.dumps(NoReduceDueToNontrivialCInit(None))
    Traceback (most recent call last):
    ...
    TypeError: no default __reduce__ due to non-trivial __cinit__
    """
    def __cinit__(self, arg):
        pass


cdef class NoMembers(object):
    """
    >>> import pickle
    >>> pickle.loads(pickle.dumps(NoMembers()))
    NoMembers()
    """
    def __repr__(self):
        return "NoMembers()"


cdef class NoPyMembers(object):
    """
    >>> import pickle
    >>> pickle.loads(pickle.dumps(NoPyMembers(2, 1.75)))
    NoPyMembers(ii=[2, 4, 8], x=1.75)
    """
    cdef int[3] ii
    cdef double x

    def __init__(self, i, x):
        self.ii[0] = i
        self.ii[1] = i * i
        self.ii[2] = i * i * i
        self.x = x

    def __repr__(self):
        return "%s(ii=%s, x=%s)" % (type(self).__name__, self.ii, self.x)

class NoPyMembersPySubclass(NoPyMembers):
    """
    >>> import pickle
    >>> pickle.loads(pickle.dumps(NoPyMembersPySubclass(2, 1.75, 'xyz')))
    NoPyMembersPySubclass(ii=[2, 4, 8], x=1.75, s='xyz')
    """
    def __init__(self, i, x, s):
        super(NoPyMembersPySubclass, self).__init__(i, x)
        self.s = s
    def __repr__(self):
        return (super(NoPyMembersPySubclass, self).__repr__()
                [:-1] + ', s=%r)' % self.s)


cdef struct MyStruct:
    int i
    double x

cdef class StructMemberDefault(object):
    """
    >>> import pickle
    >>> s = StructMemberDefault(1, 1.5); s
    StructMemberDefault(i=1, x=1.5)
    >>> pickle.dumps(s)   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...my_struct...
    """

    cdef MyStruct my_struct

    def __init__(self, i, x):
        self.my_struct.i = i
        self.my_struct.x = x

    def __repr__(self):
        return "%s(i=%s, x=%s)" % (
            type(self).__name__, self.my_struct.i, self.my_struct.x)

@cython.auto_pickle(True)  # Forced due to the (inherited) struct attribute.
cdef class StructMemberForcedPickle(StructMemberDefault):
    """
    >>> import pickle
    >>> s = StructMemberForcedPickle(1, 1.5); s
    StructMemberForcedPickle(i=1, x=1.5)
    >>> pickle.loads(pickle.dumps(s))
    StructMemberForcedPickle(i=1, x=1.5)
    """


cdef _unset = object()

# Test cyclic references.
cdef class Wrapper(object):
  """
  >>> import pickle
  >>> w = Wrapper(); w
  Wrapper(...)
  >>> w2 = pickle.loads(pickle.dumps(w)); w2
  Wrapper(...)
  >>> w2.ref is w2
  True

  >>> pickle.loads(pickle.dumps(Wrapper(DefaultReduce(1, 'xyz'))))
  Wrapper(DefaultReduce(i=1, s='xyz'))
  >>> L = [None]
  >>> L[0] = L
  >>> w = Wrapper(L)
  >>> pickle.loads(pickle.dumps(Wrapper(L)))
  Wrapper([[...]])

  >>> L[0] = w   # Don't print this one out...
  >>> w2 = pickle.loads(pickle.dumps(w))
  >>> w2.ref[0] is w2
  True
  """
  cdef public object ref
  def __init__(self, ref=_unset):
      if ref is _unset:
          self.ref = self
      else:
          self.ref = ref
  def __repr__(self):
      if self.ref is self:
          return "Wrapper(...)"
      else:
          return "Wrapper(%r)" % self.ref
Cython-0.26.1/tests/run/cfunc_call_tuple_args_T408.pyx0000664000175000017500000000056012542002467023400 0ustar  stefanstefan00000000000000# ticket: 408

__doc__ = """
>>> call_with_tuple(1, 1.2, 'test', [1,2,3])
(1, 1.2, 'test', [1, 2, 3])

>>> call_with_list(1, 1.2, None, None)
(1, 1.2, None, None)
"""

cdef c_function(int a, float b, c, list d):
    return a,b,c,d

def call_with_tuple(*args):
    return c_function(*args)

def call_with_list(*args):
    args = list(args)
    return c_function(*args)
Cython-0.26.1/tests/run/cpp_type_inference.pyx0000664000175000017500000000176013023021033022170 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

cdef extern from "shapes.h" namespace "shapes":
    cdef cppclass Shape:
        float area()

    cdef cppclass Circle(Shape):
        int radius
        Circle(int)

    cdef cppclass Square(Shape):
        Square(int)

from cython cimport typeof

from cython.operator cimport dereference as d
from cython.operator cimport preincrement as incr
from libcpp.vector cimport vector

def test_reversed_vector_iteration(L):
    """
    >>> test_reversed_vector_iteration([1,2,3])
    int: 3
    int: 2
    int: 1
    int
    """
    cdef vector[int] v = L

    it = v.rbegin()
    while it != v.rend():
        a = d(it)
        incr(it)
        print('%s: %s' % (typeof(a), a))
    print(typeof(a))

def test_derived_types(int size, bint round):
    """
    >>> test_derived_types(5, True)
    Shape *
    >>> test_derived_types(5, False)
    Shape *
    """
    if round:
        ptr = new Circle(size)
    else:
        ptr = new Square(size)
    print typeof(ptr)
    del ptr
Cython-0.26.1/tests/run/cpp_template_subclasses.pyx0000664000175000017500000000567013143605603023255 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

from cython.operator import dereference as deref
from libcpp.pair cimport pair
from libcpp.vector cimport vector

cdef extern from "cpp_template_subclasses_helper.h":
    cdef cppclass Base:
        char* name()

    cdef cppclass A[A1](Base):
        A1 funcA(A1)

    cdef cppclass B[B1, B2](A[B2]):
        pair[B1, B2] funcB(B1, B2)

    cdef cppclass C[C1](B[long, C1]):
        C1 funcC(C1)

    cdef cppclass D[D1](C[pair[D1, D1]]):
        pass

    cdef cppclass E(D[double]):
        pass

def testA(x):
    """
    >>> testA(10)
    10.0
    """
    cdef Base *base
    cdef A[double] *a = NULL
    try:
        a = new A[double]()
        base = a
        assert base.name() == b"A", base.name()
        return a.funcA(x)
    finally:
        del a

def testB(x, y):
    """
    >>> testB(1, 2)
    >>> testB(1, 1.5)
    """
    cdef Base *base
    cdef A[double] *a
    cdef B[long, double] *b = NULL
    try:
        base = a = b = new B[long, double]()
        assert base.name() == b"B", base.name()
        assert a.funcA(y) == y
        assert b.funcB(x, y) == (x, y)
    finally:
        del b

def testC(x, y):
    """
    >>> testC(37, [1, 37])
    >>> testC(25, [1, 5, 25])
    >>> testC(105, [1, 3, 5, 7, 15, 21, 35, 105])
    """
    cdef Base *base
    cdef A[vector[long]] *a
    cdef B[long, vector[long]] *b
    cdef C[vector[long]] *c = NULL
    try:
        base = a = b = c = new C[vector[long]]()
        assert base.name() == b"C", base.name()
        assert a.funcA(y) == y
        assert b.funcB(x, y) == (x, y)
        assert c.funcC(y) == y
    finally:
        del c

def testD(x, y):
    """
    >>> testD(1, 1.0)
    >>> testD(2, 0.5)
    >>> testD(4, 0.25)
    """
    cdef Base *base
    cdef A[pair[double, double]] *a
    cdef B[long, pair[double, double]] *b
    cdef C[pair[double, double]] *c
    cdef D[double] *d = NULL
    try:
        base = a = b = c = d = new D[double]()
        assert base.name() == b"D", base.name()
        assert a.funcA((y, y)) == (y, y)
        assert b.funcB(x, (y, y + 1)) == (x, (y, y + 1))
        assert c.funcC((y, y)) == (y, y)
    finally:
        del d

def testE(x, y):
    """
    >>> testD(1, 1.0)
    >>> testD(2, 0.5)
    >>> testD(4, 0.25)
    """
    cdef Base *base
    cdef A[pair[double, double]] *a
    cdef B[long, pair[double, double]] *b
    cdef C[pair[double, double]] *c
    cdef D[double] *d
    cdef E *e = NULL
    try:
        base = a = b = c = d = e = new E()
        assert base.name() == b"E", base.name()
        assert a.funcA((y, y)) == (y, y)
        assert b.funcB(x, (y, y + 1)) == (x, (y, y + 1))
        assert c.funcC((y, y)) == (y, y)
    finally:
        del e


cdef public pair[int, double] public_return_pair(a, b) except *:
  return pair[int, double](a, b)

def test_GH1599(a, b):
  """
  >>> test_GH1599(1, 2)
  (1, 2.0)
  """
  return public_return_pair(a, b)
Cython-0.26.1/tests/run/unicode_indexing.pyx0000664000175000017500000001753512542002467021670 0ustar  stefanstefan00000000000000
cimport cython

cdef unicode _ustring = u'azerty123456'
ustring = _ustring


@cython.test_assert_path_exists("//CoerceToPyTypeNode",
                                "//IndexNode")
@cython.test_fail_if_path_exists("//IndexNode//CoerceToPyTypeNode")
def index(unicode ustring, Py_ssize_t i):
    """
    >>> index(ustring, 0) == 'a'
    True
    >>> index(ustring, 2) == 'e'
    True
    >>> index(ustring, -1) == '6'
    True
    >>> index(ustring, -len(ustring)) == 'a'
    True

    >>> index(ustring, len(ustring))
    Traceback (most recent call last):
    IndexError: string index out of range
    """
    return ustring[i]


@cython.test_assert_path_exists("//IndexNode")
@cython.test_fail_if_path_exists("//CoerceToPyTypeNode")
def index_pyindex(unicode ustring, i):
    """
    >>> index(ustring, 0) == 'a'
    True
    >>> index(ustring, 2) == 'e'
    True
    >>> index(ustring, -1) == '6'
    True
    >>> index(ustring, -len(ustring)) == 'a'
    True

    >>> index(ustring, len(ustring))
    Traceback (most recent call last):
    IndexError: string index out of range
    """
    return ustring[i]



@cython.test_assert_path_exists("//CoerceToPyTypeNode",
                                "//IndexNode")
@cython.test_fail_if_path_exists("//IndexNode//CoerceToPyTypeNode")
def index_literal(Py_ssize_t i):
    """
    >>> index_literal(0) == 'a'
    True
    >>> index_literal(2) == 'e'
    True
    >>> index_literal(-1) == '6'
    True
    >>> index_literal(-len('azerty123456')) == 'a'
    True

    >>> index_literal(len(ustring))
    Traceback (most recent call last):
    IndexError: string index out of range
    """
    return u'azerty123456'[i]


@cython.test_assert_path_exists("//IndexNode")
@cython.test_fail_if_path_exists("//IndexNode//CoerceToPyTypeNode")
def index_literal_pyunicode_cast(int i):
    """
    >>> index_literal_pyunicode_cast(0) == '1'
    True
    >>> index_literal_pyunicode_cast(-5) == '1'
    True
    >>> index_literal_pyunicode_cast(2) == '3'
    True
    >>> index_literal_pyunicode_cast(4) == '5'
    True
    >>> index_literal_pyunicode_coerce(6)
    Traceback (most recent call last):
    IndexError: string index out of range
    """
    return (u"12345"[i])


@cython.test_assert_path_exists("//IndexNode",
                                "//SingleAssignmentNode")
@cython.test_fail_if_path_exists("//SingleAssignmentNode//CoerceToPyTypeNode")
def index_literal_pyunicode_coerce(int i):
    """
    >>> index_literal_pyunicode_coerce(0) == '1'
    True
    >>> index_literal_pyunicode_coerce(-5) == '1'
    True
    >>> index_literal_pyunicode_coerce(2) == '3'
    True
    >>> index_literal_pyunicode_coerce(4) == '5'
    True
    >>> index_literal_pyunicode_coerce(6)
    Traceback (most recent call last):
    IndexError: string index out of range
    """
    cdef Py_UNICODE result = u"12345"[i]
    return result


@cython.test_assert_path_exists("//SingleAssignmentNode")
@cython.test_fail_if_path_exists("//SingleAssignmentNode//CoerceFromPyTypeNode")
@cython.boundscheck(False)
def index_literal_pyunicode_coerce_no_check(int i):
    """
    >>> index_literal_pyunicode_coerce_no_check(0) == '1'
    True
    >>> index_literal_pyunicode_coerce_no_check(-5) == '1'
    True
    >>> index_literal_pyunicode_coerce_no_check(2) == '3'
    True
    >>> index_literal_pyunicode_coerce_no_check(4) == '5'
    True
    """
    cdef Py_UNICODE result = u"12345"[i]
    return result


@cython.test_assert_path_exists("//CoerceToPyTypeNode",
                                "//IndexNode")
@cython.test_fail_if_path_exists("//IndexNode//CoerceToPyTypeNode")
@cython.boundscheck(False)
def index_no_boundscheck(unicode ustring, Py_ssize_t i):
    """
    >>> index_no_boundscheck(ustring, 0) == 'a'
    True
    >>> index_no_boundscheck(ustring, 2) == 'e'
    True
    >>> index_no_boundscheck(ustring, -1) == '6'
    True
    >>> index_no_boundscheck(ustring, len(ustring)-1) == '6'
    True
    >>> index_no_boundscheck(ustring, -len(ustring)) == 'a'
    True
    """
    return ustring[i]


@cython.test_assert_path_exists("//CoerceToPyTypeNode",
                                "//IndexNode")
@cython.test_fail_if_path_exists("//IndexNode//CoerceToPyTypeNode")
@cython.boundscheck(False)
def unsigned_index_no_boundscheck(unicode ustring, unsigned int i):
    """
    >>> unsigned_index_no_boundscheck(ustring, 0) == 'a'
    True
    >>> unsigned_index_no_boundscheck(ustring, 2) == 'e'
    True
    >>> unsigned_index_no_boundscheck(ustring, len(ustring)-1) == '6'
    True
    """
    return ustring[i]

@cython.test_assert_path_exists("//CoerceToPyTypeNode",
                                "//IndexNode",
                                "//PrimaryCmpNode")
@cython.test_fail_if_path_exists("//IndexNode//CoerceToPyTypeNode")
def index_compare(unicode ustring, Py_ssize_t i):
    """
    >>> index_compare(ustring, 0)
    True
    >>> index_compare(ustring, 1)
    False
    >>> index_compare(ustring, -1)
    False
    >>> index_compare(ustring, -len(ustring))
    True

    >>> index_compare(ustring, len(ustring))
    Traceback (most recent call last):
    IndexError: string index out of range
    """
    return ustring[i] == u'a'


@cython.test_assert_path_exists("//CoerceToPyTypeNode",
                                "//IndexNode",
                                "//PrimaryCmpNode")
@cython.test_fail_if_path_exists("//IndexNode//CoerceToPyTypeNode")
def index_compare_string(unicode ustring, Py_ssize_t i, unicode other):
    """
    >>> index_compare_string(ustring, 0, ustring[0])
    True
    >>> index_compare_string(ustring, 0, ustring[:4])
    False
    >>> index_compare_string(ustring, 1, ustring[0])
    False
    >>> index_compare_string(ustring, 1, ustring[1])
    True
    >>> index_compare_string(ustring, -1, ustring[0])
    False
    >>> index_compare_string(ustring, -1, ustring[-1])
    True
    >>> index_compare_string(ustring, -len(ustring), ustring[-len(ustring)])
    True

    >>> index_compare_string(ustring, len(ustring), ustring)
    Traceback (most recent call last):
    IndexError: string index out of range
    """
    return ustring[i] == other


@cython.test_assert_path_exists("//CoerceToPyTypeNode",
                                "//IndexNode",
                                "//MulNode",
                                "//MulNode/CoerceToPyTypeNode")
@cython.test_fail_if_path_exists("//IndexNode//CoerceToPyTypeNode")
def index_multiply(unicode ustring, Py_ssize_t i, int mul):
    """
    >>> ustring[0] * 5 == 'aaaaa'
    True
    >>> index_multiply(ustring, 0, 5) == 'aaaaa'
    True
    """
    return ustring[i] * mul


@cython.test_assert_path_exists("//CoerceToPyTypeNode",
                                "//IndexNode",
                                "//AddNode",
                                "//AddNode/CoerceToPyTypeNode")
@cython.test_fail_if_path_exists("//IndexNode//CoerceToPyTypeNode")
def index_add(unicode ustring, Py_ssize_t i, Py_ssize_t j):
    """
    >>> ustring[0] + ustring[-1] == 'a6'
    True
    >>> index_add(ustring, 0, -1) == 'a6'
    True
    """
    return ustring[i] + ustring[j]


@cython.test_assert_path_exists("//CoerceToPyTypeNode",
                                "//IndexNode",
                                "//CoerceToPyTypeNode//IndexNode")
@cython.test_fail_if_path_exists("//IndexNode//CoerceToPyTypeNode")
def index_concat_loop(unicode ustring):
    """
    >>> index_concat_loop(ustring) == ustring
    True
    """
    cdef int i
    cdef unicode s = u''
    for i in range(len(ustring)):
        s += ustring[i]
    return s


@cython.test_assert_path_exists("//CoerceToPyTypeNode",
                                "//IndexNode",
                                "//CoerceToPyTypeNode//IndexNode")
@cython.test_fail_if_path_exists("//IndexNode//CoerceToPyTypeNode")
def index_join_loop(unicode ustring):
    """
    >>> index_join_loop(ustring) == ustring
    True
    """
    cdef int i
    return u''.join([ ustring[i] for i in range(len(ustring)) ])
Cython-0.26.1/tests/run/nogil.pyx0000664000175000017500000000220513143605603017450 0ustar  stefanstefan00000000000000# mode: run

try:
    from StringIO import StringIO
except ImportError:
    from io import StringIO


def test(int x):
    """
    >>> test(5)
    47
    >>> test(11)
    53
    """
    with nogil:
        f(x)
        x = g(x)
    return x

cdef void f(int x) nogil:
        cdef int y
        y = x + 42
        g(y)

cdef int g(int x) nogil:
        cdef int y
        y = x + 42
        return y

cdef int with_gil_func() except 0 with gil:
    raise Exception("error!")

cdef int nogil_func() nogil except 0:
    with_gil_func()

def test_nogil_exception_propagation():
    """
    >>> test_nogil_exception_propagation()
    Traceback (most recent call last):
       ...
    Exception: error!
    """
    with nogil:
        nogil_func()


cdef int write_unraisable() nogil:
    with gil:
        raise ValueError()


def test_unraisable():
    """
    >>> print(test_unraisable())  # doctest: +ELLIPSIS
    ValueError
    Exception...ignored...
    """
    import sys
    old_stderr = sys.stderr
    stderr = sys.stderr = StringIO()
    try:
        write_unraisable()
    finally:
        sys.stderr = old_stderr
    return stderr.getvalue().strip()
Cython-0.26.1/tests/run/r_toofewargs.pyx0000664000175000017500000000033312542002467021042 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> s = Spam()
    Traceback (most recent call last):
    TypeError: __init__() takes exactly 3 positional arguments (0 given)
"""

cdef class Spam:

    def __init__(self, a, b, int c):
        pass
Cython-0.26.1/tests/run/decorators.pyx0000664000175000017500000000141012542002467020503 0ustar  stefanstefan00000000000000__doc__ = u"""
  >>> f(1,2)
  4
  >>> f.HERE
  1

  >>> g(1,2)
  5
  >>> g.HERE
  5

  >>> h(1,2)
  6
  >>> h.HERE
  1
  >>> i(4)
  3
  >>> i.HERE
  1
"""

class wrap:
    def __init__(self, func):
        self.func = func
        self.HERE = 1
    def __call__(self, *args, **kwargs):
        return self.func(*args, **kwargs)

def decorate(func):
    try:
        func.HERE += 1
    except AttributeError:
        func = wrap(func)
    return func

def decorate2(a,b):
    return decorate

@decorate
def f(a,b):
    return a+b+1

@decorate
@decorate
@decorate
@decorate
@decorate
def g(a,b):
    return a+b+2

@decorate2(1,2)
def h(a,b):
    return a+b+3

class A:
    def decorate(self, func):
        return decorate(func)


a = A()
@a.decorate
def i(x):
    return x - 1
Cython-0.26.1/tests/run/builtin_ord.pyx0000664000175000017500000000351512542002467020660 0ustar  stefanstefan00000000000000
cimport cython

import sys

uspace = u' '
ustring_with_a = u'abcdefg'
ustring_without_a = u'bcdefg'


@cython.test_assert_path_exists(
    # ord() should receive and return a C value
    '//ReturnStatNode//CoerceToPyTypeNode//SimpleCallNode')
@cython.test_fail_if_path_exists(
    '//ReturnStatNode//SimpleCallNode//CoerceToPyTypeNode')
def ord_Py_UNICODE(unicode s):
    """
    >>> ord_Py_UNICODE(uspace)
    32
    """
    cdef Py_UNICODE u
    u = s[0]
    return ord(u)


@cython.test_assert_path_exists('//TupleNode//IntNode')
@cython.test_fail_if_path_exists('//SimpleCallNode')
def ord_const():
    """
    >>> ord(b' ')
    32
    >>> ord(' ')
    32
    >>> ord_const()
    (32, 32, 32, 255, 255, 4660, 0)
    """
    return ord(u' '), ord(b' '), ord(' '), ord('\xff'), ord(b'\xff'), ord(u'\u1234'), ord('\0')


@cython.test_assert_path_exists('//PrimaryCmpNode//IntNode')
#@cython.test_fail_if_path_exists('//SimpleCallNode')
def unicode_for_loop_ord(unicode s):
    """
    >>> unicode_for_loop_ord(ustring_with_a)
    True
    >>> unicode_for_loop_ord(ustring_without_a)
    False
    """
    for c in s:
        if ord(c) == ord(u'a'):
            return True
    return False


def compare_to_char(s):
    """
    >>> compare_to_char(uspace)
    False
    >>> compare_to_char(b'a')
    False
    >>> compare_to_char(b'x')
    True
    >>> compare_to_char('x')
    True
    """
    cdef char c = b'x'
    return ord(s) == c


def ord_object(s):
    """
    >>> try: ord_object('abc')
    ... except ValueError: assert sys.version_info[0] >= 3
    ... except TypeError: assert sys.version_info[0] < 3
    >>> ord_object('a')
    97
    >>> ord_object(b'a')
    97
    """
    return ord(s)


def non_builtin_ord(s):
    """
    >>> non_builtin_ord('x')
    (123, 123)
    """
    def _ord(s):
        return 123

    ord = _ord
    return ord(s), _ord(s)
Cython-0.26.1/tests/run/unused.pyx0000664000175000017500000000131712542002467017647 0ustar  stefanstefan00000000000000cdef c_unused_simple(a, b, c):
    """
    >>> c_unused_simple(1, 2, 3)
    3
    """
    return a + b

cdef c_unused_optional(a, b, c=1, d=2):
    """
    >>> c_unused_optional(1, 2)
    4
    >>> c_unused_optional(1, 2, 3, 4)
    6
    """
    return b + d

cpdef cp_unused_simple(a, b, c):
    """
    >>> cp_unused_simple(1, 2, 3)
    3
    """
    return a + b

cpdef cp_unused_optional(a, b, c=1, d=2):
    """
    >>> cp_unused_optional(1, 2)
    4
    >>> cp_unused_optional(1, 2, 3, 4)
    6
    """
    return b + d


cdef class Unused:
    """
    >>> o = Unused()
    """

    cpdef cp_unused_simple(self, a, b, c):
        return c

    cpdef cp_unused_optional(self, a, b, c=1, d=2):
        return b + d
Cython-0.26.1/tests/run/complex_coercion_sideeffects_T693.pyx0000664000175000017500000000052412542002467024764 0ustar  stefanstefan00000000000000# mode: run
# ticket: 693

cdef double complex func(double complex x):                                                  
    print "hello"
    return x

def test_coercion():
    """
    >>> c = test_coercion()
    hello
    >>> c.real == 0.5
    True
    >>> c.imag == 1.5
    True
    """
    cdef object x = func(0.5 + 1.5j)
    return x
Cython-0.26.1/tests/run/new_as_nonkeyword.pyx0000664000175000017500000000075612542002467022105 0ustar  stefanstefan00000000000000cdef extern from *:
    int new(int new)

def new(x):
    """
    >>> new(3)
    3
    """
    cdef int new = x
    return new

def x(new):
    """
    >>> x(10)
    110
    >>> x(1)
    1
    """
    if new*new != new:
        return new + new**2
    return new

class A:
    def new(self, n):
        """
        >>> a = A()
        >>> a.new(3)
        6
        >>> a.new(5)
        120
        """
        if n <= 1:
            return 1
        else:
            return n * self.new(n-1)
Cython-0.26.1/tests/run/cpp_operator_exc_handling_helper.hpp0000664000175000017500000001266013023021033025036 0ustar  stefanstefan00000000000000#pragma once
#include 

class wrapped_int {
public:
  long long val;
  wrapped_int() { val = 0; }
  wrapped_int(long long val) { this->val = val; }
  wrapped_int(long long v1, long long v2) {
    if (v2 == 4) {
      throw std::domain_error("4 isn't good for initialization!");
    }
    this->val = v1;
  }
  wrapped_int operator+(wrapped_int &other) {
    if (other.val == 4) {
      throw std::invalid_argument("tried to add 4");
    }
    return wrapped_int(this->val + other.val);
  }
  wrapped_int operator+() {
    if (this->val == 4) {
      throw std::domain_error("'4' not in valid domain.");
    }
    return *this;
  }
  wrapped_int operator-(wrapped_int &other) {
    if (other.val == 4) {
      throw std::overflow_error("Value '4' is no good.");
    }
    return *this;
  }
  wrapped_int operator-() {
    if (this->val == 4) {
      throw std::range_error("Can't take the negative of 4.");
    }
    return wrapped_int(-this->val);
  }
  wrapped_int operator*(wrapped_int &other) {
    if (other.val == 4) {
      throw std::out_of_range("Multiplying by 4 isn't going to work.");
    }
    return wrapped_int(this->val * other.val);
  }
  wrapped_int operator/(wrapped_int &other) {
    if (other.val == 4) {
      throw std::out_of_range("Multiplying by 4 isn't going to work.");
    }
    return wrapped_int(this->val / other.val);
  }
  wrapped_int operator%(wrapped_int &other) {
    if (other.val == 4) {
      throw std::out_of_range("Multiplying by 4 isn't going to work.");
    }
    return wrapped_int(this->val % other.val);
  }
  long long operator^(wrapped_int &other) {
    if (other.val == 4) {
      throw std::out_of_range("Multiplying by 4 isn't going to work.");
    }
    return this->val ^ other.val;
  }
  long long operator&(wrapped_int &other) {
    if (other.val == 4) {
      throw std::underflow_error("Can't do this with 4!");
    }
    return this->val & other.val;
  }
  long long operator|(wrapped_int &other) {
    if (other.val == 4) {
      throw std::underflow_error("Can't do this with 4!");
    }
    return this->val & other.val;
  }
  wrapped_int operator~() {
    if (this->val == 4) {
      throw std::range_error("4 is really just no good for this!");
    }
    return *this;
  }
  long long operator&() {
    if (this->val == 4) {
      throw std::out_of_range("4 cannot be located!");
    }
    return this->val;
  }
  long long operator==(wrapped_int &other) {
    if (other.val == 4) {
      throw std::invalid_argument("4 isn't logical and can't be equal to anything!");
    }
    return this->val == other.val;
  }
  long long operator!=(wrapped_int &other) {
    if (other.val == 4) {
      throw std::invalid_argument("4 isn't logical and can'd be not equal to anything either!");
    }
    return this->val != other.val;
  }
  long long operator<(wrapped_int &other) {
    if (other.val == 4) {
      throw std::invalid_argument("Can't compare with 4!");
    }
    return this->val < other.val;
  }
  long long operator<=(wrapped_int &other) {
    if (other.val == 4) {
      throw std::invalid_argument("Can't compare with 4!");
    }
    return this->val <= other.val;
  }
  long long operator>(wrapped_int &other) {
    if (other.val == 4) {
      throw std::invalid_argument("Can't compare with 4!");
    }
    return this->val > other.val;
  }
  long long operator>=(wrapped_int &other) {
    if (other.val == 4) {
      throw std::invalid_argument("Can't compare with 4!");
    }
    return this->val >= other.val;
  }
  wrapped_int operator<<(long long &shift) {
    if (shift == 4) {
      throw std::overflow_error("Shifting by 4 is just bad.");
    }
    return wrapped_int(this->val << shift);
  }
  wrapped_int operator>>(long long &shift) {
    if (shift == 4) {
      throw std::underflow_error("Shifting by 4 is just bad.");
    }
    return wrapped_int(this->val >> shift);
  }
  wrapped_int &operator++() {
    if (this->val == 4) {
      throw std::out_of_range("Can't increment 4!");
    }
    this->val += 1;
    return *this;
  }
  wrapped_int &operator--() {
    if (this->val == 4) {
      throw std::out_of_range("Can't decrement 4!");
    }
    this->val -= 1;
    return *this;
  }
  wrapped_int operator++(int) {
    if (this->val == 4) {
      throw std::out_of_range("Can't increment 4!");
    }
    wrapped_int t = *this;
    this->val += 1;
    return t;
  }
  wrapped_int operator--(int) {
    if (this->val == 4) {
      throw std::out_of_range("Can't decrement 4!");
    }
    wrapped_int t = *this;
    this->val -= 1;
    return t;
  }
  wrapped_int operator!() {
    if (this->val == 4) {
      throw std::out_of_range("Can't negate 4!");
    }
    return wrapped_int(!this->val);
  }
  operator bool() {
    if (this->val == 4) {
      throw std::invalid_argument("4 can't be cast to a boolean value!");
    }
    return (this->val != 0);
  }
  wrapped_int &operator[](long long &idx) {
    if (idx == 4) {
      throw std::invalid_argument("Index of 4 not allowed.");
    }
    return *this;
  }
  long long &operator()() {
    if (this->val == 4) {
      throw std::range_error("Can't call 4!");
    }
    return this->val;
  }
  wrapped_int &operator=(const wrapped_int &other) {
    if ((other.val == 4) && (this->val == 4)) {
      throw std::overflow_error("Can't assign 4 to 4!");
    }
    this->val = other.val;
    return *this;
  }
  wrapped_int &operator=(const long long &v) {
    if ((v == 4) && (this->val == 4)) {
      throw std::overflow_error("Can't assign 4 to 4!");
    }
    this->val = v;
    return *this;
  }
};
Cython-0.26.1/tests/run/for_in_iter.py0000664000175000017500000000542512542002467020457 0ustar  stefanstefan00000000000000# mode: run
# tag: forin

import sys
import cython

try:
    from builtins import next
except ImportError:
    def next(it):
        return it.next()

def for_in_pyiter_pass(it):
    """
    >>> it = Iterable(5)
    >>> for_in_pyiter_pass(it)
    >>> next(it)
    Traceback (most recent call last):
    StopIteration
    """
    for item in it:
        pass

def for_in_pyiter(it):
    """
    >>> for_in_pyiter(Iterable(5))
    [0, 1, 2, 3, 4]
    """
    l = []
    for item in it:
        l.append(item)
    return l

def for_in_list():
    """
    >>> for_in_pyiter([1,2,3,4,5])
    [1, 2, 3, 4, 5]
    """

@cython.test_assert_path_exists('//TupleNode//IntNode')
@cython.test_fail_if_path_exists('//ListNode//IntNode')
def for_in_literal_list():
    """
    >>> for_in_literal_list()
    [1, 2, 3, 4]
    """
    l = []
    for i in [1,2,3,4]:
        l.append(i)
    return l

@cython.test_assert_path_exists('//TupleNode//IntNode')
@cython.test_fail_if_path_exists('//ListNode//IntNode')
def for_in_literal_mult_list():
    """
    >>> for_in_literal_mult_list()
    [1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4]
    """
    l = []
    for i in [1,2,3,4] * 3:
        l.append(i)
    return l

class Iterable(object):
    """
    >>> for_in_pyiter(Iterable(5))
    [0, 1, 2, 3, 4]
    """
    def __init__(self, N):
        self.N = N
        self.i = 0
    def __iter__(self):
        return self
    def __next__(self):
        if self.i < self.N:
            i = self.i
            self.i += 1
            return i
        raise StopIteration
    next = __next__

if sys.version_info[0] >= 3:
    class NextReplacingIterable(object):
        def __init__(self):
            self.i = 0
        def __iter__(self):
            return self

        def __next__(self):
            if self.i > 5:
                raise StopIteration
            self.i += 1
            self.__next__ = self.next2
            return 1
        def next2(self):
            self.__next__ = self.next3
            return 2
        def next3(self):
            del self.__next__
            raise StopIteration
else:
    class NextReplacingIterable(object):
        def __init__(self):
            self.i = 0
        def __iter__(self):
            return self

        def next(self):
            if self.i > 5:
                raise StopIteration
            self.i += 1
            self.next = self.next2
            return 1
        def next2(self):
            self.next = self.next3
            return 2
        def next3(self):
            del self.next
            raise StopIteration

def for_in_next_replacing_iter():
    """
    >>> for_in_pyiter(NextReplacingIterable())
    [1, 1, 1, 1, 1, 1]
    """

def for_in_gen(N):
    """
    >>> for_in_pyiter(for_in_gen(10))
    [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
    """
    for i in range(N):
        yield i
Cython-0.26.1/tests/run/switch.pyx0000664000175000017500000001570612542002467017654 0ustar  stefanstefan00000000000000# mode: run

cimport cython


@cython.test_fail_if_path_exists('//SwitchStatNode')
@cython.test_assert_path_exists('//IfStatNode')
def switch_simple_py(x):
    """
    >>> switch_simple_py(1)
    1
    >>> switch_simple_py(2)
    2
    >>> switch_simple_py(3)
    3
    >>> switch_simple_py(4)
    8
    >>> switch_simple_py(5)
    0
    """
    if x == 1:
        return 1
    elif 2 == x:
        return 2
    elif x in [3]:
        return 3
    elif x in (4,):
        return 8
    else:
        return 0
    return -1


@cython.test_fail_if_path_exists('//SwitchStatNode')
@cython.test_assert_path_exists('//IfStatNode')
def switch_py(x):
    """
    >>> switch_py(1)
    1
    >>> switch_py(2)
    2
    >>> switch_py(3)
    3
    >>> switch_py(4)
    4
    >>> switch_py(5)
    4
    >>> switch_py(6)
    0
    >>> switch_py(8)
    4
    >>> switch_py(10)
    10
    >>> switch_py(12)
    12
    >>> switch_py(13)
    0
    """
    if x == 1:
        return 1
    elif 2 == x:
        return 2
    elif x in [3]:
        return 3
    elif x in [4,5,7,8]:
        return 4
    elif x in (10,11):
        return 10
    elif x in (12,):
        return 12
    else:
        return 0
    return -1


@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//IfStatNode')
def switch_simple_c(int x):
    """
    >>> switch_simple_c(1)
    1
    >>> switch_simple_c(2)
    2
    >>> switch_simple_c(3)
    3
    >>> switch_simple_c(4)
    8
    >>> switch_simple_c(5)
    0
    """
    if x == 1:
        return 1
    elif 2 == x:
        return 2
    elif x in [3]:
        return 3
    elif x in (4,):
        return 8
    else:
        return 0
    return -1


@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//IfStatNode')
def switch_c(int x):
    """
    >>> switch_c(1)
    1
    >>> switch_c(2)
    2
    >>> switch_c(3)
    3
    >>> switch_c(4)
    4
    >>> switch_c(5)
    4
    >>> switch_c(6)
    0
    >>> switch_c(8)
    4
    >>> switch_c(10)
    10
    >>> switch_c(12)
    12
    >>> switch_c(13)
    0
    """
    if x == 1:
        return 1
    elif 2 == x:
        return 2
    elif x in [3]:
        return 3
    elif x in [4,5,7,8]:
        return 4
    elif x in (10,11):
        return 10
    elif x in (12,):
        return 12
    else:
        return 0
    return -1


@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//IfStatNode')
def switch_or(int x):
    """
    >>> switch_or(0)
    0
    >>> switch_or(1)
    1
    >>> switch_or(2)
    1
    >>> switch_or(3)
    1
    >>> switch_or(4)
    0
    """
    if x == 1 or x == 2 or x == 3:
        return 1
    else:
        return 0
    return -1


@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//IfStatNode')
def switch_in(int X):
    """
    >>> switch_in(0)
    0
    >>> switch_in(1)
    1
    >>> switch_in(2)
    0
    >>> switch_in(7)
    1
    >>> switch_in(8)
    0
    """
    if X in (1,3,5,7):
        return 1
    return 0


@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//IfStatNode')
def switch_short(int x):
    """
    >>> switch_short(0)
    0
    >>> switch_short(1)
    1
    >>> switch_short(2)
    2
    >>> switch_short(3)
    0
    """
    if x == 1:
        return 1
    elif 2 == x:
        return 2
    else:
        return 0
    return -1


@cython.test_fail_if_path_exists('//SwitchStatNode')
@cython.test_assert_path_exists('//IfStatNode')
def switch_off(int x):
    """
    >>> switch_off(0)
    0
    >>> switch_off(1)
    1
    >>> switch_off(2)
    0
    """
    if x == 1:
        return 1
    else:
        return 0
    return -1



@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//IfStatNode')
def switch_pass(int x):
    """
    >>> switch_pass(1)
    1
    """
    if x == 1:
        pass
    elif x == 2:
        pass
    else:
        pass
    return x


DEF t = (1,2,3,4,5,6)

@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//IfStatNode')
def compile_time_tuple_constant(int x):
    """
    >>> compile_time_tuple_constant(1)
    True
    >>> compile_time_tuple_constant(0)
    False
    >>> compile_time_tuple_constant(7)
    False
    """
    if x in t:
        return True
    else:
        return False


cdef enum X:
    a = 1
    b
    c
    d
    e = 10
    f = 100

@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//IfStatNode')
def enum_switch(X x):
    """
    >>> enum_switch(1)
    0
    >>> enum_switch(10)
    1
    >>> enum_switch(100)
    2
    """
    if x in [a, b, c, d]:
        return 0
    elif x == e:
        return 1
    else:
        return 2


@cython.test_assert_path_exists('//IfStatNode')
@cython.test_assert_path_exists('//IfStatNode//SwitchStatNode')
def enum_duplicates(X x):
    """
    >>> enum_duplicates(1)
    0
    >>> enum_duplicates(2)  # b
    0
    >>> enum_duplicates(10)
    1
    >>> enum_duplicates(100)
    3
    """
    if x in [a, b, c, d]:   # switch is ok here!
        return 0
    elif x == e:
        return 1
    elif x == b:  # duplicate => no switch here!
        return 2
    else:
        return 3


@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//IfStatNode')
def int_enum_switch_mix(int x):
    """
    >>> int_enum_switch_mix(1)
    0
    >>> int_enum_switch_mix(10)
    1
    >>> int_enum_switch_mix(ord('X'))
    2
    >>> int_enum_switch_mix(99)
    3
    >>> int_enum_switch_mix(100)
    4
    """
    if x in [a, b, c, d]:
        return 0
    elif x == e:
        return 1
    elif x == 'X':  # ASCII(88)
        return 2
    elif x == 99:
        return 3
    else:
        return 4


@cython.test_fail_if_path_exists('//SwitchStatNode')
@cython.test_assert_path_exists('//IfStatNode')
def int_enum_duplicates_mix(int x):
    """
    >>> int_enum_duplicates_mix(88)
    0
    >>> int_enum_duplicates_mix(ord('X'))
    0
    >>> int_enum_duplicates_mix(99)
    2
    >>> int_enum_duplicates_mix(100)
    3
    """
    if x == 88:
        return 0
    elif x == 'X':  # ASCII(88) => redundant
        return 1
    elif x == 99:
        return 2
    else:
        return 3


@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//BoolBinopNode', '//PrimaryCmpNode')
def int_in_bool_binop(int x):
    """
    >>> int_in_bool_binop(0)
    False
    >>> int_in_bool_binop(1)
    True
    >>> int_in_bool_binop(2)
    True
    >>> int_in_bool_binop(3)
    False
    """
    return x == 1 or x == 2


@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//BoolBinopNode', '//PrimaryCmpNode')
def int_in_bool_binop_3(int x):
    """
    >>> int_in_bool_binop_3(0)
    False
    >>> int_in_bool_binop_3(1)
    True
    >>> int_in_bool_binop_3(2)
    True
    >>> int_in_bool_binop_3(3)
    False
    >>> int_in_bool_binop_3(4)
    True
    >>> int_in_bool_binop_3(5)
    False
    """
    return x == 1 or x == 2 or x == 4
Cython-0.26.1/tests/run/ipow_crash_T562.pyx0000664000175000017500000000056012542002467021221 0ustar  stefanstefan00000000000000# ticket: 562

class IPOW:
    """
    >>> IPOW().__ipow__('a')
    a
    >>> x = IPOW()
    >>> x **= 'z'
    z
    """
    def __ipow__(self, other):
        print ("%s" % other)

cdef class CrashIPOW:
    """
    >>> CrashIPOW().__ipow__('a')
    a
    >>> x = CrashIPOW()
    >>> x **= 'z'
    z
    """
    def __ipow__(self, other):
        print ("%s" % other)
Cython-0.26.1/tests/run/future_unicode_literals.pyx0000664000175000017500000000117513143605603023264 0ustar  stefanstefan00000000000000from __future__ import unicode_literals

import sys
if sys.version_info[0] >= 3:
    __doc__ = u"""
    >>> u == 'test'
    True
    >>> isinstance(u, str)
    True
    >>> isinstance(b, bytes)
    True
    >>> raw ==  'abc\\\\xf8\\\\t\\u00f8\\U000000f8'  # unescaped by Python (required by doctest)
    True
"""
else:
    __doc__ = u"""
    >>> u == u'test'
    True
    >>> isinstance(u, unicode)
    True
    >>> isinstance(b, str)
    True
    >>> raw == u'abc\\\\xf8\\\\t\\u00f8\\U000000f8'  # unescaped by Python (required by doctest)
    True
"""

u = "test"

cdef char* s = "bytes test"
b = s

raw = r'abc\xf8\t\u00f8\U000000f8'
Cython-0.26.1/tests/run/bishop2.pyx0000664000175000017500000000022512542002467017707 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> f = foo()
    >>> 'a' in f
    True
    >>> 1 in f
    True
"""

cdef class foo:

  def __contains__(self, key):
    return 1
Cython-0.26.1/tests/run/exceptionrefcount.pyx0000664000175000017500000000272112542002467022110 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> class SampleException(Exception): pass

>>> def assert_refcount(rc1, rc2, func):
...     # test ref-counts, but allow a bit of freedom
...     assert rc2 <= rc1 + 4, "%s, before: %d, after %d" % (
...         func.__name__, rc1, rc2)

>>> def run_test(repeat, test_func):
...     initial_refcount = get_refcount(SampleException)
...     for i in range(repeat):
...         try: raise SampleException
...         except:
...             refcount1 = get_refcount(SampleException)
...             test_func()
...             refcount2 = get_refcount(SampleException)
...
...             assert_refcount(refcount1, refcount2, test_func)
...             assert_refcount(initial_refcount, refcount2, test_func)
...         refcount3 = get_refcount(SampleException)
...         assert_refcount(refcount1, refcount3, test_func)
...         assert_refcount(initial_refcount, refcount3, test_func)

>>> run_test(50, test_no_exception_else)
>>> run_test(50, test_no_exception)
>>> run_test(50, test_exception)
>>> run_test(50, test_finally)
"""

from cpython.ref cimport PyObject

def get_refcount(obj):
    return (obj).ob_refcnt

def test_no_exception():
    try:
        a = 1+1
    except:
        pass

def test_no_exception_else():
    try:
        a = 1+1
    except:
        pass
    else:
        b = 1+1

def test_exception():
    try:
        raise TypeError
    except:
        pass

def test_finally():
    try:
        a = 1+1
    finally:
        b = 1+1
Cython-0.26.1/tests/run/return.pyx0000664000175000017500000000044312542002467017662 0ustar  stefanstefan00000000000000def f(a):
    """
    >>> f('test')
    """
    return
    return a
    return 42

cdef void g():
    return

cdef int h(a):
    cdef int i
    i = a
    return i

def test_g():
    """
    >>> test_g()
    """
    g()

def test_h(i):
    """
    >>> test_h(5)
    5
    """
    return h(i)
Cython-0.26.1/tests/run/str_default_auto_encoding.pyx0000664000175000017500000000046312542002467023557 0ustar  stefanstefan00000000000000# cython: c_string_type = str
# cython: c_string_encoding = default

import sys
if sys.version_info[0] >= 3:
    __doc__ = r"""
        >>> as_objects("ab\xff") == "ab\xff"
        True
        >>> slice_as_objects("ab\xffd", 1, 4) == "b\xff"
        True
        """

include "str_ascii_auto_encoding.pyx"
Cython-0.26.1/tests/run/extended_unpacking_T409.pyx0000664000175000017500000000061212542002467022720 0ustar  stefanstefan00000000000000# ticket: 409

def simple():
    """
    >>> simple()
    ([1, 2], [1, 2])
    """
    d = e = [1,2]
    return d, e

def simple_parallel():
    """
    >>> simple_parallel()
    (1, 2, [1, 2], [1, 2])
    """
    a, c = d = e = [1,2]
    return a, c, d, e

def extended():
    """
    >>> extended()
    (1, [], 2, [1, 2], [1, 2])
    """
    a, *b, c = d = e = [1,2]
    return a, b, c, d, e
Cython-0.26.1/tests/run/strescapes.pyx0000664000175000017500000000235612542002467020524 0ustar  stefanstefan00000000000000__doc__ = u"""

>>> py_strings = [
... b'\\x1234',
... b'\\x0A12\\x0C34',
... b'\\x0A57',
... b'\\x0A',
... b'\\'',
... b"\\'",
... b"\\"",
... b'\\"',
... b'abc\\x12def',
... u'\\u1234',
... u'\\U00001234',
... b'\\u1234',
... b'\\U00001234',
... b'\\n\\r\\t',
... b':>',
... b'??>',
... b'\\0\\0\\0',
... ]

>>> for i, (py_string, (c_string, length)) in enumerate(zip(py_strings, c_strings)):
...     assert py_string == c_string, "%d: %r != %r" % (i, py_string, c_string)
...     assert len(py_string) == length, (
...         "%d: wrong length of %r, got %d, expected %d" % (
...             i, py_string, len(py_string), length))
...     assert len(c_string) == length, (
...         "%d: wrong length of %r, got %d, expected %d" % (
...             i, c_string, len(c_string), length))

"""

import sys
if sys.version_info[0] < 3:
    __doc__ = __doc__.replace(u" b'", u" '").replace(u' b"', u' "')
else:
    __doc__ = __doc__.replace(u" u'", u" '").replace(u' u"', u' "')

c_strings = [
(b'\x1234', 3),
(b'\x0A12\x0C34', 6),
(b'\x0A57', 3),
(b'\x0A', 1),
(b'\'', 1),
(b"\'", 1),
(b"\"", 1),
(b'\"', 1),
(b'abc\x12def', 7),
(u'\u1234', 1),
(u'\U00001234', 1),
(b'\u1234', 6),
(b'\U00001234', 10),
(b'\n\r\t', 3),
(b':>', 2),
(b'??>', 3),
(b'\0\0\0', 3),
]
Cython-0.26.1/tests/run/py2_super.pyx0000664000175000017500000000342712542002467020300 0ustar  stefanstefan00000000000000# mode: run
# tag: py3k_super

class A(object):
    def method(self):
        return 1

    @classmethod
    def class_method(cls):
        return 2

    @staticmethod
    def static_method():
        return 3

    def generator_test(self):
        return [1, 2, 3]


class B(A):
    """
    >>> obj = B()
    >>> obj.method()
    1
    >>> B.class_method()
    2
    >>> B.static_method(obj)
    3
    >>> list(obj.generator_test())
    [1, 2, 3]
    """
    def method(self):
        return super(B, self).method()

    @classmethod
    def class_method(cls):
        return super(B, cls).class_method()

    @staticmethod
    def static_method(instance):
        return super(B, instance).static_method()

    def generator_test(self):
        for i in super(B, self).generator_test():
            yield i


cdef class CClassBase(object):
    def method(self):
        return 'def'
    cpdef method_cp(self):
        return 'cpdef'

#     cdef method_c(self):
#         return 'cdef'
#     def call_method_c(self):
#         return self.method_c()

cdef class CClassSub(CClassBase):
    """
    >>> CClassSub().method()
    'def'
    >>> CClassSub().method_cp()
    'cpdef'
    """
#     >>> CClassSub().call_method_c()
#     'cdef'

    def method(self):
        return super(CClassSub, self).method()
    cpdef method_cp(self):
        return super(CClassSub, self).method_cp()

#     cdef method_c(self):
#         return super(CClassSub, self).method_c()

cdef class Base(object):
    """
    >>> Base().method()
    'Base'
    >>> Base.method(Base())
    'Base'
    """
    cpdef method(self):
        return "Base"

cdef class Sub(Base):
    """
    >>> Sub().method()
    'Sub'
    >>> Sub.method(Sub())
    'Sub'
    >>> Base.method(Sub())
    'Base'
    """
    cpdef method(self):
        return "Sub"
Cython-0.26.1/tests/run/non_const_as_const_arg.pyx0000664000175000017500000000036313023021033023047 0ustar  stefanstefan00000000000000cdef double f(const double a, const double b, const double c):
    return a + b - c

def test_non_const_as_const_arg():
    """
    >>> test_non_const_as_const_arg()
    1.0
    """
    cdef double a = 1., b = 1., c = 1.
    return f(a, b, c)
Cython-0.26.1/tests/run/argument_unpacking_closure_T736.py0000664000175000017500000000102612542002467024311 0ustar  stefanstefan00000000000000# mode: run
# ticket: 736
# tag: default arguments, closure

def default_args_for_closure(a=1, b=2):
    """
    >>> default_args_for_closure()()
    (1, 2)
    >>> default_args_for_closure(1, 2)()
    (1, 2)
    >>> default_args_for_closure(2)()
    (2, 2)
    >>> default_args_for_closure(8,9)()
    (8, 9)
    >>> default_args_for_closure(7, b=6)()
    (7, 6)
    >>> default_args_for_closure(a=5, b=4)()
    (5, 4)
    >>> default_args_for_closure(b=5, a=6)()
    (6, 5)
    """
    def func():
        return a,b
    return func
Cython-0.26.1/tests/run/generator_expressions.pyx0000664000175000017500000000252212542002467022773 0ustar  stefanstefan00000000000000# mode: run
# tag: generators, lambda

def genexpr():
    """
    >>> genexpr()
    [0, 2, 4, 6, 8]
    """
    x = 'abc'
    result = list( x*2 for x in range(5) )
    assert x == 'abc' # don't leak
    return result

def genexpr_if():
    """
    >>> genexpr_if()
    [0, 4, 8]
    """
    x = 'abc'
    result = list( x*2 for x in range(5) if x % 2 == 0 )
    assert x == 'abc' # don't leak
    return result

def genexpr_if_false():
    """
    >>> genexpr_if_false()
    []
    """
    x = 'abc'
    result = list( x*2 for x in range(5) if False )
    assert x == 'abc' # don't leak
    return result

def genexpr_with_lambda():
    """
    >>> genexpr_with_lambda()
    [0, 4, 8]
    """
    x = 'abc'
    result = list( x*2 for x in range(5) if (lambda x:x % 2)(x) == 0 )
    assert x == 'abc' # don't leak
    return result

def genexpr_of_lambdas(int N):
    """
    >>> [ (f(), g()) for f,g in genexpr_of_lambdas(5) ]
    [(0, 0), (1, 2), (2, 4), (3, 6), (4, 8)]
    """
    return ( ((lambda : x), (lambda : x*2)) for x in range(N) )


def genexpr_with_bool_binop(values):
    """
    >>> values = [(1, 2, 3), (None, 4, None), (5, None, 6)]
    >>> genexpr_with_bool_binop(values)
    [(1, 2, 3), ('X', 4, 'X'), (5, 'X', 6)]
    """
    # copied from CPython's test_itertools.py
    return [tuple((e is None and 'X' or e) for e in t) for t in values]
Cython-0.26.1/tests/run/append.pyx0000664000175000017500000000312212574327400017611 0ustar  stefanstefan00000000000000class A:
    def append(self, x):
        print u"appending", x
        return x

class B(list):
    def append(self, *args):
        for arg in args:
            list.append(self, arg)

cdef class C:
    """
    >>> c = C(100)
    appending 100
    """
    def __init__(self, value):
        self.append(value)
    cdef append(self, value):
        print u"appending", value
        return value

def test_append(L):
    """
    >>> test_append([])
    None
    None
    None
    got error
    [1, 2, (3, 4)]
    >>> _ = test_append(A())
    appending 1
    1
    appending 2
    2
    appending (3, 4)
    (3, 4)
    got error
    >>> test_append(B())
    None
    None
    None
    None
    [1, 2, (3, 4), 5, 6]
    """
    print L.append(1)
    print L.append(2)
    print L.append((3,4))
    try:
        print L.append(5,6)
    except TypeError:
        print u"got error"
    return L


def test_append_typed(list L not None):
    """
    >>> test_append_typed([])
    None
    None
    [1, 2, (3, 4)]
    """
    print L.append(1)
    L.append(2)
    print L.append((3,4))
    return L


def append_unused_retval(L):
    """
    >>> append_unused_retval([])
    got error
    [1, 2, (3, 4)]
    >>> _ = append_unused_retval(A())
    appending 1
    appending 2
    appending (3, 4)
    got error
    >>> append_unused_retval(B())
    [1, 2, (3, 4), 5, 6]
    """
    L.append(1)
    L.append(2)
    L.append((3,4))
    try:
        L.append(5,6)
    except TypeError:
        print u"got error"
    return L


def method_name():
    """
    >>> method_name()
    'append'
    """
    return [].append.__name__
Cython-0.26.1/tests/run/cdef_locals_decorator_T477.pyx0000664000175000017500000000071712542002467023374 0ustar  stefanstefan00000000000000# ticket: 477

import cython
@cython.locals(x=double)
cdef func(x):
    return x**2

@cython.locals(x=double)
cdef func_defval(x=0):
    return x**2

def test():
    """
    >>> isinstance(test(), float)
    True
    """
    return func(2)

def test_defval(x=None):
    """
    >>> test_defval()
    0.0
    >>> test_defval(1)
    1.0
    >>> test_defval(2.0)
    4.0
    """
    if x is None:
        return func_defval()
    else:
        return func_defval(x)
Cython-0.26.1/tests/run/complex_numbers_T305_long_double.pyx0000664000175000017500000000034213143605603024626 0ustar  stefanstefan00000000000000# ticket: 305

cimport cython

def test_object_conversion(o):
    """
    >>> test_object_conversion(2)
    (2+0j)
    >>> test_object_conversion(2j - 0.5)
    (-0.5+2j)
    """
    cdef long double complex a = o
    return a
Cython-0.26.1/tests/run/curiously_recurring_template_pattern_GH1458.pyx0000664000175000017500000000114313023021033026770 0ustar  stefanstefan00000000000000# tag: cpp

cdef extern from "curiously_recurring_template_pattern_GH1458_suport.h":

    cdef cppclass Base[T, Derived]:
        Base(T)
        Derived half()
        T calculate()

    cdef cppclass Square[T](Base[T, Square[T]]):
        Square(T)

    cdef cppclass Cube[T](Base[T, Cube[T]]):
        Cube(T)


def test_derived(int x):
    """
    >>> test_derived(5)
    (6.25, 8)
    """
    try:
        square_double = new Square[double](x)
        cube_int = new Cube[int](x)
        return square_double.half().calculate(), cube_int.half().calculate()
    finally:
        del square_double, cube_int
Cython-0.26.1/tests/run/cdef_setitem_T284.pyx0000664000175000017500000000165212542002467021522 0ustar  stefanstefan00000000000000# ticket: 284

def no_cdef():
    """
    >>> no_cdef()
    """
    cdef object lst = list(range(11))
    ob = 10L
    lst[ob] = -10
    cdef object dd = {}
    dd[ob] = -10

def with_cdef():
    """
    >>> with_cdef()
    """
    cdef list lst = list(range(11))
    ob = 10L
    lst[ob] = -10
    cdef dict dd = {}
    dd[ob] = -10

def with_external_list(list L):
    """
    >>> with_external_list([1,2,3])
    [1, -10, 3]
    >>> with_external_list(None)
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    """
    ob = 1L
    L[ob] = -10
    return L

def test_list(list L, object i, object a):
    """
    >>> test_list(list(range(11)), -2, None)
    [0, 1, 2, 3, 4, 5, 6, 7, 8, None, 10]
    >>> test_list(list(range(11)), "invalid index", None) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: list ... must be ...integer...
    """
    L[i] = a
    return L
Cython-0.26.1/tests/run/define_macro.pyx0000664000175000017500000000030013023021033020727 0ustar  stefanstefan00000000000000#distutils: define_macros = DEFINE_NO_VALUE  DEFINE_WITH_VALUE=0

cdef extern from "define_macro_helper.h" nogil:
    int VAL;

def test():
    """
    >>> test()
    1
    """
    return VAL
Cython-0.26.1/tests/run/kwonlyargscall.pyx0000664000175000017500000001224412542002467021401 0ustar  stefanstefan00000000000000# the calls:

def call0ab(f):
    """
    >>> call0ab(b)
    Traceback (most recent call last):
    TypeError: b() takes exactly 3 positional arguments (2 given)
    >>> call0ab(c)
    1 2 1
    >>> call0ab(d)
    1 2 88
    """
    f(a=1,b=2)

def call0abc(f):
    """
    >>> call0abc(b)
    1 2 3
    >>> call0abc(c)
    1 2 3
    >>> call0abc(d)
    1 2 3
    >>> call0abc(e)
    1 2 3 []
    >>> call0abc(f)
    1 2 3 42
    >>> call0abc(m)
    1 2 3
    """
    f(a=1,b=2,c=3)

def call2(f):
    """
    >>> call2(c)
    1 2 1
    >>> call2(d)
    1 2 88
    >>> call2(e)
    1 2 88 []
    >>> call2(f)
    Traceback (most recent call last):
    TypeError: f() needs keyword-only argument c
    >>> call2(g)
    Traceback (most recent call last):
    TypeError: g() needs keyword-only argument c
    >>> call2(m)
    Traceback (most recent call last):
    TypeError: m() needs keyword-only argument c
    """
    f(1,2)

def call3(f):
    """
    >>> call3(b)
    1 2 3
    >>> call3(c)
    1 2 3
    >>> call3(d)
    Traceback (most recent call last):
    TypeError: d() takes exactly 2 positional arguments (3 given)
    >>> call3(e)
    1 2 3 []
    >>> call3(f)
    Traceback (most recent call last):
    TypeError: f() takes exactly 2 positional arguments (3 given)
    >>> call3(g)
    Traceback (most recent call last):
    TypeError: g() takes exactly 2 positional arguments (3 given)
    >>> call3(h)
    Traceback (most recent call last):
    TypeError: h() needs keyword-only argument c
    >>> call3(k)
    Traceback (most recent call last):
    TypeError: k() needs keyword-only argument f
    >>> call3(m)
    Traceback (most recent call last):
    TypeError: m() takes at most 2 positional arguments (3 given)
    """
    f(1,2,3)

def call4(f):
    """
    >>> call4(b)
    Traceback (most recent call last):
    TypeError: b() takes exactly 3 positional arguments (4 given)
    >>> call4(c)
    Traceback (most recent call last):
    TypeError: c() takes at most 3 positional arguments (4 given)
    >>> call4(e)
    Traceback (most recent call last):
    TypeError: e() takes at most 3 positional arguments (4 given)
    """
    f(1,2,3,4)

def call2c(f):
    """
    >>> call2c(d)
    1 2 1
    >>> call2c(e)
    1 2 1 []
    >>> call2c(f)
    1 2 1 42
    >>> call2c(g)
    Traceback (most recent call last):
    TypeError: g() needs keyword-only argument f
    >>> call2c(m)
    1 2 1
    """
    f(1,2, c=1)

def call2d(f):
    """
    >>> call2d(d)
    Traceback (most recent call last):
    TypeError: d() got an unexpected keyword argument 'd'
    >>> call2d(e)
    1 2 88 [('d', 1)]
    >>> call2d(k)
    Traceback (most recent call last):
    TypeError: k() needs keyword-only argument f
    """
    f(1,2, d=1)

def call3d(f):
    """
    >>> call3d(h)
    Traceback (most recent call last):
    TypeError: h() needs keyword-only argument c
    """
    f(1,2,3, d=1)

def call2cd(f):
    """
    >>> call2cd(f)
    1 2 1 2
    >>> call2cd(m)
    Traceback (most recent call last):
    TypeError: m() got an unexpected keyword argument 'd'
    """
    f(1,2, c=1, d=2)

def call2ce(f):
    """
    >>> call2ce(f)
    Traceback (most recent call last):
    TypeError: f() got an unexpected keyword argument 'e'
    """
    f(1,2, c=1, e=2)

def call2cde(f):
    """
    >>> call2cde(e)
    1 2 1 [('d', 2), ('e', 3)]
    """
    f(1,2, c=1, d=2, e=3)

def call2cf(f):
    """
    >>> call2cf(g)
    1 2 1 42 17 2 []
    >>> call2cf(h)
    1 2 1 42 17 2 () []
    >>> call2cf(k)
    1 2 1 42 17 2 () []
    """
    f(1,2, c=1, f=2)

def call6cf(f):
    """
    >>> call6cf(h)
    1 2 1 42 17 2 (3, 4, 5, 6) []
    """
    f(1,2,3,4,5,6, c=1, f=2)

def call6df(f):
    """
    >>> call6df(k)
    1 2 3 1 17 2 (4, 5, 6) []
    """
    f(1,2,3,4,5,6, d=1, f=2)

def call2cfe(f):
    """
    >>> call2cfe(h)
    1 2 1 42 3 2 () []
    >>> call2cfe(k)
    1 2 1 42 3 2 () []
    """
    f(1,2, c=1, f=2, e=3)

def call2cefd(f):
    """
    >>> call2cefd(g)
    1 2 1 11 0 2 []
    """
    f(1,2, c=1, e=0, f=2, d=11)

def call2cfex(f):
    """
    >>> call2cfex(g)
    1 2 1 42 0 2 [('x', 25)]
    """
    f(1,2, c=1, f=2, e=0, x=25)

def call6argscfexy(f):
    args = (1,2,3,4,5,6)
    f(*args, c=1, f=2, e=3, x=25, y=11)

def call6cfexy(f):
    """
    >>> call6cfexy(h)
    1 2 1 42 3 2 (3, 4, 5, 6) [('x', 25), ('y', 11)]
    """
    f(1,2,3,4,5,6, c=1, f=2, e=3, x=25, y=11)

def call6dfexy(f):
    """
    >>> call6dfexy(k)
    1 2 3 1 3 2 (4, 5, 6) [('x', 25), ('y', 11)]
    """
    f(1,2,3,4,5,6, d=1, f=2, e=3, x=25, y=11)

# the called functions:

def b(a, b, c):
    print a,b,c

def c(a, b, c=1):
    print a,b,c

def d(a, b, *, c = 88):
    print a,b,c

def e(a, b, c = 88, **kwds):
    kwlist = list(kwds.items())
    kwlist.sort()
    print a,b,c, kwlist

def f(a, b, *, c, d = 42):
    print a,b,c,d

def g(a, b, *, c, d = 42, e = 17, f, **kwds):
    kwlist = list(kwds.items())
    kwlist.sort()
    print a,b,c,d,e,f, kwlist

def h(a, b, *args, c, d = 42, e = 17, f, **kwds):
    kwlist = list(kwds.items())
    kwlist.sort()
    print a,b,c,d,e,f, args, kwlist

def k(a, b, c=1, *args, d = 42, e = 17, f, **kwds):
    kwlist = list(kwds.items())
    kwlist.sort()
    print a,b,c,d,e,f, args, kwlist

def m(a, b=1, *, c):
    print a,b,c
Cython-0.26.1/tests/run/builtin_py3.pyx0000664000175000017500000000133112542002467020601 0ustar  stefanstefan00000000000000# tag: py3

__doc__ = u"""
>>> test_xrange()
0
1
2
>>> test_range()
0
1
2

>>> test_long() == 12
True
>>> test_int() == 12
True
"""

# the builtins 'xrange' and 'long' are not available in Py3, but they
# can safely be replaced by 'range' and 'int' on that platform

import sys

IS_PY3 = sys.version_info[0] >= 3

def test_xrange():
    r = xrange(3)
    assert type(r) is xrange
    for i in r:
        print i

def test_range():
    r = range(3)
    assert (type(r) is range) if IS_PY3 else (type(r) is list)
    for i in r:
        print i

def test_long():
    long_val = long(12)
    assert type(long_val) is long
    return long_val

def test_int():
    int_val = int(12)
    assert type(int_val) is int
    return int_val
Cython-0.26.1/tests/run/mod__name__.pyx0000664000175000017500000000034112542002467020554 0ustar  stefanstefan00000000000000
module_name = __name__

def in_module():
    """
    >>> print(in_module())
    mod__name__
    """
    return module_name

def in_function():
    """
    >>> print(in_function())
    mod__name__
    """
    return __name__
Cython-0.26.1/tests/run/arrayassign.pyx0000664000175000017500000002247612542002467020700 0ustar  stefanstefan00000000000000# mode: run

cimport cython

def test_literal_list():
    """
    >>> test_literal_list()
    (1, 2, 3, 4, 5)
    """
    cdef int a[5]
    a = [1,2,3,4,5]
    return (a[0], a[1], a[2], a[3], a[4])

def test_literal_list_multiplied():
    """
    >>> test_literal_list_multiplied()
    (1, 2, 1, 2, 1, 2)
    """
    cdef int a[6]
    a = [1,2] * 3
    return (a[0], a[1], a[2], a[3], a[4], a[5])

def test_literal_list_slice_all():
    """
    >>> test_literal_list_slice_all()
    (1, 2, 3, 4, 5)
    """
    cdef int a[5] # = [5,4,3,2,1]
    a[:] = [1,2,3,4,5]
    return (a[0], a[1], a[2], a[3], a[4])

def test_literal_list_slice_start():
    """
    >>> test_literal_list_slice_start()
    (1, 2, 3, 4, 5)
    """
    cdef int a[7] # = [7,6,5,4,3,2,1]
    a[2:] = [1,2,3,4,5]
    return (a[2], a[3], a[4], a[5], a[6])

def test_literal_list_slice_end():
    """
    >>> test_literal_list_slice_end()
    (1, 2, 3, 4, 5)
    """
    cdef int a[7] # = [7,6,5,4,3,2,1]
    a[:5] = [1,2,3,4,5]
    return (a[0], a[1], a[2], a[3], a[4])

def test_literal_list_slice_start_end():
    """
    >>> test_literal_list_slice_start_end()
    (1, 2, 3, 4, 5)
    """
    cdef int a[9] # = [9,8,7,6,5,4,3,2,1]
    a[2:7] = [1,2,3,4,5]
    return (a[2], a[3], a[4], a[5], a[6])

def test_literal_list_slice_start_param(s):
    """
    >>> test_literal_list_slice_start_param(4)
    (1, 2, 3, 4, 5)
    >>> test_literal_list_slice_start_param(3)
    Traceback (most recent call last):
    ValueError: Assignment to slice of wrong length, expected 5, got 6
    >>> test_literal_list_slice_start_param(5)
    Traceback (most recent call last):
    ValueError: Assignment to slice of wrong length, expected 5, got 4
    """
    cdef int a[9] # = [9,8,7,6,5,4,3,2,1]
    a[s:] = [1,2,3,4,5]
    return (a[4], a[5], a[6], a[7], a[8])
#    return a[s:]

def test_literal_list_slice_end_param(e):
    """
    >>> test_literal_list_slice_end_param(5)
    (1, 2, 3, 4, 5)
    >>> test_literal_list_slice_end_param(4)
    Traceback (most recent call last):
    ValueError: Assignment to slice of wrong length, expected 5, got 4
    >>> test_literal_list_slice_end_param(6)
    Traceback (most recent call last):
    ValueError: Assignment to slice of wrong length, expected 5, got 6
    """
    cdef int a[9] # = [9,8,7,6,5,4,3,2,1]
    a[:e] = [1,2,3,4,5]
    return (a[0], a[1], a[2], a[3], a[4])
#    return a[:e]

def test_literal_list_slice_start_end_param(s,e):
    """
    >>> test_literal_list_slice_start_end_param(2,7)
    (1, 2, 3, 4, 5)
    >>> test_literal_list_slice_start_end_param(3,7)
    Traceback (most recent call last):
    ValueError: Assignment to slice of wrong length, expected 5, got 4
    >>> test_literal_list_slice_start_end_param(1,7)
    Traceback (most recent call last):
    ValueError: Assignment to slice of wrong length, expected 5, got 6
    >>> test_literal_list_slice_start_end_param(2,6)
    Traceback (most recent call last):
    ValueError: Assignment to slice of wrong length, expected 5, got 4
    >>> test_literal_list_slice_start_end_param(2,8)
    Traceback (most recent call last):
    ValueError: Assignment to slice of wrong length, expected 5, got 6
    >>> test_literal_list_slice_start_end_param(3,6)
    Traceback (most recent call last):
    ValueError: Assignment to slice of wrong length, expected 5, got 3
    >>> test_literal_list_slice_start_end_param(1,8)
    Traceback (most recent call last):
    ValueError: Assignment to slice of wrong length, expected 5, got 7
    """
    cdef int a[9] # = [9,8,7,6,5,4,3,2,1]
    a[s:e] = [1,2,3,4,5]
    return (a[2], a[3], a[4], a[5], a[6])
#    return a[s:e]

def test_ptr_literal_list_slice_all():
    """
    >>> test_ptr_literal_list_slice_all()
    (1, 2, 3, 4, 5)
    """
    cdef int *a = [6,5,4,3,2]
    a[:] = [1,2,3,4,5]
    return (a[0], a[1], a[2], a[3], a[4])

def test_ptr_literal_list_slice_start():
    """
    >>> test_ptr_literal_list_slice_start()
    (1, 2, 3, 4, 5)
    """
    cdef int *a = [6,5,4,3,2,1]
    a[1:] = [1,2,3,4,5]
    return (a[1], a[2], a[3], a[4], a[5])

def test_ptr_literal_list_slice_end():
    """
    >>> test_ptr_literal_list_slice_end()
    (1, 2, 3, 4, 5)
    """
    cdef int *a = [6,5,4,3,2,1]
    a[:5] = [1,2,3,4,5]
    return (a[0], a[1], a[2], a[3], a[4])


@cython.test_assert_path_exists(
    '//ReturnStatNode//CoerceToPyTypeNode'
)
def test_starred_from_array():
    """
    >>> test_starred_from_array()
    (1, [2, 3, 4], 5)
    """
    cdef int[5] a
    a[0] = 1
    a[1] = 2
    a[2] = 3
    a[3] = 4
    a[4] = 5
    x, *y, z = a
    return x, y, z


@cython.test_fail_if_path_exists(
    '//ParallelAssignmentNode//CoerceToPyTypeNode',
    '//ParallelAssignmentNode//CoerceFromPyTypeNode',
)
@cython.test_assert_path_exists(
    '//ParallelAssignmentNode',
    '//ReturnStatNode//CoerceToPyTypeNode'
)
def test_multiple_from_array():
    """
    >>> test_multiple_from_array()
    (1, 2, 3)
    """
    cdef int[3] a
    a[0] = 1
    a[1] = 2
    a[2] = 3
    x, y, z = a
    return x, y, z


@cython.test_fail_if_path_exists(
    '//ParallelAssignmentNode//CoerceToPyTypeNode'
)
@cython.test_assert_path_exists(
    '//ParallelAssignmentNode',
    '//ReturnStatNode//CoerceToPyTypeNode'
)
def test_multiple_from_array_full_slice():
    """
    >>> test_multiple_from_array_full_slice()
    (1, 2, 3)
    """
    cdef int[3] a
    a[0] = 1
    a[1] = 2
    a[2] = 3
    x, y, z = a[:]
    return x, y, z


@cython.test_fail_if_path_exists(
    '//ParallelAssignmentNode//CoerceToPyTypeNode'
)
@cython.test_assert_path_exists(
    '//ParallelAssignmentNode',
    '//ReturnStatNode//CoerceToPyTypeNode'
)
def test_multiple_from_slice():
    """
    >>> test_multiple_from_slice()
    (5, 4, 3)
    """
    cdef int *a = [6,5,4,3,2,1]
    x, y, z = a[1:4]
    return x, y, z


def test_slice_from_multiple():
    """
    >>> test_slice_from_multiple()
    (6, -1, -2, -3, 2, 1)
    """
    cdef int *a = [6,5,4,3,2,1]
    a[1:4] = -1, -2, -3
    return a[0], a[1], a[2], a[3], a[4], a[5]

def test_literal_tuple():
    """
    >>> test_literal_tuple()
    (1, 2, 3, 4, 5)
    """
    cdef int a[5]
    a = (1,2,3,4,5)
    return (a[0], a[1], a[2], a[3], a[4])

def test_list(list l):
    """
    >>> test_list([1, 2, 3, 4, 5])
    (1, 2, 3, 4, 5)
    """
    cdef int a[5]
    a[:] = l
    return (a[0], a[1], a[2], a[3], a[4])


def assign_all_from_pointer():
    """
    >>> assign_all_from_pointer()
    (1, 2, 3, 4, 5)
    """
    cdef int *v = [1, 2, 3, 4, 5]
    cdef int[5] a
    a = v
    return (a[0], a[1], a[2], a[3], a[4])


def assign_full_from_pointer():
    """
    >>> assign_full_from_pointer()
    (1, 2, 3, 4, 5)
    """
    cdef int *v = [1, 2, 3, 4, 5]
    cdef int[5] a
    a[:] = v
    return (a[0], a[1], a[2], a[3], a[4])


def assign_slice_end_from_pointer():
    """
    >>> assign_slice_end_from_pointer()
    (1, 2, 3, 4, 123)
    """
    cdef int *v = [1, 2, 3, 4, 5]
    cdef int[5] a
    a[4] = 123
    a[:4] = v
    return (a[0], a[1], a[2], a[3], a[4])


def assign_slice_start_from_pointer():
    """
    >>> assign_slice_start_from_pointer()
    (123, 234, 1, 2, 3)
    """
    cdef int *v = [1, 2, 3, 4, 5]
    cdef int[5] a
    a[0] = 123
    a[1] = 234
    a[2:] = v
    return (a[0], a[1], a[2], a[3], a[4])


def assign_slice_start_end_from_pointer():
    """
    >>> assign_slice_start_end_from_pointer()
    (123, 234, 1, 2, 345)
    """
    cdef int *v = [1, 2, 3, 4, 5]
    cdef int[5] a
    a[0] = 123
    a[1] = 234
    a[4] = 345
    a[2:4] = v
    return (a[0], a[1], a[2], a[3], a[4])


'''
# FIXME: make this work:
def assign_slice_start_end_from_sliced_pointer():
    """
    >>> assign_slice_start_end_from_sliced_pointer()
    (123, 234, 3, 4, 345)
    """
    cdef int *v = [1, 2, 3, 4, 5]
    cdef int[5] a
    a[0] = 123
    a[1] = 234
    a[4] = 345
    a[2:4] = v[2:4]
    return (a[0], a[1], a[2], a[3], a[4])


def assign_from_longer_array_slice():
    """
    >>> assign_from_longer_array_slice()
    [3, 4, 5]
    """
    cdef int[5] a
    cdef int[3] b
    a[0] = 1
    a[1] = 2
    a[2] = 3
    a[3] = 4
    a[4] = 5
    b[0] = 11
    b[1] = 12
    b[2] = 13
    b = a[2:]
    return b
'''


def assign_slice_from_shorter_array():
    """
    >>> assign_slice_from_shorter_array()
    [1, 11, 12, 13, 5]
    """
    cdef int[5] a
    cdef int[3] b
    a[0] = 1
    a[1] = 2
    a[2] = 3
    a[3] = 4
    a[4] = 5
    b[0] = 11
    b[1] = 12
    b[2] = 13
    a[1:4] = b
    return a


cdef enum:
    SIZE = 2

ctypedef int[SIZE] int_array_dyn


def assign_ptr_to_unknown_csize():
    """
    >>> assign_ptr_to_unknown_csize()
    [1, 2]
    """
    cdef int* v = [1, 2, 3, 4, 5]
    cdef int_array_dyn d
    d = v
    return d


def assign_to_wrong_csize():
    """
    >>> assign_to_wrong_csize()
    Traceback (most recent call last):
    ValueError: Assignment to slice of wrong length, expected 3, got 2
    """
    cdef int_array_dyn d
    cdef int v[3]
    v[0] = 1
    v[1] = 2
    v[2] = 3
    d = v
    return d


def assign_full_array_slice_to_array():
    """
    >>> assign_full_array_slice_to_array()
    [1, 2, 3]
    """
    cdef int[3] x, y
    x[0] = 1
    x[1] = 2
    x[2] = 3
    y = x[:]
    return y


cdef class ArrayOwner:
    cdef readonly int[3] array

    def __init__(self, a, b, c):
        self.array = (a, b, c)


def assign_from_array_attribute():
    """
    >>> assign_from_array_attribute()
    [1, 2, 3]
    """
    cdef int[3] v
    a = ArrayOwner(1, 2, 3)
    v = a.array[:]
    return v
Cython-0.26.1/tests/run/complex_numbers_c99_T398.pyx0000664000175000017500000000014612542002467022760 0ustar  stefanstefan00000000000000# ticket: 398

cdef extern from "complex_numbers_c99_T398.h": pass
include "complex_numbers_T305.pyx"
Cython-0.26.1/tests/run/closure_inside_cdef_T554.pyx0000664000175000017500000000134213023021033023034 0ustar  stefanstefan00000000000000# mode: run
# tag: closures
# ticket: 554

def call_f(x):
    """
    >>> call_f(2)
    4
    """
    return f(x)


cdef f(x):                # def  here => works fine
   def g(y): return y*x   # cdef here => compile error
   return g(x)            # faults@ INCREF(.*cur_scope->.*v_x


def closure_in_void():
    """
    >>> genex = closure_in_void()
    >>> list(genex)
    ['a', 'b', 'c']
    """
    l = []
    add_gen(l)
    return l[0]


cdef void add_gen(l):
    x = "abc"
    l.append((c for c in x))


def closure_in_int():
    """
    >>> genex = closure_in_int()
    >>> list(genex)
    ['a', 'b', 'c']
    """
    l = []
    add_gen_int(l)
    return l[0]


cdef int add_gen_int(l):
    x = "abc"
    l.append((c for c in x))
Cython-0.26.1/tests/run/cythonscope.pyx0000664000175000017500000001063512542002467020705 0ustar  stefanstefan00000000000000cimport cython

from cython cimport _testscope as tester
from cython cimport TestClass, _testclass_new as TestClass_New
from cython cimport test_call, test_dep
from cython.view cimport _testscope as viewtester

from cpython cimport PyObject

cdef extern from *:
    # TestClass stuff
    cdef struct __pyx_TestClass_obj:
        int value

    # Type pointer
    cdef PyObject *TestClassType "__pyx_TestClass_type"

    # This is a cdef function
    cdef __pyx_TestClass_New(int)

    # These are methods and therefore have no prototypes
    cdef __pyx_TestClass_cdef_method(TestClass self, int value)
    cdef __pyx_TestClass_cpdef_method(TestClass self, int value, int skip_dispatch)
    cdef __pyx_TestClass_def_method(object self, object value)

    cdef __pyx_TestClass_cdef_cname(TestClass self, int value)
    cdef __pyx_TestClass_cpdef_cname(TestClass self, int value, int skip_dispatch)
    cdef __pyx_TestClass_def_cname(object self, object value)

    cdef __pyx_test_dep(object)
    cdef __pyx_test_call_other_cy_util(object)


def test_cdef_cython_utility():
    """
    >>> test_cdef_cython_utility()
    hello from cython scope, value=4
    hello from cython.view scope, value=4
    hello from cython scope, value=3
    hello from cython.view scope, value=3
    """
    print cython._testscope(4)
    print cython.view._testscope(4)
    print tester(3)
    print viewtester(3)

def test_cdef_class_cython_utility():
    """
    >>> test_cdef_class_cython_utility()
    7
    14
    TestClass(20)
    TestClass(50)
    """
    cdef __pyx_TestClass_obj *objstruct

    obj =  TestClass_New(7)
    objstruct = <__pyx_TestClass_obj *> obj
    print objstruct.value

    obj =  __pyx_TestClass_New(14)
    objstruct = <__pyx_TestClass_obj *> obj
    print objstruct.value

    print ( TestClassType)(20)
    print TestClass(50)

def test_extclass_c_methods():
    """
    >>> test_extclass_c_methods()
    Hello from cdef_method 1
    Hello from cpdef_method 2
    Hello from def_method 3
    Hello from cdef_cname_method 4
    Hello from cpdef_cname_method 5
    Hello from def_cname_method 6
    Hello from cdef_method 1
    Hello from cpdef_method 2
    Hello from def_method 3
    Hello from cdef_cname_method 4
    Hello from cpdef_cname_method 5
    Hello from def_cname_method 6
    """
    cdef TestClass obj1 = TestClass(11)
    cdef TestClass obj2 = TestClass_New(22)

    __pyx_TestClass_cdef_method(obj1, 1)
    __pyx_TestClass_cpdef_method(obj1, 2, True)
    __pyx_TestClass_def_method(obj1, 3)

    __pyx_TestClass_cdef_cname(obj1, 4)
    __pyx_TestClass_cpdef_cname(obj1, 5, True)
    __pyx_TestClass_def_cname(obj1, 6)

    __pyx_TestClass_cdef_method(obj2, 1)
    __pyx_TestClass_cpdef_method(obj2, 2, True)
    __pyx_TestClass_def_method(obj2, 3)

    __pyx_TestClass_cdef_cname(obj2, 4)
    __pyx_TestClass_cpdef_cname(obj2, 5, True)
    __pyx_TestClass_def_cname(obj2, 6)

def test_extclass_cython_methods():
    """
    >>> test_extclass_cython_methods()
    Hello from cdef_method 1
    Hello from cpdef_method 2
    Hello from def_method 3
    Hello from cdef_cname_method 4
    Hello from cpdef_cname_method 5
    Hello from def_cname_method 6
    Hello from cdef_method 1
    Hello from cpdef_method 2
    Hello from def_method 3
    Hello from cdef_cname_method 4
    Hello from cpdef_cname_method 5
    Hello from def_cname_method 6
    """
    cdef TestClass obj1 = TestClass(11)
    cdef TestClass obj2 = TestClass_New(22)

    obj1.cdef_method(1)
    obj1.cpdef_method(2)
    obj1.def_method(3)
    obj1.cdef_cname_method(4)
    obj1.cpdef_cname_method(5)
    obj1.def_cname_method(6)

    obj2.cdef_method(1)
    obj2.cpdef_method(2)
    obj2.def_method(3)
    obj2.cdef_cname_method(4)
    obj2.cpdef_cname_method(5)
    obj2.def_cname_method(6)

def test_cython_utility_dep():
    """
    >>> test_cython_utility_dep()
    test_dep first
    test_call
    test_dep second
    test_dep third
    test_call
    test_dep fourth
    """
    test_dep('first')
    test_call('second')
    __pyx_test_dep('third')
    __pyx_test_call_other_cy_util('fourth')

def viewobjs():
    """
    >>> viewobjs()
    
    
    
    
    
    """
    print cython.view.generic
    print cython.view.strided
    print cython.view.indirect
    #print cython.view.generic_contiguous
    print cython.view.contiguous
    print cython.view.indirect_contiguous
Cython-0.26.1/tests/run/letnode_T766.pyx0000664000175000017500000000101712542002467020521 0ustar  stefanstefan00000000000000# mode: run
# ticket: 766
# tag: letnode

def test_letnode_range(int n):
    """
    >>> [i() for i in test_letnode_range(5)]
    [0, 1, 2, 3, 4]
    """
    ret = []
    for i in range(n):
        def bar(x=i):
            return x
        ret.append(bar)
    return ret

def test_letnode_enumerate(a):
    """
    >>> [i() for i in test_letnode_enumerate("abc")]
    [0, 1, 2]
    """
    cdef int n
    ret = []
    for n, i in enumerate(a):
        def bar(x=n):
            return x
        ret.append(bar)
    return ret
Cython-0.26.1/tests/run/asyncio_generators.srctree0000664000175000017500000001166513023021033023061 0ustar  stefanstefan00000000000000# mode: run
# tag: asyncio, pep492

"""
PYTHON setup.py build_ext -i
PYTHON test_from_import.py
PYTHON test_import.py
PYTHON test_async_def.py
PYTHON test_async_def_future.py
PYTHON test_all.py
"""

######## setup.py ########

from Cython.Build.Dependencies import cythonize
from distutils.core import setup

setup(
  ext_modules = cythonize("*.pyx"),
)


######## test_from_import.py ########

import from_asyncio_import
import asyncio

def runloop(task):
    loop = asyncio.get_event_loop()
    result = loop.run_until_complete(task())
    assert 3 == result, result

runloop(from_asyncio_import.wait3)


######## test_import.py ########

import import_asyncio
import asyncio

def runloop(task):
    loop = asyncio.get_event_loop()
    result = loop.run_until_complete(task())
    assert 3 == result, result

runloop(import_asyncio.wait3)


######## test_async_def.py ########

import sys

ASYNCIO_SUPPORTS_COROUTINE = sys.version_info[:2] >= (3, 5)

if ASYNCIO_SUPPORTS_COROUTINE:
    import async_def
    import asyncio

    def runloop(task):
        loop = asyncio.get_event_loop()
        result = loop.run_until_complete(task())
        assert 3 == result, result

    runloop(async_def.wait3)


######## test_async_def_future.py ########

import sys

ASYNCIO_SUPPORTS_COROUTINE = sys.version_info[:2] >= (3, 5)

if ASYNCIO_SUPPORTS_COROUTINE:
    from async_def_future import await_future
    import asyncio

    def runloop():
        loop = asyncio.get_event_loop()
        task, events, expected = await_future(loop)
        result = loop.run_until_complete(task())
        assert events == expected, 'expected %s, got %s' % (expected, events)

    runloop()


######## test_all.py ########

import sys
import asyncio

ASYNCIO_SUPPORTS_COROUTINE = sys.version_info[:2] >= (3, 5)

def runloop(task):
    loop = asyncio.get_event_loop()
    result = loop.run_until_complete(task())
    assert 3 == result, result

import import_asyncio
runloop(import_asyncio.wait3)       # 1a)
import from_asyncio_import
runloop(from_asyncio_import.wait3)  # 1b)

import async_def
if ASYNCIO_SUPPORTS_COROUTINE:
    runloop(async_def.wait3)        # 1c)

runloop(from_asyncio_import.wait3)  # 2a)
runloop(import_asyncio.wait3)       # 2b)
if ASYNCIO_SUPPORTS_COROUTINE:
    runloop(async_def.wait3)        # 2c)

runloop(from_asyncio_import.wait3)  # 3a)
runloop(import_asyncio.wait3)       # 3b)
if ASYNCIO_SUPPORTS_COROUTINE:
    runloop(async_def.wait3)        # 3c)

try:
    from collections.abc import Generator
except ImportError:
    try:
        from collections import Generator
    except ImportError:
        assert sys.version_info < (3,5), "Python 3.5+ should have collections.abc.Generator"
        Generator = object  # easy win :)

assert isinstance(from_asyncio_import.wait3(), Generator)
assert isinstance(import_asyncio.wait3(), Generator)
assert isinstance((lambda:(yield))(), Generator)

try:
    from collections.abc import Awaitable
except ImportError:
    try:
        from collections import Awaitable
    except ImportError:
        assert sys.version_info < (3,5), "Python 3.5+ should have collections.abc.Awaitable"
        Awaitable = object  # easy win :)

assert isinstance(async_def.wait3(), Awaitable)

try:
    from collections.abc import Coroutine
except ImportError:
    try:
        from collections import Coroutine
    except ImportError:
        assert sys.version_info < (3,5), "Python 3.5+ should have collections.abc.Coroutine"
        Coroutine = object  # easy win :)

assert isinstance(async_def.wait3(), Coroutine)


######## import_asyncio.pyx ########
# cython: binding=True

try:
    from types import coroutine as types_coroutine
except ImportError:
    types_coroutine = lambda f:f

import asyncio

@asyncio.coroutine
@types_coroutine
def wait3():
    counter = 0
    for i in range(3):
        print(counter)
        yield from asyncio.sleep(0.01)
        counter += 1
    return counter


######## from_asyncio_import.pyx ########
# cython: binding=True

try:
    from types import coroutine as types_coroutine
except ImportError:
    types_coroutine = lambda f:f

from asyncio import coroutine, sleep

@coroutine
@types_coroutine
def wait3():
    counter = 0
    for i in range(3):
        print(counter)
        yield from sleep(0.01)
        counter += 1
    return counter


######## async_def.pyx ########
# cython: binding=True

import asyncio

async def wait3():
    counter = 0
    for i in range(3):
        print(counter)
        await asyncio.sleep(0.01)
        counter += 1
    return counter


######## async_def_future.pyx ########
# cython: binding=True

import asyncio

def await_future(loop):
    events = []
    async def worker():
        fut = asyncio.Future()

        def setval():
            events.append('setval')
            fut.set_result(123)

        events.append('setup')
        loop.call_later(0.2, setval)
        events.append(await fut)

    async def test():
        await worker()

    expected = ['setup', 'setval', 123]
    return test, events, expected
Cython-0.26.1/tests/run/charptr_from_temp.pyx0000664000175000017500000000356412542002467022065 0ustar  stefanstefan00000000000000# mode: run

from cpython.version cimport PY_MAJOR_VERSION

cdef bint IS_PY2 = PY_MAJOR_VERSION == 2


cdef cfunc1(char* s):
    if IS_PY2:
        return s
    else:
        return s.decode('ASCII')


cdef cfunc3(int x, char* s, object y):
    return cfunc1(s)


def test_one_arg_indexing(s):
    """
    >>> test_one_arg_indexing(b'xyz')
    'y'
    """
    cfunc1(s[0]) if IS_PY2 else cfunc1(s[:1])
    z = cfunc1(s[2]) if IS_PY2 else cfunc1(s[2:])
    assert z == 'z', repr(z)
    return cfunc1(s[1]) if IS_PY2 else cfunc1(s[1:2])


def test_more_args_indexing(s):
    """
    >>> test_more_args_indexing(b'xyz')
    'y'
    """
    cfunc3(1, s[0 if IS_PY2 else slice(0,1)], 6.5)
    z = cfunc3(2, s[2 if IS_PY2 else slice(2,None)], 'abc' * 2)
    assert z == 'z', repr(z)
    return cfunc3(3, s[1 if IS_PY2 else slice(1,2)], 1)


def test_one_arg_slicing(s):
    """
    >>> test_one_arg_slicing(b'xyz')
    'y'
    """
    cfunc1(s[:2])
    z = cfunc1(s[2:])
    assert z == 'z', repr(z)
    return cfunc1(s[1:2])


def test_more_args_slicing(s):
    """
    >>> test_more_args_slicing(b'xyz')
    'y'
    """
    cfunc3(1, s[:2], 'abc')
    z = cfunc3(123, s[2:], 5)
    assert z == 'z', repr(z)
    return cfunc3(2, s[1:2], 1.4)


def test_one_arg_adding(s):
    """
    >>> test_one_arg_adding(b'xyz')
    'abxyzqr'
    """
    return cfunc1(b"a" + b"b" + s + b"q" + b"r")


def test_more_args_adding(s):
    """
    >>> test_more_args_adding(b'xyz')
    'abxyzqr'
    """
    return cfunc3(1, b"a" + b"b" + s + b"q" + b"r", 'xyz%d' % 3)


cdef char* ret_charptr(char* s):
    return s


def test_charptr_and_charptr_func(char* s):
    """
    >>> test_charptr_and_charptr_func(b'abc') == b'abc'
    True
    """
    return s and ret_charptr(s)


def test_charptr_and_ucharptr(char* s):
    """
    >>> test_charptr_and_ucharptr(b'abc') == b'abc'
    True
    """
    return s and s
Cython-0.26.1/tests/run/funcexceptreturn.pyx0000664000175000017500000000062612542002467021752 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> import sys
>>> if not IS_PY3: sys.exc_clear()

>>> print(sys.exc_info()[0]) # 0
None
>>> exc = test_c()
>>> isinstance(exc, TestException) or exc
True
>>> print(sys.exc_info()[0]) # test_c()
None
"""

import sys

IS_PY3 = sys.version_info[0] >= 3

class TestException(Exception):
    pass

def test_c():
    try:
        raise TestException
    except TestException, e:
        return e
Cython-0.26.1/tests/run/typedfieldbug_T303.pyx0000664000175000017500000000221112542002467021676 0ustar  stefanstefan00000000000000# mode: run
# ticket: 303

__doc__ = """
>>> try: readonly()
... except (TypeError, AttributeError): pass
"""


cdef extern from "external_defs.h":
    ctypedef float DoubleTypedef
    ctypedef float LongDoubleTypedef

cdef public DoubleTypedef global_tdef
cdef public double global_double

cdef class MyClass:
    cdef readonly:
        double actual_double
        DoubleTypedef float_isreally_double
        LongDoubleTypedef float_isreally_longdouble

    def __init__(self):
        self.actual_double = 42.0
        self.float_isreally_double = 42.0
        self.float_isreally_longdouble = 42.0

def global_vars(x):
    """
    >>> global_vars(12.0)
    12.0 12.0
    """
    global global_tdef, global_double
    global_tdef = x
    global_double = x
    print global_tdef, global_double

def f():
    """
    >>> f()
    42.0
    42.0
    """
    cdef object c = MyClass()
    print c.actual_double
    print c.float_isreally_double

def longdouble_access():
    """
    >>> longdouble_access()
    42.0
    """
    cdef object c = MyClass()
    print c.float_isreally_longdouble


def readonly():
    cdef object c = MyClass()
    c.actual_double = 3
Cython-0.26.1/tests/run/bytearraymethods.pyx0000664000175000017500000001744112542002467021737 0ustar  stefanstefan00000000000000
import sys
IS_PY3 = sys.version_info[0] >= 3

cimport cython

b_a = bytearray(b'a')
b_b = bytearray(b'b')


'''   # disabled for now, enable when we consider it worth the code overhead

@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode")
def bytearray_startswith(bytearray s, sub, start=None, stop=None):
    """
    >>> bytearray_startswith(b_a, b_a)
    True
    >>> bytearray_startswith(b_a+b_b, b_a)
    True
    >>> bytearray_startswith(b_a, b_b)
    False
    >>> bytearray_startswith(b_a+b_b, b_b)
    False
    >>> bytearray_startswith(b_a, (b_a, b_b))
    True
    >>> bytearray_startswith(b_a, b_a, 1)
    False
    >>> bytearray_startswith(b_a, b_a, 0, 0)
    False
    """

    if start is None:
      return s.startswith(sub)
    elif stop is None:
      return s.startswith(sub, start)
    else:
      return s.startswith(sub, start, stop)


@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode")
def bytearray_endswith(bytearray s, sub, start=None, stop=None):
    """
    >>> bytearray_endswith(b_a, b_a)
    True
    >>> bytearray_endswith(b_b+b_a, b_a)
    True
    >>> bytearray_endswith(b_a, b_b)
    False
    >>> bytearray_endswith(b_b+b_a, b_b)
    False
    >>> bytearray_endswith(b_a, (b_a, b_b))
    True
    >>> bytearray_endswith(b_a, b_a, 1)
    False
    >>> bytearray_endswith(b_a, b_a, 0, 0)
    False
    """

    if start is None:
      return s.endswith(sub)
    elif stop is None:
      return s.endswith(sub, start)
    else:
      return s.endswith(sub, start, stop)
'''


@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode")
def bytearray_decode(bytearray s, start=None, stop=None):
    """
    >>> s = b_a+b_b+b_a+b_a+b_b
    >>> print(bytearray_decode(s))
    abaab

    >>> print(bytearray_decode(s, 2))
    aab
    >>> print(bytearray_decode(s, -3))
    aab

    >>> print(bytearray_decode(s, None, 4))
    abaa
    >>> print(bytearray_decode(s, None, 400))
    abaab
    >>> print(bytearray_decode(s, None, -2))
    aba
    >>> print(bytearray_decode(s, None, -4))
    a
    >>> print(bytearray_decode(s, None, -5))
    
    >>> print(bytearray_decode(s, None, -200))
    

    >>> print(bytearray_decode(s, 2, 5))
    aab
    >>> print(bytearray_decode(s, 2, 500))
    aab
    >>> print(bytearray_decode(s, 2, -1))
    aa
    >>> print(bytearray_decode(s, 2, -3))
    
    >>> print(bytearray_decode(s, 2, -300))
    
    >>> print(bytearray_decode(s, -3, -1))
    aa
    >>> print(bytearray_decode(s, -300, 300))
    abaab
    >>> print(bytearray_decode(s, -300, -4))
    a
    >>> print(bytearray_decode(s, -300, -5))
    
    >>> print(bytearray_decode(s, -300, -6))
    
    >>> print(bytearray_decode(s, -300, -500))
    

    >>> s[:'test']                       # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError:...
    >>> print(bytearray_decode(s, 'test'))   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError:...
    >>> print(bytearray_decode(s, None, 'test'))    # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError:...
    >>> print(bytearray_decode(s, 'test', 'test'))  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError:...

    >>> print(bytearray_decode(None))
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'decode'
    >>> print(bytearray_decode(None, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    >>> print(bytearray_decode(None, None, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    >>> print(bytearray_decode(None, 0, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    """
    if start is None:
        if stop is None:
            return s.decode('utf8')
        else:
            return s[:stop].decode('utf8')
    elif stop is None:
        return s[start:].decode('utf8')
    else:
        return s[start:stop].decode('utf8')


@cython.test_assert_path_exists(
    "//PythonCapiCallNode")
@cython.test_fail_if_path_exists(
    "//SimpleCallNode")
def bytearray_decode_unbound_method(bytearray s, start=None, stop=None):
    """
    >>> s = b_a+b_b+b_a+b_a+b_b
    >>> print(bytearray_decode_unbound_method(s))
    abaab
    >>> print(bytearray_decode_unbound_method(s, 1))
    baab
    >>> print(bytearray_decode_unbound_method(s, None, 3))
    aba
    >>> print(bytearray_decode_unbound_method(s, 1, 4))
    baa

    >>> print(bytearray_decode_unbound_method(None))
    Traceback (most recent call last):
    TypeError: descriptor 'decode' requires a 'bytearray' object but received a 'NoneType'
    >>> print(bytearray_decode_unbound_method(None, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    >>> print(bytearray_decode_unbound_method(None, None, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    >>> print(bytearray_decode_unbound_method(None, 0, 1))
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not subscriptable
    """
    if start is None:
        if stop is None:
            return bytearray.decode(s, 'utf8')
        else:
            return bytearray.decode(s[:stop], 'utf8')
    elif stop is None:
        return bytearray.decode(s[start:], 'utf8')
    else:
        return bytearray.decode(s[start:stop], 'utf8')

@cython.test_fail_if_path_exists('//SimpleCallNode')
@cython.test_assert_path_exists('//PythonCapiCallNode')
def bytearray_append(bytearray b, signed char c, int i, object o):
    """
    >>> b = bytearray(b'abc')
    >>> b = bytearray_append(b, ord('x'), ord('y'), ord('z'))
    >>> print(b.decode('ascii'))
    abcX@xyz

    >>> b = bytearray(b'abc')
    >>> b = bytearray_append(b, ord('x'), ord('y'), 0)
    >>> print(b.decode('ascii')[:-1])
    abcX@xy
    >>> b[-1]
    0

    >>> b = bytearray(b'abc')
    >>> b = bytearray_append(b, ord('x'), ord('y'), ord('z') if IS_PY3 else b'z')
    >>> print(b.decode('ascii'))
    abcX@xyz

    >>> b = bytearray(b'abc')
    >>> b = bytearray_append(b, ord('x'), ord('y'), ord('\\xc3') if IS_PY3 else b'\\xc3')
    >>> print(b[:-1].decode('ascii'))
    abcX@xy
    >>> print('%x' % b[-1])
    c3

    >>> b = bytearray(b'abc')
    >>> try:
    ...     b = bytearray_append(b, ord('x'), ord('y'), b'zz')
    ... except (TypeError, ValueError): pass  # (Py3, Py2)
    ... else: print("FAIL")
    >>> print(b.decode('ascii'))
    abcX@xy

    >>> b = bytearray(b'abc')
    >>> b = bytearray_append(b, -1, ord('y'), ord('z'))  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ValueError: ...
    >>> print(b.decode('ascii'))
    abcX@

    >>> b = bytearray(b'abc')
    >>> b = bytearray_append(b, ord('x'), -1, ord('z'))  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ValueError: ...
    >>> print(b.decode('ascii'))
    abcX@x

    >>> b = bytearray(b'abc')
    >>> b = bytearray_append(b, ord('x'), 256, ord('z'))  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ValueError: ...
    >>> print(b.decode('ascii'))
    abcX@x

    >>> b = bytearray(b'abc')
    >>> b = bytearray_append(b, ord('x'), ord('y'), -1)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ValueError: ...
    >>> print(b.decode('ascii'))
    abcX@xy

    >>> b = bytearray(b'abc')
    >>> b = bytearray_append(b, ord('x'), ord('y'), 256)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ValueError: ...
    >>> print(b.decode('ascii'))
    abcX@xy
    """
    assert b.append('X') is None
    b.append(64)
    b.append(c)
    b.append(i)
    b.append(o)
    return b
Cython-0.26.1/tests/run/r_argdefault.pyx0000664000175000017500000000107412542002467021003 0ustar  stefanstefan00000000000000def swallow(name = None, airspeed = None, coconuts = None):
    """
    >>> swallow(name = "Brian")
    This swallow is called Brian
    >>> swallow(airspeed = 42)
    This swallow is flying at 42 furlongs per fortnight
    >>> swallow(coconuts = 3)
    This swallow is carrying 3 coconuts
    """
    if name is not None:
        print u"This swallow is called", name
    if airspeed is not None:
        print u"This swallow is flying at", airspeed, u"furlongs per fortnight"
    if coconuts is not None:
        print u"This swallow is carrying", coconuts, u"coconuts"
Cython-0.26.1/tests/run/for_in_break_continue_T533.pyx0000664000175000017500000000126712542002467023412 0ustar  stefanstefan00000000000000# ticket: 533

def for_in():
    """
    >>> for_in()
    CONTINUE -1
    CONTINUE 4
    BREAK 6
    6
    """
    i = -1
    for L in [[], range(5), range(10)]:
        for i in L:
            if i > 5:
                break
        else:
            print "CONTINUE", i
            continue
        print "BREAK", i
        break
    return i

def for_from():
    """
    >>> for_from()
    CONTINUE 0
    CONTINUE 5
    BREAK 6
    6
    """
    i = -1
    for L in [[], range(5), range(10)]:
        for i from 0 <= i < len(L):
            if i > 5:
                break
        else:
            print "CONTINUE", i
            continue
        print "BREAK", i
        break
    return i
Cython-0.26.1/tests/run/test_dictviews.pyx0000664000175000017500000001473712542002467021416 0ustar  stefanstefan00000000000000import unittest

class DictSetTest(unittest.TestCase):

    def test_constructors_not_callable(self):
        kt = type({}.viewkeys())
        self.assertRaises(TypeError, kt, {})
        self.assertRaises(TypeError, kt)
        it = type({}.viewitems())
        self.assertRaises(TypeError, it, {})
        self.assertRaises(TypeError, it)
        vt = type({}.viewvalues())
        self.assertRaises(TypeError, vt, {})
        self.assertRaises(TypeError, vt)

    def test_dict_keys(self):
        d = {1: 10, "a": "ABC"}
        keys = d.viewkeys()
        self.assertEqual(len(keys), 2)
        self.assertEqual(set(keys), set([1, "a"]))
        self.assertEqual(keys, set([1, "a"]))
        self.assertNotEqual(keys, set([1, "a", "b"]))
        self.assertNotEqual(keys, set([1, "b"]))
        self.assertNotEqual(keys, set([1]))
        self.assertNotEqual(keys, 42)
        self.assertIn(1, keys)
        self.assertIn("a", keys)
        self.assertNotIn(10, keys)
        self.assertNotIn("Z", keys)
        self.assertEqual(d.viewkeys(), d.viewkeys())
        e = {1: 11, "a": "def"}
        self.assertEqual(d.viewkeys(), e.viewkeys())
        del e["a"]
        self.assertNotEqual(d.viewkeys(), e.viewkeys())

    def test_dict_items(self):
        d = {1: 10, "a": "ABC"}
        items = d.viewitems()
        self.assertEqual(len(items), 2)
        self.assertEqual(set(items), set([(1, 10), ("a", "ABC")]))
        self.assertEqual(items, set([(1, 10), ("a", "ABC")]))
        self.assertNotEqual(items, set([(1, 10), ("a", "ABC"), "junk"]))
        self.assertNotEqual(items, set([(1, 10), ("a", "def")]))
        self.assertNotEqual(items, set([(1, 10)]))
        self.assertNotEqual(items, 42)
        self.assertIn((1, 10), items)
        self.assertIn(("a", "ABC"), items)
        self.assertNotIn((1, 11), items)
        self.assertNotIn(1, items)
        self.assertNotIn((), items)
        self.assertNotIn((1,), items)
        self.assertNotIn((1, 2, 3), items)
        self.assertEqual(d.viewitems(), d.viewitems())
        e = dict(d.copy())
        self.assertEqual(d.viewitems(), e.viewitems())
        e["a"] = "def"
        self.assertNotEqual(d.viewitems(), e.viewitems())

    def test_dict_mixed_keys_items(self):
        d = {(1, 1): 11, (2, 2): 22}
        e = {1: 1, 2: 2}
        self.assertEqual(d.viewkeys(), e.viewitems())
        self.assertNotEqual(d.viewitems(), e.viewkeys())

    def test_dict_values(self):
        d = {1: 10, "a": "ABC"}
        values = d.viewvalues()
        self.assertEqual(set(values), set([10, "ABC"]))
        self.assertEqual(len(values), 2)

    def test_dict_repr(self):
        d = {1: 10, "a": "ABC"}
        self.assertTrue(isinstance(repr(d), str))
        r = repr(d.viewitems())
        self.assertTrue(isinstance(r, str))
        self.assertTrue(r == "dict_items([('a', 'ABC'), (1, 10)])" or
                        r == "dict_items([(1, 10), ('a', 'ABC')])")
        r = repr(d.viewkeys())
        self.assertTrue(isinstance(r, str))
        self.assertTrue(r == "dict_keys(['a', 1])" or
                        r == "dict_keys([1, 'a'])")
        r = repr(d.viewvalues())
        self.assertTrue(isinstance(r, str))
        self.assertTrue(r == "dict_values(['ABC', 10])" or
                        r == "dict_values([10, 'ABC'])")

    def test_keys_set_operations(self):
        d1 = {'a': 1, 'b': 2}
        d2 = {'b': 3, 'c': 2}
        d3 = {'d': 4, 'e': 5}
        self.assertEqual(d1.viewkeys() & d1.viewkeys(), {'a', 'b'})
        self.assertEqual(d1.viewkeys() & d2.viewkeys(), {'b'})
        self.assertEqual(d1.viewkeys() & d3.viewkeys(), set())
        self.assertEqual(d1.viewkeys() & set(d1.viewkeys()), {'a', 'b'})
        self.assertEqual(d1.viewkeys() & set(d2.viewkeys()), {'b'})
        self.assertEqual(d1.viewkeys() & set(d3.viewkeys()), set())

        self.assertEqual(d1.viewkeys() | d1.viewkeys(), {'a', 'b'})
        self.assertEqual(d1.viewkeys() | d2.viewkeys(), {'a', 'b', 'c'})
        self.assertEqual(d1.viewkeys() | d3.viewkeys(), {'a', 'b', 'd', 'e'})
        self.assertEqual(d1.viewkeys() | set(d1.viewkeys()), {'a', 'b'})
        self.assertEqual(d1.viewkeys() | set(d2.viewkeys()), {'a', 'b', 'c'})
        self.assertEqual(d1.viewkeys() | set(d3.viewkeys()),
                         {'a', 'b', 'd', 'e'})

        self.assertEqual(d1.viewkeys() ^ d1.viewkeys(), set())
        self.assertEqual(d1.viewkeys() ^ d2.viewkeys(), {'a', 'c'})
        self.assertEqual(d1.viewkeys() ^ d3.viewkeys(), {'a', 'b', 'd', 'e'})
        self.assertEqual(d1.viewkeys() ^ set(d1.viewkeys()), set())
        self.assertEqual(d1.viewkeys() ^ set(d2.viewkeys()), {'a', 'c'})
        self.assertEqual(d1.viewkeys() ^ set(d3.viewkeys()),
                         {'a', 'b', 'd', 'e'})

    def test_items_set_operations(self):
        d1 = {'a': 1, 'b': 2}
        d2 = {'a': 2, 'b': 2}
        d3 = {'d': 4, 'e': 5}
        self.assertEqual(
            d1.viewitems() & d1.viewitems(), {('a', 1), ('b', 2)})
        self.assertEqual(d1.viewitems() & d2.viewitems(), {('b', 2)})
        self.assertEqual(d1.viewitems() & d3.viewitems(), set())
        self.assertEqual(d1.viewitems() & set(d1.viewitems()),
                         {('a', 1), ('b', 2)})
        self.assertEqual(d1.viewitems() & set(d2.viewitems()), {('b', 2)})
        self.assertEqual(d1.viewitems() & set(d3.viewitems()), set())

        self.assertEqual(d1.viewitems() | d1.viewitems(),
                         {('a', 1), ('b', 2)})
        self.assertEqual(d1.viewitems() | d2.viewitems(),
                         {('a', 1), ('a', 2), ('b', 2)})
        self.assertEqual(d1.viewitems() | d3.viewitems(),
                         {('a', 1), ('b', 2), ('d', 4), ('e', 5)})
        self.assertEqual(d1.viewitems() | set(d1.viewitems()),
                         {('a', 1), ('b', 2)})
        self.assertEqual(d1.viewitems() | set(d2.viewitems()),
                         {('a', 1), ('a', 2), ('b', 2)})
        self.assertEqual(d1.viewitems() | set(d3.viewitems()),
                         {('a', 1), ('b', 2), ('d', 4), ('e', 5)})

        self.assertEqual(d1.viewitems() ^ d1.viewitems(), set())
        self.assertEqual(d1.viewitems() ^ d2.viewitems(),
                         {('a', 1), ('a', 2)})
        self.assertEqual(d1.viewitems() ^ d3.viewitems(),
                         {('a', 1), ('b', 2), ('d', 4), ('e', 5)})




def test_main():
    try:
        from test import test_support as support
    except ImportError:
        from test import support
    support.run_unittest(DictSetTest)

if __name__ == "__main__":
    test_main()
Cython-0.26.1/tests/run/testinclude.pxi0000664000175000017500000000003712542002467020645 0ustar  stefanstefan00000000000000# this will be included

D = 2
Cython-0.26.1/tests/run/for_decrement.pyx0000664000175000017500000000176612542002467021170 0ustar  stefanstefan00000000000000"""
>>> range_loop_indices()
** Calculating step **
(9, 9, 8, 1, 2)
>>> from_loop_indices()
** Calculating step **
(10, 10, 0)
"""

cdef int get_step():
    """
    This should only be called once, when used in range().
    """
    print u"** Calculating step **"
    return 2

def range_loop_indices():
    """
    Optimized integer for loops using range() should follow Python behavior,
    and leave the index variable with the last value of the range.
    """
    cdef int i, j, k=0, l=10, m=10
    for i in range(10): pass
    for j in range(2,10): pass
    for k in range(0,10,get_step()): pass
    for l in range(10,0,-1): pass
    for m in range(10,0,-2): pass
    return i, j, k, l, m

def from_loop_indices():
    """
    for-from-loops should follow C behavior, and leave the index variable
    incremented one step after the last iteration.
    """
    cdef int i, j, k
    for i from 0 <= i < 5+5 by get_step(): pass
    for j from 0 <= j < 10: pass
    for k from 10 > k > 0: pass
    return i, j, k
Cython-0.26.1/tests/run/cpp_iterators_simple.h0000664000175000017500000000036112542002467022200 0ustar  stefanstefan00000000000000class DoublePointerIter {
public:
    DoublePointerIter(double* start, int len) : start_(start), len_(len) { }
    double* begin() { return start_; }
    double* end() { return start_ + len_; }
private:
    double* start_;
    int len_;
};

Cython-0.26.1/tests/run/cpp_const_method.pyx0000664000175000017500000000370712542002467021701 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror
# cython: experimental_cpp_class_def=True

from libcpp.vector cimport vector

cdef cppclass Wrapper[T]:
    T value
    __init__(T &value):
        this.value = value
    void set(T &value):
        this.value = value
    T get() const:
        return this.value


def test_const_get(int x):
    """
    >>> test_const_get(10)
    10
    """
    cdef const Wrapper[int] *wrapper = new Wrapper[int](x)
    try:
        return const_get(wrapper[0])
    finally:
        del wrapper

cdef int const_get(const Wrapper[int] wrapper):
    return wrapper.get()

def test_const_ref_get(int x):
    """
    >>> test_const_ref_get(100)
    100
    """
    cdef const Wrapper[int] *wrapper = new Wrapper[int](x)
    try:
        return const_ref_get(wrapper[0])
    finally:
        del wrapper

cdef int const_ref_get(const Wrapper[int] &wrapper):
    return wrapper.get()

def test_const_pointer_get(int x):
    """
    >>> test_const_pointer_get(1000)
    1000
    """
    cdef Wrapper[int] *wrapper = new Wrapper[int](x)
    cdef const Wrapper[int] *const_wrapper = wrapper
    try:
        return const_wrapper.get()
    finally:
        del wrapper


# TODO: parse vector[Wrapper[int]*]
ctypedef Wrapper[int] wrapInt

def test_vector_members(py_a, py_b):
    """
    >>> test_vector_members([1, 2, 3], [4,5, 6])
    ([1, 2, 3], 4)
    """
    cdef Wrapper[int] *value
    cdef const Wrapper[int] *const_value
    cdef vector[const Wrapper[int]*] a
    cdef vector[wrapInt*] b
    for x in py_a:
        a.push_back(new Wrapper[int](x))
    for x in py_b:
        b.push_back(new Wrapper[int](x))
    try:
        return vector_members(a, b)
    finally:
        for const_value in a:
            del const_value
        for value in b:
            del value

cdef vector_members(vector[const Wrapper[int]*] a, const vector[wrapInt*] b):
    # TODO: Cython-level error.
    # b[0].set(100)
    
    # TODO: const_iterator
    return [x.get() for x in a], b[0].get()
Cython-0.26.1/tests/run/tp_new_T454.pyx0000664000175000017500000000061412542002467020357 0ustar  stefanstefan00000000000000# ticket: 454

cimport cython

cdef class TypeWithFactory:
    @cython.test_assert_path_exists('//PythonCapiCallNode')
    @cython.test_fail_if_path_exists('//SimpleCallNode/AttributeNode')
    @classmethod
    def new(cls):
        return cls.__new__(cls)

def make_new_factory():
    """
    >>> isinstance(make_new_factory(), TypeWithFactory)
    True
    """
    return TypeWithFactory.new()
Cython-0.26.1/tests/run/isinstance.pyx0000664000175000017500000001274312542002467020511 0ustar  stefanstefan00000000000000
cimport cython
from cpython.bool cimport bool

cdef class A:
    pass


a_as_obj = A


@cython.test_assert_path_exists('//SimpleCallNode//SimpleCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode//PythonCapiCallNode',
                                 '//PythonCapiCallNode//SimpleCallNode')
def test_non_optimised():
    """
    >>> test_non_optimised()
    True
    """
    # Non-optimized
    cdef object foo = A
    assert isinstance(A(), foo)
    return True


@cython.test_assert_path_exists('//PythonCapiCallNode',
                                '//PythonCapiCallNode//SimpleCallNode',
                                '//PythonCapiFunctionNode[@cname = "PyType_Check"]',
                                '//PythonCapiFunctionNode[@cname = "PyInt_Check"]',
                                '//PythonCapiFunctionNode[@cname = "PyFloat_Check"]',
                                '//PythonCapiFunctionNode[@cname = "PyBytes_Check"]',
                                '//PythonCapiFunctionNode[@cname = "PyUnicode_Check"]',
                                '//PythonCapiFunctionNode[@cname = "PyTuple_Check"]',
                                '//PythonCapiFunctionNode[@cname = "PyList_Check"]',
                                '//PythonCapiFunctionNode[@cname = "PyDict_Check"]',
                                '//PythonCapiFunctionNode[@cname = "PySet_Check"]',
                                '//PythonCapiFunctionNode[@cname = "PySlice_Check"]',
                                '//PythonCapiFunctionNode[@cname = "PyComplex_Check"]')
@cython.test_fail_if_path_exists('//SimpleCallNode//SimpleCallNode',
                                 '//SimpleCallNode//PythonCapiCallNode')
def test_optimised():
    """
    >>> test_optimised()
    True
    """
    # Optimized tests.
    cdef object new_type = type('a',(),{})
    assert isinstance(type('a',(),{}), type)
    assert isinstance(new_type, type)

    cdef object boolval = True
    assert isinstance(boolval, bool)
    assert isinstance(True, bool)

    cdef object intval = int()
    assert isinstance(intval, int)
    assert isinstance(int(), int)

    cdef object longval = long()
    assert isinstance(longval, long)
    assert isinstance(long(), long)

    cdef object floatval = float()
    assert isinstance(floatval, float)
    assert isinstance(float(), float)

    cdef object bytesval = bytes()
    assert isinstance(bytesval, bytes)
    assert isinstance(bytes(), bytes)

    cdef object strval = str()
    assert isinstance(strval, str)
    assert isinstance(str(), str)

    cdef object unicodeval = unicode()
    assert isinstance(unicodeval, unicode)
    assert isinstance(unicode(), unicode)

    cdef object tupleval = tuple()
    assert isinstance(tupleval, tuple)
    assert isinstance(tuple(), tuple)

    cdef object listval = list()
    assert isinstance(listval, list)
    assert isinstance(list(), list)

    cdef object dictval = dict()
    assert isinstance(dictval, dict)
    assert isinstance(dict(), dict)

    cdef object setval = set()
    assert isinstance(setval, set)
    assert isinstance(set(), set)

    cdef object sliceval = slice(0)
    assert isinstance(sliceval, slice)
    assert isinstance(slice(0), slice)

    cdef object complexval = complex()
    assert isinstance(complexval, complex)
    assert isinstance(complex(), complex)

    assert not isinstance(u"foo", int)
    assert isinstance(A, type)
    assert isinstance(A(), A)
    cdef type typed_type = A
    assert isinstance(A(), typed_type)
    cdef object untyped_type = A
    assert isinstance(A(), untyped_type)
    return True


@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode//SimpleCallNode',
                                 '//SimpleCallNode//PythonCapiCallNode',
                                 '//TupleNode//NameNode')
def test_optimised_tuple():
    """
    >>> test_optimised_tuple()
    True
    """
    assert isinstance(int(),   (int, long, float, bytes, str, unicode, tuple, list, dict, set, slice, type, A))
    assert isinstance(list(),  (int, long, float, bytes, str, unicode, tuple, list, dict, set, slice, type, A))
    assert isinstance(A(),  (int, long, float, bytes, str, unicode, tuple, list, dict, set, slice, type, A))
    assert isinstance(A(),  (int, long, float, bytes, str, unicode, tuple, list, dict, set, slice, type, A, a_as_obj))
    assert isinstance(A(),  (int, long, float, bytes, str, unicode, tuple, list, dict, set, slice, type, a_as_obj, A))
    assert isinstance(A(),  (int, long, float, bytes, str, unicode, a_as_obj, tuple, list, dict, set, slice, type, A))
    assert isinstance(0, (int, long))
    assert not isinstance(u"xyz", (int, long))
    return True


def test_custom():
    """
    >>> test_custom()
    True
    """
    assert isinstance(A(), A)
    return True

cdef class B:
    pass

cdef class C:
    pass


@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode//SimpleCallNode',
                                 '//SimpleCallNode//PythonCapiCallNode',
                                 '//TupleNode//NameNode')
def test_custom_tuple(obj):
    """
    >>> test_custom_tuple(A())
    True
    >>> test_custom_tuple(B())
    True
    >>> test_custom_tuple(C())
    False
    """
    return isinstance(obj, (A,B))


def test_nested(x):
    """
    >>> test_nested(1)
    True
    >>> test_nested(1.5)
    True
    >>> test_nested("a")
    False
    """
    cdef object a = (x, None)
    if isinstance(a[0], (int, float)):
        return True
    return False
Cython-0.26.1/tests/run/int_float_builtins_as_casts_T400_long_double.pyx0000664000175000017500000000074113143605603027173 0ustar  stefanstefan00000000000000# ticket: 400

cimport cython


@cython.test_fail_if_path_exists("//SingleAssignmentNode//TypecastNode")
@cython.test_assert_path_exists(
    "//PythonCapiCallNode",
    "//PythonCapiCallNode/PythonCapiFunctionNode/@cname = '__Pyx_truncl'",
)
def long_double_to_float_int(long double x):
    """
    >>> long_double_to_float_int(4.1)
    4.0
    >>> long_double_to_float_int(-4.1)
    -4.0
    >>> long_double_to_float_int(4)
    4.0
    """
    cdef float r = int(x)
    return r
Cython-0.26.1/tests/run/ref2global.py0000664000175000017500000000061712542002467020175 0ustar  stefanstefan00000000000000# mode: run
# tag: global, nameerror

try:
    from heapq import *   # just to confuse the compiler
except ImportError:
    pass


def f(a):
    """
    Py<=3.3 gives 'global name ...', Py3.4+ only 'name ...'

    >>> f(1)   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    NameError: ...name 'definitely_unknown_name' is not defined
    """
    a = f
    a = definitely_unknown_name
Cython-0.26.1/tests/run/cpp_templates_helper.h0000664000175000017500000000217013023021033022131 0ustar  stefanstefan00000000000000template 
class Wrap {
    T value;
public:
    typedef S AltType;

    Wrap(T v) : value(v) { }
    void set(T v) { value = v; }
    T get(void) { return value; }
    bool operator==(Wrap other) { return value == other.value; }

    S get_alt_type(void) { return (S) value; }
    void set_alt_type(S v) { value = (T) v; }

    U create(void) { return (U) value; }
    bool accept(U v) { return v == (U) value; }
};

template 
class Pair {
    T1 _first;
    T2 _second;
public:
    Pair() { }
    Pair(T1 u, T2 v) { _first = u; _second = v; }
    T1 first(void) { return _first; }
    T2 second(void) { return _second; }
    bool operator==(Pair other) { return _first == other._first && _second == other._second; }
    bool operator!=(Pair other) { return _first != other._first || _second != other._second; }
};

template 
class SuperClass {
public:
    SuperClass() {}
};

template 
class SubClass : public SuperClass {
};

template 
class Div {
public:
    static T half(T value) { return value / 2; }
};
Cython-0.26.1/tests/run/annotation_typing.pyx0000664000175000017500000000357712542002467022122 0ustar  stefanstefan00000000000000# cython: annotation_typing=True

from cython cimport typeof


def pytypes_def(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'float'} = 4) -> list:
    """
    >>> pytypes_def([1])
    ('list object', 'int', 'long', 'float')
    [1, 2, 3, 4.0]
    >>> pytypes_def([1], 3)
    ('list object', 'int', 'long', 'float')
    [1, 3, 3, 4.0]
    >>> pytypes_def(123)
    Traceback (most recent call last):
    TypeError: Argument 'a' has incorrect type (expected list, got int)
    """
    print(typeof(a), typeof(b), typeof(c), typeof(d))
    a.append(b)
    a.append(c)
    a.append(d)
    return a


cpdef pytypes_cpdef(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'float'} = 4):
    """
    >>> pytypes_cpdef([1])
    ('list object', 'int', 'long', 'float')
    [1, 2, 3, 4.0]
    >>> pytypes_cpdef([1], 3)
    ('list object', 'int', 'long', 'float')
    [1, 3, 3, 4.0]
    >>> pytypes_cpdef(123)
    Traceback (most recent call last):
    TypeError: Argument 'a' has incorrect type (expected list, got int)
    """
    print(typeof(a), typeof(b), typeof(c), typeof(d))
    a.append(b)
    a.append(c)
    a.append(d)
    return a


cdef c_pytypes_cdef(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'float'} = 4):
    print(typeof(a), typeof(b), typeof(c), typeof(d))
    a.append(b)
    a.append(c)
    a.append(d)
    return a


def pytypes_cdef(a, b=2, c=3, d=4):
    """
    >>> pytypes_cdef([1])
    ('list object', 'int', 'long', 'float')
    [1, 2, 3, 4.0]
    >>> pytypes_cdef([1], 3)
    ('list object', 'int', 'long', 'float')
    [1, 3, 3, 4.0]
    >>> pytypes_cdef(123)   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...
    """
    return c_pytypes_cdef(a, b, c, d)


def return_tuple_for_carray() -> tuple:
    """
    >>> return_tuple_for_carray()
    (1, 2, 3)
    """
    cdef int[3] x
    x = [1, 2, 3]
    return x
Cython-0.26.1/tests/run/cpp_namespaces_helper.h0000664000175000017500000000050313023021033022250 0ustar  stefanstefan00000000000000namespace outer {

    int x = 10;

    int outer_value = 10;

    namespace inner {

        int x = 100;

        int inner_value = 100;

    }

}

namespace A {

    typedef int A_t;

    struct S {
        A_t k;
        double x;
    };

    A_t A_func(A_t first, A_t second) {
        return first + second;
    }

}
Cython-0.26.1/tests/run/notinop.pyx0000664000175000017500000001625412542002467020040 0ustar  stefanstefan00000000000000
cimport cython

def f(a,b):
    """
    >>> f(1,[1,2,3])
    False
    >>> f(5,[1,2,3])
    True
    >>> f(2,(1,2,3))
    False
    """
    result = a not in b
    return result

def g(a,b):
    """
    >>> g(1,[1,2,3])
    0
    >>> g(5,[1,2,3])
    1
    >>> g(2,(1,2,3))
    0
    """
    cdef int result
    result = a not in b
    return result

def h(b):
    """
    >>> h([1,2,3,4])
    False
    >>> h([1,3,4])
    True
    """
    result = 2 not in b
    return result

def j(b):
    """
    >>> j([1,2,3,4])
    0
    >>> j([1,3,4])
    1
    """
    cdef int result
    result = 2 not in b
    return result

@cython.test_fail_if_path_exists("//SwitchStatNode")
def k(a):
    """
    >>> k(1)
    0
    >>> k(5)
    1
    """
    cdef int result = a not in [1,2,3,4]
    return result

@cython.test_assert_path_exists("//SwitchStatNode")
@cython.test_fail_if_path_exists("//PrimaryCmpNode")
def m_list(int a):
    """
    >>> m_list(2)
    0
    >>> m_list(5)
    1
    """
    cdef int result = a not in [1,2,3,4]
    return result

@cython.test_assert_path_exists("//SwitchStatNode")
@cython.test_fail_if_path_exists("//PrimaryCmpNode")
def m_tuple(int a):
    """
    >>> m_tuple(2)
    0
    >>> m_tuple(5)
    1
    """
    cdef int result = a not in (1,2,3,4)
    return result

@cython.test_assert_path_exists("//SwitchStatNode")
@cython.test_fail_if_path_exists("//BoolBinopNode", "//BoolBinopNode", "//PrimaryCmpNode")
def m_set(int a):
    """
    >>> m_set(2)
    0
    >>> m_set(5)
    1
    """
    cdef int result = a not in {1,2,3,4}
    return result

cdef bytes bytes_string = b'abcdefg'

@cython.test_assert_path_exists("//PrimaryCmpNode")
@cython.test_fail_if_path_exists("//SwitchStatNode", "//BoolBinopNode", "//BoolBinopNode")
def m_bytes(char a):
    """
    >>> m_bytes(ord('f'))
    0
    >>> m_bytes(ord('X'))
    1
    """
    cdef int result = a not in bytes_string
    return result

@cython.test_assert_path_exists("//SwitchStatNode")
@cython.test_fail_if_path_exists("//BoolBinopNode", "//BoolBinopNode", "//PrimaryCmpNode")
def m_bytes_literal(char a):
    """
    >>> m_bytes_literal(ord('f'))
    0
    >>> m_bytes_literal(ord('X'))
    1
    """
    cdef int result = a not in b'abcdefg'
    return result

cdef unicode unicode_string = u'abcdefg\u1234\uF8D2'
py_unicode_string = unicode_string

cdef unicode klingon_character = u'\uF8D2'
py_klingon_character = klingon_character

@cython.test_assert_path_exists("//PrimaryCmpNode")
@cython.test_fail_if_path_exists("//SwitchStatNode", "//BoolBinopNode", "//BoolBinopNode")
def m_unicode(Py_UNICODE a, unicode unicode_string):
    """
    >>> m_unicode(ord('f'), py_unicode_string)
    0
    >>> m_unicode(ord('X'), py_unicode_string)
    1
    >>> m_unicode(ord(py_klingon_character), py_unicode_string)
    0
    >>> 'f' in None    # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...iterable...
    >>> m_unicode(ord('f'), None)
    Traceback (most recent call last):
    TypeError: argument of type 'NoneType' is not iterable
    """
    cdef int result = a not in unicode_string
    return result

@cython.test_assert_path_exists("//SwitchStatNode")
@cython.test_fail_if_path_exists("//BoolBinopNode", "//BoolBinopNode", "//PrimaryCmpNode")
def m_unicode_literal(Py_UNICODE a):
    """
    >>> m_unicode_literal(ord('f'))
    0
    >>> m_unicode_literal(ord('X'))
    1
    >>> m_unicode_literal(ord(py_klingon_character))
    0
    """
    cdef int result = a not in u'abcdefg\u1234\uF8D2'
    return result

@cython.test_assert_path_exists("//SwitchStatNode", "//BoolBinopNode")
@cython.test_fail_if_path_exists("//PrimaryCmpNode")
def m_tuple_in_or_notin(int a):
    """
    >>> m_tuple_in_or_notin(2)
    0
    >>> m_tuple_in_or_notin(3)
    1
    >>> m_tuple_in_or_notin(5)
    1
    """
    cdef int result = a not in (1,2,3,4) or a in (3,4)
    return result

@cython.test_assert_path_exists("//SwitchStatNode", "//BoolBinopNode")
@cython.test_fail_if_path_exists("//PrimaryCmpNode")
def m_tuple_notin_or_notin(int a):
    """
    >>> m_tuple_notin_or_notin(2)
    1
    >>> m_tuple_notin_or_notin(6)
    1
    >>> m_tuple_notin_or_notin(4)
    0
    """
    cdef int result = a not in (1,2,3,4) or a not in (4,5)
    return result

@cython.test_assert_path_exists("//SwitchStatNode")
@cython.test_fail_if_path_exists("//BoolBinopNode", "//BoolBinopNode", "//PrimaryCmpNode")
def m_tuple_notin_and_notin(int a):
    """
    >>> m_tuple_notin_and_notin(2)
    0
    >>> m_tuple_notin_and_notin(6)
    0
    >>> m_tuple_notin_and_notin(5)
    1
    """
    cdef int result = a not in (1,2,3,4) and a not in (6,7)
    return result

@cython.test_assert_path_exists("//SwitchStatNode", "//BoolBinopNode")
@cython.test_fail_if_path_exists("//PrimaryCmpNode")
def m_tuple_notin_and_notin_overlap(int a):
    """
    >>> m_tuple_notin_and_notin_overlap(2)
    0
    >>> m_tuple_notin_and_notin_overlap(4)
    0
    >>> m_tuple_notin_and_notin_overlap(5)
    1
    """
    cdef int result = a not in (1,2,3,4) and a not in (3,4)
    return result

@cython.test_assert_path_exists("//SwitchStatNode")
@cython.test_fail_if_path_exists("//BoolBinopNode", "//BoolBinopNode", "//PrimaryCmpNode")
def conditional_int(int a):
    """
    >>> conditional_int(1)
    2
    >>> conditional_int(0)
    1
    >>> conditional_int(5)
    1
    """
    return 1 if a not in (1,2,3,4) else 2

@cython.test_assert_path_exists("//SwitchStatNode")
@cython.test_fail_if_path_exists("//BoolBinopNode", "//BoolBinopNode", "//PrimaryCmpNode")
def conditional_object(int a):
    """
    >>> conditional_object(1)
    '2'
    >>> conditional_object(0)
    1
    >>> conditional_object(5)
    1
    """
    return 1 if a not in (1,2,3,4) else '2'

@cython.test_assert_path_exists("//SwitchStatNode")
@cython.test_fail_if_path_exists("//BoolBinopNode", "//BoolBinopNode", "//PrimaryCmpNode")
def conditional_bytes(char a):
    """
    >>> conditional_bytes(ord('a'))
    '2'
    >>> conditional_bytes(ord('X'))
    1
    >>> conditional_bytes(0)
    1
    """
    return 1 if a not in b'abc' else '2'

@cython.test_assert_path_exists("//SwitchStatNode")
@cython.test_fail_if_path_exists("//BoolBinopNode", "//BoolBinopNode", "//PrimaryCmpNode")
def conditional_unicode(Py_UNICODE a):
    """
    >>> conditional_unicode(ord('a'))
    '2'
    >>> conditional_unicode(ord('X'))
    1
    >>> conditional_unicode(0)
    1
    """
    return 1 if a not in u'abc' else '2'

@cython.test_assert_path_exists("//SwitchStatNode")
@cython.test_fail_if_path_exists("//BoolBinopNode", "//BoolBinopNode", "//PrimaryCmpNode")
def conditional_none(int a):
    """
    >>> conditional_none(1)
    1
    >>> conditional_none(0)
    >>> conditional_none(5)
    """
    return None if a not in {1,2,3,4} else 1

def n(a):
    """
    >>> n('d *')
    0
    >>> n('xxx')
    1
    """
    cdef int result = a.lower() not in [u'a *',u'b *',u'c *',u'd *']
    return result

def p(a):
    """
    >>> p('a')
    0
    >>> p(1)
    1
    """
    cdef dict d = {u'a': 1, u'b': 2}
    cdef int result = a not in d
    return result

def q(a):
    """
    >>> q(1)
    Traceback (most recent call last):
    TypeError: 'NoneType' object is not iterable
    """
    cdef dict d = None
    cdef int result = a not in d # should fail with a TypeError
    return result
Cython-0.26.1/tests/run/pyobjcast_T313.pyx0000664000175000017500000000051712542002467021055 0ustar  stefanstefan00000000000000# ticket: 313
# Ensure casting still works to void*

"""
>>> o = f()
>>> print(o[0])
teststring
>>> print(o[1])
teststring
"""

cdef extern from *:
    ctypedef void PyObject

def f():
    cdef void* p1
    cdef PyObject* p2
    cdef object a = u"teststring"
    p1 = a
    p2 = a
    return (p1, p2)
Cython-0.26.1/tests/run/onelinesuite.py0000664000175000017500000000073412542002467020661 0ustar  stefanstefan00000000000000# mode: run
# tag: syntax

"""
>>> y  # doctest: +ELLIPSIS
Traceback (most recent call last):
NameError: ...name 'y' is not defined
>>> z  # doctest: +ELLIPSIS
Traceback (most recent call last):
NameError: ...name 'z' is not defined
>>> f()
17
"""

x = False

if x: y = 42; z = 88
def f(): return 17


def suite_in_func(x):
    """
    >>> suite_in_func(True)
    (42, 88)
    >>> suite_in_func(False)
    (0, 0)
    """
    y = z = 0
    if x: y = 42; z = 88
    return y, z
Cython-0.26.1/tests/run/ssize_t_T399.pyx0000664000175000017500000000234212542002467020553 0ustar  stefanstefan00000000000000# ticket: 399

__doc__ = u"""
>>> test(-2)
-2
>>> test(-1)
-1
>>> test(0)
0
>>> test(1)
1
>>> test(2)
2

>>> test(SSIZE_T_MAX) == SSIZE_T_MAX
True
>>> test(SSIZE_T_MIN) == SSIZE_T_MIN
True

>>> test(SSIZE_T_MAX+1) #doctest: +ELLIPSIS
Traceback (most recent call last):
    ...
OverflowError: ...
>>> test(SSIZE_T_MIN-1) #doctest: +ELLIPSIS
Traceback (most recent call last):
    ...
OverflowError: ...

>>> test(1<<128) #doctest: +ELLIPSIS
Traceback (most recent call last):
    ...
OverflowError: ...
>>> test(-(1<<128)) #doctest: +ELLIPSIS
Traceback (most recent call last):
    ...
OverflowError: ...

>>> a = A(1,2)
>>> a.a == 1
True
>>> a.b == 2
True
>>> a.foo(5)
5
>>> a.foo(1 << 180) #doctest: +ELLIPSIS
Traceback (most recent call last):
    ...
OverflowError: ...
"""

cdef extern from *:
    ctypedef long ssize_t # XXX This should generate a warning !!!
    ssize_t PY_SSIZE_T_MAX
    ssize_t PY_SSIZE_T_MIN

SSIZE_T_MAX = PY_SSIZE_T_MAX
SSIZE_T_MIN = PY_SSIZE_T_MIN

def test(ssize_t i):
    return i

cdef class A:
    cdef public ssize_t a
    cdef readonly ssize_t b

    def __init__(self, ssize_t a, object b):
        self.a = a
        self.b = b

    cpdef ssize_t foo(self, ssize_t x):
        cdef object o = x
        return o
Cython-0.26.1/tests/run/cpp_custom_string.srctree0000664000175000017500000000302113023021033022710 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp, werror

PYTHON setup.py build_ext --inplace
PYTHON -c "import a; a.test_convert()"

######## setup.py ########

from Cython.Build.Dependencies import cythonize
from Cython.Compiler import PyrexTypes
PyrexTypes.cpp_string_conversions += ("MyString", "MyString2")

from distutils.core import setup

setup(
  ext_modules = cythonize("*.pyx"),
)

######## my_string.cpp ########

#include 

class MyString {
  public:
    MyString() { }
    MyString(const char* data, size_t size) : value_(data, size) { }
    const char* data() const { return value_.data(); }
    const size_t size() const { return value_.size(); }
  private:
    std::string value_;
};

class MyString2 : public MyString {
  public:
    MyString2() : MyString() { }
    MyString2(const char* data, size_t size) : MyString(data, size) { }
};

typedef MyString MyTypedefString;

######## a.pyx ########

# distutils: language = c++

cdef extern from "my_string.cpp":
  cdef cppclass MyString:
    pass

  cdef cppclass MyString2:
    pass

  ctypedef MyString2 MyTypedefString  # really a MyString

def do_convert(MyString value):
    return value

def do_convert2(MyString2 value):
    return value

def do_convert_typedef(MyTypedefString value):
    return value

def test_convert():
    assert do_convert(b"abc") == b"abc"
    assert do_convert(b"ab\0c") == b"ab\0c"

    assert do_convert2(b"abc") == b"abc"
    assert do_convert2(b"ab\0c") == b"ab\0c"

    assert do_convert_typedef(b"abc") == b"abc"
    assert do_convert_typedef(b"ab\0c") == b"ab\0c"
Cython-0.26.1/tests/run/r_starargsonly.pyx0000664000175000017500000000030712542002467021413 0ustar  stefanstefan00000000000000def spam(*args):
    """
    >>> spam()
    Args: ()
    >>> spam(42)
    Args: (42,)
    >>> spam("one", 2, "buckle my shoe")
    Args: ('one', 2, 'buckle my shoe')
    """
    print u"Args:", args
Cython-0.26.1/tests/run/ctuple.pyx0000664000175000017500000001110313023021033017613 0ustar  stefanstefan00000000000000import cython

def simple_convert(*o):
    """
    >>> simple_convert(1, 2)
    (1, 2.0)

    >>> simple_convert(1)
    Traceback (most recent call last):
    ...
    TypeError: Expected a tuple of size 2, got tuple
    >>> simple_convert(1, 2, 3)
    Traceback (most recent call last):
    ...
    TypeError: Expected a tuple of size 2, got tuple
    """
    cdef (int, double) xy = o
    return xy

def indexing((int, double) xy):
    """
    >>> indexing((1, 2))
    (2, 3.0)
    """
    x = xy[0]
    y = xy[1]
    xy[0] = x + 1
    xy[1] = y + 1
    return xy

def unpacking((int, double) xy):
    """
    >>> unpacking((1, 2))
    (1, 2.0)
    """
    x, y = xy
    return x, y

cdef (int, double) side_effect((int, double) xy):
    print "called with", xy
    return xy

def unpacking_with_side_effect((int, double) xy):
    """
    >>> unpacking_with_side_effect((1, 2))
    called with (1, 2.0)
    (1, 2.0)
    """
    x, y = side_effect(xy)
    return x, y

def packing_tuple(int x, double y):
    """
    >>> packing_tuple(1, 2)
    (1, 2.0)
    """
    cdef (int, double) xy = (x, y)
    assert xy == (x, y), xy
    xy = (x, y) * 1
    assert xy == (x, y), xy
    xy = 1 * (x, y)
    return xy

def packing_list(int x, double y):
    """
    >>> packing_list(1, 2)
    (1, 2.0)
    """
    cdef (int, double) xy = [x, y]
    assert xy == (x, y), xy
    xy = [x, y] * 1
    assert xy == (x, y), xy
    xy = 1 * [x, y]
    return xy

def coerce_packing_tuple(int x, int y):
    cdef (int, double) xy = (x, y)
    """
    >>> coerce_packing_tuple(1, 2)
    (1, 2.0)
    """
    return xy

def c_types(int a, double b):
    """
    >>> c_types(1, 2)
    (1, 2.0)
    """
    cdef int* a_ptr
    cdef double* b_ptr
    cdef (int*, double*) ab = (&a, &b)
    a_ptr, b_ptr = ab
    return a_ptr[0], b_ptr[0]


cdef union Union:
    int x
    double y


def union_in_ctuple_literal():
    """
    >>> union_in_ctuple_literal()
    (1, 2.0)
    """
    cdef (Union,) a = ({"x": 1},)
    cdef (Union,) b = ({"y": 2},)
    return a[0].x, b[0].y


def union_in_ctuple_dynamic(*values):
    """
    >>> union_in_ctuple_dynamic(1, {'x': 1})
    1
    >>> union_in_ctuple_dynamic(2, {'y': 2})
    2.0
    >>> union_in_ctuple_dynamic(1, {'x': 1, 'y': 2})
    Traceback (most recent call last):
    ValueError: More than one union attribute passed: 'x' and 'y'
    """
    cdef (int, Union) a = values
    return a[1].x if a[0] == 1 else a[1].y


cdef (int, int*) cdef_ctuple_return_type(int x, int* x_ptr):
    return x, x_ptr

def call_cdef_ctuple_return_type(int x):
    """
    >>> call_cdef_ctuple_return_type(2)
    (2, 2)
    """
    cdef (int, int*) res = cdef_ctuple_return_type(x, &x)
    return res[0], res[1][0]


cpdef (int, double) cpdef_ctuple_return_type(int x, double y):
    """
    >>> cpdef_ctuple_return_type(1, 2)
    (1, 2.0)
    """
    return x, y


@cython.infer_types(True)
def test_type_inference():
    """
    >>> test_type_inference()
    """
    cdef int x = 1
    cdef double y = 2
    cdef object o = 3
    xy = (x, y)
    assert cython.typeof(xy) == "(int, double)", cython.typeof(xy)
    xo = (x, o)
    assert cython.typeof(xo) == "tuple object", cython.typeof(xo)


@cython.locals(a=(int,int), b=(cython.long,cython.float))
def test_pure_python_declaration(x, y):
    """
    >>> test_pure_python_declaration(1, 2)
    (int, int)
    (long, float)
    ((1, 2), (1, 2.0))
    >>> test_pure_python_declaration(1.0, 2.0)
    (int, int)
    (long, float)
    ((1, 2), (1, 2.0))
    >>> test_pure_python_declaration('x', 'y')
    Traceback (most recent call last):
    TypeError: an integer is required
    """
    a = (x, y)
    b = (x, y)
    print(cython.typeof(a))
    print(cython.typeof(b))
    return (a, b)


def test_equality((int, int) ab, (int, int) cd, (int, int) ef):
    """
    >>> test_equality((1, 2), (3, 4), (5, 6))
    True
    >>> test_equality((1, 2), (3, 4), (3, 4))
    True
    >>> test_equality((3, 4), (3, 4), (3, 4))
    False
    """
    return ab < cd <= ef

def test_equality_different_types((double, int) ab, (int, int) cd, (long, int) ef):
    """
    >>> test_equality((1, 2), (3, 4), (5, 6))
    True
    >>> test_equality((1, 2), (3, 4), (3, 4))
    True
    >>> test_equality((3, 4), (3, 4), (3, 4))
    False
    """
    return ab < cd <= ef

def test_binop((int, int) ab, (double, double) cd):
    """
    >>> test_binop((1, 2), (3, 4))
    (1, 2, 3.0, 4.0)
    """
    return ab + cd

def test_mul((int, int) ab, int c):
    """
    >>> test_mul((1, 2), 3)
    (1, 2, 1, 2, 1, 2)
    """
    return ab * c

def test_unop((int, int) ab):
    """
    >>> test_unop((1, 2))
    True
    """
    return not ab
Cython-0.26.1/tests/run/slice_charptr.pyx0000664000175000017500000000072212542002467021165 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> do_slice(b'abcdef', 2, 3)
    (b'c', b'cdef', b'ab', b'abcdef', b'cdef', b'ab', b'abcdef')
    >>> do_slice(b'abcdef', 0, 5)
    (b'abcde', b'abcdef', b'', b'abcdef', b'abcdef', b'', b'abcdef')
"""

import sys

if sys.version_info[0] < 3:
    __doc__ = __doc__.replace(u"(b'", u"('").replace(u" b'", u" '")

def do_slice(s, int i, int j):
    cdef char* ss = s
    return ss[i:j], ss[i:], ss[:i], ss[:], ss[i:None], ss[None:i], ss[None:None]

Cython-0.26.1/tests/run/typeofexttype.pyx0000664000175000017500000000020112542002467021264 0ustar  stefanstefan00000000000000# mode: run
# tag: exttype


cdef class Spam:
    pass


def test():
    """
    >>> test()
    """
    cdef type t
    t = Spam
Cython-0.26.1/tests/run/print_function.pyx0000664000175000017500000000213412542002467021403 0ustar  stefanstefan00000000000000
# Py2.6 and later only!
from __future__ import print_function

def print_to_stdout(a, b):
    """
    >>> print_to_stdout(1, 'test')
    
    1
    1 test
    1 test
    1 test 42 spam
    """
    print()
    print(a)
    print(a, end=' ')
    print(b)
    print(a, b)
    print(a, b, end=' ')
    print(42, u"spam")

def print_assign(a, b):
    """
    >>> print_assign(1, 'test')
    
    1
    1 test
    1 test
    1 test 42 spam
    """
    x = print
    x()
    x(a)
    x(a, end=' ')
    x(b)
    x(a, b)
    x(a, b, end=' ')
    x(42, u"spam")


try:
    from StringIO import StringIO
except ImportError:
    from io import StringIO

def print_to_stringio(stream, a, b):
    """
    >>> stream = StringIO()
    >>> print_to_stringio(stream, 1, 'test')
    >>> print(stream.getvalue())
    
    1
    1 test
    1 test
    1 test 42 spam
    
    """
    print(file=stream)
    print(a, file=stream)
    print(a, end=' ', file=stream)
    print(b, file=stream)
    print(a, b, file=stream)
    print(a, b, end=' ', file=stream)
    print(42, u"spam", file=stream)
Cython-0.26.1/tests/run/int_literals.pyx0000664000175000017500000001036113023021033021015 0ustar  stefanstefan00000000000000# mode: run
# tag: syntax

from __future__ import absolute_import

cimport cython
from cython cimport typeof

import sys


def valid_underscore_literals():
    """
    >>> valid_underscore_literals()
    """
    # Copied from CPython's test_grammar.py
    assert 0_0_0 == 0
    assert 4_2 == 42
    assert 1_0000_0000 == 100000000
    assert 0b1001_0100 == 0b10010100
    assert 0xffff_ffff == 0xffffffff
    assert 0o5_7_7 == 0o577
    assert 1_00_00.5 == 10000.5
    assert 1e1_0 == 1e10
    assert .1_4 == .14
    assert 1_0 == 1_0L == 1_0LL == 1_0UL == 1_0ULL
    assert typeof(1_0ULL) == "unsigned long long"


@cython.test_assert_path_exists(
    '//IntNode[@longness = "LL"]',
    '//IntNode[@longness = "L"]',
    )
@cython.test_fail_if_path_exists('//IntNode[@longness = ""]')
def c_longs():
    """
    >>> c_longs() == (1, 1, -1, 18446744073709551615)  or  c_longs()
    True
    """
    cdef long a = 1L
    cdef unsigned long ua = 1UL
    cdef long long aa = 0xFFFFFFFFFFFFFFFFLL
    cdef unsigned long long uaa = 0xFFFFFFFFFFFFFFFFULL
    return a, ua, int(aa), uaa

@cython.test_assert_path_exists(
    '//IntNode[@longness = "LL"]',
    '//IntNode[@longness = "L"]',
    )
@cython.test_fail_if_path_exists('//IntNode[@longness = ""]')
def negative_c_longs():
    """
    >>> negative_c_longs() == (-1, -9223285636854775809)  or  negative_c_longs()
    True
    """
    cdef long a = -1L
    cdef long long aa = -9223285636854775809LL
    return a, aa

def py_longs():
    """
    >>> py_longs() == (
    ...     1, 1, 100000000000000000000000000000000, -100000000000000000000000000000000
    ...     )  or  py_longs()
    True
    """
    return 1, 1L, 100000000000000000000000000000000, -100000000000000000000000000000000

@cython.test_fail_if_path_exists("//NumBinopNode", "//IntBinopNode")
@cython.test_assert_path_exists("//ReturnStatNode/IntNode")
def py_huge_calculated_long():
    """
    >>> py_huge_calculated_long() == (
    ...     1606938044258990275541962092341162602522202993782792835301376
    ...     )  or  py_huge_calculated_long()
    True
    """
    return 1 << 200

@cython.test_fail_if_path_exists("//NumBinopNode", "//IntBinopNode")
@cython.test_assert_path_exists("//ReturnStatNode/IntNode")
def py_huge_computation_small_result():
    """
    >>> py_huge_computation_small_result()
    2
    """
    return (1 << 200) >> 199

@cython.test_fail_if_path_exists("//NumBinopNode", "//IntBinopNode")
#@cython.test_assert_path_exists("//ReturnStatNode/IntNode")
def py_huge_computation_small_result_neg():
    """
    >>> py_huge_computation_small_result_neg() == (
    ...    -2535301200456458802993406410752, -2535301200456458802993406410752
    ...    )  or  py_huge_computation_small_result_neg()
    True
    """
    return -(2 ** 101), (-2) ** 101

def large_literal():
    """
    >>> type(large_literal()) is int
    True
    """
    if sys.version_info[0] >= 3 or sys.maxint > 0xFFFFFFFFFFFF:
        return 0xFFFFFFFFFFFF
    else:
        return 0xFFFFFFF

def c_long_types():
    """
    >>> c_long_types()
    long
    long
    long long
    unsigned long
    unsigned long
    unsigned long long
    """
    print typeof(1)
    print typeof(1L)
    print typeof(1LL)
    print typeof(1U)
    print typeof(1UL)
    print typeof(1ULL)

# different ways to write an integer in Python

def c_oct():
    """
    >>> c_oct()
    (1, -17, 63)
    """
    cdef int a = 0o01
    cdef int b = -0o21
    cdef int c = 0o77
    return a,b,c

def c_oct_py2_legacy():
    """
    >>> c_oct_py2_legacy()
    (1, -17, 63)
    """
    cdef int a = 001
    cdef int b = -021
    cdef int c = 077
    return a,b,c

def py_oct():
    """
    >>> py_oct()
    (1, -17, 63)
    """
    return 0o01, -0o21, 0o77

def py_oct_py2_legacy():
    """
    >>> py_oct_py2_legacy()
    (1, -17, 63)
    """
    return 001, -021, 077

def c_hex():
    """
    >>> c_hex()
    (1, -33, 255)
    """
    cdef int a = 0x01
    cdef int b = -0x21
    cdef int c = 0xFF
    return a,b,c

def py_hex():
    """
    >>> py_hex()
    (1, -33, 255)
    """
    return 0x01, -0x21, 0xFF

def c_bin():
    """
    >>> c_bin()
    (1, -2, 15)
    """
    cdef int a = 0b01
    cdef int b = -0b10
    cdef int c = 0b1111
    return a,b,c

def py_bin():
    """
    >>> py_bin()
    (1, -2, 15)
    """
    return 0b01, -0b10, 0b1111
Cython-0.26.1/tests/run/filenames.pxi0000664000175000017500000000002012542002467020255 0ustar  stefanstefan00000000000000spam = u"ftang"
Cython-0.26.1/tests/run/cpp_exceptions_nogil_helper.h0000664000175000017500000000010012542002467023512 0ustar  stefanstefan00000000000000void foo(int i) {
  if (i==0)
    return;
  else
    throw i;
}
Cython-0.26.1/tests/run/generator_expressions_nested.pyx0000664000175000017500000000314212542002467024334 0ustar  stefanstefan00000000000000# mode: run
# tag: genexpr
# cython: language_level=3

"""
Adapted from CPython's test_grammar.py
"""

def genexpr_simple():
    """
    >>> sum([ x**2 for x in range(10) ])
    285
    >>> sum(genexpr_simple())
    285
    """
    return (x**2 for x in range(10))

def genexpr_conditional():
    """
    >>> sum([ x*x for x in range(10) if x%2 ])
    165
    >>> sum(genexpr_conditional())
    165
    """
    return (x*x for x in range(10) if x%2)

def genexpr_nested2():
    """
    >>> sum([x for x in range(10)])
    45
    >>> sum(genexpr_nested2())
    45
    """
    return (x for x in (y for y in range(10)))

def genexpr_nested3():
    """
    >>> sum([x for x in range(10)])
    45
    >>> sum(genexpr_nested3())
    45
    """
    return (x for x in (y for y in (z for z in range(10))))

def genexpr_nested_listcomp():
    """
    >>> sum([x for x in range(10)])
    45
    >>> sum(genexpr_nested_listcomp())
    45
    """
    return (x for x in [y for y in (z for z in range(10))])

def genexpr_nested_conditional():
    """
    >>> sum([ x for x in [y for y in [z for z in range(10) if True]] if True ])
    45
    >>> sum(genexpr_nested_conditional())
    45
    """
    return (x for x in (y for y in (z for z in range(10) if True)) if True)

def genexpr_nested2_conditional_empty():
    """
    >>> sum(genexpr_nested2_conditional_empty())
    0
    """
    return (y for y in (z for z in range(10) if True) if False)

def genexpr_nested3_conditional_empty():
    """
    >>> sum(genexpr_nested3_conditional_empty())
    0
    """
    return (x for x in (y for y in (z for z in range(10) if True) if False) if True)
Cython-0.26.1/tests/run/ref2local.pyx0000664000175000017500000000011612542002467020211 0ustar  stefanstefan00000000000000def f():
    """
    >>> f()
    42
    """
    a = 42
    b = a
    return b
Cython-0.26.1/tests/run/extinstantiate.pyx0000664000175000017500000000017212542002467021406 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> type(f()).__name__
    'Spam'
"""

cdef class Spam:
    pass

def f():
    s = Spam()
    return s
Cython-0.26.1/tests/run/test_grammar.py0000664000175000017500000012146513023021023020630 0ustar  stefanstefan00000000000000### COPIED FROM CPython 3.5 - ADDED PART FOLLOWS ###
# cython: language_level=3

import contextlib
from tempfile import NamedTemporaryFile
from Cython.Compiler.Main import compile as cython_compile


def _compile(code):
    with NamedTemporaryFile(suffix='.py') as f:
        f.write(code.encode('utf8'))
        f.flush()

        try:
            from StringIO import StringIO
        except ImportError:
            from io import StringIO

        old_stderr = sys.stderr
        try:
            sys.stderr = StringIO()
            result = cython_compile(f.name, language_level=3)
        finally:
            sys.stderr = old_stderr
    return result


def check_syntax_error(test, code):
    result = _compile(code)
    assert not result.c_file


def compile(code, name, what):
    assert what == 'exec'
    result = _compile(code)
    if not result.c_file:
        raise SyntaxError('unexpected EOF')  # see usage of compile() below


def exec(code):
    result = _compile(code)
    if not result.c_file:
        raise SyntaxError('unexpected EOF')  # see usage of compile() below


import unittest

if not hasattr(unittest, 'skipUnless'):
    def skipUnless(condition, message):
        def decorator(func):
            if condition:
                return func

            def test_method(self):
                print(message)
            return test_method
        return decorator

    unittest.skipUnless = skipUnless


### END OF CYTHON ADDED PART - COPIED PART FOLLOWS ###

# Python test set -- part 1, grammar.
# This just tests whether the parser accepts them all.

#from test.support import check_syntax_error
import inspect
import unittest
import sys
# testing import *
from sys import *


class TokenTests(unittest.TestCase):

    def test_backslash(self):
        # Backslash means line continuation:
        x = 1 \
        + 1
        self.assertEqual(x, 2, 'backslash for line continuation')

        # Backslash does not means continuation in comments :\
        x = 0
        self.assertEqual(x, 0, 'backslash ending comment')

    def test_plain_integers(self):
        self.assertEqual(type(000), type(0))
        self.assertEqual(0xff, 255)
        self.assertEqual(0o377, 255)
        self.assertEqual(2147483647, 0o17777777777)
        self.assertEqual(0b1001, 9)
        # "0x" is not a valid literal
        self.assertRaises(SyntaxError, eval, "0x")
        from sys import maxsize
        if maxsize == 2147483647:
            self.assertEqual(-2147483647-1, -0o20000000000)
            # XXX -2147483648
            self.assertTrue(0o37777777777 > 0)
            self.assertTrue(0xffffffff > 0)
            self.assertTrue(0b1111111111111111111111111111111 > 0)
            for s in ('2147483648', '0o40000000000', '0x100000000',
                      '0b10000000000000000000000000000000'):
                try:
                    x = eval(s)
                except OverflowError:
                    self.fail("OverflowError on huge integer literal %r" % s)
        elif maxsize == 9223372036854775807:
            self.assertEqual(-9223372036854775807-1, -0o1000000000000000000000)
            self.assertTrue(0o1777777777777777777777 > 0)
            self.assertTrue(0xffffffffffffffff > 0)
            self.assertTrue(0b11111111111111111111111111111111111111111111111111111111111111 > 0)
            for s in '9223372036854775808', '0o2000000000000000000000', \
                     '0x10000000000000000', \
                     '0b100000000000000000000000000000000000000000000000000000000000000':
                try:
                    x = eval(s)
                except OverflowError:
                    self.fail("OverflowError on huge integer literal %r" % s)
        else:
            self.fail('Weird maxsize value %r' % maxsize)

    def test_long_integers(self):
        x = 0
        x = 0xffffffffffffffff
        x = 0Xffffffffffffffff
        x = 0o77777777777777777
        x = 0O77777777777777777
        x = 123456789012345678901234567890
        x = 0b100000000000000000000000000000000000000000000000000000000000000000000
        x = 0B111111111111111111111111111111111111111111111111111111111111111111111

    def test_floats(self):
        x = 3.14
        x = 314.
        x = 0.314
        # XXX x = 000.314
        x = .314
        x = 3e14
        x = 3E14
        x = 3e-14
        x = 3e+14
        x = 3.e14
        x = .3e14
        x = 3.1e4

    def test_float_exponent_tokenization(self):
        # See issue 21642.
        self.assertEqual(1 if 1else 0, 1)
        self.assertEqual(1 if 0else 0, 0)
        self.assertRaises(SyntaxError, eval, "0 if 1Else 0")

    def test_string_literals(self):
        x = ''; y = ""; self.assertTrue(len(x) == 0 and x == y)
        x = '\''; y = "'"; self.assertTrue(len(x) == 1 and x == y and ord(x) == 39)
        x = '"'; y = "\""; self.assertTrue(len(x) == 1 and x == y and ord(x) == 34)
        x = "doesn't \"shrink\" does it"
        y = 'doesn\'t "shrink" does it'
        self.assertTrue(len(x) == 24 and x == y)
        x = "does \"shrink\" doesn't it"
        y = 'does "shrink" doesn\'t it'
        self.assertTrue(len(x) == 24 and x == y)
        x = """
The "quick"
brown fox
jumps over
the 'lazy' dog.
"""
        y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n'
        self.assertEqual(x, y)
        y = '''
The "quick"
brown fox
jumps over
the 'lazy' dog.
'''
        self.assertEqual(x, y)
        y = "\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the 'lazy' dog.\n\
"
        self.assertEqual(x, y)
        y = '\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the \'lazy\' dog.\n\
'
        self.assertEqual(x, y)

    def test_ellipsis(self):
        x = ...
        self.assertTrue(x is Ellipsis)
        self.assertRaises(SyntaxError, eval, ".. .")

    def test_eof_error(self):
        samples = ("def foo(", "\ndef foo(", "def foo(\n")
        for s in samples:
            with self.assertRaises(SyntaxError) as cm:
                compile(s, "", "exec")
            self.assertIn("unexpected EOF", str(cm.exception))

class GrammarTests(unittest.TestCase):

    # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
    # XXX can't test in a script -- this rule is only used when interactive

    # file_input: (NEWLINE | stmt)* ENDMARKER
    # Being tested as this very moment this very module

    # expr_input: testlist NEWLINE
    # XXX Hard to test -- used only in calls to input()

    def test_eval_input(self):
        # testlist ENDMARKER
        x = eval('1, 0 or 1')

    def test_funcdef(self):
        ### [decorators] 'def' NAME parameters ['->' test] ':' suite
        ### decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
        ### decorators: decorator+
        ### parameters: '(' [typedargslist] ')'
        ### typedargslist: ((tfpdef ['=' test] ',')*
        ###                ('*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef)
        ###                | tfpdef ['=' test] (',' tfpdef ['=' test])* [','])
        ### tfpdef: NAME [':' test]
        ### varargslist: ((vfpdef ['=' test] ',')*
        ###              ('*' [vfpdef] (',' vfpdef ['=' test])*  [',' '**' vfpdef] | '**' vfpdef)
        ###              | vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
        ### vfpdef: NAME
        def f1(): pass
        f1()
        f1(*())
        f1(*(), **{})
        def f2(one_argument): pass
        def f3(two, arguments): pass
        self.assertEqual(f2.__code__.co_varnames, ('one_argument',))
        self.assertEqual(f3.__code__.co_varnames, ('two', 'arguments'))
        def a1(one_arg,): pass
        def a2(two, args,): pass
        def v0(*rest): pass
        def v1(a, *rest): pass
        def v2(a, b, *rest): pass

        f1()
        f2(1)
        f2(1,)
        f3(1, 2)
        f3(1, 2,)
        v0()
        v0(1)
        v0(1,)
        v0(1,2)
        v0(1,2,3,4,5,6,7,8,9,0)
        v1(1)
        v1(1,)
        v1(1,2)
        v1(1,2,3)
        v1(1,2,3,4,5,6,7,8,9,0)
        v2(1,2)
        v2(1,2,3)
        v2(1,2,3,4)
        v2(1,2,3,4,5,6,7,8,9,0)

        def d01(a=1): pass
        d01()
        d01(1)
        d01(*(1,))
        d01(*[] or [2])
        d01(*() or (), *{} and (), **() or {})
        d01(**{'a':2})
        d01(**{'a':2} or {})
        def d11(a, b=1): pass
        d11(1)
        d11(1, 2)
        d11(1, **{'b':2})
        def d21(a, b, c=1): pass
        d21(1, 2)
        d21(1, 2, 3)
        d21(*(1, 2, 3))
        d21(1, *(2, 3))
        d21(1, 2, *(3,))
        d21(1, 2, **{'c':3})
        def d02(a=1, b=2): pass
        d02()
        d02(1)
        d02(1, 2)
        d02(*(1, 2))
        d02(1, *(2,))
        d02(1, **{'b':2})
        d02(**{'a': 1, 'b': 2})
        def d12(a, b=1, c=2): pass
        d12(1)
        d12(1, 2)
        d12(1, 2, 3)
        def d22(a, b, c=1, d=2): pass
        d22(1, 2)
        d22(1, 2, 3)
        d22(1, 2, 3, 4)
        def d01v(a=1, *rest): pass
        d01v()
        d01v(1)
        d01v(1, 2)
        d01v(*(1, 2, 3, 4))
        d01v(*(1,))
        d01v(**{'a':2})
        def d11v(a, b=1, *rest): pass
        d11v(1)
        d11v(1, 2)
        d11v(1, 2, 3)
        def d21v(a, b, c=1, *rest): pass
        d21v(1, 2)
        d21v(1, 2, 3)
        d21v(1, 2, 3, 4)
        d21v(*(1, 2, 3, 4))
        d21v(1, 2, **{'c': 3})
        def d02v(a=1, b=2, *rest): pass
        d02v()
        d02v(1)
        d02v(1, 2)
        d02v(1, 2, 3)
        d02v(1, *(2, 3, 4))
        d02v(**{'a': 1, 'b': 2})
        def d12v(a, b=1, c=2, *rest): pass
        d12v(1)
        d12v(1, 2)
        d12v(1, 2, 3)
        d12v(1, 2, 3, 4)
        d12v(*(1, 2, 3, 4))
        d12v(1, 2, *(3, 4, 5))
        d12v(1, *(2,), **{'c': 3})
        def d22v(a, b, c=1, d=2, *rest): pass
        d22v(1, 2)
        d22v(1, 2, 3)
        d22v(1, 2, 3, 4)
        d22v(1, 2, 3, 4, 5)
        d22v(*(1, 2, 3, 4))
        d22v(1, 2, *(3, 4, 5))
        d22v(1, *(2, 3), **{'d': 4})

        # keyword argument type tests
        try:
            str('x', **{b'foo':1 })
        except TypeError:
            pass
        else:
            self.fail('Bytes should not work as keyword argument names')
        # keyword only argument tests
        def pos0key1(*, key): return key
        pos0key1(key=100)
        def pos2key2(p1, p2, *, k1, k2=100): return p1,p2,k1,k2
        pos2key2(1, 2, k1=100)
        pos2key2(1, 2, k1=100, k2=200)
        pos2key2(1, 2, k2=100, k1=200)
        def pos2key2dict(p1, p2, *, k1=100, k2, **kwarg): return p1,p2,k1,k2,kwarg
        pos2key2dict(1,2,k2=100,tokwarg1=100,tokwarg2=200)
        pos2key2dict(1,2,tokwarg1=100,tokwarg2=200, k2=100)

        self.assertRaises(SyntaxError, eval, "def f(*): pass")
        self.assertRaises(SyntaxError, eval, "def f(*,): pass")
        self.assertRaises(SyntaxError, eval, "def f(*, **kwds): pass")

        # keyword arguments after *arglist
        def f(*args, **kwargs):
            return args, kwargs
        self.assertEqual(f(1, x=2, *[3, 4], y=5), ((1, 3, 4),
                                                    {'x':2, 'y':5}))
        self.assertEqual(f(1, *(2,3), 4), ((1, 2, 3, 4), {}))
        self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)")
        self.assertEqual(f(**{'eggs':'scrambled', 'spam':'fried'}),
                         ((), {'eggs':'scrambled', 'spam':'fried'}))
        self.assertEqual(f(spam='fried', **{'eggs':'scrambled'}),
                         ((), {'eggs':'scrambled', 'spam':'fried'}))

        # argument annotation tests
        def f(x) -> list: pass
        self.assertEqual(f.__annotations__, {'return': list})
        def f(x: int): pass
        self.assertEqual(f.__annotations__, {'x': int})
        def f(*x: str): pass
        self.assertEqual(f.__annotations__, {'x': str})
        def f(**x: float): pass
        self.assertEqual(f.__annotations__, {'x': float})
        def f(x, y: 1+2): pass
        self.assertEqual(f.__annotations__, {'y': 3})
        def f(a, b: 1, c: 2, d): pass
        self.assertEqual(f.__annotations__, {'b': 1, 'c': 2})
        def f(a, b: 1, c: 2, d, e: 3 = 4, f=5, *g: 6): pass
        self.assertEqual(f.__annotations__,
                         {'b': 1, 'c': 2, 'e': 3, 'g': 6})
        def f(a, b: 1, c: 2, d, e: 3 = 4, f=5, *g: 6, h: 7, i=8, j: 9 = 10,
              **k: 11) -> 12: pass
        self.assertEqual(f.__annotations__,
                         {'b': 1, 'c': 2, 'e': 3, 'g': 6, 'h': 7, 'j': 9,
                          'k': 11, 'return': 12})
        # Check for issue #20625 -- annotations mangling
        class Spam:
            def f(self, *, __kw: 1):
                pass
        class Ham(Spam): pass
        self.assertEqual(Spam.f.__annotations__, {'_Spam__kw': 1})
        self.assertEqual(Ham.f.__annotations__, {'_Spam__kw': 1})
        # Check for SF Bug #1697248 - mixing decorators and a return annotation
        def null(x): return x
        @null
        def f(x) -> list: pass
        self.assertEqual(f.__annotations__, {'return': list})

        # test MAKE_CLOSURE with a variety of oparg's
        closure = 1
        def f(): return closure
        def f(x=1): return closure
        def f(*, k=1): return closure
        def f() -> int: return closure

        # Check ast errors in *args and *kwargs
        check_syntax_error(self, "f(*g(1=2))")
        check_syntax_error(self, "f(**g(1=2))")

        # Check trailing commas are permitted in funcdef argument list
        def f(a,): pass
        def f(*args,): pass
        def f(**kwds,): pass
        def f(a, *args,): pass
        def f(a, **kwds,): pass
        def f(*args, b,): pass
        def f(*, b,): pass
        def f(*args, **kwds,): pass
        def f(a, *args, b,): pass
        def f(a, *, b,): pass
        def f(a, *args, **kwds,): pass
        def f(*args, b, **kwds,): pass
        def f(*, b, **kwds,): pass
        def f(a, *args, b, **kwds,): pass
        def f(a, *, b, **kwds,): pass

    def test_lambdef(self):
        ### lambdef: 'lambda' [varargslist] ':' test
        l1 = lambda : 0
        self.assertEqual(l1(), 0)
        l2 = lambda : a[d] # XXX just testing the expression
        l3 = lambda : [2 < x for x in [-1, 3, 0]]
        self.assertEqual(l3(), [0, 1, 0])
        l4 = lambda x = lambda y = lambda z=1 : z : y() : x()
        self.assertEqual(l4(), 1)
        l5 = lambda x, y, z=2: x + y + z
        self.assertEqual(l5(1, 2), 5)
        self.assertEqual(l5(1, 2, 3), 6)
        check_syntax_error(self, "lambda x: x = 2")
        check_syntax_error(self, "lambda (None,): None")
        l6 = lambda x, y, *, k=20: x+y+k
        self.assertEqual(l6(1,2), 1+2+20)
        self.assertEqual(l6(1,2,k=10), 1+2+10)

        # check that trailing commas are permitted
        l10 = lambda a,: 0
        l11 = lambda *args,: 0
        l12 = lambda **kwds,: 0
        l13 = lambda a, *args,: 0
        l14 = lambda a, **kwds,: 0
        l15 = lambda *args, b,: 0
        l16 = lambda *, b,: 0
        l17 = lambda *args, **kwds,: 0
        l18 = lambda a, *args, b,: 0
        l19 = lambda a, *, b,: 0
        l20 = lambda a, *args, **kwds,: 0
        l21 = lambda *args, b, **kwds,: 0
        l22 = lambda *, b, **kwds,: 0
        l23 = lambda a, *args, b, **kwds,: 0
        l24 = lambda a, *, b, **kwds,: 0


    ### stmt: simple_stmt | compound_stmt
    # Tested below

    def test_simple_stmt(self):
        ### simple_stmt: small_stmt (';' small_stmt)* [';']
        x = 1; pass; del x
        def foo():
            # verify statements that end with semi-colons
            x = 1; pass; del x;
        foo()

    ### small_stmt: expr_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt
    # Tested below

    def test_expr_stmt(self):
        # (exprlist '=')* exprlist
        1
        1, 2, 3
        x = 1
        x = 1, 2, 3
        x = y = z = 1, 2, 3
        x, y, z = 1, 2, 3
        abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4)

        check_syntax_error(self, "x + 1 = 1")
        check_syntax_error(self, "a + 1 = b + 2")

    # Check the heuristic for print & exec covers significant cases
    # As well as placing some limits on false positives
    def test_former_statements_refer_to_builtins(self):
        keywords = "print", "exec"
        # Cases where we want the custom error
        cases = [
            "{} foo",
            "{} {{1:foo}}",
            "if 1: {} foo",
            "if 1: {} {{1:foo}}",
            "if 1:\n    {} foo",
            "if 1:\n    {} {{1:foo}}",
        ]
        for keyword in keywords:
            custom_msg = "call to '{}'".format(keyword)
            for case in cases:
                source = case.format(keyword)
                with self.subTest(source=source):
                    with self.assertRaisesRegex(SyntaxError, custom_msg):
                        exec(source)
                source = source.replace("foo", "(foo.)")
                with self.subTest(source=source):
                    with self.assertRaisesRegex(SyntaxError, "invalid syntax"):
                        exec(source)

    def test_del_stmt(self):
        # 'del' exprlist
        abc = [1,2,3]
        x, y, z = abc
        xyz = x, y, z

        del abc
        del x, y, (z, xyz)

    def test_pass_stmt(self):
        # 'pass'
        pass

    # flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt
    # Tested below

    def test_break_stmt(self):
        # 'break'
        while 1: break

    def test_continue_stmt(self):
        # 'continue'
        i = 1
        while i: i = 0; continue

        msg = ""
        while not msg:
            msg = "ok"
            try:
                continue
                msg = "continue failed to continue inside try"
            except:
                msg = "continue inside try called except block"
        if msg != "ok":
            self.fail(msg)

        msg = ""
        while not msg:
            msg = "finally block not called"
            try:
                continue
            finally:
                msg = "ok"
        if msg != "ok":
            self.fail(msg)

    def test_break_continue_loop(self):
        # This test warrants an explanation. It is a test specifically for SF bugs
        # #463359 and #462937. The bug is that a 'break' statement executed or
        # exception raised inside a try/except inside a loop, *after* a continue
        # statement has been executed in that loop, will cause the wrong number of
        # arguments to be popped off the stack and the instruction pointer reset to
        # a very small number (usually 0.) Because of this, the following test
        # *must* written as a function, and the tracking vars *must* be function
        # arguments with default values. Otherwise, the test will loop and loop.

        def test_inner(extra_burning_oil = 1, count=0):
            big_hippo = 2
            while big_hippo:
                count += 1
                try:
                    if extra_burning_oil and big_hippo == 1:
                        extra_burning_oil -= 1
                        break
                    big_hippo -= 1
                    continue
                except:
                    raise
            if count > 2 or big_hippo != 1:
                self.fail("continue then break in try/except in loop broken!")
        test_inner()

    def test_return(self):
        # 'return' [testlist]
        def g1(): return
        def g2(): return 1
        g1()
        x = g2()
        check_syntax_error(self, "class foo:return 1")

    def test_yield(self):
        # Allowed as standalone statement
        def g(): yield 1
        def g(): yield from ()
        # Allowed as RHS of assignment
        def g(): x = yield 1
        def g(): x = yield from ()
        # Ordinary yield accepts implicit tuples
        def g(): yield 1, 1
        def g(): x = yield 1, 1
        # 'yield from' does not
        check_syntax_error(self, "def g(): yield from (), 1")
        check_syntax_error(self, "def g(): x = yield from (), 1")
        # Requires parentheses as subexpression
        def g(): 1, (yield 1)
        def g(): 1, (yield from ())
        check_syntax_error(self, "def g(): 1, yield 1")
        check_syntax_error(self, "def g(): 1, yield from ()")
        # Requires parentheses as call argument
        def g(): f((yield 1))
        def g(): f((yield 1), 1)
        def g(): f((yield from ()))
        def g(): f((yield from ()), 1)
        check_syntax_error(self, "def g(): f(yield 1)")
        check_syntax_error(self, "def g(): f(yield 1, 1)")
        check_syntax_error(self, "def g(): f(yield from ())")
        check_syntax_error(self, "def g(): f(yield from (), 1)")
        # Not allowed at top level
        check_syntax_error(self, "yield")
        check_syntax_error(self, "yield from")
        # Not allowed at class scope
        check_syntax_error(self, "class foo:yield 1")
        check_syntax_error(self, "class foo:yield from ()")
        # Check annotation refleak on SyntaxError
        check_syntax_error(self, "def g(a:(yield)): pass")

    def test_raise(self):
        # 'raise' test [',' test]
        try: raise RuntimeError('just testing')
        except RuntimeError: pass
        try: raise KeyboardInterrupt
        except KeyboardInterrupt: pass

    def test_import(self):
        # 'import' dotted_as_names
        import sys
        import time, sys
        # 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names)
        from time import time
        from time import (time)
        # not testable inside a function, but already done at top of the module
        # from sys import *
        from sys import path, argv
        from sys import (path, argv)
        from sys import (path, argv,)

    def test_global(self):
        # 'global' NAME (',' NAME)*
        global a
        global a, b
        global one, two, three, four, five, six, seven, eight, nine, ten

    def test_nonlocal(self):
        # 'nonlocal' NAME (',' NAME)*
        x = 0
        y = 0
        def f():
            nonlocal x
            nonlocal x, y

    def test_assert(self):
        # assertTruestmt: 'assert' test [',' test]
        assert 1
        assert 1, 1
        assert lambda x:x
        assert 1, lambda x:x+1

        try:
            assert True
        except AssertionError as e:
            self.fail("'assert True' should not have raised an AssertionError")

        try:
            assert True, 'this should always pass'
        except AssertionError as e:
            self.fail("'assert True, msg' should not have "
                      "raised an AssertionError")

    # these tests fail if python is run with -O, so check __debug__
    @unittest.skipUnless(__debug__, "Won't work if __debug__ is False")
    def testAssert2(self):
        try:
            assert 0, "msg"
        except AssertionError as e:
            self.assertEqual(e.args[0], "msg")
        else:
            self.fail("AssertionError not raised by assert 0")

        try:
            assert False
        except AssertionError as e:
            self.assertEqual(len(e.args), 0)
        else:
            self.fail("AssertionError not raised by 'assert False'")


    ### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef
    # Tested below

    def test_if(self):
        # 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
        if 1: pass
        if 1: pass
        else: pass
        if 0: pass
        elif 0: pass
        if 0: pass
        elif 0: pass
        elif 0: pass
        elif 0: pass
        else: pass

    def test_while(self):
        # 'while' test ':' suite ['else' ':' suite]
        while 0: pass
        while 0: pass
        else: pass

        # Issue1920: "while 0" is optimized away,
        # ensure that the "else" clause is still present.
        x = 0
        while 0:
            x = 1
        else:
            x = 2
        self.assertEqual(x, 2)

    def test_for(self):
        # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
        for i in 1, 2, 3: pass
        for i, j, k in (): pass
        else: pass
        class Squares:
            def __init__(self, max):
                self.max = max
                self.sofar = []
            def __len__(self): return len(self.sofar)
            def __getitem__(self, i):
                if not 0 <= i < self.max: raise IndexError
                n = len(self.sofar)
                while n <= i:
                    self.sofar.append(n*n)
                    n = n+1
                return self.sofar[i]
        n = 0
        for x in Squares(10): n = n+x
        if n != 285:
            self.fail('for over growing sequence')

        result = []
        for x, in [(1,), (2,), (3,)]:
            result.append(x)
        self.assertEqual(result, [1, 2, 3])

    def test_try(self):
        ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
        ###         | 'try' ':' suite 'finally' ':' suite
        ### except_clause: 'except' [expr ['as' expr]]
        try:
            1/0
        except ZeroDivisionError:
            pass
        else:
            pass
        try: 1/0
        except EOFError: pass
        except TypeError as msg: pass
        except RuntimeError as msg: pass
        except: pass
        else: pass
        try: 1/0
        except (EOFError, TypeError, ZeroDivisionError): pass
        try: 1/0
        except (EOFError, TypeError, ZeroDivisionError) as msg: pass
        try: pass
        finally: pass

    def test_suite(self):
        # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
        if 1: pass
        if 1:
            pass
        if 1:
            #
            #
            #
            pass
            pass
            #
            pass
            #

    def test_test(self):
        ### and_test ('or' and_test)*
        ### and_test: not_test ('and' not_test)*
        ### not_test: 'not' not_test | comparison
        if not 1: pass
        if 1 and 1: pass
        if 1 or 1: pass
        if not not not 1: pass
        if not 1 and 1 and 1: pass
        if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass

    def test_comparison(self):
        ### comparison: expr (comp_op expr)*
        ### comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not'
        if 1: pass
        x = (1 == 1)
        if 1 == 1: pass
        if 1 != 1: pass
        if 1 < 1: pass
        if 1 > 1: pass
        if 1 <= 1: pass
        if 1 >= 1: pass
        if 1 is 1: pass
        if 1 is not 1: pass
        if 1 in (): pass
        if 1 not in (): pass
        if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in 1 is 1 is not 1: pass

    def test_binary_mask_ops(self):
        x = 1 & 1
        x = 1 ^ 1
        x = 1 | 1

    def test_shift_ops(self):
        x = 1 << 1
        x = 1 >> 1
        x = 1 << 1 >> 1

    def test_additive_ops(self):
        x = 1
        x = 1 + 1
        x = 1 - 1 - 1
        x = 1 - 1 + 1 - 1 + 1

    def test_multiplicative_ops(self):
        x = 1 * 1
        x = 1 / 1
        x = 1 % 1
        x = 1 / 1 * 1 % 1

    def test_unary_ops(self):
        x = +1
        x = -1
        x = ~1
        x = ~1 ^ 1 & 1 | 1 & 1 ^ -1
        x = -1*1/1 + 1*1 - ---1*1

    def test_selectors(self):
        ### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME
        ### subscript: expr | [expr] ':' [expr]

        import sys, time
        c = sys.path[0]
        x = time.time()
        x = sys.modules['time'].time()
        a = '01234'
        c = a[0]
        c = a[-1]
        s = a[0:5]
        s = a[:5]
        s = a[0:]
        s = a[:]
        s = a[-5:]
        s = a[:-1]
        s = a[-4:-3]
        # A rough test of SF bug 1333982.  http://python.org/sf/1333982
        # The testing here is fairly incomplete.
        # Test cases should include: commas with 1 and 2 colons
        d = {}
        d[1] = 1
        d[1,] = 2
        d[1,2] = 3
        d[1,2,3] = 4
        L = list(d)
        L.sort(key=lambda x: x if isinstance(x, tuple) else ())
        self.assertEqual(str(L), '[1, (1,), (1, 2), (1, 2, 3)]')

    def test_atoms(self):
        ### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictsetmaker] '}' | NAME | NUMBER | STRING
        ### dictsetmaker: (test ':' test (',' test ':' test)* [',']) | (test (',' test)* [','])

        x = (1)
        x = (1 or 2 or 3)
        x = (1 or 2 or 3, 2, 3)

        x = []
        x = [1]
        x = [1 or 2 or 3]
        x = [1 or 2 or 3, 2, 3]
        x = []

        x = {}
        x = {'one': 1}
        x = {'one': 1,}
        x = {'one' or 'two': 1 or 2}
        x = {'one': 1, 'two': 2}
        x = {'one': 1, 'two': 2,}
        x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6}

        x = {'one'}
        x = {'one', 1,}
        x = {'one', 'two', 'three'}
        x = {2, 3, 4,}

        x = x
        x = 'x'
        x = 123

    ### exprlist: expr (',' expr)* [',']
    ### testlist: test (',' test)* [',']
    # These have been exercised enough above

    def test_classdef(self):
        # 'class' NAME ['(' [testlist] ')'] ':' suite
        class B: pass
        class B2(): pass
        class C1(B): pass
        class C2(B): pass
        class D(C1, C2, B): pass
        class C:
            def meth1(self): pass
            def meth2(self, arg): pass
            def meth3(self, a1, a2): pass

        # decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
        # decorators: decorator+
        # decorated: decorators (classdef | funcdef)
        def class_decorator(x): return x
        @class_decorator
        class G: pass

    def test_dictcomps(self):
        # dictorsetmaker: ( (test ':' test (comp_for |
        #                                   (',' test ':' test)* [','])) |
        #                   (test (comp_for | (',' test)* [','])) )
        nums = [1, 2, 3]
        self.assertEqual({i:i+1 for i in nums}, {1: 2, 2: 3, 3: 4})

    def test_listcomps(self):
        # list comprehension tests
        nums = [1, 2, 3, 4, 5]
        strs = ["Apple", "Banana", "Coconut"]
        spcs = ["  Apple", " Banana ", "Coco  nut  "]

        self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco  nut'])
        self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15])
        self.assertEqual([x for x in nums if x > 2], [3, 4, 5])
        self.assertEqual([(i, s) for i in nums for s in strs],
                         [(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'),
                          (2, 'Apple'), (2, 'Banana'), (2, 'Coconut'),
                          (3, 'Apple'), (3, 'Banana'), (3, 'Coconut'),
                          (4, 'Apple'), (4, 'Banana'), (4, 'Coconut'),
                          (5, 'Apple'), (5, 'Banana'), (5, 'Coconut')])
        self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]],
                         [(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'),
                          (3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'),
                          (5, 'Banana'), (5, 'Coconut')])
        self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)],
                         [[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]])

        def test_in_func(l):
            return [0 < x < 3 for x in l if x > 2]

        self.assertEqual(test_in_func(nums), [False, False, False])

        def test_nested_front():
            self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]],
                             [[1, 2], [3, 4], [5, 6]])

        test_nested_front()

        check_syntax_error(self, "[i, s for i in nums for s in strs]")
        check_syntax_error(self, "[x if y]")

        suppliers = [
          (1, "Boeing"),
          (2, "Ford"),
          (3, "Macdonalds")
        ]

        parts = [
          (10, "Airliner"),
          (20, "Engine"),
          (30, "Cheeseburger")
        ]

        suppart = [
          (1, 10), (1, 20), (2, 20), (3, 30)
        ]

        x = [
          (sname, pname)
            for (sno, sname) in suppliers
              for (pno, pname) in parts
                for (sp_sno, sp_pno) in suppart
                  if sno == sp_sno and pno == sp_pno
        ]

        self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'),
                             ('Macdonalds', 'Cheeseburger')])

    def test_genexps(self):
        # generator expression tests
        g = ([x for x in range(10)] for x in range(1))
        self.assertEqual(next(g), [x for x in range(10)])
        try:
            next(g)
            self.fail('should produce StopIteration exception')
        except StopIteration:
            pass

        a = 1
        try:
            g = (a for d in a)
            next(g)
            self.fail('should produce TypeError')
        except TypeError:
            pass

        self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd'])
        self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy'])

        a = [x for x in range(10)]
        b = (x for x in (y for y in a))
        self.assertEqual(sum(b), sum([x for x in range(10)]))

        self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)]))
        self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2]))
        self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)]))
        self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)]))
        self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)]))
        self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)]))
        self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0)
        check_syntax_error(self, "foo(x for x in range(10), 100)")
        check_syntax_error(self, "foo(100, x for x in range(10))")

    def test_comprehension_specials(self):
        # test for outmost iterable precomputation
        x = 10; g = (i for i in range(x)); x = 5
        self.assertEqual(len(list(g)), 10)

        # This should hold, since we're only precomputing outmost iterable.
        x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x))
        x = 5; t = True;
        self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g))

        # Grammar allows multiple adjacent 'if's in listcomps and genexps,
        # even though it's silly. Make sure it works (ifelse broke this.)
        self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7])
        self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7])

        # verify unpacking single element tuples in listcomp/genexp.
        self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6])
        self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9])

    def test_with_statement(self):
        class manager(object):
            def __enter__(self):
                return (1, 2)
            def __exit__(self, *args):
                pass

        with manager():
            pass
        with manager() as x:
            pass
        with manager() as (x, y):
            pass
        with manager(), manager():
            pass
        with manager() as x, manager() as y:
            pass
        with manager() as x, manager():
            pass

    def test_if_else_expr(self):
        # Test ifelse expressions in various cases
        def _checkeval(msg, ret):
            "helper to check that evaluation of expressions is done correctly"
            print(x)
            return ret

        # the next line is not allowed anymore
        #self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True])
        self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True])
        self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True])
        self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5)
        self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5)
        self.assertEqual((5 and 6 if 0 else 1), 1)
        self.assertEqual(((5 and 6) if 0 else 1), 1)
        self.assertEqual((5 and (6 if 1 else 1)), 6)
        self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3)
        self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1)
        self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5)
        self.assertEqual((not 5 if 1 else 1), False)
        self.assertEqual((not 5 if 0 else 1), 1)
        self.assertEqual((6 + 1 if 1 else 2), 7)
        self.assertEqual((6 - 1 if 1 else 2), 5)
        self.assertEqual((6 * 2 if 1 else 4), 12)
        self.assertEqual((6 / 2 if 1 else 3), 3)
        self.assertEqual((6 < 4 if 0 else 2), 2)

    def test_paren_evaluation(self):
        self.assertEqual(16 // (4 // 2), 8)
        self.assertEqual((16 // 4) // 2, 2)
        self.assertEqual(16 // 4 // 2, 2)
        self.assertTrue(False is (2 is 3))
        self.assertFalse((False is 2) is 3)
        self.assertFalse(False is 2 is 3)

    def test_matrix_mul(self):
        # This is not intended to be a comprehensive test, rather just to be few
        # samples of the @ operator in test_grammar.py.
        class M:
            def __matmul__(self, o):
                return 4
            def __imatmul__(self, o):
                self.other = o
                return self
        m = M()
        self.assertEqual(m @ m, 4)
        m @= 42
        self.assertEqual(m.other, 42)

    def test_async_await(self):
        async = 1
        await = 2
        self.assertEqual(async, 1)

        def async():
            nonlocal await
            await = 10
        async()
        self.assertEqual(await, 10)

        #self.assertFalse(bool(async.__code__.co_flags & inspect.CO_COROUTINE))

        async def test():
            def sum():
                pass
            if 1:
                await someobj()

        self.assertEqual(test.__name__, 'test')
        #self.assertTrue(bool(test.__code__.co_flags & inspect.CO_COROUTINE))

        def decorator(func):
            setattr(func, '_marked', True)
            return func

        @decorator
        async def test2():
            return 22
        self.assertTrue(test2._marked)
        self.assertEqual(test2.__name__, 'test2')
        #self.assertTrue(bool(test2.__code__.co_flags & inspect.CO_COROUTINE))

    def test_async_for(self):
        class Done(Exception): pass

        class AIter:
            async def __aiter__(self):
                return self
            async def __anext__(self):
                raise StopAsyncIteration

        async def foo():
            async for i in AIter():
                pass
            async for i, j in AIter():
                pass
            async for i in AIter():
                pass
            else:
                pass
            raise Done

        with self.assertRaises(Done):
            foo().send(None)

    def test_async_with(self):
        class Done(Exception): pass

        class manager:
            async def __aenter__(self):
                return (1, 2)
            async def __aexit__(self, *exc):
                return False

        async def foo():
            async with manager():
                pass
            async with manager() as x:
                pass
            async with manager() as (x, y):
                pass
            async with manager(), manager():
                pass
            async with manager() as x, manager() as y:
                pass
            async with manager() as x, manager():
                pass
            raise Done

        with self.assertRaises(Done):
            foo().send(None)


### END OF COPY ###

GrammarTests.assertRaisesRegex = lambda self, exc, msg: self.assertRaises(exc)

if sys.version_info < (2, 7):
    def assertRaises(self, exc_type, func=None, *args, **kwargs):
        if func is not None:
            return unittest.TestCase.assertRaises(self, exc_type, func, *args, **kwargs)
        @contextlib.contextmanager
        def assertRaisesCM():
            class Result(object):
                exception = exc_type("unexpected EOF")  # see usage above
            try:
                yield Result()
            except exc_type:
                self.assertTrue(True)
            else:
                self.assertTrue(False)
        return assertRaisesCM()
    GrammarTests.assertRaises = assertRaises
    TokenTests.assertRaises = assertRaises


if not hasattr(unittest.TestCase, 'subTest'):
    @contextlib.contextmanager
    def subTest(self, source, **kwargs):
        try:
            yield
        except Exception:
            print(source)
            raise
    GrammarTests.subTest = subTest


if not hasattr(unittest.TestCase, 'assertIn'):
    def assertIn(self, member, container, msg=None):
        self.assertTrue(member in container, msg)
    TokenTests.assertIn = assertIn


# FIXME: disabling some tests for real Cython bugs here
del GrammarTests.test_comprehension_specials  # iterable pre-calculation in generator expression
del GrammarTests.test_funcdef  # annotation mangling

# this test is difficult to enable in Py2.6
if sys.version_info < (2,7):
    del GrammarTests.test_former_statements_refer_to_builtins


if __name__ == '__main__':
    unittest.main()
Cython-0.26.1/tests/run/clear_to_null.pyx0000664000175000017500000000351512542002467021170 0ustar  stefanstefan00000000000000"""
Check that Cython generates a tp_clear function that actually clears object
references to NULL instead of None.

Discussed here: http://article.gmane.org/gmane.comp.python.cython.devel/14833
"""

from cpython.ref cimport PyObject, Py_TYPE

cdef class ExtensionType:
    """
    Just a type which is handled by a specific C type (instead of PyObject)
    to check that tp_clear works when the C pointer is of a type different
    from PyObject *.
    """


# Pull tp_clear for PyTypeObject as I did not find another way to access it
# from Cython code.

cdef extern from "Python.h":
    ctypedef struct PyTypeObject:
        void (*tp_clear)(object)


cdef class TpClearFixture:
    """
    An extension type that has a tp_clear method generated to test that it
    actually clears the references to NULL.

    >>> fixture = TpClearFixture()
    >>> isinstance(fixture.extension_type, ExtensionType)
    True
    >>> isinstance(fixture.any_object, str)
    True
    >>> fixture.call_tp_clear()
    >>> fixture.check_any_object_status()
    'NULL'
    >>> fixture.check_extension_type_status()
    'NULL'
    """
    
    cdef readonly object any_object
    cdef readonly ExtensionType extension_type

    def __cinit__(self):
        self.any_object = "Hello World"
        self.extension_type = ExtensionType()

    def call_tp_clear(self):
        cdef PyTypeObject *pto = Py_TYPE(self)
        pto.tp_clear(self)

    def check_any_object_status(self):
        if (self.any_object) == NULL:
            return 'NULL'
        elif self.any_object is None:
            return 'None' 
        else:
            return 'not cleared'

    def check_extension_type_status(self):
        if (self.any_object) == NULL:
            return 'NULL'
        elif self.any_object is None:
            return 'None' 
        else:
            return 'not cleared'
Cython-0.26.1/tests/run/public_fused_types.srctree0000664000175000017500000001402412542002467023062 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import b"

######## setup.py ########


from Cython.Build import cythonize
from distutils.core import setup

setup(
  ext_modules = cythonize("*.pyx"),
)

######## a.pxd ########

cimport cython

cdef extern from "header.h":
    ctypedef int extern_int
    ctypedef long extern_long


cdef struct mystruct_t:
    extern_int a

ctypedef union myunion_t:
    extern_long a

cdef public class MyExt [ type MyExtType, object MyExtObject ]:
    cdef unsigned char a

ctypedef char *string_t
simple_t = cython.fused_type(int, float)
less_simple_t = cython.fused_type(int, float, string_t)
struct_t = cython.fused_type(mystruct_t, myunion_t, MyExt)
builtin_t = cython.fused_type(str, unicode, bytes)

ctypedef fused fusedbunch:
    int
    long
    complex
    string_t

ctypedef fused fused1:
    short
    string_t

cdef fused fused2:
    float
    double
    string_t

cdef struct_t add_simple(struct_t obj, simple_t simple)
cdef less_simple_t add_to_simple(struct_t obj, less_simple_t simple)
cdef public_optional_args(struct_t obj, simple_t simple = *)

cdef class TestFusedExtMethods(object):
    cdef cython.floating method(self, cython.integral x, cython.floating y)
    cpdef cpdef_method(self, cython.integral x, cython.floating y)

object_t = cython.fused_type(TestFusedExtMethods, object, list)

cpdef public_cpdef(cython.integral x, cython.floating y, object_t z)

######## header.h ########

typedef int extern_int;
typedef long extern_long;

######## a.pyx ########

cimport cython

cdef struct_t add_simple(struct_t obj, simple_t simple):
     obj.a =  (obj.a + simple)
     return obj

cdef less_simple_t add_to_simple(struct_t obj, less_simple_t simple):
    return obj.a + simple

cdef public_optional_args(struct_t obj, simple_t simple = 6):
    return obj.a, simple

cdef class TestFusedExtMethods(object):
    cdef cython.floating method(self, cython.integral x, cython.floating y):
        if cython.integral is int:
            x += 1

        if cython.floating is double:
            y += 2.0

        return x + y

    cpdef cpdef_method(self, cython.integral x, cython.floating y):
        return cython.typeof(x), cython.typeof(y)

    def def_method(self, fused1 x, fused2 y):
        if (fused1 is string_t and fused2 is not string_t or
            not fused1 is string_t and fused2 is string_t):
            return x, y
        else:
            return  x + y

cpdef public_cpdef(cython.integral x, cython.floating y, object_t z):
    if cython.integral is int:
        pass

    return cython.typeof(x), cython.typeof(y), cython.typeof(z)


######## b.pyx ########

cimport cython
cimport a as a_cmod
from a cimport *

cdef mystruct_t mystruct
cdef myunion_t myunion
cdef MyExt myext = MyExt()

mystruct.a = 5
myunion.a = 5
myext.a = 5

assert add_simple(mystruct, 5).a == 10
assert add_simple(myunion, 5.0).a == 10.0

assert add_to_simple(mystruct, 5.0) == 10.0
assert add_to_simple(myunion, b"spamhameggs") == b"ameggs"
assert add_to_simple(myext, 5) == 10

cdef mystruct_t (*f)(mystruct_t, int)
f = add_simple
assert f(mystruct, 5).a == 10

f =  add_simple
assert f(mystruct, 5).a == 10

f = add_simple[mystruct_t, int]
assert f(mystruct, 5).a == 10

assert public_optional_args(mystruct, 5) == (5, 5)
assert public_optional_args[mystruct_t, int](mystruct) == (5, 6)

assert public_optional_args[mystruct_t, float](mystruct) == (5, 6.0)
assert public_optional_args[mystruct_t, float](mystruct, 7.0) == (5, 7.0)


cdef TestFusedExtMethods obj = TestFusedExtMethods()

cdef int x = 4
cdef float y = 5.0
cdef long a = 6
cdef double b = 7.0

cdef double (*func)(TestFusedExtMethods, long, double)

func = obj.method

result = func(obj, a, b)
assert result == 15.0, result

func =  obj.method
assert func(obj, x, y) == 11.0

func = obj.method[long, double]
assert func(obj, a, y) == 13.0

assert obj.method(x,  a) == 13.0
assert obj.method[int, double](x, b) == 14.0


# Test inheritance
cdef class Subclass(TestFusedExtMethods):
    cdef cython.floating method(self, cython.integral x, cython.floating y):
        return -x -y

    cpdef cpdef_method(self, cython.integral x, cython.floating y):
        return x, y

cdef Subclass myobj = Subclass()
assert myobj.method[int, float](5, 5.0) == -10

cdef float (*meth)(Subclass, int, float)
meth = myobj.method
assert meth(myobj, 5, 5.0) == -10

meth = myobj.method[int, float]
assert meth(myobj, 5, 5.0) == -10


# Test cpdef functions and methods
cy = __import__("cython")
import a as a_mod

def ae(result, expected):
    "assert equals"
    if result != expected:
        print 'result  :', result
        print 'expected:', expected

    assert result == expected

ae(a_mod.public_cpdef[int, float, list](5, 6, [7]), ("int", "float", "list object"))

idx = cy.typeof(0), cy.typeof(0.0), cy.typeof([])
ae(a_mod.public_cpdef[idx](5, 6, [7]), ("int", "float", "list object"))

ae(a_mod.public_cpdef[cy.int, cy.double, cython.typeof(obj)](5, 6, obj), ("int", "double", "TestFusedExtMethods"))
ae(a_mod.public_cpdef[cy.int, cy.double, cython.typeof(obj)](5, 6, myobj), ("int", "double", "TestFusedExtMethods"))

ae(public_cpdef[int, float, list](5, 6, [7]), ("int", "float", "list object"))
ae(public_cpdef[int, double, TestFusedExtMethods](5, 6, obj), ("int", "double", "TestFusedExtMethods"))
ae(public_cpdef[int, double, TestFusedExtMethods](5, 6, myobj), ("int", "double", "TestFusedExtMethods"))

ae(obj.cpdef_method(10, 10.0), ("long", "double"))
ae(myobj.cpdef_method(10, 10.0), (10, 10.0))
ae(obj.cpdef_method[int, float](10, 10.0), ("int", "float"))
ae(myobj.cpdef_method[int, float](10, 10.0), (10, 10.0))

s = """\
import cython as cy

ae(obj.cpdef_method[cy.int, cy.float](10, 10.0), ("int", "float"))
ae(myobj.cpdef_method[cy.int, cy.float](10, 10.0), (10, 10.0))
"""

d = {'obj': obj, 'myobj': myobj, 'ae': ae}

# FIXME: uncomment after subclassing CyFunction
#exec s in d

# Test def methods
# ae(obj.def_method(12, 14.9), 26)
# ae(obj.def_method(13, "spam"), (13, "spam"))
# ae(obj.def_method[cy.short, cy.float](13, 16.3), 29)
Cython-0.26.1/tests/run/cdefoptargs.pyx0000664000175000017500000000177112542002467020651 0ustar  stefanstefan00000000000000from cython cimport typeof

def call2():
    """
    >>> call2()
    """
    b(1,2)

def call3():
    """
    >>> call3()
    """
    b(1,2,3)

def call4():
    """
    >>> call4()
    """
    b(1,2,3,4)

# the called function:

cdef b(a, b, c=1, d=2):
    pass


cdef int foo(int a, int b=1, int c=1):
    return a+b*c

def test_foo():
    """
    >>> test_foo()
    2
    3
    7
    26
    """
    print foo(1)
    print foo(1, 2)
    print foo(1, 2, 3)
    print foo(1, foo(2, 3), foo(4))

cdef class A:
    cpdef method(self):
        """
        >>> A().method()
        'A'
        """
        return typeof(self)

cdef class B(A):
    cpdef method(self, int x = 0):
        """
        >>> B().method()
        ('B', 0)
        >>> B().method(100)
        ('B', 100)
        """
        return typeof(self), x

cdef class C(B):
    cpdef method(self, int x = 10):
        """
        >>> C().method()
        ('C', 10)
        >>> C().method(100)
        ('C', 100)
        """
        return typeof(self), x
Cython-0.26.1/tests/run/unpacklistcomp.pyx0000664000175000017500000000123612542002467021400 0ustar  stefanstefan00000000000000def unpack_normal(l):
    """
    >>> unpack_normal([1,2])
    (1, 2)
    >>> unpack_normal([1,2,3]) # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ValueError: ...
    """
    a,b = l
    return a,b

def unpack_comp(l):
    """
    >>> unpack_comp([1,2])
    (1, 2)
    >>> unpack_comp([1,2,3]) # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ValueError: ...
    """
    a,b = [ n for n in l ]
    return a,b

def unpack_expr(l):
    """
    >>> unpack_expr([1,2])
    (1, 4)
    >>> unpack_expr([1,2,3]) # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ValueError: ...
    """
    a,b = [ n*n for n in l ]
    return a,b
Cython-0.26.1/tests/run/external_inline_declaration.srctree0000664000175000017500000000061612542002467024721 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import a; assert a.test() == 1"

######## setup.py ########

from Cython.Build.Dependencies import cythonize

from distutils.core import setup

setup(
    ext_modules = cythonize("a.py"),
)

######## a.py ########

def inlined_func(x):
    return x

def test():
    return inlined_func(1)

######## a.pxd ########

cdef inline int inlined_func(int x)
Cython-0.26.1/tests/run/str_ascii_auto_encoding.pyx0000664000175000017500000000042112542002467023215 0ustar  stefanstefan00000000000000#cython: c_string_type = str
#cython: c_string_encoding = ascii

"End of first directives"

include "unicode_ascii_auto_encoding.pyx"

auto_string_type = str

def check_auto_string_type():
    """
    >>> check_auto_string_type()
    """
    assert auto_string_type is str
Cython-0.26.1/tests/run/py34_signature.pyx0000664000175000017500000000324512542002467021226 0ustar  stefanstefan00000000000000# cython: binding=True, language_level=3
# mode: run
# tag: cyfunction

import inspect

sig = inspect.Signature.from_function


def signatures_match(f1, f2):
    if sig(f1) == sig(f2):
        return None  # nothing to show in doctest
    return sig(f1), sig(f2)


def b(a, b, c):
    """
    >>> def py_b(a, b, c): pass
    >>> signatures_match(b, py_b)
    """


def c(a, b, c=1):
    """
    >>> def py_c(a, b, c=1): pass
    >>> signatures_match(c, py_c)
    """


def d(a, b, *, c = 88):
    """
    >>> def py_d(a, b, *, c = 88): pass
    >>> signatures_match(d, py_d)
    """


def e(a, b, c = 88, **kwds):
    """
    >>> def py_e(a, b, c = 88, **kwds): pass
    >>> signatures_match(e, py_e)
    """


def f(a, b, *, c, d = 42):
    """
    >>> def py_f(a, b, *, c, d = 42): pass
    >>> signatures_match(f, py_f)
    """


def g(a, b, *, c, d = 42, e = 17, f, **kwds):
    """
    >>> def py_g(a, b, *, c, d = 42, e = 17, f, **kwds): pass
    >>> signatures_match(g, py_g)
    """


def h(a, b, *args, c, d = 42, e = 17, f, **kwds):
    """
    >>> def py_h(a, b, *args, c, d = 42, e = 17, f, **kwds): pass
    >>> signatures_match(h, py_h)
    """


def k(a, b, c=1, *args, d = 42, e = 17, f, **kwds):
    """
    >>> def py_k(a, b, c=1, *args, d = 42, e = 17, f, **kwds): pass
    >>> signatures_match(k, py_k)
    """


def l(*, a, b, c = 88):
    """
    >>> def py_l(*, a, b, c = 88): pass
    >>> signatures_match(l, py_l)
    """


def m(a, *, b, c = 88):
    """
    >>> def py_m(a, *, b, c = 88): pass
    >>> signatures_match(m, py_m)
    """
    a, b, c = b, c, a


def n(a, *, b, c = 88):
    """
    >>> def py_n(a, *, b, c = 88): pass
    >>> signatures_match(n, py_n)
    """
Cython-0.26.1/tests/run/memview_vector.pyx0000664000175000017500000000043512574327400021401 0ustar  stefanstefan00000000000000# mode: run
# tag: cpp

from libcpp.vector cimport vector

def memview_test(L, int i, int x):
    """
    >>> memview_test(range(10), 7, 100)
    [0, 1, 2, 3, 4, 5, 6, 100, 8, 9]
    """
    cdef vector[int] v = L
    cdef int[::1] mv =  &v[0]
    mv[i] = x
    return v
Cython-0.26.1/tests/run/cdef_methods_T462.pyx0000664000175000017500000000161612542002467021511 0ustar  stefanstefan00000000000000# ticket: 462

cimport cython

cdef class cclass:
    def test_self(self):
        """
        >>> cclass().test_self()
        'cclass'
        """
        return cython.typeof(self)

    def test_self_1(self, arg):
        """
        >>> cclass().test_self_1(1)
        ('cclass', 1)
        """
        return cython.typeof(self), arg

    def test_self_args(self, *args):
        """
        >>> cclass().test_self_args(1,2,3)
        ('cclass', (1, 2, 3))
        """
        return cython.typeof(self), args

    def test_args(*args):
        """
        >>> cclass().test_args(1,2,3)
        ('Python object', (1, 2, 3))
        """
        return cython.typeof(args[0]), args[1:]

    def test_args_kwargs(*args, **kwargs):
        """
        >>> cclass().test_args_kwargs(1,2,3, a=4)
        ('Python object', (1, 2, 3), {'a': 4})
        """
        return cython.typeof(args[0]), args[1:], kwargs
Cython-0.26.1/tests/run/external_defs.h0000664000175000017500000000110212542002467020566 0ustar  stefanstefan00000000000000
typedef float FloatTypedef;
typedef double DoubleTypedef;
typedef long double LongDoubleTypedef;

typedef char CharTypedef;
typedef short ShortTypedef;
typedef int IntTypedef;
typedef long LongTypedef;
#if defined(T_LONGLONG)
typedef PY_LONG_LONG LongLongTypedef;
#else
typedef long LongLongTypedef;
#endif

typedef unsigned char UCharTypedef;
typedef unsigned short UShortTypedef;
typedef unsigned int UIntTypedef;
typedef unsigned long ULongTypedef;
#if defined(T_LONGLONG)
typedef unsigned PY_LONG_LONG ULongLongTypedef;
#else
typedef unsigned long ULongLongTypedef;
#endif
Cython-0.26.1/tests/run/pxd_syntax.srctree0000664000175000017500000000202112542002467021365 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import a; a.test()"

######## setup.py ########

from Cython.Build.Dependencies import cythonize

from distutils.core import setup

setup(
    ext_modules = cythonize("a.pyx"),
)

######## a.pyx ########

cdef class ExtTypeDocstringPass:
    pass

cdef class ExtTypeDocstring:
    "huhu!"   # this should override the .pxd docstring

cdef class ExtTypePass:
    pass

cdef class ExtTypeDocstringPassString:
    pass

def test():
    assert not ExtTypePass().__doc__, ExtTypePass().__doc__
    assert ExtTypeDocstring().__doc__ == "huhu!", ExtTypeDocstring().__doc__
    assert ExtTypeDocstringPass().__doc__ == "hoho!", ExtTypeDocstringPass().__doc__
    assert ExtTypeDocstringPassString().__doc__ == "hoho!", ExtTypeDocstringPassString().__doc__

######## a.pxd ########

cdef class ExtTypePass:
    pass

cdef class ExtTypeDocstring:
    """
    hoho
    """

cdef class ExtTypeDocstringPass:
    "hoho!"
    pass

cdef class ExtTypeDocstringPassString:
    "hoho!"
    pass
    "more hoho"
Cython-0.26.1/tests/run/cmp.pyx0000664000175000017500000000326312542002467017125 0ustar  stefanstefan00000000000000def single_py(a, b):
    """
    >>> single_py(1, 2)
    True
    >>> single_py(2, 1)
    False
    """
    return a < b

def cascaded_py(a, b, c):
    """
    >>> cascaded_py(1, 2, 3)
    True
    >>> cascaded_py(1, 2, -1)
    False
    >>> cascaded_py(10, 2, 3)
    False
    """
    return a < b < c

def single_c(int a, int b):
    """
    >>> single_c(1, 2)
    True
    >>> single_c(2, 1)
    False
    """
    return a < b

def cascaded_c(double a, double b, double c):
    """
    >>> cascaded_c(1, 2, 3)
    True
    >>> cascaded_c(1, 2, -1)
    False
    >>> cascaded_c(10, 2, 3)
    False
    """
    return a < b < c

def cascaded_mix_pyleft(a, double b, double c):
    """
    >>> cascaded_mix_pyleft(1, 2, 3)
    True
    >>> cascaded_mix_pyleft(1, 2, -1)
    False
    >>> cascaded_mix_pyleft(10, 2, 3)
    False
    """
    return a < b < c

def cascaded_mix_pyright(double a, double b, c):
    """
    >>> cascaded_mix_pyright(1, 2, 3)
    True
    >>> cascaded_mix_pyright(1, 2, -1)
    False
    >>> cascaded_mix_pyright(10, 2, 3)
    False
    """
    return a < b < c

def typed_cmp(list L):
    """
    >>> typed_cmp([1,2,3])
    False
    False
    False
    False
    """
    print L is Ellipsis
    print Ellipsis is L
    print 1 == L
    print L == 1.5

def pointer_cmp():
    """
    >>> pointer_cmp()
    True
    False
    True
    """
    cdef int* a = NULL
    cdef double* b = NULL
    cdef double** c = NULL
    print a is NULL
    print b is not NULL
    print c == NULL

def c_cmp(double a, int b, long c):
    """
    >>> c_cmp(1, 2, 3)
    True
    >>> c_cmp(1.5, 2, 2)
    True
    >>> c_cmp(1.5, 2, 0)
    False
    >>> c_cmp(1, 1, 3)
    False
    """
    return a < b <= c
Cython-0.26.1/tests/run/pynumber_subtype_conversion.pyx0000664000175000017500000000073112542002467024224 0ustar  stefanstefan00000000000000# mode: run
# tag: python, float, builtin


class MyFloat(float):
    """
    >>> x = MyFloat(1.0)
    >>> x
    1.0
    >>> float(x)
    12.0
    >>> x.float()
    12.0
    """
    def __float__(self):
        return 12.0

    def float(self):
        return float(self)


class MyInt(int):
    """
    >>> x = MyInt(1)
    >>> x
    1
    >>> int(x)
    2
    >>> x.int()
    2
    """
    def __int__(self):
        return 2

    def int(self):
        return int(self)
Cython-0.26.1/tests/run/ext_type_none_arg.pyx0000664000175000017500000001322712542002467022060 0ustar  stefanstefan00000000000000
cimport cython


### extension types

cdef class MyExtType:
    cdef object attr
    def __cinit__(self):
        self.attr = 123

cdef attr(MyExtType x):
    return x is None and 321 or x.attr


# defaults, without 'not/or None'

def ext_default(MyExtType x): # currently behaves like 'or None'
    """
    >>> ext_default(MyExtType())
    123
    >>> ext_default(None)
    321
    """
    return attr(x)

@cython.allow_none_for_extension_args(False)
def ext_default_none(MyExtType x=None): # special cased default arg
    """
    >>> ext_default_none(MyExtType())
    123
    >>> ext_default_none(None)
    321
    >>> ext_default_none()
    321
    """
    return attr(x)

@cython.allow_none_for_extension_args(True)
def ext_default_check_off(MyExtType x):
    """
    >>> ext_default_check_off(MyExtType())
    123
    >>> ext_default_check_off(None)
    321
    """
    return attr(x)

@cython.allow_none_for_extension_args(False)
def ext_default_check_on(MyExtType x):
    """
    >>> ext_default_check_on(MyExtType())
    123
    >>> ext_default_check_on(None)
    Traceback (most recent call last):
    TypeError: Argument 'x' has incorrect type (expected ext_type_none_arg.MyExtType, got NoneType)
    """
    return attr(x)


# with 'or/not None'

def ext_or_none(MyExtType x or None):
    """
    >>> ext_or_none(MyExtType())
    123
    >>> ext_or_none(None)
    321
    """
    return attr(x)

def ext_not_none(MyExtType x not None):
    """
    >>> ext_not_none(MyExtType())
    123
    >>> ext_not_none(None)
    Traceback (most recent call last):
    TypeError: Argument 'x' has incorrect type (expected ext_type_none_arg.MyExtType, got NoneType)
    """
    return attr(x)


### builtin types (using list)

cdef litem(list L, int item):
    return L is None and 321 or L[item]


# defaults, without 'not/or None'

def builtin_default(list L): # currently behaves like 'or None'
    """
    >>> builtin_default([123])
    123
    >>> builtin_default(None)
    321
    """
    return litem(L, 0)

@cython.allow_none_for_extension_args(False)
def builtin_default_none(list L=None): # special cased default arg
    """
    >>> builtin_default_none([123])
    123
    >>> builtin_default_none(None)
    321
    >>> builtin_default_none()
    321
    """
    return litem(L, 0)

@cython.allow_none_for_extension_args(True)
def builtin_default_check_off(list L):
    """
    >>> builtin_default_check_off([123])
    123
    >>> builtin_default_check_off(None)
    321
    """
    return litem(L, 0)

@cython.allow_none_for_extension_args(False)
def builtin_default_check_on(list L):
    """
    >>> builtin_default_check_on([123])
    123
    >>> builtin_default_check_on(None)
    Traceback (most recent call last):
    TypeError: Argument 'L' has incorrect type (expected list, got NoneType)
    """
    return litem(L, 0)


# with 'or/not None'

def builtin_or_none(list L or None):
    """
    >>> builtin_or_none([123])
    123
    >>> builtin_or_none(None)
    321
    """
    return litem(L, 0)

def builtin_not_none(list L not None):
    """
    >>> builtin_not_none([123])
    123
    >>> builtin_not_none(None)
    Traceback (most recent call last):
    TypeError: Argument 'L' has incorrect type (expected list, got NoneType)
    """
    return litem(L, 0)


## builtin type 'object' - isinstance(None, object) is True!

@cython.allow_none_for_extension_args(False)
def object_default(object o): # always behaves like 'or None'
    """
    >>> object_default(object())
    'object'
    >>> object_default([])
    'list'
    >>> object_default(None)
    'NoneType'
    """
    return type(o).__name__

@cython.allow_none_for_extension_args(False)
def object_default_none(object o=None): # behaves like 'or None'
    """
    >>> object_default_none(object())
    'object'
    >>> object_default_none([])
    'list'
    >>> object_default_none(None)
    'NoneType'
    >>> object_default_none()
    'NoneType'
    """
    return type(o).__name__

@cython.allow_none_for_extension_args(False)
def object_or_none(object o or None):
    """
    >>> object_or_none(object())
    'object'
    >>> object_or_none([])
    'list'
    >>> object_or_none(None)
    'NoneType'
    """
    return type(o).__name__

@cython.allow_none_for_extension_args(False)
def object_not_none(object o not None):
    """
    >>> object_not_none(object())
    'object'
    >>> object_not_none([])
    'list'
    >>> object_not_none(None)
    Traceback (most recent call last):
    TypeError: Argument 'o' must not be None
    """
    return type(o).__name__


## untyped 'object' - isinstance(None, object) is True!

@cython.allow_none_for_extension_args(False)
def notype_default(o): # behaves like 'or None'
    """
    >>> notype_default(object())
    'object'
    >>> notype_default([])
    'list'
    >>> notype_default(None)
    'NoneType'
    """
    return type(o).__name__

@cython.allow_none_for_extension_args(False)
def notype_default_none(o=None): # behaves like 'or None'
    """
    >>> notype_default_none(object())
    'object'
    >>> notype_default_none([])
    'list'
    >>> notype_default_none(None)
    'NoneType'
    >>> notype_default_none()
    'NoneType'
    """
    return type(o).__name__

@cython.allow_none_for_extension_args(False)
def notype_or_none(o or None):
    """
    >>> notype_or_none(object())
    'object'
    >>> notype_or_none([])
    'list'
    >>> notype_or_none(None)
    'NoneType'
    """
    return type(o).__name__

@cython.allow_none_for_extension_args(False)
def notype_not_none(o not None):
    """
    >>> notype_not_none(object())
    'object'
    >>> notype_not_none([])
    'list'
    >>> notype_not_none(None)
    Traceback (most recent call last):
    TypeError: Argument 'o' must not be None
    """
    return type(o).__name__
Cython-0.26.1/tests/run/importfrom.pyx0000664000175000017500000000241112542002467020536 0ustar  stefanstefan00000000000000from distutils import cmd, core, version

def import1():
    """
    >>> import1() == (cmd, core, version)
    True
    """
    from distutils import (

        cmd,

core,                    version)
    return cmd, core, version


def import2():
    """
    >>> import2() == (cmd, core, version)
    True
    """
    from distutils import (cmd,

core,


                           version
)
    return cmd, core, version


def import3():
    """
    >>> import3() == (cmd, core, version)
    True
    """
    from distutils import (cmd, core,version)
    return cmd, core, version

def import4():
    """
    >>> import4() == (cmd, core, version)
    True
    """
    from distutils import cmd, core, version
    return cmd, core, version



def typed_imports():
    """
    >>> typed_imports()
    True
    True
    an integer is required
    Expected type, got int
    """

    import sys
    import types
    cdef long maxunicode
    cdef type t

    from sys import maxunicode
    print(maxunicode == sys.maxunicode)
    from types import ModuleType as t
    print(t is types.ModuleType)

    try:
        from sys import version_info as maxunicode
    except TypeError, e:
        print(e)

    try:
        from sys import maxunicode as t
    except TypeError, e:
        print(e)
Cython-0.26.1/tests/run/builtin_type_inheritance_T608.pyx0000664000175000017500000000576613023021033024142 0ustar  stefanstefan00000000000000# ticket: 608

cdef class MyInt(int):
    """
    >>> MyInt(2) == 2
    True
    >>> MyInt(2).attr is None
    True
    """
    cdef readonly object attr

cdef class MyInt2(int):
    """
    >>> MyInt2(2) == 2
    True
    >>> MyInt2(2).attr is None
    True
    >>> MyInt2(2).test(3)
    5
    """
    cdef readonly object attr

    def test(self, arg):
        return self._test(arg)

    cdef _test(self, arg):
        return self + arg

cdef class MyInt3(MyInt2):
    """
    >>> MyInt3(2) == 2
    True
    >>> MyInt3(2).attr is None
    True
    >>> MyInt3(2).test(3)
    6
    """
    cdef _test(self, arg):
        return self + arg + 1

cdef class MyFloat(float):
    """
    >>> MyFloat(1.0)== 1.0
    True
    >>> MyFloat(1.0).attr is None
    True
    """
    cdef readonly object attr

ustring = u'abc'

cdef class MyUnicode(unicode):
    """
    >>> MyUnicode(ustring) == ustring
    True
    >>> MyUnicode(ustring + ustring) == ustring
    False
    >>> MyUnicode(ustring).attr is None
    True
    """
    cdef readonly object attr

cdef class MyList(list):
    """
    >>> MyList([1,2,3]) == [1,2,3]
    True
    >>> MyList([1,2,3]).attr is None
    True
    """
    cdef readonly object attr

cdef class MyListOverride(list):
    """
    >>> MyListOverride([1,2,3]) == [1,2,3]
    True
    >>> l = MyListOverride([1,2,3])
    >>> l.reverse()
    >>> l
    [1, 2, 3, 5]
    >>> l._reverse()
    >>> l
    [1, 2, 3, 5, 5]
    """
    # not doctested:
    """
    >>> l = MyListOverride([1,2,3])
    >>> l.append(8)
    >>> l
    [1, 2, 3, 0, 8]
    >>> l._append(9)
    >>> l
    [1, 2, 3, 0, 8, 0, 9]
    """
    def reverse(self):
        self[:] = self + [5]

    def _reverse(self):
        self.reverse()

    ## FIXME: this doesn't currently work:

    ## cdef int append(self, value) except -1:
    ##     self[:] = self + [0] + [value]
    ##     return 0

    ## def _append(self, value):
    ##     self.append(value)

cdef class MyDict(dict):
    """
    >>> MyDict({1:2, 3:4}) == {1:2, 3:4}
    True
    >>> MyDict({1:2, 3:4}).attr is None
    True
    """
    cdef readonly object attr

cdef class MyException(Exception):
    """
    >>> raise MyException(3) # doctest: +IGNORE_EXCEPTION_DETAIL
    Traceback (most recent call last):
    ...
    MyException: 3
    """
    cdef readonly int value
    def __cinit__(self, value):
        self.value = value

def test_exception_isinstance(maybe_exn):
    """
    >>> test_exception_isinstance(Exception())
    True
    >>> test_exception_isinstance(MyException(3))
    True
    >>> test_exception_isinstance(3)
    False
    """
    return isinstance(maybe_exn, Exception)

def test_exception_type_cast(Exception maybe_exn):
    """
    >>> test_exception_type_cast(Exception())
    >>> test_exception_type_cast(MyException(3))
    >>> test_exception_type_cast(3)   # doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    TypeError: Argument 'maybe_exn' has incorrect type (expected ...Exception, got int)
    """
    cdef object o = maybe_exn
    cdef Exception e = o
Cython-0.26.1/tests/run/dietachmayer1.pyx0000664000175000017500000000016112542002467021060 0ustar  stefanstefan00000000000000def test():
    """
    >>> test()
    1.0
    """
    cdef float[10][10] v
    v[1][2] = 1.0
    return v[1][2]
Cython-0.26.1/tests/run/cpdef_enums_import.srctree0000664000175000017500000000171113023021033023034 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import import_enums_test"

######## setup.py ########

from Cython.Build.Dependencies import cythonize

from distutils.core import setup

setup(
  ext_modules = cythonize(["enums.pyx", "no_enums.pyx"]),
)

######## enums.pyx ########

cpdef enum:
    BAR

cpdef foo(): pass

######## enums.pxd ########

cpdef enum:
    FOO

cpdef enum NamedEnumType:
    NamedEnumValue = 389

cpdef foo()

######## no_enums.pyx ########

from enums cimport *

def get_named_enum_value():
    return NamedEnumType.NamedEnumValue

######## import_enums_test.py ########

# We can import enums with a star import.
from enums import *

print(dir())
assert 'BAR' in dir() and 'FOO' in dir()
assert 'NamedEnumType' in dir()

# enums not generated in the wrong module
import no_enums
print(dir(no_enums))
assert 'FOO' not in dir(no_enums)
assert 'foo' not in dir(no_enums)

assert no_enums.get_named_enum_value() == NamedEnumType.NamedEnumValue
Cython-0.26.1/tests/run/bound_builtin_methods_T589.pyx0000664000175000017500000000370012574327400023455 0ustar  stefanstefan00000000000000# ticket: 589

cimport cython

_set = set # CPython may not define it (in Py2.3), but Cython does :)


def test_set_clear_bound():
    """
    >>> type(test_set_clear_bound()) is _set
    True
    >>> list(test_set_clear_bound())
    []
    """
    cdef set s1 = set([1])
    clear = s1.clear
    clear()
    return s1

text = u'ab jd  sdflk as sa  sadas asdas fsdf '
pipe_sep = u'|'


@cython.test_assert_path_exists(
    "//PythonCapiCallNode",
)
def test_unicode_join_bound(unicode sep, l):
    """
    >>> l = text.split()
    >>> len(l)
    8
    >>> print( pipe_sep.join(l) )
    ab|jd|sdflk|as|sa|sadas|asdas|fsdf
    >>> print( test_unicode_join_bound(pipe_sep, l) )
    ab|jd|sdflk|as|sa|sadas|asdas|fsdf
    """
    join = sep.join
    return join(l)


def test_unicode_join_bound_no_assignment(unicode sep):
    """
    >>> test_unicode_join_bound_no_assignment(text)
    """
    sep.join


def test_dict_items_bound_no_assignment(dict d):
    """
    >>> test_dict_items_bound_no_assignment({1:2})
    """
    d.items


def list_pop(list l):
    """
    >>> list_pop([1,2,3])
    (2, [1, 3])
    """
    pop = l.pop
    r = pop(1)
    return r, l


def list_pop_literal():
    """
    >>> list_pop_literal()
    (2, [1, 3])
    """
    l = [1,2,3]
    pop = l.pop
    r = pop(1)
    return r, l


def list_pop_reassign():
    """
    >>> list_pop_reassign()
    2
    """
    l = [1,2,3]
    pop = l.pop
    l = None
    r = pop(1)
    return r


def list_insert(list l):
    """
    >>> list_insert([1,2,3])
    (None, [1, 4, 2, 3])
    """
    insert = l.insert
    r = insert(1, 4)
    return r, l


def list_insert_literal():
    """
    >>> list_insert_literal()
    (None, [1, 4, 2, 3])
    """
    l = [1,2,3]
    insert = l.insert
    r = insert(1, 4)
    return r, l


def list_insert_reassign():
    """
    >>> list_insert_reassign()
    (None, [1, 4, 2, 3])
    """
    l = [1,2,3]
    insert = l.insert
    m, l = l, None
    r = insert(1, 4)
    return r, m
Cython-0.26.1/tests/run/unicodeliterals.pyx0000664000175000017500000000472713143605603021541 0ustar  stefanstefan00000000000000# -*- coding: utf-8 -*-

import sys

__doc__ = br"""
    >>> sa
    'abc'
    >>> ua
    u'abc'
    >>> b
    u'123'
    >>> c
    u'S\xf8k ik'
    >>> d
    u'\xfc\xd6\xe4'
    >>> e
    u'\x03g\xf8\uf8d2S\xf8k ik'
    >>> f
    u'\xf8'
    >>> g
    u'\udc00'
    >>> h
    u'\ud800'
    >>> add
    u'S\xf8k ik\xfc\xd6\xe4abc'
    >>> null
    u'\x00'
""".decode("ASCII") + b"""
    >>> len(sa)
    3
    >>> len(ua)
    3
    >>> len(b)
    3
    >>> len(c)
    6
    >>> len(d)
    3
    >>> len(e)
    10
    >>> len(f)
    1
    >>> len(g)
    1
    >>> len(h)
    1
    >>> len(add)
    12
    >>> len(null)
    1
    >>> sys.maxunicode >= 65535
    True
    >>> sys.maxunicode == 65535 and 1 or len(wide_literal) # test for wide build
    1
    >>> sys.maxunicode > 65535 and 2 or len(wide_literal)  # test for narrow build
    2
""".decode("ASCII") + u"""
    >>> ua == u'abc'
    True
    >>> b == u'123'
    True
    >>> c == u'Søk ik'
    True
    >>> d == u'üÖä'
    True
    >>> e == u'\x03\x67\xf8\uf8d2Søk ik'     # unescaped by Cython
    True
    >>> e == u'\\x03\\x67\\xf8\\uf8d2Søk ik' # unescaped by Python
    True
    >>> f == u'\xf8'  # unescaped by Cython
    True
    >>> f == u'\\xf8' # unescaped by Python
    True
    >>> g == u'\\udc00' # unescaped by Python (required by doctest)
    True
    >>> h == u'\\ud800' # unescaped by Python (required by doctest)
    True
    >>> k == u'\\N{SNOWMAN}' == u'\\u2603'
    True
    >>> m == u'abc\\\\xf8\\\\t\\u00f8\\U000000f8'  # unescaped by Python (required by doctest)
    True
    >>> add == u'Søk ik' + u'üÖä' + 'abc'
    True
    >>> null == u'\\x00' # unescaped by Python (required by doctest)
    True
    >>> wide_literal == u'\\U00101234'   # unescaped by Python
    True
"""

if sys.version_info >= (2,6,5):
    # this doesn't work well in older Python versions
    __doc__ += u"""\
    >>> expected = u'\U00101234'    # unescaped by Cython
    >>> if wide_literal == expected: print(True)
    ... else: print(repr(wide_literal), repr(expected), sys.maxunicode)
    True
"""

if sys.version_info[0] >= 3:
    __doc__ = __doc__.replace(u" u'", u" '")
else:
    __doc__ = __doc__.replace(u" b'", u" '")

sa = 'abc'
ua = u'abc'

b = u'123'
c = u'Søk ik'
d = u'üÖä'
e = u'\x03\x67\xf8\uf8d2Søk ik'
f = u'\xf8'
g = u'\udc00'   # lone trail surrogate
h = u'\ud800'   # lone lead surrogate
k = u'\N{SNOWMAN}'
m = ur'abc\xf8\t\u00f8\U000000f8'

add = u'Søk ik' + u'üÖä' + u'abc'
null = u'\x00'

wide_literal = u'\U00101234'
Cython-0.26.1/tests/run/pyparam_nogil.pyx0000664000175000017500000000045212542002467021204 0ustar  stefanstefan00000000000000
def if_list_nogil(list obj):
    """
    >>> if_list_nogil( [] )
    False
    >>> if_list_nogil( [1] )
    True
    >>> if_list_nogil(None)
    False
    """
    return _if_list_nogil(obj)

cdef bint _if_list_nogil(list obj) nogil:
    if obj:
        return True
    else:
        return False

Cython-0.26.1/tests/run/builtin_types_none_T166.pyx0000664000175000017500000000055512542002467023000 0ustar  stefanstefan00000000000000# ticket: 166

__doc__ = u"""
>>> l = None
>>> l.append(2)
Traceback (most recent call last):
AttributeError: 'NoneType' object has no attribute 'append'

"""

def append_to_none():
    """
    >>> append_to_none()
    Traceback (most recent call last):
    AttributeError: 'NoneType' object has no attribute 'append'
    """
    cdef list l = None
    l.append(2)
Cython-0.26.1/tests/run/behnel2.pyx0000664000175000017500000000014012542002467017654 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> y
    1
    >>> y and {}
    {}
    >>> x
    {}
"""

y = 1
x = y and {}
Cython-0.26.1/tests/run/cpp_nested_classes.pyx0000664000175000017500000000471613143605603022212 0ustar  stefanstefan00000000000000# tag: cpp

cdef extern from "cpp_nested_classes_support.h":
    cdef cppclass A:
        cppclass B:
            int square(int)
            cppclass C:
                int cube(int)
        B* createB()
        ctypedef int my_int
        @staticmethod
        my_int negate(my_int)

    cdef cppclass TypedClass[T]:
        ctypedef T MyType
        struct MyStruct:
            T typed_value
            int int_value
        union MyUnion:
            T typed_value
            int int_value
        enum MyEnum:
            value

    cdef cppclass SpecializedTypedClass(TypedClass[double]):
        pass


def test_nested_classes():
    """
    >>> test_nested_classes()
    """
    cdef A a
    cdef A.B b
    assert b.square(3) == 9
    cdef A.B.C c
    assert c.cube(3) == 27

    cdef A.B *b_ptr = a.createB()
    assert b_ptr.square(4) == 16
    del b_ptr

def test_nested_typedef(py_x):
    """
    >>> test_nested_typedef(5)
    """
    cdef A.my_int x = py_x
    assert A.negate(x) == -py_x

def test_typed_nested_typedef(x):
    """
    >>> test_typed_nested_typedef(4)
    (4, 4.0)
    """
    cdef TypedClass[int].MyType ix = x
    cdef TypedClass[double].MyType dx = x
    return ix, dx

def test_nested_enum(TypedClass[double].MyEnum x):
    """
    >>> test_nested_enum(4)
    False
    """
    return x == 0

def test_nested_union(x):
    """
    >>> test_nested_union(2)
    2.0
    """
    cdef TypedClass[double].MyUnion u
    u.int_value = x
    assert u.int_value == x
    u.typed_value = x
    return u.typed_value

def test_nested_struct(x):
    """
    >>> test_nested_struct(2)
    2.0
    """
    cdef TypedClass[double].MyStruct s
    s.int_value = x
    assert s.int_value == x
    s.typed_value = x
    return s.typed_value



def test_typed_nested_sub_typedef(x):
    """
    >>> test_typed_nested_sub_typedef(4)
    4.0
    """
    cdef SpecializedTypedClass.MyType dx = x
    return dx

def test_nested_sub_enum(SpecializedTypedClass.MyEnum x):
    """
    >>> test_nested_sub_enum(4)
    False
    """
    return x == 0

def test_nested_sub_union(x):
    """
    >>> test_nested_sub_union(2)
    2.0
    """
    cdef SpecializedTypedClass.MyUnion u
    u.int_value = x
    assert u.int_value == x
    u.typed_value = x
    return u.typed_value

def test_nested_sub_struct(x):
    """
    >>> test_nested_sub_struct(2)
    2.0
    """
    cdef SpecializedTypedClass.MyStruct s
    s.int_value = x
    assert s.int_value == x
    s.typed_value = x
    return s.typed_value
Cython-0.26.1/tests/run/pinard6.pyx0000664000175000017500000000007212542002467017704 0ustar  stefanstefan00000000000000__doc__ = u"""
    >>> x
    (1, 2)
"""

x = 1,
x = 1, 2,
Cython-0.26.1/tests/run/literal_lists.pyx0000664000175000017500000000320412542002467021213 0ustar  stefanstefan00000000000000__doc__ = """
    >>> test_chars(b'yo')
    (b'a', b'bc', b'yo')
    >>> try: test_chars(None)
    ... except TypeError: pass
"""

import sys

if sys.version_info[0] < 3:
    __doc__ = __doc__.replace(u"b'", u"'")

def repeated_literals():
    """
    >>> repeated_literals()
    p1: [4, 4]
    p2: [5, 5]
    """
    cdef int i
    cdef int* p1 = [4, 4]
    cdef int* p2 = [5, 5]

    print "p1: %s" % [ p1[i] for i in range(2) ]
    print "p2: %s" % [ p2[i] for i in range(2) ]

def test_ints(int x):
    """
    >>> test_ints(100)
    (100, 100, 100)
    """
    cdef list L = [1,2,3,x]
    cdef int* Li = [1,2,3,x]
    cdef int** Lii = [Li, &x]
    return L[3], Li[3], Lii[1][0]

def test_chars(foo):
    cdef char** ss = [b"a", b"bc", foo]
    return ss[0], ss[1], ss[2]

cdef struct MyStruct:
    int x
    int y
    double** data

cdef print_struct(MyStruct a):
    print a.x, a.y, a.data == NULL

def test_struct(int x, y):
    """
    >>> test_struct(-5, -10)
    -5 -10 True
    1 2 False
    """
    cdef MyStruct* aa = [[x,y, NULL], [x+1,y+1,NULL]]
    print_struct(aa[0])
    print_struct([1, 2, 1])

cdef int m_int = -1
cdef int* m_iarray = [4, m_int]
cdef int** m_piarray = [m_iarray, &m_int]
cdef char** m_carray = [b"a", b"bc"]
cdef MyStruct* m_structarray = [[m_int,0,NULL], [1,m_int+1,NULL]]

def test_module_level():
    """
    >>> test_module_level()
    4 -1
    4 -1
    True True
    1 0 True
    """
    print m_iarray[0], m_iarray[1]
    print m_piarray[0][0], m_piarray[1][0]
    print m_carray[0] == b"a", m_carray[1] == b"bc"
    print_struct(m_structarray[1])


# Make sure it's still naturally an object.

[0,1,2,3].append(4)
Cython-0.26.1/tests/run/coverage_nogil.srctree0000664000175000017500000000455112542002467022161 0ustar  stefanstefan00000000000000# mode: run
# tag: coverage,trace,nogil

"""
PYTHON setup.py build_ext -i
PYTHON coverage_test.py
"""

######## setup.py ########

from distutils.core import setup
from Cython.Build import cythonize

setup(ext_modules = cythonize([
    'coverage_test_*.pyx',
]))


######## .coveragerc ########
[run]
plugins = Cython.Coverage


######## coverage_test_nogil.pyx ########
# cython: linetrace=True
# distutils: define_macros=CYTHON_TRACE=1 CYTHON_TRACE_NOGIL=1

cdef int func1(int a, int b) nogil:
    cdef int x                   #  5
    with gil:                    #  6
        x = 1                    #  7
    cdef int c = func2(a) + b    #  8
    return x + c                 #  9


cdef int func2(int a) with gil:
    return a * 2                 # 13


def call(int a, int b):
    a, b = b, a                  # 17
    with nogil:                  # 18
        result = func1(b, a)     # 19
    return result                # 20


######## coverage_test.py ########

import os.path
try:
    # io.StringIO in Py2.x cannot handle str ...
    from StringIO import StringIO
except ImportError:
    from io import StringIO

from coverage import coverage


import coverage_test_nogil

assert not any(coverage_test_nogil.__file__.endswith(ext)
               for ext in '.py .pyc .pyo .pyw .pyx .pxi'.split()), \
    coverage_test_nogil.__file__


def run_coverage(module):
    module_name = module.__name__
    module_path = module_name + '.pyx'

    cov = coverage()
    cov.start()
    assert module.call(1, 2) == (1 * 2) + 2 + 1
    cov.stop()

    out = StringIO()
    cov.report(file=out)
    #cov.report([module], file=out)
    lines = out.getvalue().splitlines()
    assert any(module_path in line for line in lines), \
        "'%s' not found in coverage report:\n\n%s" % (module_path, out.getvalue())

    mod_file, exec_lines, excl_lines, missing_lines, _ = cov.analysis2(os.path.abspath(module_path))
    assert module_path in mod_file

    executed = set(exec_lines) - set(missing_lines)
    # check that everything that runs with the gil owned was executed
    assert all(line in executed for line in [13, 17, 18, 20]), '%s / %s' % (exec_lines, missing_lines)
    # check that everything that runs in nogil sections was executed
    assert all(line in executed for line in [6, 7, 8, 9]), '%s / %s' % (exec_lines, missing_lines)


if __name__ == '__main__':
    run_coverage(coverage_test_nogil)
Cython-0.26.1/tests/run/unsigned.pyx0000664000175000017500000000173312542002467020162 0ustar  stefanstefan00000000000000cdef int i = 1
cdef long l = 2
cdef unsigned int ui = 4
cdef unsigned long ul = 8

def test_add():
    """
    >>> test_add()
    3
    9
    6
    12
    """
    print i + l
    print i + ul
    print ui + l
    print ui + ul

def test_add_sshort_ulong(signed short a, unsigned long b):
    """
    >>> test_add_sshort_ulong(1, 1) == 2
    True
    >>> test_add_sshort_ulong(-1, 1) == 0
    True
    >>> test_add_sshort_ulong(-2, 1) == -1
    False
    """
    return a + b

def test_add_ushort_slonglong(unsigned short a, signed long long b):
    """
    >>> test_add_ushort_slonglong(1, 1) == 2
    True
    >>> test_add_ushort_slonglong(1, -1) == 0
    True
    >>> test_add_ushort_slonglong(1, -2) == -1
    True
    """
    return a + b

def test_add_slong_ulong(signed long a, unsigned long b):
    """
    >>> test_add_slong_ulong(1, 1) == 2
    True
    >>> test_add_slong_ulong(-1, 1) == 0
    True
    >>> test_add_slong_ulong(-2, 1) == -1
    False
    """
    return a + b

Cython-0.26.1/tests/run/callargs.pyx0000664000175000017500000000725012542002467020136 0ustar  stefanstefan00000000000000def c(a=10, b=20, **kwds):
    """
    >>> c()
    10 20 0
    >>> c(1)
    1 20 0
    >>> c(1,2)
    1 2 0
    >>> c(key=None)
    10 20 1
    >>> c(1, key=None)
    1 20 1
    >>> c(1,2, key=None)
    1 2 1
    """
    print a, b, len(kwds)

def d(a, b=1, *args, **kwds):
    """
    >>> d()
    Traceback (most recent call last):
    TypeError: d() takes at least 1 positional argument (0 given)
    >>> d(1)
    1 1 0 0
    >>> d(1,2)
    1 2 0 0
    >>> d(1,2,3)
    1 2 1 0
    >>> d(key=None)
    Traceback (most recent call last):
    TypeError: d() takes at least 1 positional argument (0 given)
    >>> d(1, key=None)
    1 1 0 1
    >>> d(1,2, key=None)
    1 2 0 1
    >>> d(1,2,3, key=None)
    1 2 1 1
    """
    print a, b, len(args), len(kwds)

def e(*args, **kwargs):
    print len(args), len(kwargs)

def f(*args):
    """
    >>> f(1,2, d=5)
    Traceback (most recent call last):
    TypeError: f() got an unexpected keyword argument 'd'
    >>> f(1, d=5)
    Traceback (most recent call last):
    TypeError: f() got an unexpected keyword argument 'd'
    >>> f(d=5)
    Traceback (most recent call last):
    TypeError: f() got an unexpected keyword argument 'd'
    """
    print len(args)

def g(**kwargs):
    """
    >>> g(1,2, d=5)
    Traceback (most recent call last):
    TypeError: g() takes exactly 0 positional arguments (2 given)
    >>> g(1,2)
    Traceback (most recent call last):
    TypeError: g() takes exactly 0 positional arguments (2 given)
    >>> g(1)
    Traceback (most recent call last):
    TypeError: g() takes exactly 0 positional arguments (1 given)
    """
    print len(kwargs)

def h(a, b, c, *args, **kwargs):
    """
    >>> h(1,2, d=5)
    Traceback (most recent call last):
    TypeError: h() takes at least 3 positional arguments (2 given)
    """
    print a, b, c, u'*', len(args), len(kwargs)

args = (9,8,7)

import sys
if sys.version_info[0] >= 3:
    kwargs = {u"test" : u"toast"}
else:
    kwargs = {"test" : u"toast"}

def test_kw_args(f):
    """
    >>> test_kw_args(h)
    1 2 3 * 0 0
    1 2 9 * 2 1
    1 2 7 * 2 1
    1 2 9 * 2 2
    1 2 9 * 2 2
    1 2 9 * 2 3
    >>> test_kw_args(e)
    2 1
    5 1
    5 1
    5 2
    5 2
    5 3
    """
    f(1,2, c=3)
    f(1,2, d=3, *args)
    f(1,2, d=3, *(7,8,9))
    f(1,2, d=3, *args, **kwargs)
    f(1,2, d=3, *args, e=5)
    f(1,2, d=3, *args, e=5, **kwargs)

def test_pos_args(f):
    """
    >>> test_pos_args(h)
    1 2 3 * 0 0
    1 2 9 * 2 0
    1 2 7 * 2 0
    9 8 7 * 0 0
    7 8 9 * 0 0
    >>> test_pos_args(f)
    3
    5
    5
    3
    3
    """
    f(1,2,3)
    f(1,2, *args)
    f(1,2, *(7,8,9))
    f(*args)
    f(*(7,8,9))

def test_kw(f):
    """
    >>> test_kw(e)
    0 1
    0 2
    0 2
    0 1
    >>> test_kw(g)
    1
    2
    2
    1
    """
    f(c=3)
    f(d=3, e=5)
    f(d=3, **kwargs)
    f(**kwargs)

def test_noargs(f):
    """
    >>> test_noargs(e)
    0 0
    >>> test_noargs(f)
    0
    >>> test_noargs(g)
    0

    # and some errors:
    >>> test_noargs(h)
    Traceback (most recent call last):
    TypeError: h() takes at least 3 positional arguments (0 given)
    """
    f()

def test_int_kwargs(f):
    """
    >>> test_int_kwargs(e)     # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...keywords must be strings
    >>> test_int_kwargs(f)     # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...keywords must be strings
    >>> test_int_kwargs(g)     # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...keywords must be strings
    >>> test_int_kwargs(h)     # doctest: +ELLIPSIS
    Traceback (most recent call last):
    TypeError: ...keywords must be strings
    """
    f(a=1,b=2,c=3, **{10:20,30:40})
Cython-0.26.1/tests/run/division_T384.pyx0000664000175000017500000000045112542002467020710 0ustar  stefanstefan00000000000000# ticket: 384

"""
>>> test(3)
(3+1j)
"""

cimport cython

ctypedef Py_ssize_t index_t

ctypedef double complex mycomplex

ctypedef struct MyStruct:
    mycomplex a, b

@cython.cdivision(False)
def test(index_t x):
    cdef index_t y = x // 2
    cdef MyStruct s
    s.a = x + y*1j
    return s.a
Cython-0.26.1/tests/run/function_binding_T494.pyx0000664000175000017500000000123512542002467022406 0ustar  stefanstefan00000000000000# ticket: 494

cimport cython

class SomeNumber(object):

    def __init__(self, n):
        self._n = n

    def __repr__(self):
        return "SomeNumber(%s)" % self._n

@cython.binding(True)
def add_to_func(self, x):
    """
    >>> add_to_func(SomeNumber(2), 5)
    7
    >>> SomeNumber(3).add_to(10)
    13
    >>> SomeNumber.add_to(SomeNumber(22), 7)
    29
    """
    return self._n + x

@cython.binding(False)
def new_num(n):
    """
    >>> new_num(11)
    SomeNumber(11)
    >>> SomeNumber.new(11)
    SomeNumber(11)
    >>> SomeNumber(3).new(11)
    SomeNumber(11)
    """
    return SomeNumber(n)

SomeNumber.add_to = add_to_func
SomeNumber.new = new_num
Cython-0.26.1/tests/run/cpdef_optargs.pyx0000664000175000017500000000346113023021033021147 0ustar  stefanstefan00000000000000# mode: run
# tag: cyfunction
# cython: binding=True

cimport cython


class PyClass(object):
    a = 2


class PyClass99(object):
    a = 99

    def pymethod(self, x, y=1, z=PyClass):
        """
        >>> obj = PyClass99()
        >>> obj.pymethod(0)
        (0, 1, 2)
        """
        return x, y, z.a


cdef class CyClass:
    cpdef cpmethod(self, x, y=1, z=PyClass):
        """
        >>> obj = CyClass()
        >>> obj.cpmethod(0)
        (0, 1, 2)
        >>> obj.cpmethod(0, 3)
        (0, 3, 2)
        >>> obj.cpmethod(0, 3, PyClass)
        (0, 3, 2)
        >>> obj.cpmethod(0, 3, 5)
        Traceback (most recent call last):
        AttributeError: 'int' object has no attribute 'a'
        """
        return x, y, z.a

    y_value = 3
    p_class = PyClass

    cpdef cpmethod2(self, x, y=y_value, z=p_class):
        """
        >>> obj = CyClass()
        >>> obj.cpmethod2(0)
        (0, 3, 2)
        """
        return x, y, z.a

    def pymethod(self, x, y=y_value, z=p_class):
        """
        >>> obj = CyClass()
        >>> obj.pymethod(0)
        (0, 3, 2)
        """
        return x, y, z.a

    # change values to check that defaults above stay unmodified
    y_value = 98
    p_class = PyClass99


cpdef func(x, y=1, z=PyClass):
    """
    >>> func(0)
    (0, 1, 2)
    >>> func(0, 3)
    (0, 3, 2)
    >>> func(0, 3, PyClass)
    (0, 3, 2)
    >>> func(0, 3, 5)
    Traceback (most recent call last):
    AttributeError: 'int' object has no attribute 'a'
    """
    return x, y, z.a


@cython.ccall
def pyfunc(x, y=1, z=PyClass):
    """
    >>> pyfunc(0)
    (0, 1, 2)
    >>> pyfunc(0, 3)
    (0, 3, 2)
    >>> pyfunc(0, 3, PyClass)
    (0, 3, 2)
    >>> pyfunc(0, 3, 5)
    Traceback (most recent call last):
    AttributeError: 'int' object has no attribute 'a'
    """
    return x, y, z.a
Cython-0.26.1/tests/run/extclassbody.pyx0000664000175000017500000000035712542002467021053 0ustar  stefanstefan00000000000000__doc__ = u"""
>>> s = Spam()
>>> s.a
2
>>> s.c
3
>>> s.test(5)
13
>>> s.b
5
"""

cdef class Spam:
    a = 1
    def test(self, a):
        return a + self.b + self.c
    b = a + 2 # 3
    a = b - 1 # 2
    c = 3     # 3
    b = c + a # 5
Cython-0.26.1/tests/run/set_discard_remove.py0000664000175000017500000000177112542002467022021 0ustar  stefanstefan00000000000000
def set_discard():
    """
    >>> sorted(set_discard())
    [1, 2]
    """
    s = set([1,2,3])
    s.discard(3)
    return s


def set_discard_missing():
    """
    >>> sorted(set_discard_missing())
    [1, 2, 3]
    """
    s = set([1,2,3])
    s.discard(4)
    return s


def set_discard_set():
    """
    >>> s = set_discard_set()
    >>> len(s)
    1
    >>> sorted(s.pop())
    [1, 2]
    """
    s = set([frozenset([1,2]), frozenset([2,3])])
    s.discard(set([2,3]))
    return s


def set_remove():
    """
    >>> sorted(set_remove())
    [1, 2]
    """
    s = set([1,2,3])
    s.remove(3)
    return s


def set_remove_missing():
    """
    >>> sorted(set_remove_missing())
    Traceback (most recent call last):
    KeyError: 4
    """
    s = set([1,2,3])
    s.remove(4)
    return s


def set_remove_set():
    """
    >>> s = set_remove_set()
    >>> len(s)
    1
    >>> sorted(s.pop())
    [1, 2]
    """
    s = set([frozenset([1,2]), frozenset([2,3])])
    s.remove(set([2,3]))
    return s
Cython-0.26.1/tests/run/py_ucs4_type.pyx0000664000175000017500000002151313143605603020772 0ustar  stefanstefan00000000000000# -*- coding: iso-8859-1 -*-
# mode: run
# tag: warnings


cimport cython

cdef Py_UCS4 char_ASCII = u'A'
cdef Py_UCS4 char_KLINGON = u'\uF8D2'

u_A = char_ASCII
u_KLINGON = char_KLINGON


def compare_ASCII():
    """
    >>> compare_ASCII()
    True
    False
    False
    """
    print(char_ASCII == u'A')
    print(char_ASCII == u'B')
    print(char_ASCII == u'\uF8D2')


def compare_klingon():
    """
    >>> compare_klingon()
    True
    False
    False
    """
    print(char_KLINGON == u'\uF8D2')
    print(char_KLINGON == u'A')
    print(char_KLINGON == u'B')


def single_uchar_compare():
    """
    >>> single_uchar_compare()
    """
    assert u'\u0100' < u'\u0101'
    assert u'\u0101' > u'\u0100'


from cpython.unicode cimport PyUnicode_FromOrdinal
import sys

u0 = u'\x00'
u1 = u'\x01'
umax = PyUnicode_FromOrdinal(sys.maxunicode)

def unicode_ordinal(Py_UCS4 i):
    """
    >>> ord(unicode_ordinal(0)) == 0
    True
    >>> ord(unicode_ordinal(1)) == 1
    True
    >>> ord(unicode_ordinal(sys.maxunicode)) == sys.maxunicode
    True

    >>> ord(unicode_ordinal(u0)) == 0
    True
    >>> ord(unicode_ordinal(u1)) == 1
    True
    >>> ord(unicode_ordinal(umax)) == sys.maxunicode
    True

    Value too small:
    >>> unicode_ordinal(-1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...

    Value too large:
    >>> unicode_ordinal(1114111+1) #doctest: +ELLIPSIS
    Traceback (most recent call last):
    ...
    OverflowError: ...

    Less than one character:
    >>> unicode_ordinal(u0[:0])
    Traceback (most recent call last):
    ...
    ValueError: only single character unicode strings can be converted to Py_UCS4, got length 0

    More than one character:
    >>> unicode_ordinal(u0+u1)
    Traceback (most recent call last):
    ...
    ValueError: only single character unicode strings can be converted to Py_UCS4, got length 2
    """
    return i


def ord_py_ucs4(Py_UCS4 x):
    """
    >>> ord_py_ucs4(u0)
    0
    >>> ord_py_ucs4(u_A)
    65
    >>> ord_py_ucs4(u_KLINGON)
    63698
    """
    return ord(x)


@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode')
def unicode_type_methods(Py_UCS4 uchar):
    """
    >>> unicode_type_methods(ord('A'))
    [True, True, False, False, False, False, False, True, True]
    >>> unicode_type_methods(ord('a'))
    [True, True, False, False, True, False, False, False, False]
    >>> unicode_type_methods(ord('8'))
    [True, False, True, True, False, True, False, False, False]
    >>> unicode_type_methods(ord('\\t'))
    [False, False, False, False, False, False, True, False, False]
    """
    return [
        # character types
        uchar.isalnum(),
        uchar.isalpha(),
        uchar.isdecimal(),
        uchar.isdigit(),
        uchar.islower(),
        uchar.isnumeric(),
        uchar.isspace(),
        uchar.istitle(),
        uchar.isupper(),
        ]

@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode')
def unicode_methods(Py_UCS4 uchar):
    """
    >>> unicode_methods(ord('A')) == ['a', 'A', 'A']
    True
    >>> unicode_methods(ord('a')) == ['a', 'A', 'A']
    True
    """
    return [
        # character conversion
        uchar.lower(),
        uchar.upper(),
        uchar.title(),
        ]


@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists(
    '//SimpleCallNode',
    '//CoerceFromPyTypeNode',
)
def unicode_method_return_type(Py_UCS4 uchar):
    """
    >>> unicode_method_return_type(ord('A'))
    [True, False]
    >>> unicode_method_return_type(ord('a'))
    [False, True]
    """
    cdef Py_UCS4 uc, ul
    uc, ul = uchar.upper(), uchar.lower()
    return [uc == uchar, ul == uchar]


@cython.test_assert_path_exists('//IntNode')
@cython.test_fail_if_path_exists('//SimpleCallNode',
                                 '//PythonCapiCallNode')
def len_uchar(Py_UCS4 uchar):
    """
    >>> len_uchar(ord('A'))
    1
    """
    return len(uchar)

def index_uchar(Py_UCS4 uchar, Py_ssize_t i):
    """
    >>> index_uchar(ord('A'), 0) == ('A', 'A', 'A')
    True
    >>> index_uchar(ord('A'), -1) == ('A', 'A', 'A')
    True
    >>> index_uchar(ord('A'), 1)
    Traceback (most recent call last):
    IndexError: string index out of range
    """
    return uchar[0], uchar[-1], uchar[i]

mixed_ustring = u'AbcDefGhIjKlmnoP'
lower_ustring = mixed_ustring.lower()
upper_ustring = mixed_ustring.lower()

@cython.test_assert_path_exists('//PythonCapiCallNode',
                                '//ForFromStatNode')
@cython.test_fail_if_path_exists('//SimpleCallNode',
                                 '//ForInStatNode')
def count_lower_case_characters(unicode ustring):
    """
    >>> count_lower_case_characters(mixed_ustring)
    10
    >>> count_lower_case_characters(lower_ustring)
    16
    """
    cdef Py_ssize_t count = 0
    for uchar in ustring:
         if uchar.islower():
             count += 1
    return count

@cython.test_assert_path_exists('//PythonCapiCallNode',
                                '//ForFromStatNode')
@cython.test_fail_if_path_exists('//SimpleCallNode',
                                 '//ForInStatNode')
def count_lower_case_characters_slice(unicode ustring):
    """
    >>> count_lower_case_characters_slice(mixed_ustring)
    10
    >>> count_lower_case_characters_slice(lower_ustring)
    14
    >>> sum([ 1 for uchar in lower_ustring[1:-1] if uchar.islower() ])
    14
    """
    cdef Py_ssize_t count = 0
    for uchar in ustring[1:-1]:
         if uchar.islower():
             count += 1
    return count

@cython.test_assert_path_exists('//PythonCapiCallNode',
                                '//ForFromStatNode')
@cython.test_fail_if_path_exists('//SimpleCallNode',
                                 '//ForInStatNode')
def count_lower_case_characters_slice_reversed(unicode ustring):
    """
    >>> count_lower_case_characters_slice_reversed(mixed_ustring)
    10
    >>> count_lower_case_characters_slice_reversed(lower_ustring)
    14
    >>> sum([ 1 for uchar in lower_ustring[-2:0:-1] if uchar.islower() ])
    14
    """
    cdef Py_ssize_t count = 0
    for uchar in ustring[-2:0:-1]:
         if uchar.islower():
             count += 1
    return count

def loop_object_over_latin1_unicode_literal():
    """
    >>> result = loop_object_over_latin1_unicode_literal()
    >>> print(result[:-1])
    abcdefg
    >>> ord(result[-1]) == 0xD7
    True
    """
    cdef object uchar
    chars = []
    for uchar in u'abcdefg\xD7':
        chars.append(uchar)
    return u''.join(chars)

def loop_object_over_unicode_literal():
    """
    >>> result = loop_object_over_unicode_literal()
    >>> print(result[:-1])
    abcdefg
    >>> ord(result[-1]) == 0xF8FD
    True
    """
    cdef object uchar
    chars = []
    for uchar in u'abcdefg\uF8FD':
        chars.append(uchar)
    return u''.join(chars)

@cython.test_assert_path_exists('//SwitchStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode')
def iter_and_in():
    """
    >>> iter_and_in()
    a
    b
    e
    f
    h
    """
    for c in u'abcdefgh':
        if c in u'abCDefGh':
            print c


@cython.test_fail_if_path_exists('//ForInStatNode')
def iter_inferred():
    """
    >>> iter_inferred()
    a
    b
    c
    d
    e
    """
    uchars = list(u"abcde")
    uchars = u''.join(uchars)
    for c in uchars:
        print c


@cython.test_assert_path_exists('//SwitchStatNode',
                                '//ForFromStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode')
def index_and_in():
    """
    >>> index_and_in()
    1
    3
    4
    7
    8
    """
    cdef int i
    for i in range(1,9):
        if u'abcdefgh'[-i] in u'abCDefGh':
            print i

# special test for narrow builds

high_uchar = u'\U00012345'
high_ustring0 = u'\U00012345\U00012346abc'
high_ustring1 = u'\U00012346\U00012345abc'
high_ustring_end = u'\U00012346abc\U00012344\U00012345'
high_ustring_no = u'\U00012346\U00012346abc'

def uchar_in(Py_UCS4 uchar, unicode ustring):
    """
    >>> uchar_in(high_uchar, high_ustring0)
    True
    >>> uchar_in(high_uchar, high_ustring1)
    True
    >>> uchar_in(high_uchar, high_ustring_end)
    True
    >>> uchar_in(high_uchar, high_ustring_no)
    False
    """
    assert uchar == 0x12345, ('%X' % uchar)
    return uchar in ustring


def uchar_lookup_in_dict(obj, Py_UCS4 uchar):
    """
    >>> d = {u_KLINGON: 1234, u0: 0, u1: 1, u_A: 2}
    >>> uchar_lookup_in_dict(d, u_KLINGON)
    (1234, 1234)
    >>> uchar_lookup_in_dict(d, u_A)
    (2, 2)
    >>> uchar_lookup_in_dict(d, u0)
    (0, 0)
    >>> uchar_lookup_in_dict(d, u1)
    (1, 1)
    """
    cdef dict d = obj
    dval = d[uchar]
    objval = obj[uchar]
    return dval, objval


_WARNINGS = """
364:16: Item lookup of unicode character codes now always converts to a Unicode string. Use an explicit C integer cast to get back the previous integer lookup behaviour.
"""
Cython-0.26.1/tests/run/builtin_pow.pyx0000664000175000017500000000075512542002467020704 0ustar  stefanstefan00000000000000
def pow3(a,b,c):
    """
    >>> pow3(2,3,5)
    3
    >>> pow3(3,3,5)
    2
    """
    return pow(a,b,c)

def pow3_const():
    """
    >>> pow3_const()
    3
    """
    return pow(2,3,5)

def pow2(a,b):
    """
    >>> pow2(2,3)
    8
    >>> pow2(3,3)
    27
    """
    return pow(a,b)

def pow2_const():
    """
    >>> pow2_const()
    8
    """
    return pow(2,3)

def pow_args(*args):
    """
    >>> pow_args(2,3)
    8
    >>> pow_args(2,3,5)
    3
    """
    return pow(*args)
Cython-0.26.1/tests/run/r_mitch_chapman_2.pyx0000664000175000017500000000027012542002467021676 0ustar  stefanstefan00000000000000def boolExpressionsFail():
    """
    >>> boolExpressionsFail()
    'Not 2b'
    """
    dict = {1: 1}
    if not "2b" in dict:
        return "Not 2b"
    else:
        return "2b?"
Cython-0.26.1/tests/wrappers/0000775000175000017500000000000013151203436016633 5ustar  stefanstefan00000000000000Cython-0.26.1/tests/wrappers/cpp_overload_wrapper.pyx0000664000175000017500000000273612542002467023626 0ustar  stefanstefan00000000000000# tag: cpp

cimport cpp_overload_wrapper_lib as cppwrap_lib

cdef class DoubleKeeper:
    """
    >>> d = DoubleKeeper()
    >>> d.get_number()
    1.0
    >>> d.set_number(5.5)
    >>> d.get_number()
    5.5
    >>> d.set_number(0)
    >>> d.get_number()
    0.0
    """
    cdef cppwrap_lib.DoubleKeeper* keeper

    def __cinit__(self, number=None):
        if number is None:
            self.keeper = new cppwrap_lib.DoubleKeeper()
        else:
            self.keeper = new cppwrap_lib.DoubleKeeper(number)

    def __dealloc__(self):
        del self.keeper

    def set_number(self, number=None):
        if number is None:
            self.keeper.set_number()
        else:
            self.keeper.set_number(number)

    def get_number(self):
        return self.keeper.get_number()

    def transmogrify(self, double value):
        """
        >>> d = DoubleKeeper(5.5)
        >>> d.transmogrify(1.0)
        5.5
        >>> d.transmogrify(2.0)
        11.0
        """
        return self.keeper.transmogrify(value)


def voidfunc():
    """
    >>> voidfunc()
    """
    cppwrap_lib.voidfunc()

def doublefunc(double x, double y, double z):
    """
    >>> doublefunc(1.0, 2.0, 3.0) == 1.0 + 2.0 + 3.0
    True
    """
    return cppwrap_lib.doublefunc(x, y, z)

def transmogrify_from_cpp(DoubleKeeper obj not None, double value):
    """
    >>> d = DoubleKeeper(2.0)
    >>> d.transmogrify(3.0) == 6.0
    True
    """
    return cppwrap_lib.transmogrify_from_cpp(obj.keeper, value)
Cython-0.26.1/tests/wrappers/cpp_overload_wrapper_lib.h0000664000175000017500000000077412542002467024063 0ustar  stefanstefan00000000000000#ifndef CPP_OVERLOAD_WRAPPER_LIB_H
#define CPP_OVERLOAD_WRAPPER_LIB_H
void voidfunc(void);

double doublefunc (double a, double b, double c);


class DoubleKeeper
{
    double number;

public:
    DoubleKeeper ();
    DoubleKeeper (double number);
    virtual ~DoubleKeeper ();

    void set_number (double num);
    void set_number (void);
    double get_number () const;
    virtual double transmogrify (double value) const;
};

double transmogrify_from_cpp (DoubleKeeper const *obj, double value);
#endif
Cython-0.26.1/tests/wrappers/cppwrap.pyx0000664000175000017500000000225312542002467021057 0ustar  stefanstefan00000000000000# tag: cpp

cimport cppwrap_lib

cdef class DoubleKeeper:
    """
    >>> d = DoubleKeeper(1.0)
    >>> d.get_number() == 1.0
    True
    >>> d.get_number() == 2.0
    False
    >>> d.set_number(2.0)
    >>> d.get_number() == 2.0
    True
    >>> d.transmogrify(3.0) == 6.0
    True
    """
    cdef cppwrap_lib.DoubleKeeper* keeper

    def __cinit__(self, double number):
        self.keeper = new cppwrap_lib.DoubleKeeper(number)

    def __dealloc__(self):
        del self.keeper

    def set_number(self, double number):
        self.keeper.set_number(number)

    def get_number(self):
        return self.keeper.get_number()

    def transmogrify(self, double value):
        return self.keeper.transmogrify(value)


def voidfunc():
    """
    >>> voidfunc()
    """
    cppwrap_lib.voidfunc()

def doublefunc(double x, double y, double z):
    """
    >>> doublefunc(1.0, 2.0, 3.0) == 1.0 + 2.0 + 3.0
    True
    """
    return cppwrap_lib.doublefunc(x, y, z)

def transmogrify_from_cpp(DoubleKeeper obj not None, double value):
    """
    >>> d = DoubleKeeper(2.0)
    >>> d.transmogrify(3.0) == 6.0
    True
    """
    return cppwrap_lib.transmogrify_from_cpp(obj.keeper, value)
Cython-0.26.1/tests/wrappers/cppwrap_lib.h0000664000175000017500000000066112542002467021315 0ustar  stefanstefan00000000000000#ifndef CPPWRAP_LIB_H
#define CPPWRAP_LIB_H
void voidfunc(void);

double doublefunc (double a, double b, double c);


class DoubleKeeper
{
    double number;

public:
    DoubleKeeper (double number);
    virtual ~DoubleKeeper ();

    void set_number (double num);
    double get_number () const;
    virtual double transmogrify (double value) const;
};

double transmogrify_from_cpp (DoubleKeeper const *obj, double value);
#endif
Cython-0.26.1/tests/wrappers/cppwrap_lib.pxd0000664000175000017500000000061012542002467021653 0ustar  stefanstefan00000000000000cdef extern from "cppwrap_lib.cpp":
    pass
cdef extern from "cppwrap_lib.h":
    void voidfunc()
    double doublefunc(double a, double b, double c)

    cdef cppclass DoubleKeeper:
        DoubleKeeper(double factor)
        void set_number(double f)
        double get_number()
        double transmogrify(double value)

    double transmogrify_from_cpp (DoubleKeeper *obj, double value)
Cython-0.26.1/tests/wrappers/cpp_overload_wrapper_lib.pxd0000664000175000017500000000072312542002467024421 0ustar  stefanstefan00000000000000cdef extern from "cpp_overload_wrapper_lib.cpp":
    pass
cdef extern from "cpp_overload_wrapper_lib.h":
    void voidfunc()
    double doublefunc(double a, double b, double c)

    cdef cppclass DoubleKeeper:
        DoubleKeeper()
        DoubleKeeper(double factor)
        void set_number()
        void set_number(double f)
        double get_number()
        double transmogrify(double value)

    double transmogrify_from_cpp (DoubleKeeper *obj, double value)
Cython-0.26.1/tests/wrappers/cppwrap_lib.cpp0000664000175000017500000000106012542002467021642 0ustar  stefanstefan00000000000000
#include "cppwrap_lib.h"

void voidfunc (void)
{
}

double doublefunc (double a, double b, double c)
{
    return a + b + c;
}

DoubleKeeper::DoubleKeeper (double factor)
    : number (factor)
{
}

DoubleKeeper::~DoubleKeeper ()
{
}

double DoubleKeeper::get_number () const
{
    return number;
}

void DoubleKeeper::set_number (double f)
{
    number = f;
}

double
DoubleKeeper::transmogrify (double value) const
{
    return value*number;
}


double
transmogrify_from_cpp (DoubleKeeper const *obj, double value)
{
    return obj->transmogrify (value);
}

Cython-0.26.1/tests/wrappers/cpp_references_helper.h0000664000175000017500000000013412542002467023330 0ustar  stefanstefan00000000000000
int ref_var_value = 10;
int& ref_var = ref_var_value;

int& ref_func(int& x) { return x; }
Cython-0.26.1/tests/wrappers/cpp_references.pyx0000664000175000017500000000240513023021033022346 0ustar  stefanstefan00000000000000# tag: cpp

cimport cython


cdef extern from "cpp_references_helper.h":
    cdef int& ref_func(int&)
    cdef int& except_ref_func "ref_func" (int&) except +

    cdef int ref_var_value
    cdef int& ref_var


def test_ref_func(int x):
    """
    >>> test_ref_func(2)
    2
    >>> test_ref_func(3)
    3
    """
    return ref_func(x)

def test_ref_func_address(int x):
    """
    >>> test_ref_func_address(5)
    5
    >>> test_ref_func_address(7)
    7
    """
    cdef int* i_ptr = &ref_func(x)
    return i_ptr[0]

def test_except_ref_func_address(int x):
    """
    >>> test_except_ref_func_address(5)
    5
    >>> test_except_ref_func_address(7)
    7
    """
    cdef int* i_ptr = &except_ref_func(x)
    return i_ptr[0]

def test_ref_var(int x):
    """
    >>> test_ref_func(11)
    11
    >>> test_ref_func(13)
    13
    """
    ref_var = x
    return ref_var_value

def test_ref_assign(int x):
    """
    >>> test_ref_assign(17)
    17.0
    >>> test_ref_assign(19)
    19.0
    """
    cdef double d = ref_func(x)
    return d

@cython.infer_types(True)
def test_ref_inference(int x):
    """
    >>> test_ref_inference(23)
    23
    >>> test_ref_inference(29)
    29
    """
    z = ref_func(x)
    assert cython.typeof(z) == "int", cython.typeof(z)
    return z
Cython-0.26.1/tests/wrappers/cpp_overload_wrapper_lib.cpp0000664000175000017500000000125412542002467024410 0ustar  stefanstefan00000000000000
#include "cpp_overload_wrapper_lib.h"

void voidfunc (void)
{
}

double doublefunc (double a, double b, double c)
{
    return a + b + c;
}


DoubleKeeper::DoubleKeeper ()
    : number (1.0)
{
}

DoubleKeeper::DoubleKeeper (double factor)
    : number (factor)
{
}

DoubleKeeper::~DoubleKeeper ()
{
}

double DoubleKeeper::get_number () const
{
    return number;
}

void DoubleKeeper::set_number (double f)
{
    number = f;
}

void DoubleKeeper::set_number ()
{
    number = 1.0;
}

double
DoubleKeeper::transmogrify (double value) const
{
    return value*number;
}


double
transmogrify_from_cpp (DoubleKeeper const *obj, double value)
{
    return obj->transmogrify (value);
}

Cython-0.26.1/tests/memoryview/0000775000175000017500000000000013151203436017173 5ustar  stefanstefan00000000000000Cython-0.26.1/tests/memoryview/view_return_errors.pyx0000664000175000017500000000133012542002467023703 0ustar  stefanstefan00000000000000# mode: run
# tag: memoryview


cdef double[:] foo(int i):
    if i == 1:
        raise AttributeError('dummy')
    if i == 2:
        raise RuntimeError('dummy')
    if i == 3:
        raise ValueError('dummy')
    if i == 4:
        raise TypeError('dummy')


def propagate(i):
    """
    >>> propagate(0)
    TypeError('Memoryview return value is not initialized')
    >>> propagate(1)
    AttributeError('dummy')
    >>> propagate(2)
    RuntimeError('dummy')
    >>> propagate(3)
    ValueError('dummy')
    >>> propagate(4)
    TypeError('dummy')
    """
    try:
        foo(i)
    except Exception as e:
        print '%s(%r)' % (e.__class__.__name__, e.args[0])
    else:
        print 'Exception subclass not raised'
Cython-0.26.1/tests/memoryview/memoryview.pyx0000664000175000017500000006044613023021033022137 0ustar  stefanstefan00000000000000# mode: run

u'''
>>> f()
>>> g()
>>> call()
>>> assignmvs()
'''

from cython.view cimport memoryview, array
from cython cimport view

from cpython.object cimport PyObject
from cpython.ref cimport Py_INCREF, Py_DECREF
cimport cython

cdef extern from "Python.h":
    cdef int PyBUF_C_CONTIGUOUS

include "../buffers/mockbuffers.pxi"

#
### Test for some coercions
#
def init_obj():
    return 3

cdef passmvs(float[:,::1] mvs, object foo):
    mvs = array((10,10), itemsize=sizeof(float), format='f')
    foo = init_obj()

cdef object returnobj():
    cdef obj = object()
    return obj

cdef float[::1] returnmvs_inner():
    return array((10,), itemsize=sizeof(float), format='f')

cdef float[::1] returnmvs():
    cdef float[::1] mvs = returnmvs_inner()
    return mvs

def f():
    cdef array arr = array(shape=(10,10), itemsize=sizeof(int), format='i')
    cdef memoryview mv = memoryview(arr, PyBUF_C_CONTIGUOUS)

def g():
    cdef object obj = init_obj()
    cdef int[::1] mview = array((10,), itemsize=sizeof(int), format='i')
    obj = init_obj()
    mview = array((10,), itemsize=sizeof(int), format='i')

cdef class ExtClass(object):
    cdef int[::1] mview

    def __init__(self):
        self.mview = array((10,), itemsize=sizeof(int), format='i')
        self.mview = array((10,), itemsize=sizeof(int), format='i')

class PyClass(object):

    def __init__(self):
        self.mview = array((10,), itemsize=sizeof(long), format='l')

cdef cdg():
    cdef double[::1] dmv = array((10,), itemsize=sizeof(double), format='d')
    dmv = array((10,), itemsize=sizeof(double), format='d')

cdef class TestExcClassExternalDtype(object):
    cdef ext_dtype[:, :] arr_float
    cdef td_h_double[:, :] arr_double

    def __init__(self):
        self.arr_float = array((10, 10), itemsize=sizeof(ext_dtype), format='f')
        self.arr_float[:] = 0.0
        self.arr_float[4, 4] = 2.0

        self.arr_double = array((10, 10), itemsize=sizeof(td_h_double), format='d')
        self.arr_double[:] = 0.0
        self.arr_double[4, 4] = 2.0

def test_external_dtype():
    """
    >>> test_external_dtype()
    2.0
    2.0
    """
    cdef TestExcClassExternalDtype obj = TestExcClassExternalDtype()
    print obj.arr_float[4, 4]
    print obj.arr_double[4, 4]


cdef class ExtClassMockedAttr(object):
    cdef int[:, :] arr

    def __init__(self):
        self.arr = IntMockBuffer("self.arr", range(100), (10, 8))
        self.arr[:] = 0
        self.arr[4, 4] = 2

cdef int[:, :] _coerce_to_temp():
    cdef ExtClassMockedAttr obj = ExtClassMockedAttr()
    return obj.arr

def test_coerce_to_temp():
    """
    >>> test_coerce_to_temp()
    acquired self.arr
    released self.arr
    
    acquired self.arr
    released self.arr
    
    acquired self.arr
    released self.arr
    2
    
    acquired self.arr
    released self.arr
    2
    
    acquired self.arr
    released self.arr
    2
    """
    _coerce_to_temp()[:] = 0
    print
    _coerce_to_temp()[...] = 0
    print
    print _coerce_to_temp()[4, 4]
    print
    print _coerce_to_temp()[..., 4][4]
    print
    print _coerce_to_temp()[4][4]

def test_extclass_attribute_dealloc():
    """
    >>> test_extclass_attribute_dealloc()
    acquired self.arr
    2
    released self.arr
    """
    cdef ExtClassMockedAttr obj = ExtClassMockedAttr()
    print obj.arr[4, 4]

cdef float[:,::1] global_mv = array((10,10), itemsize=sizeof(float), format='f')
global_mv = array((10,10), itemsize=sizeof(float), format='f')
cdef object global_obj

def assignmvs():
    cdef int[::1] mv1, mv2
    cdef int[:] mv3
    mv1 = array((10,), itemsize=sizeof(int), format='i')
    mv2 = mv1
    mv1 = mv2
    mv3 = mv2

def call():
    global global_mv
    passmvs(global_mv, global_obj)
    global_mv = array((3,3), itemsize=sizeof(float), format='f')
    cdef float[::1] getmvs = returnmvs()
    returnmvs()
    cdef object obj = returnobj()
    cdg()
    f = ExtClass()
    pf = PyClass()

cdef ExtClass get_ext_obj():
    print 'get_ext_obj called'
    return ExtClass.__new__(ExtClass)

def test_cdef_attribute():
    """
    >>> test_cdef_attribute()
    Memoryview is not initialized
    local variable 'myview' referenced before assignment
    local variable 'myview' referenced before assignment
    get_ext_obj called
    Memoryview is not initialized
    
    """
    cdef ExtClass extobj = ExtClass.__new__(ExtClass)
    try:
        print extobj.mview
    except AttributeError, e:
        print e.args[0]
    else:
        print "No AttributeError was raised"

    cdef int[:] myview
    try:
        print myview
    except UnboundLocalError, e:
        print e.args[0]
    else:
        print "No UnboundLocalError was raised"

    cdef int[:] otherview
    try:
         otherview = myview
    except UnboundLocalError, e:
        print e.args[0]

    try:
        print get_ext_obj().mview
    except AttributeError, e:
        print e.args[0]
    else:
        print "No AttributeError was raised"

    print ExtClass().mview

@cython.boundscheck(False)
def test_nogil_unbound_localerror():
    """
    >>> test_nogil_unbound_localerror()
    Traceback (most recent call last):
        ...
    UnboundLocalError: local variable 'm' referenced before assignment
    """
    cdef int[:] m
    with nogil:
        m[0] = 10

def test_nogil_oob():
    """
    >>> test_nogil_oob()
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 0)
    """
    cdef int[5] a
    cdef int[:] m = a
    with nogil:
        m[5] = 1

def basic_struct(MyStruct[:] mslice):
    """
    See also buffmt.pyx

    >>> basic_struct(MyStructMockBuffer(None, [(1, 2, 3, 4, 5)]))
    [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5)]
    >>> basic_struct(MyStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="ccqii"))
    [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5)]
    """
    buf = mslice
    print sorted([(k, int(v)) for k, v in buf[0].items()])

def nested_struct(NestedStruct[:] mslice):
    """
    See also buffmt.pyx

    >>> nested_struct(NestedStructMockBuffer(None, [(1, 2, 3, 4, 5)]))
    1 2 3 4 5
    >>> nested_struct(NestedStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="T{ii}T{2i}i"))
    1 2 3 4 5
    """
    buf = mslice
    d = buf[0]
    print d['x']['a'], d['x']['b'], d['y']['a'], d['y']['b'], d['z']

def packed_struct(PackedStruct[:] mslice):
    """
    See also buffmt.pyx

    >>> packed_struct(PackedStructMockBuffer(None, [(1, 2)]))
    1 2
    >>> packed_struct(PackedStructMockBuffer(None, [(1, 2)], format="T{c^i}"))
    1 2
    >>> packed_struct(PackedStructMockBuffer(None, [(1, 2)], format="T{c=i}"))
    1 2

    """
    buf = mslice
    print buf[0]['a'], buf[0]['b']

def nested_packed_struct(NestedPackedStruct[:] mslice):
    """
    See also buffmt.pyx

    >>> nested_packed_struct(NestedPackedStructMockBuffer(None, [(1, 2, 3, 4, 5)]))
    1 2 3 4 5
    >>> nested_packed_struct(NestedPackedStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="ci^ci@i"))
    1 2 3 4 5
    >>> nested_packed_struct(NestedPackedStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="^c@i^ci@i"))
    1 2 3 4 5
    """
    buf = mslice
    d = buf[0]
    print d['a'], d['b'], d['sub']['a'], d['sub']['b'], d['c']


def complex_dtype(long double complex[:] mslice):
    """
    >>> complex_dtype(LongComplexMockBuffer(None, [(0, -1)]))
    -1j
    """
    buf = mslice
    print buf[0]

def complex_inplace(long double complex[:] mslice):
    """
    >>> complex_inplace(LongComplexMockBuffer(None, [(0, -1)]))
    (1+1j)
    """
    buf = mslice
    buf[0] = buf[0] + 1 + 2j
    print buf[0]

def complex_struct_dtype(LongComplex[:] mslice):
    """
    Note that the format string is "Zg" rather than "2g", yet a struct
    is accessed.
    >>> complex_struct_dtype(LongComplexMockBuffer(None, [(0, -1)]))
    0.0 -1.0
    """
    buf = mslice
    print buf[0]['real'], buf[0]['imag']

#
# Getting items and index bounds checking
#
def get_int_2d(int[:, :] mslice, int i, int j):
    """
    >>> C = IntMockBuffer("C", range(6), (2,3))
    >>> get_int_2d(C, 1, 1)
    acquired C
    released C
    4

    Check negative indexing:
    >>> get_int_2d(C, -1, 0)
    acquired C
    released C
    3
    >>> get_int_2d(C, -1, -2)
    acquired C
    released C
    4
    >>> get_int_2d(C, -2, -3)
    acquired C
    released C
    0

    Out-of-bounds errors:
    >>> get_int_2d(C, 2, 0)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 0)
    >>> get_int_2d(C, 0, -4)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 1)
    """
    buf = mslice
    return buf[i, j]

def set_int_2d(int[:, :] mslice, int i, int j, int value):
    """
    Uses get_int_2d to read back the value afterwards. For pure
    unit test, one should support reading in MockBuffer instead.

    >>> C = IntMockBuffer("C", range(6), (2,3))
    >>> set_int_2d(C, 1, 1, 10)
    acquired C
    released C
    >>> get_int_2d(C, 1, 1)
    acquired C
    released C
    10

    Check negative indexing:
    >>> set_int_2d(C, -1, 0, 3)
    acquired C
    released C
    >>> get_int_2d(C, -1, 0)
    acquired C
    released C
    3

    >>> set_int_2d(C, -1, -2, 8)
    acquired C
    released C
    >>> get_int_2d(C, -1, -2)
    acquired C
    released C
    8

    >>> set_int_2d(C, -2, -3, 9)
    acquired C
    released C
    >>> get_int_2d(C, -2, -3)
    acquired C
    released C
    9

    Out-of-bounds errors:
    >>> set_int_2d(C, 2, 0, 19)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 0)
    >>> set_int_2d(C, 0, -4, 19)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 1)

    """
    buf = mslice
    buf[i, j] = value


#
# Test all kinds of indexing and flags
#

def writable(unsigned short int[:, :, :] mslice):
    """
    >>> R = UnsignedShortMockBuffer("R", range(27), shape=(3, 3, 3))
    >>> writable(R)
    acquired R
    released R
    >>> [str(x) for x in R.recieved_flags] # Py2/3
    ['FORMAT', 'ND', 'STRIDES', 'WRITABLE']
    """
    buf = mslice
    buf[2, 2, 1] = 23

def strided(int[:] mslice):
    """
    >>> A = IntMockBuffer("A", range(4))
    >>> strided(A)
    acquired A
    released A
    2

    Check that the suboffsets were patched back prior to release.
    >>> A.release_ok
    True
    """
    buf = mslice
    return buf[2]

def c_contig(int[::1] mslice):
    """
    >>> A = IntMockBuffer(None, range(4))
    >>> c_contig(A)
    2
    """
    buf = mslice
    return buf[2]

def c_contig_2d(int[:, ::1] mslice):
    """
    Multi-dim has seperate implementation

    >>> A = IntMockBuffer(None, range(12), shape=(3,4))
    >>> c_contig_2d(A)
    7
    """
    buf = mslice
    return buf[1, 3]

def f_contig(int[::1, :] mslice):
    """
    >>> A = IntMockBuffer(None, range(4), shape=(2, 2), strides=(1, 2))
    >>> f_contig(A)
    2
    """
    buf = mslice
    return buf[0, 1]

def f_contig_2d(int[::1, :] mslice):
    """
    Must set up strides manually to ensure Fortran ordering.

    >>> A = IntMockBuffer(None, range(12), shape=(4,3), strides=(1, 4))
    >>> f_contig_2d(A)
    7
    """
    buf = mslice
    return buf[3, 1]

def generic(int[::view.generic, ::view.generic] mslice1,
            int[::view.generic, ::view.generic] mslice2):
    """
    >>> A = IntMockBuffer("A", [[0,1,2], [3,4,5], [6,7,8]])
    >>> B = IntMockBuffer("B", [[0,1,2], [3,4,5], [6,7,8]], shape=(3, 3), strides=(1, 3))
    >>> generic(A, B)
    acquired A
    acquired B
    4
    4
    10
    11
    released A
    released B
    """
    buf1, buf2 = mslice1, mslice2

    print buf1[1, 1]
    print buf2[1, 1]

    buf1[2, -1] = 10
    buf2[2, -1] = 11

    print buf1[2, 2]
    print buf2[2, 2]

#def generic_contig(int[::view.generic_contiguous, :] mslice1,
#                   int[::view.generic_contiguous, :] mslice2):
#    """
#    >>> A = IntMockBuffer("A", [[0,1,2], [3,4,5], [6,7,8]])
#    >>> B = IntMockBuffer("B", [[0,1,2], [3,4,5], [6,7,8]], shape=(3, 3), strides=(1, 3))
#    >>> generic_contig(A, B)
#    acquired A
#    acquired B
#    4
#    4
#    10
#    11
#    released A
#    released B
#    """
#    buf1, buf2 = mslice1, mslice2
#
#    print buf1[1, 1]
#    print buf2[1, 1]
#
#    buf1[2, -1] = 10
#    buf2[2, -1] = 11
#
#    print buf1[2, 2]
#    print buf2[2, 2]

ctypedef int td_cy_int
cdef extern from "bufaccess.h":
    ctypedef td_cy_int td_h_short # Defined as short, but Cython doesn't know this!
    ctypedef float td_h_double # Defined as double
    ctypedef unsigned int td_h_ushort # Defined as unsigned short
ctypedef td_h_short td_h_cy_short

def printbuf_td_cy_int(td_cy_int[:] mslice, shape):
    """
    >>> printbuf_td_cy_int(IntMockBuffer(None, range(3)), (3,))
    0 1 2 END
    >>> printbuf_td_cy_int(ShortMockBuffer(None, range(3)), (3,))
    Traceback (most recent call last):
       ...
    ValueError: Buffer dtype mismatch, expected 'td_cy_int' but got 'short'
    """
    buf = mslice
    cdef int i
    for i in range(shape[0]):
        print buf[i],
    print 'END'

def printbuf_td_h_short(td_h_short[:] mslice, shape):
    """
    >>> printbuf_td_h_short(ShortMockBuffer(None, range(3)), (3,))
    0 1 2 END
    >>> printbuf_td_h_short(IntMockBuffer(None, range(3)), (3,))
    Traceback (most recent call last):
       ...
    ValueError: Buffer dtype mismatch, expected 'td_h_short' but got 'int'
    """
    buf = mslice
    cdef int i
    for i in range(shape[0]):
        print buf[i],
    print 'END'

def printbuf_td_h_cy_short(td_h_cy_short[:] mslice, shape):
    """
    >>> printbuf_td_h_cy_short(ShortMockBuffer(None, range(3)), (3,))
    0 1 2 END
    >>> printbuf_td_h_cy_short(IntMockBuffer(None, range(3)), (3,))
    Traceback (most recent call last):
       ...
    ValueError: Buffer dtype mismatch, expected 'td_h_cy_short' but got 'int'
    """
    buf = mslice
    cdef int i
    for i in range(shape[0]):
        print buf[i],
    print 'END'

def printbuf_td_h_ushort(td_h_ushort[:] mslice, shape):
    """
    >>> printbuf_td_h_ushort(UnsignedShortMockBuffer(None, range(3)), (3,))
    0 1 2 END
    >>> printbuf_td_h_ushort(ShortMockBuffer(None, range(3)), (3,))
    Traceback (most recent call last):
       ...
    ValueError: Buffer dtype mismatch, expected 'td_h_ushort' but got 'short'
    """
    buf = mslice
    cdef int i
    for i in range(shape[0]):
        print buf[i],
    print 'END'

def printbuf_td_h_double(td_h_double[:] mslice, shape):
    """
    >>> printbuf_td_h_double(DoubleMockBuffer(None, [0.25, 1, 3.125]), (3,))
    0.25 1.0 3.125 END
    >>> printbuf_td_h_double(FloatMockBuffer(None, [0.25, 1, 3.125]), (3,))
    Traceback (most recent call last):
       ...
    ValueError: Buffer dtype mismatch, expected 'td_h_double' but got 'float'
    """
    buf = mslice
    cdef int i
    for i in range(shape[0]):
        print buf[i],
    print 'END'

#
# Object access
#
def addref(*args):
    for item in args: Py_INCREF(item)
def decref(*args):
    for item in args: Py_DECREF(item)

def get_refcount(x):
    return (x).ob_refcnt

def printbuf_object(object[:] mslice, shape):
    """
    Only play with unique objects, interned numbers etc. will have
    unpredictable refcounts.

    ObjectMockBuffer doesn't do anything about increfing/decrefing,
    we to the "buffer implementor" refcounting directly in the
    testcase.

    >>> a, b, c = "globally_unique_string_23234123", {4:23}, [34,3]
    >>> get_refcount(a), get_refcount(b), get_refcount(c)
    (2, 2, 2)
    >>> A = ObjectMockBuffer(None, [a, b, c])
    >>> printbuf_object(A, (3,))
    'globally_unique_string_23234123' 2
    {4: 23} 2
    [34, 3] 2
    """
    buf = mslice
    cdef int i
    for i in range(shape[0]):
        print repr(buf[i]), (buf[i]).ob_refcnt

def assign_to_object(object[:] mslice, int idx, obj):
    """
    See comments on printbuf_object above.

    >>> a, b = [1, 2, 3], [4, 5, 6]
    >>> get_refcount(a), get_refcount(b)
    (2, 2)
    >>> addref(a)
    >>> A = ObjectMockBuffer(None, [1, a]) # 1, ...,otherwise it thinks nested lists...
    >>> get_refcount(a), get_refcount(b)
    (3, 2)
    >>> assign_to_object(A, 1, b)
    >>> get_refcount(a), get_refcount(b)
    (2, 3)
    >>> decref(b)
    """
    buf = mslice
    buf[idx] = obj

def assign_temporary_to_object(object[:] mslice):
    """
    See comments on printbuf_object above.

    >>> a, b = [1, 2, 3], {4:23}
    >>> get_refcount(a)
    2
    >>> addref(a)
    >>> A = ObjectMockBuffer(None, [b, a])
    >>> get_refcount(a)
    3
    >>> assign_temporary_to_object(A)
    >>> get_refcount(a)
    2

    >>> printbuf_object(A, (2,))
    {4: 23} 2
    {1: 8} 2

    To avoid leaking a reference in our testcase we need to
    replace the temporary with something we can manually decref :-)
    >>> assign_to_object(A, 1, a)
    >>> decref(a)
    """
    buf = mslice
    buf[1] = {3-2: 2+(2*4)-2}

def test_generic_slicing(arg, indirect=False):
    """
    Test simple slicing
    >>> test_generic_slicing(IntMockBuffer("A", range(8 * 14 * 11), shape=(8, 14, 11)))
    acquired A
    (3, 9, 2)
    308 -11 1
    -1 -1 -1
    released A

    Test direct slicing, negative slice oob in dim 2
    >>> test_generic_slicing(IntMockBuffer("A", range(1 * 2 * 3), shape=(1, 2, 3)))
    acquired A
    (0, 0, 2)
    12 -3 1
    -1 -1 -1
    released A

    Test indirect slicing
    >>> test_generic_slicing(IntMockBuffer("A", shape_5_3_4_list, shape=(5, 3, 4)), indirect=True)
    acquired A
    (2, 0, 2)
    0 1 -1
    released A

    >>> stride1 = 21 * 14
    >>> stride2 = 21
    >>> test_generic_slicing(IntMockBuffer("A", shape_9_14_21_list, shape=(9, 14, 21)), indirect=True)
    acquired A
    (3, 9, 2)
    10 1 -1
    released A

    """
    cdef int[::view.generic, ::view.generic, :] _a = arg
    a = _a
    b = a[2:8:2, -4:1:-1, 1:3]

    print b.shape

    if indirect:
        print b.suboffsets[0] // sizeof(int *),
        print b.suboffsets[1] // sizeof(int),
        print b.suboffsets[2]
    else:
        print_int_offsets(b.strides[0], b.strides[1], b.strides[2])
        print_int_offsets(b.suboffsets[0], b.suboffsets[1], b.suboffsets[2])

    cdef int i, j, k
    for i in range(b.shape[0]):
        for j in range(b.shape[1]):
            for k in range(b.shape[2]):
                itemA = a[2 + 2 * i, -4 - j, 1 + k]
                itemB = b[i, j, k]
                assert itemA == itemB, (i, j, k, itemA, itemB)

def test_indirect_slicing(arg):
    """
    Test indirect slicing
    >>> test_indirect_slicing(IntMockBuffer("A", shape_5_3_4_list, shape=(5, 3, 4)))
    acquired A
    (5, 3, 2)
    0 0 -1
    58
    56
    58
    58
    58
    58
    released A

    >>> test_indirect_slicing(IntMockBuffer("A", shape_9_14_21_list, shape=(9, 14, 21)))
    acquired A
    (5, 14, 3)
    0 16 -1
    2412
    2410
    2412
    2412
    2412
    2412
    released A
    """
    cdef int[::view.indirect, ::view.indirect, :] _a = arg
    a = _a
    b = a[-5:, ..., -5:100:2]

    print b.shape
    print_int_offsets(*b.suboffsets)

    print b[4, 2, 1]
    print b[..., 0][4, 2]
    print b[..., 1][4, 2]
    print b[..., 1][4][2]
    print b[4][2][1]
    print b[4, 2][1]

def test_direct_slicing(arg):
    """
    Fused types would be convenient to test this stuff!

    Test simple slicing
    >>> test_direct_slicing(IntMockBuffer("A", range(8 * 14 * 11), shape=(8, 14, 11)))
    acquired A
    (3, 9, 2)
    308 -11 1
    -1 -1 -1
    released A

    Test direct slicing, negative slice oob in dim 2
    >>> test_direct_slicing(IntMockBuffer("A", range(1 * 2 * 3), shape=(1, 2, 3)))
    acquired A
    (0, 0, 2)
    12 -3 1
    -1 -1 -1
    released A
    """
    cdef int[:, :, :] _a = arg
    a = _a
    b = a[2:8:2, -4:1:-1, 1:3]

    print b.shape
    print_int_offsets(*b.strides)
    print_int_offsets(*b.suboffsets)

    cdef int i, j, k
    for i in range(b.shape[0]):
        for j in range(b.shape[1]):
            for k in range(b.shape[2]):
                itemA = a[2 + 2 * i, -4 - j, 1 + k]
                itemB = b[i, j, k]
                assert itemA == itemB, (i, j, k, itemA, itemB)


def test_slicing_and_indexing(arg):
    """
    >>> a = IntStridedMockBuffer("A", range(10 * 3 * 5), shape=(10, 3, 5))
    >>> test_slicing_and_indexing(a)
    acquired A
    (5, 2)
    15 2
    126 113
    [111]
    released A
    """
    cdef int[:, :, :] _a = arg
    a = _a
    b = a[-5:, 1, 1::2]
    c = b[4:1:-1, ::-1]
    d = c[2, 1:2]

    print b.shape
    print_int_offsets(*b.strides)

    cdef int i, j
    for i in range(b.shape[0]):
        for j in range(b.shape[1]):
            itemA = a[-5 + i, 1, 1 + 2 * j]
            itemB = b[i, j]
            assert itemA == itemB, (i, j, itemA, itemB)

    print c[1, 1], c[2, 0]
    print [d[i] for i in range(d.shape[0])]

def test_oob():
    """
    >>> test_oob()
    Traceback (most recent call last):
       ...
    IndexError: Index out of bounds (axis 1)
    """
    cdef int[:, :] a = IntMockBuffer("A", range(4 * 9), shape=(4, 9))
    print a[:, 20]

def test_acquire_memoryview():
    """
    Segfaulting in 3.2?
    >> test_acquire_memoryview()
    acquired A
    22
    
    22
    22
    released A
    """
    cdef int[:, :] a = IntMockBuffer("A", range(4 * 9), shape=(4, 9))
    cdef object b = a

    print a[2, 4]

    # Make sure we don't have to keep this around
    del a

    print b
    cdef int[:, :] c = b
    print b[2, 4]
    print c[2, 4]

def test_acquire_memoryview_slice():
    """
    >>> test_acquire_memoryview_slice()
    acquired A
    31
    
    31
    31
    released A
    """
    cdef int[:, :] a = IntMockBuffer("A", range(4 * 9), shape=(4, 9))
    a = a[1:, :6]

    cdef object b = a

    print a[2, 4]

    # Make sure we don't have to keep this around
    del a

    print b
    cdef int[:, :] c = b
    print b[2, 4]
    print c[2, 4]

class SingleObject(object):
    def __init__(self, value):
        self.value = value

    def __str__(self):
        return str(self.value)

    def __eq__(self, other):
        return self.value == getattr(other, 'value', None) or self.value == other

def test_assign_scalar(int[:, :] m):
    """
    >>> A = IntMockBuffer("A", [0] * 100, shape=(10, 10))
    >>> test_assign_scalar(A)
    acquired A
    1 1 1 4 1 6 1 1 1 1
    2 2 2 4 2 6 2 2 2 2
    3 3 3 4 3 6 3 3 3 3
    1 1 1 4 1 6 1 1 1 1
    5 5 5 5 5 6 5 5 5 5
    1 1 1 4 1 6 1 1 1 1
    released A
    """
    m[:, :] = 1
    m[1, :] = 2
    m[2, :] = 3
    m[:, 3] = 4
    m[4, ...] = 5
    m[..., 5] = 6

    for i in range(6):
        print " ".join([str(m[i, j]) for j in range(m.shape[1])])


def test_contig_scalar_to_slice_assignment():
    """
    >>> test_contig_scalar_to_slice_assignment()
    14 14 14 14
    20 20 20 20
    """
    cdef int[5][10] a
    cdef int[:, ::1] _m = a
    m = _m

    m[...] = 14
    print m[0, 0], m[-1, -1], m[3, 2], m[4, 9]

    m[:, :] = 20
    print m[0, 0], m[-1, -1], m[3, 2], m[4, 9]

def test_dtype_object_scalar_assignment():
    """
    >>> test_dtype_object_scalar_assignment()
    """
    cdef object[:] m = array((10,), sizeof(PyObject *), 'O')
    m[:] = SingleObject(2)
    assert m[0] == m[4] == m[-1] == 2

    ( m)[:] = SingleObject(3)
    assert m[0] == m[4] == m[-1] == 3


def test_assignment_in_conditional_expression(bint left):
    """
    >>> test_assignment_in_conditional_expression(True)
    1.0
    2.0
    1.0
    2.0
    >>> test_assignment_in_conditional_expression(False)
    3.0
    4.0
    3.0
    4.0
    """
    cdef double a[2]
    cdef double b[2]
    a[:] = [1, 2]
    b[:] = [3, 4]

    cdef double[:] A = a
    cdef double[:] B = b
    cdef double[:] C, c

    # assign new memoryview references
    C = A if left else B

    for i in range(C.shape[0]):
        print C[i]

    # create new memoryviews
    c = a if left else b
    for i in range(c.shape[0]):
        print c[i]


def test_cpython_offbyone_issue_23349():
    """
    >>> print(test_cpython_offbyone_issue_23349())
    testing
    """
    cdef unsigned char[:] v = bytearray(b"testing")
    # the following returns 'estingt' without the workaround
    return bytearray(v).decode('ascii')


@cython.test_fail_if_path_exists('//SimpleCallNode')
@cython.test_assert_path_exists(
    '//ReturnStatNode//TupleNode',
    '//ReturnStatNode//TupleNode//CondExprNode',
)
def min_max_tree_restructuring():
    """
    >>> min_max_tree_restructuring()
    (1, 3)
    """
    cdef char a[5]
    a = [1, 2, 3, 4, 5]
    cdef char[:] aview = a

    return max(1, aview[0]), min(5, aview[2])
Cython-0.26.1/tests/memoryview/memoryview_inplace_division.pyx0000664000175000017500000000104412542002467025542 0ustar  stefanstefan00000000000000# mode: run
# tag: memoryview, cdivision, array

cimport cython
from cpython.array cimport array  # make Cython aware of the array type

def div_memoryview(int[:] A):
    """
    >>> from array import array
    >>> x = array('i', [6])
    >>> div_memoryview(x)
    >>> x[0]
    3
    """
    with cython.cdivision(True):
        A[0] /= 2

def div_buffer(object[int, ndim=1] A):
    """
    >>> from array import array
    >>> x = array('i', [6])
    >>> div_buffer(x)
    >>> x[0]
    3
    """
    with cython.cdivision(True):
        A[0] /= 2

Cython-0.26.1/tests/memoryview/numpy_memoryview.pyx0000664000175000017500000004253313023021033023364 0ustar  stefanstefan00000000000000# tag: numpy
# mode: run

"""
Test slicing for memoryviews and memoryviewslices
"""

import sys

cimport numpy as np
import numpy as np
cimport cython
from cython cimport view

include "cythonarrayutil.pxi"
include "../buffers/mockbuffers.pxi"

ctypedef np.int32_t dtype_t

def get_array():
    # We need to type our array to get a __pyx_get_buffer() that typechecks
    # for np.ndarray and calls __getbuffer__ in numpy.pxd
    cdef np.ndarray[dtype_t, ndim=3] a
    a = np.arange(8 * 14 * 11, dtype=np.int32).reshape(8, 14, 11)
    return a

a = get_array()

def ae(*args):
    "assert equals"
    for x in args:
        if x != args[0]:
            raise AssertionError(args)

__test__ = {}

def testcase(f):
    __test__[f.__name__] = f.__doc__
    return f

def testcase_numpy_1_5(f):
    major, minor, *rest = np.__version__.split('.')
    if (int(major), int(minor)) >= (1, 5):
        __test__[f.__name__] = f.__doc__
    return f

#
### Test slicing memoryview slices
#

@testcase
def test_partial_slicing(array):
    """
    >>> test_partial_slicing(a)
    """
    cdef dtype_t[:, :, :] a = array
    obj = array[4]

    cdef dtype_t[:, :] b = a[4, :]
    cdef dtype_t[:, :] c = a[4]

    ae(b.shape[0], c.shape[0], obj.shape[0])
    ae(b.shape[1], c.shape[1], obj.shape[1])
    ae(b.strides[0], c.strides[0], obj.strides[0])
    ae(b.strides[1], c.strides[1], obj.strides[1])

@testcase
def test_ellipsis(array):
    """
    >>> test_ellipsis(a)
    """
    cdef dtype_t[:, :, :] a = array

    cdef dtype_t[:, :] b = a[..., 4]
    b_obj = array[..., 4]

    cdef dtype_t[:, :] c = a[4, ...]
    c_obj = array[4, ...]

    cdef dtype_t[:, :] d = a[2:8, ..., 2]
    d_obj = array[2:8, ..., 2]

    ae(tuple([b.shape[i] for i in range(2)]), b_obj.shape)
    ae(tuple([b.strides[i] for i in range(2)]), b_obj.strides)
    for i in range(b.shape[0]):
        for j in range(b.shape[1]):
            ae(b[i, j], b_obj[i, j])

    ae(tuple([c.shape[i] for i in range(2)]), c_obj.shape)
    ae(tuple([c.strides[i] for i in range(2)]), c_obj.strides)
    for i in range(c.shape[0]):
        for j in range(c.shape[1]):
            ae(c[i, j], c_obj[i, j])

    ae(tuple([d.shape[i] for i in range(2)]), d_obj.shape)
    ae(tuple([d.strides[i] for i in range(2)]), d_obj.strides)
    for i in range(d.shape[0]):
        for j in range(d.shape[1]):
            ae(d[i, j], d_obj[i, j])

    cdef dtype_t[:] e = a[..., 5, 6]
    e_obj = array[..., 5, 6]
    ae(e.shape[0], e_obj.shape[0])
    ae(e.strides[0], e_obj.strides[0])

#
### Test slicing memoryview objects
#
@testcase
def test_partial_slicing_memoryview(array):
    """
    >>> test_partial_slicing_memoryview(a)
    """
    cdef dtype_t[:, :, :] _a = array
    a = _a
    obj = array[4]

    b = a[4, :]
    c = a[4]

    ae(b.shape[0], c.shape[0], obj.shape[0])
    ae(b.shape[1], c.shape[1], obj.shape[1])
    ae(b.strides[0], c.strides[0], obj.strides[0])
    ae(b.strides[1], c.strides[1], obj.strides[1])

@testcase
def test_ellipsis_memoryview(array):
    """
    >>> test_ellipsis_memoryview(a)
    """
    cdef dtype_t[:, :, :] _a = array
    a = _a

    b = a[..., 4]
    b_obj = array[..., 4]

    c = a[4, ...]
    c_obj = array[4, ...]

    d = a[2:8, ..., 2]
    d_obj = array[2:8, ..., 2]

    ae(tuple([b.shape[i] for i in range(2)]), b_obj.shape)
    ae(tuple([b.strides[i] for i in range(2)]), b_obj.strides)
    for i in range(b.shape[0]):
        for j in range(b.shape[1]):
            ae(b[i, j], b_obj[i, j])

    ae(tuple([c.shape[i] for i in range(2)]), c_obj.shape)
    ae(tuple([c.strides[i] for i in range(2)]), c_obj.strides)
    for i in range(c.shape[0]):
        for j in range(c.shape[1]):
            ae(c[i, j], c_obj[i, j])

    ae(tuple([d.shape[i] for i in range(2)]), d_obj.shape)
    ae(tuple([d.strides[i] for i in range(2)]), d_obj.strides)
    for i in range(d.shape[0]):
        for j in range(d.shape[1]):
            ae(d[i, j], d_obj[i, j])

    e = a[..., 5, 6]
    e_obj = array[..., 5, 6]
    ae(e.shape[0], e_obj.shape[0])
    ae(e.strides[0], e_obj.strides[0])


@testcase
def test_transpose():
    """
    >>> test_transpose()
    3 4
    (3, 4)
    (3, 4)
    11 11 11 11 11 11
    """
    cdef dtype_t[:, :] a

    numpy_obj = np.arange(4 * 3, dtype=np.int32).reshape(4, 3)

    a = numpy_obj
    a_obj = a

    cdef dtype_t[:, :] b = a.T
    print a.T.shape[0], a.T.shape[1]
    print a_obj.T.shape
    print numpy_obj.T.shape

    cdef dtype_t[:, :] c
    with nogil:
        c = a.T.T

    assert ( a).shape == ( c).shape
    assert ( a).strides == ( c).strides

    print a[3, 2], a.T[2, 3], a_obj[3, 2], a_obj.T[2, 3], numpy_obj[3, 2], numpy_obj.T[2, 3]


@testcase
def test_transpose_type(a):
    """
    >>> a = np.zeros((5, 10), dtype=np.float64)
    >>> a[4, 6] = 9
    >>> test_transpose_type(a)
    9.0
    """
    cdef double[:, ::1] m = a
    cdef double[::1, :] m_transpose = a.T
    print m_transpose[6, 4]


@testcase_numpy_1_5
def test_numpy_like_attributes(cyarray):
    """
    For some reason this fails in numpy 1.4, with shape () and strides (40, 8)
    instead of 20, 4 on my machine. Investigate this.

    >>> cyarray = create_array(shape=(8, 5), mode="c")
    >>> test_numpy_like_attributes(cyarray)
    >>> test_numpy_like_attributes(cyarray.memview)
    """
    numarray = np.asarray(cyarray)

    assert cyarray.shape == numarray.shape, (cyarray.shape, numarray.shape)
    assert cyarray.strides == numarray.strides, (cyarray.strides, numarray.strides)
    assert cyarray.ndim == numarray.ndim, (cyarray.ndim, numarray.ndim)
    assert cyarray.size == numarray.size, (cyarray.size, numarray.size)
    assert cyarray.nbytes == numarray.nbytes, (cyarray.nbytes, numarray.nbytes)

    cdef int[:, :] mslice = numarray
    assert ( mslice).base is numarray

@testcase_numpy_1_5
def test_copy_and_contig_attributes(a):
    """
    >>> a = np.arange(20, dtype=np.int32).reshape(5, 4)
    >>> test_copy_and_contig_attributes(a)
    """
    cdef np.int32_t[:, :] mslice = a
    m = mslice

    # Test object copy attributes
    assert np.all(a == np.array(m.copy()))
    assert a.strides == m.strides == m.copy().strides

    assert np.all(a == np.array(m.copy_fortran()))
    assert m.copy_fortran().strides == (4, 20)

    # Test object is_*_contig attributes
    assert m.is_c_contig() and m.copy().is_c_contig()
    assert m.copy_fortran().is_f_contig() and not m.is_f_contig()

ctypedef int td_cy_int
cdef extern from "bufaccess.h":
    ctypedef td_cy_int td_h_short # Defined as short, but Cython doesn't know this!
    ctypedef float td_h_double # Defined as double
    ctypedef unsigned int td_h_ushort # Defined as unsigned short
ctypedef td_h_short td_h_cy_short

cdef void dealloc_callback(void *data):
    print "deallocating..."

def build_numarray(array array):
    array.callback_free_data = dealloc_callback
    return np.asarray(array)

def index(array array):
    print build_numarray(array)[3, 2]

@testcase_numpy_1_5
def test_coerce_to_numpy():
    """
    Test coercion to NumPy arrays, especially with automatically
    generated format strings.

    >>> test_coerce_to_numpy()
    [97, 98, 600, 700, 800]
    deallocating...
    (600, 700)
    deallocating...
    ((100, 200), (300, 400), 500)
    deallocating...
    (97, 900)
    deallocating...
    99
    deallocating...
    111
    deallocating...
    222
    deallocating...
    333
    deallocating...
    11.1
    deallocating...
    12.2
    deallocating...
    13.3
    deallocating...
    (14.4+15.5j)
    deallocating...
    (16.6+17.7j)
    deallocating...
    (18.8+19.9j)
    deallocating...
    22
    deallocating...
    33.33
    deallocating...
    44
    deallocating...
    """
    #
    ### First set up some C arrays that will be used to hold data
    #
    cdef MyStruct[20] mystructs
    cdef SmallStruct[20] smallstructs
    cdef NestedStruct[20] nestedstructs
    cdef PackedStruct[20] packedstructs

    cdef signed char[20] chars
    cdef short[20] shorts
    cdef int[20] ints
    cdef long long[20] longlongs
    cdef td_h_short[20] externs

    cdef float[20] floats
    cdef double[20] doubles
    cdef long double[20] longdoubles

    cdef float complex[20] floatcomplex
    cdef double complex[20] doublecomplex
    cdef long double complex[20] longdoublecomplex

    cdef td_h_short[20] h_shorts
    cdef td_h_double[20] h_doubles
    cdef td_h_ushort[20] h_ushorts

    cdef Py_ssize_t idx = 17

    #
    ### Initialize one element in each array
    #
    mystructs[idx] = {
        'a': 'a',
        'b': 'b',
        'c': 600,
        'd': 700,
        'e': 800,
    }

    smallstructs[idx] = { 'a': 600, 'b': 700 }

    nestedstructs[idx] = {
        'x': { 'a': 100, 'b': 200 },
        'y': { 'a': 300, 'b': 400 },
        'z': 500,
    }

    packedstructs[idx] = { 'a': 'a', 'b': 900 }

    chars[idx] = 99
    shorts[idx] = 111
    ints[idx] = 222
    longlongs[idx] = 333
    externs[idx] = 444

    floats[idx] = 11.1
    doubles[idx] = 12.2
    longdoubles[idx] = 13.3

    floatcomplex[idx] = 14.4 + 15.5j
    doublecomplex[idx] = 16.6 + 17.7j
    longdoublecomplex[idx] = 18.8 + 19.9j

    h_shorts[idx] = 22
    h_doubles[idx] = 33.33
    h_ushorts[idx] = 44

    #
    ### Create a NumPy array and see if our element can be correctly retrieved
    #
    mystruct_array = build_numarray(  mystructs)
    print [int(x) for x in mystruct_array[3, 2]]
    del mystruct_array
    index(  smallstructs)
    index(  nestedstructs)
    index(  packedstructs)

    index(  chars)
    index(  shorts)
    index(  ints)
    index(  longlongs)

    index(  floats)
    index(  doubles)
    index(  longdoubles)

    index(  floatcomplex)
    index(  doublecomplex)
    index(  longdoublecomplex)

    index(  h_shorts)
    index(  h_doubles)
    index(  h_ushorts)


@testcase_numpy_1_5
def test_memslice_getbuffer():
    """
    >>> test_memslice_getbuffer()
    [[ 0  2  4]
     [10 12 14]]
    callback called
    """
    cdef int[:, :] array = create_array((4, 5), mode="c", use_callback=True)
    print np.asarray(array)[::2, ::2]

cdef class DeallocateMe(object):
    def __dealloc__(self):
        print "deallocated!"

# Disabled! References cycles don't seem to be supported by NumPy
# @testcase
def acquire_release_cycle(obj):
    DISABLED_DOCSTRING = """
    >>> a = np.arange(20, dtype=np.object)
    >>> a[10] = DeallocateMe()
    >>> acquire_release_cycle(a)
    deallocated!
    """
    import gc

    cdef object[:] buf = obj
    buf[1] = buf

    gc.collect()

    del buf

    gc.collect()

cdef packed struct StructArray:
    int a[4]
    signed char b[5]

@testcase_numpy_1_5
def test_memslice_structarray(data, dtype):
    """
    >>> def b(s): return s.encode('ascii')
    >>> def to_byte_values(b):
    ...     if sys.version_info[0] >= 3: return list(b)
    ...     else: return map(ord, b)

    >>> data = [(range(4), b('spam\\0')), (range(4, 8), b('ham\\0\\0')), (range(8, 12), b('eggs\\0'))]
    >>> dtype = np.dtype([('a', '4i'), ('b', '5b')])
    >>> test_memslice_structarray([(L, to_byte_values(s)) for L, s in data], dtype)
    0
    1
    2
    3
    spam
    4
    5
    6
    7
    ham
    8
    9
    10
    11
    eggs

    Test the same thing with the string format specifier

    >>> dtype = np.dtype([('a', '4i'), ('b', 'S5')])
    >>> test_memslice_structarray(data, dtype)
    0
    1
    2
    3
    spam
    4
    5
    6
    7
    ham
    8
    9
    10
    11
    eggs
    """
    a = np.empty((3,), dtype=dtype)
    a[:] = data
    cdef StructArray[:] myslice = a
    cdef int i, j
    for i in range(3):
        for j in range(4):
            print myslice[i].a[j]
        print myslice[i].b.decode('ASCII')

@testcase_numpy_1_5
def test_structarray_errors(StructArray[:] a):
    """
    >>> dtype = np.dtype([('a', '4i'), ('b', '5b')])
    >>> test_structarray_errors(np.empty((5,), dtype=dtype))

    >>> dtype = np.dtype([('a', '6i'), ('b', '5b')])
    >>> test_structarray_errors(np.empty((5,), dtype=dtype))
    Traceback (most recent call last):
       ...
    ValueError: Expected a dimension of size 4, got 6

    >>> dtype = np.dtype([('a', '(4,4)i'), ('b', '5b')])
    >>> test_structarray_errors(np.empty((5,), dtype=dtype))
    Traceback (most recent call last):
       ...
    ValueError: Expected 1 dimension(s), got 2

    Test the same thing with the string format specifier

    >>> dtype = np.dtype([('a', '4i'), ('b', 'S5')])
    >>> test_structarray_errors(np.empty((5,), dtype=dtype))

    >>> dtype = np.dtype([('a', '6i'), ('b', 'S5')])
    >>> test_structarray_errors(np.empty((5,), dtype=dtype))
    Traceback (most recent call last):
       ...
    ValueError: Expected a dimension of size 4, got 6

    >>> dtype = np.dtype([('a', '(4,4)i'), ('b', 'S5')])
    >>> test_structarray_errors(np.empty((5,), dtype=dtype))
    Traceback (most recent call last):
       ...
    ValueError: Expected 1 dimension(s), got 2
    """

cdef struct StringStruct:
    signed char c[4][4]

ctypedef signed char String[4][4]

def stringstructtest(StringStruct[:] view):
    pass

def stringtest(String[:] view):
    pass

@testcase_numpy_1_5
def test_string_invalid_dims():
    """
    >>> def b(s): return s.encode('ascii')
    >>> dtype = np.dtype([('a', 'S4')])
    >>> data = [b('spam'), b('eggs')]
    >>> stringstructtest(np.array(data, dtype=dtype))
    Traceback (most recent call last):
       ...
    ValueError: Expected 2 dimensions, got 1
    >>> stringtest(np.array(data, dtype='S4'))
    Traceback (most recent call last):
       ...
    ValueError: Expected 2 dimensions, got 1
    """

ctypedef struct AttributesStruct:
    int attrib1
    float attrib2
    StringStruct attrib3

@testcase_numpy_1_5
def test_struct_attributes():
    """
    >>> test_struct_attributes()
    1
    2.0
    c
    """
    cdef AttributesStruct[10] a
    cdef AttributesStruct[:] myslice = a
    myslice[0].attrib1 = 1
    myslice[0].attrib2 = 2.0
    myslice[0].attrib3.c[0][0] = 'c'

    array = np.asarray(myslice)
    print array[0]['attrib1']
    print array[0]['attrib2']
    print chr(array[0]['attrib3']['c'][0][0])

#
### Test for NULL strides (C contiguous buffers)
#
cdef getbuffer(Buffer self, Py_buffer *info):
    info.buf = &self.m[0, 0]
    info.len = 10 * 20
    info.ndim = 2
    info.shape = self._shape
    info.strides = NULL
    info.suboffsets = NULL
    info.itemsize = 4
    info.readonly = 0
    self.format = b"f"
    info.format = self.format

cdef class Buffer(object):
    cdef Py_ssize_t[2] _shape
    cdef bytes format
    cdef float[:, :] m
    cdef object shape, strides

    def __init__(self):
        a = np.arange(200, dtype=np.float32).reshape(10, 20)
        self.m = a
        self.shape = a.shape
        self.strides = a.strides
        self._shape[0] = 10
        self._shape[1] = 20

    def __getbuffer__(self, Py_buffer *info, int flags):
        getbuffer(self, info)

cdef class SuboffsetsNoStridesBuffer(Buffer):
    def __getbuffer__(self, Py_buffer *info, int flags):
        getbuffer(self, info)
        info.suboffsets = self._shape

@testcase
def test_null_strides(Buffer buffer_obj):
    """
    >>> test_null_strides(Buffer())
    """
    cdef float[:, :] m1 = buffer_obj
    cdef float[:, ::1] m2 = buffer_obj
    cdef float[:, ::view.contiguous] m3 = buffer_obj

    assert ( m1).strides == buffer_obj.strides
    assert ( m2).strides == buffer_obj.strides, (( m2).strides, buffer_obj.strides)
    assert ( m3).strides == buffer_obj.strides

    cdef int i, j
    for i in range(m1.shape[0]):
        for j in range(m1.shape[1]):
            assert m1[i, j] == buffer_obj.m[i, j]
            assert m2[i, j] == buffer_obj.m[i, j], (i, j, m2[i, j], buffer_obj.m[i, j])
            assert m3[i, j] == buffer_obj.m[i, j]

@testcase
def test_null_strides_error(buffer_obj):
    """
    >>> test_null_strides_error(Buffer())
    C-contiguous buffer is not indirect in dimension 1
    C-contiguous buffer is not indirect in dimension 0
    C-contiguous buffer is not contiguous in dimension 0
    C-contiguous buffer is not contiguous in dimension 0
    >>> test_null_strides_error(SuboffsetsNoStridesBuffer())
    Traceback (most recent call last):
        ...
    ValueError: Buffer exposes suboffsets but no strides
    """
    # valid
    cdef float[::view.generic, ::view.generic] full_buf = buffer_obj

    # invalid
    cdef float[:, ::view.indirect] indirect_buf1
    cdef float[::view.indirect, :] indirect_buf2
    cdef float[::1, :] fortran_buf1
    cdef float[::view.contiguous, :] fortran_buf2

    try:
        indirect_buf1 = buffer_obj
    except ValueError, e:
        print e

    try:
        indirect_buf2 = buffer_obj
    except ValueError, e:
        print e

    try:
        fortran_buf1 = buffer_obj
    except ValueError, e:
        print e

    try:
        fortran_buf2 = buffer_obj
    except ValueError, e:
        print e

def test_refcount_GH507():
    """
    >>> test_refcount_GH507()
    """
    a = np.arange(12).reshape([3, 4])
    cdef np.int_t[:,:] a_view = a
    cdef np.int_t[:,:] b = a_view[1:2,:].T
Cython-0.26.1/tests/memoryview/relaxed_strides.pyx0000664000175000017500000000325712542002467023131 0ustar  stefanstefan00000000000000# mode: run
# tag: numpy

"""
Test accepting NumPy arrays with arbitrary strides for zero- or one-sized
dimensions.

Thanks to Nathaniel Smith and Sebastian Berg.

See also:

    Mailing list threads:
      http://thread.gmane.org/gmane.comp.python.cython.devel/14762
      http://thread.gmane.org/gmane.comp.python.cython.devel/14634

    Detailed discussion of the difference between numpy/cython's current
    definition of "contiguity", and the correct definition:
      http://thread.gmane.org/gmane.comp.python.cython.devel/14634/focus=14640

    The PR implementing NPY_RELAXED_STRIDES_CHECKING:
      https://github.com/numpy/numpy/pull/3162

    Another test case:
      https://github.com/numpy/numpy/issues/2956
"""

import numpy as np

numpy_version = np.__version__.split('.')[:2]
try:
    numpy_version = tuple(map(int, numpy_version))
except ValueError:
    numpy_version = (20, 0)

NUMPY_HAS_RELAXED_STRIDES = (
    numpy_version < (1, 8) or
    np.ones((10, 1), order="C").flags.f_contiguous)


def test_one_sized(array):
    """
    >>> contig = np.ascontiguousarray(np.arange(10, dtype=np.double)[::100])
    >>> test_one_sized(contig)[0]
    1.0
    >>> a = np.arange(10, dtype=np.double)[::100]
    >>> if NUMPY_HAS_RELAXED_STRIDES: print(test_one_sized(a)[0])
    ... else: print(1.0)
    1.0
    """
    cdef double[::1] a = array
    a[0] += 1.
    return array


def test_zero_sized(array):
    """
    >>> contig = np.ascontiguousarray(np.arange(10, dtype=np.double)[100:200:10])
    >>> _ = test_zero_sized(contig)

    >>> a = np.arange(10, dtype=np.double)[100:200:10]
    >>> if NUMPY_HAS_RELAXED_STRIDES: _ = test_zero_sized(a)
    """
    cdef double[::1] a = array
    return a
Cython-0.26.1/tests/memoryview/memslice.pyx0000664000175000017500000015560212574327400021552 0ustar  stefanstefan00000000000000# mode: run

# Note: see also bufaccess.pyx

from __future__ import unicode_literals

from cpython.object cimport PyObject
from cpython.ref cimport Py_INCREF, Py_DECREF

cimport cython
from cython cimport view
from cython.view cimport array
from cython.parallel cimport prange, parallel

import gc
import sys

if sys.version_info[0] < 3:
    import __builtin__ as builtins
else:
    import builtins


__test__ = {}

def testcase(func):
    doctest = func.__doc__
    if sys.version_info >= (3, 0):
        _u = str
    else:
        _u = unicode
    if not isinstance(doctest, _u):
        doctest = doctest.decode('UTF-8')
    __test__[func.__name__] = doctest

    def wrapper(*args, **kwargs):
        gc.collect()
        result = func(*args, **kwargs)
        gc.collect()
        return result

    return wrapper


include "../buffers/mockbuffers.pxi"
include "cythonarrayutil.pxi"

def _print_attributes(memview):
    print "shape: " + " ".join(map(str, memview.shape))
    print "strides: " + " ".join([str(stride // memview.itemsize)
                                      for stride in memview.strides])
    print "suboffsets: " + " ".join(
        [str(suboffset if suboffset < 0 else suboffset // memview.itemsize)
             for suboffset in memview.suboffsets])

#
# Buffer acquire and release tests
#

def nousage():
    """
    The challenge here is just compilation.
    """
    cdef int[:, :] buf

@testcase
def acquire_release(o1, o2):
    """
    >>> A = IntMockBuffer("A", range(6))
    >>> B = IntMockBuffer("B", range(6))
    >>> acquire_release(A, B)
    acquired A
    acquired B
    released A
    released B
    >>> acquire_release(None, B)
    acquired B
    released B
    """
    cdef int[:] buf
    buf = o1
    buf = o2

@testcase
def acquire_raise(o):
    """
    Apparently, doctest won't handle mixed exceptions and print
    stats, so need to circumvent this.

    >>> A = IntMockBuffer("A", range(6))
    >>> A.resetlog()
    >>> acquire_raise(A)
    Traceback (most recent call last):
        ...
    Exception: on purpose
    >>> A.printlog()
    acquired A
    released A

    """
    cdef int[:] buf
    buf = o
    raise Exception("on purpose")

@testcase
def acquire_failure1():
    """
    >>> acquire_failure1()
    acquired working
    0 3
    0 3
    released working
    """
    cdef int[:] buf
    buf = IntMockBuffer("working", range(4))
    print buf[0], buf[3]
    try:
        buf = ErrorBuffer()
        assert False
    except Exception:
        print buf[0], buf[3]

@testcase
def acquire_failure2():
    """
    >>> acquire_failure2()
    acquired working
    0 3
    0 3
    released working
    """
    cdef int[:] buf = IntMockBuffer("working", range(4))
    print buf[0], buf[3]
    try:
        buf = ErrorBuffer()
        assert False
    except Exception:
        print buf[0], buf[3]

@testcase
def acquire_failure3():
    """
    >>> acquire_failure3()
    acquired working
    0 3
    0 3
    released working
    """
    cdef int[:] buf
    buf = IntMockBuffer("working", range(4))
    print buf[0], buf[3]
    try:
        buf = object()
        assert False
    except Exception:
        print buf[0], buf[3]

@testcase
def acquire_nonbuffer1(first, second=None):
    """
    >>> acquire_nonbuffer1(3)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
      ...
    TypeError:... 'int'...
    >>> acquire_nonbuffer1(type)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
      ...
    TypeError:... 'type'...
    >>> acquire_nonbuffer1(None, 2)  # doctest: +ELLIPSIS
    Traceback (most recent call last):
      ...
    TypeError:... 'int'...
    >>> acquire_nonbuffer1(4, object())  # doctest: +ELLIPSIS
    Traceback (most recent call last):
      ...
    TypeError:... 'int'...
    """
    cdef int[:] buf
    buf = first
    buf = second

@testcase
def acquire_nonbuffer2():
    """
    >>> acquire_nonbuffer2()
    acquired working
    0 3
    0 3
    released working
    """
    cdef int[:] buf = IntMockBuffer("working", range(4))
    print buf[0], buf[3]
    try:
        buf = ErrorBuffer
        assert False
    except Exception:
        print buf[0], buf[3]

@testcase
def as_argument(int[:] bufarg, int n):
    """
    >>> A = IntMockBuffer("A", range(6))
    >>> as_argument(A, 6)
    acquired A
    0 1 2 3 4 5 END
    released A
    """
    cdef int i
    for i in range(n):
        print bufarg[i],
    print 'END'

@testcase
def as_argument_defval(int[:] bufarg=IntMockBuffer('default', range(6)), int n=6):
    """
    >>> as_argument_defval()
    0 1 2 3 4 5 END
    >>> A = IntMockBuffer("A", range(6))
    >>> as_argument_defval(A, 6)
    acquired A
    0 1 2 3 4 5 END
    released A
    """
    cdef int i
    for i in range(n):
        print bufarg[i],
    print 'END'

@testcase
def cdef_assignment(obj, n):
    """
    >>> A = IntMockBuffer("A", range(6))
    >>> cdef_assignment(A, 6)
    acquired A
    0 1 2 3 4 5 END
    released A

    """
    cdef int[:] buf = obj
    cdef int i
    for i in range(n):
        print buf[i],
    print 'END'

@testcase
def forin_assignment(objs, int pick):
    """
    >>> A = IntMockBuffer("A", range(6))
    >>> B = IntMockBuffer("B", range(6))
    >>> forin_assignment([A, B, A, A], 2)
    acquired A
    2
    acquired B
    released A
    2
    acquired A
    released B
    2
    acquired A
    released A
    2
    released A
    """
    cdef int[:] buf
    for buf in objs:
        print buf[pick]

@testcase
def cascaded_buffer_assignment(obj):
    """
    >>> A = IntMockBuffer("A", range(6))
    >>> cascaded_buffer_assignment(A)
    acquired A
    released A
    """
    cdef int[:] a, b
    a = b = obj

@testcase
def tuple_buffer_assignment1(a, b):
    """
    >>> A = IntMockBuffer("A", range(6))
    >>> B = IntMockBuffer("B", range(6))
    >>> tuple_buffer_assignment1(A, B)
    acquired A
    acquired B
    released A
    released B
    """
    cdef int[:] x, y
    x, y = a, b

@testcase
def tuple_buffer_assignment2(tup):
    """
    >>> A = IntMockBuffer("A", range(6))
    >>> B = IntMockBuffer("B", range(6))
    >>> tuple_buffer_assignment2((A, B))
    acquired A
    acquired B
    released A
    released B
    """
    cdef int[:] x, y
    x, y = tup

@testcase
def explicitly_release_buffer():
    """
    >>> explicitly_release_buffer()
    acquired A
    released A
    After release
    """
    cdef int[:] x = IntMockBuffer("A", range(10))
    del x
    print "After release"

#
# Getting items and index bounds checking
#
@testcase
def get_int_2d(int[:, :] buf, int i, int j):
    """
    >>> C = IntMockBuffer("C", range(6), (2,3))
    >>> get_int_2d(C, 1, 1)
    acquired C
    released C
    4

    Check negative indexing:
    >>> get_int_2d(C, -1, 0)
    acquired C
    released C
    3
    >>> get_int_2d(C, -1, -2)
    acquired C
    released C
    4
    >>> get_int_2d(C, -2, -3)
    acquired C
    released C
    0

    Out-of-bounds errors:
    >>> get_int_2d(C, 2, 0)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 0)
    >>> get_int_2d(C, 0, -4)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 1)
    """
    return buf[i, j]

@testcase
def get_int_2d_uintindex(int[:, :] buf, unsigned int i, unsigned int j):
    """
    Unsigned indexing:
    >>> C = IntMockBuffer("C", range(6), (2,3))
    >>> get_int_2d_uintindex(C, 0, 0)
    acquired C
    released C
    0
    >>> get_int_2d_uintindex(C, 1, 2)
    acquired C
    released C
    5
    """
    # This is most interesting with regards to the C code
    # generated.
    return buf[i, j]

@testcase
def set_int_2d(int[:, :] buf, int i, int j, int value):
    """
    Uses get_int_2d to read back the value afterwards. For pure
    unit test, one should support reading in MockBuffer instead.

    >>> C = IntMockBuffer("C", range(6), (2,3))
    >>> set_int_2d(C, 1, 1, 10)
    acquired C
    released C
    >>> get_int_2d(C, 1, 1)
    acquired C
    released C
    10

    Check negative indexing:
    >>> set_int_2d(C, -1, 0, 3)
    acquired C
    released C
    >>> get_int_2d(C, -1, 0)
    acquired C
    released C
    3

    >>> set_int_2d(C, -1, -2, 8)
    acquired C
    released C
    >>> get_int_2d(C, -1, -2)
    acquired C
    released C
    8

    >>> set_int_2d(C, -2, -3, 9)
    acquired C
    released C
    >>> get_int_2d(C, -2, -3)
    acquired C
    released C
    9

    Out-of-bounds errors:
    >>> set_int_2d(C, 2, 0, 19)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 0)
    >>> set_int_2d(C, 0, -4, 19)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 1)

    """
    buf[i, j] = value


def _read_int2d(int[:, :] buf, int i, int j):
    return buf[i, j]


@testcase
def schar_index_vars(int[:, :] buf, signed char i, signed char j, int value):
    """
    >>> C = IntMockBuffer("C", range(300*300), (300, 300))  # > sizeof(char)
    >>> schar_index_vars(C, 1, 1, 5)
    acquired C
    reading
    writing
    validated
    released C
    301
    >>> _read_int2d(C, 1, 1)  # validate with int indices
    acquired C
    released C
    5

    >>> schar_index_vars(C, -1, 1, 6)
    acquired C
    reading
    writing
    validated
    released C
    89701
    >>> _read_int2d(C, -1, 1)  # validate with int indices
    acquired C
    released C
    6

    >>> schar_index_vars(C, -1, -2, 7)
    acquired C
    reading
    writing
    validated
    released C
    89998
    >>> _read_int2d(C, -1, -2)  # validate with int indices
    acquired C
    released C
    7

    >>> schar_index_vars(C, -2, -3, 8)
    acquired C
    reading
    writing
    validated
    released C
    89697
    >>> _read_int2d(C, -2, -3)  # validate with int indices
    acquired C
    released C
    8

    >>> C = IntMockBuffer("C", range(6), (2, 3))
    >>> schar_index_vars(C, 5, 1, 10)
    Traceback (most recent call last):
    IndexError: Out of bounds on buffer access (axis 0)
    >>> schar_index_vars(C, 1, 5, 10)
    Traceback (most recent call last):
    IndexError: Out of bounds on buffer access (axis 1)
    >>> schar_index_vars(C, -2, 1, 10)
    acquired C
    reading
    writing
    validated
    released C
    1
    >>> schar_index_vars(C, -3, 1, 10)
    Traceback (most recent call last):
    IndexError: Out of bounds on buffer access (axis 0)
    >>> schar_index_vars(C, 1, -3, 10)
    acquired C
    reading
    writing
    validated
    released C
    3
    >>> schar_index_vars(C, 1, -4, 10)
    Traceback (most recent call last):
    IndexError: Out of bounds on buffer access (axis 1)
    """
    print("reading")
    old_value = buf[i, j]
    print("writing")
    buf[i, j] = value
    if buf[i, j] == value:
        print("validated")
    return old_value


@testcase
def uchar_index_vars(int[:, :] buf, unsigned char i, unsigned char j, int value):
    """
    >>> C = IntMockBuffer("C", range(300*300), (300, 300))  # > sizeof(char)
    >>> uchar_index_vars(C, 1, 1, 5)
    acquired C
    reading
    writing
    validated
    released C
    301
    >>> _read_int2d(C, 1, 1)  # validate with int indices
    acquired C
    released C
    5

    >>> C = IntMockBuffer("C", range(6), (2, 3))
    >>> uchar_index_vars(C, 5, 1, 10)
    Traceback (most recent call last):
    IndexError: Out of bounds on buffer access (axis 0)
    >>> uchar_index_vars(C, 1, 5, 10)
    Traceback (most recent call last):
    IndexError: Out of bounds on buffer access (axis 1)
    """
    print("reading")
    old_value = buf[i, j]
    print("writing")
    buf[i, j] = value
    if buf[i, j] == value:
        print("validated")
    return old_value


@testcase
def char_index_vars(int[:, :] buf, char i, char j, int value):
    """
    >>> C = IntMockBuffer("C", range(300*300), (300, 300))  # > sizeof(char)
    >>> char_index_vars(C, 1, 1, 5)
    acquired C
    reading
    writing
    validated
    released C
    301
    >>> _read_int2d(C, 1, 1)  # validate with int indices
    acquired C
    released C
    5

    >>> C = IntMockBuffer("C", range(6), (2, 3))
    >>> char_index_vars(C, 5, 1, 10)
    Traceback (most recent call last):
    IndexError: Out of bounds on buffer access (axis 0)
    >>> char_index_vars(C, 1, 5, 10)
    Traceback (most recent call last):
    IndexError: Out of bounds on buffer access (axis 1)
    """
    print("reading")
    old_value = buf[i, j]
    print("writing")
    buf[i, j] = value
    if buf[i, j] == value:
        print("validated")
    return old_value


@testcase
def list_comprehension(int[:] buf, len):
    """
    >>> list_comprehension(IntMockBuffer(None, [1,2,3]), 3)
    1|2|3
    """
    cdef int i
    print u"|".join([unicode(buf[i]) for i in range(len)])

@testcase
@cython.wraparound(False)
def wraparound_directive(int[:] buf, int pos_idx, int neg_idx):
    """
    Again, the most interesting thing here is to inspect the C source.

    >>> A = IntMockBuffer(None, range(4))
    >>> wraparound_directive(A, 2, -1)
    5
    >>> wraparound_directive(A, -1, 2)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 0)
    """
    cdef int byneg
    with cython.wraparound(True):
        byneg = buf[neg_idx]
    return buf[pos_idx] + byneg


#
# Test all kinds of indexing and flags
#

@testcase
def writable(obj):
    """
    >>> R = UnsignedShortMockBuffer("R", range(27), shape=(3, 3, 3))
    >>> writable(R)
    acquired R
    released R
    >>> [str(x) for x in R.recieved_flags] # Py2/3
    ['FORMAT', 'ND', 'STRIDES', 'WRITABLE']
    """
    cdef unsigned short int[:, :, :] buf = obj
    buf[2, 2, 1] = 23

@testcase
def strided(int[:] buf):
    """
    >>> A = IntMockBuffer("A", range(4))
    >>> strided(A)
    acquired A
    released A
    2
    >>> [str(x) for x in A.recieved_flags] # Py2/3
    ['FORMAT', 'ND', 'STRIDES', 'WRITABLE']

    Check that the suboffsets were patched back prior to release.
    >>> A.release_ok
    True
    """
    return buf[2]

@testcase
def c_contig(int[::1] buf):
    """
    >>> A = IntMockBuffer(None, range(4))
    >>> c_contig(A)
    2
    >>> [str(x) for x in A.recieved_flags]
    ['FORMAT', 'ND', 'STRIDES', 'C_CONTIGUOUS', 'WRITABLE']
    """
    return buf[2]

@testcase
def c_contig_2d(int[:, ::1] buf):
    """
    Multi-dim has seperate implementation

    >>> A = IntMockBuffer(None, range(12), shape=(3,4))
    >>> c_contig_2d(A)
    7
    >>> [str(x) for x in A.recieved_flags]
    ['FORMAT', 'ND', 'STRIDES', 'C_CONTIGUOUS', 'WRITABLE']
    """
    return buf[1, 3]

@testcase
def f_contig(int[::1, :] buf):
    """
    >>> A = IntMockBuffer(None, range(4), shape=(2, 2), strides=(1, 2))
    >>> f_contig(A)
    2
    >>> [str(x) for x in A.recieved_flags]
    ['FORMAT', 'ND', 'STRIDES', 'F_CONTIGUOUS', 'WRITABLE']
    """
    return buf[0, 1]

@testcase
def f_contig_2d(int[::1, :] buf):
    """
    Must set up strides manually to ensure Fortran ordering.

    >>> A = IntMockBuffer(None, range(12), shape=(4,3), strides=(1, 4))
    >>> f_contig_2d(A)
    7
    >>> [str(x) for x in A.recieved_flags]
    ['FORMAT', 'ND', 'STRIDES', 'F_CONTIGUOUS', 'WRITABLE']
    """
    return buf[3, 1]

@testcase
def generic(int[::view.generic, ::view.generic] buf1,
            int[::view.generic, ::view.generic] buf2):
    """
    >>> A = IntMockBuffer("A", [[0,1,2], [3,4,5], [6,7,8]])
    >>> B = IntMockBuffer("B", [[0,1,2], [3,4,5], [6,7,8]], shape=(3, 3), strides=(1, 3))
    >>> generic(A, B)
    acquired A
    acquired B
    4
    4
    10
    11
    released A
    released B
    >>> [str(x) for x in A.recieved_flags]
    ['FORMAT', 'INDIRECT', 'ND', 'STRIDES', 'WRITABLE']
    >>> [str(x) for x in B.recieved_flags]
    ['FORMAT', 'INDIRECT', 'ND', 'STRIDES', 'WRITABLE']
    """
    print buf1[1, 1]
    print buf2[1, 1]

    buf1[2, -1] = 10
    buf2[2, -1] = 11

    print buf1[2, 2]
    print buf2[2, 2]

# Note: disabled. generic_contiguous isn't very useful (you have to check suboffsets,
#                                                       might as well multiply with strides)
# def generic_contig(int[::view.generic_contiguous, :] buf1,
#                    int[::view.generic_contiguous, :] buf2):
#     """
#     >>> A = IntMockBuffer("A", [[0,1,2], [3,4,5], [6,7,8]])
#     >>> B = IntMockBuffer("B", [[0,1,2], [3,4,5], [6,7,8]], shape=(3, 3), strides=(1, 3))
#     >>> generic_contig(A, B)
#     acquired A
#     acquired B
#     4
#     4
#     10
#     11
#     released A
#     released B
#     >>> [str(x) for x in A.recieved_flags]
#     ['FORMAT', 'INDIRECT', 'ND', 'STRIDES', 'WRITABLE']
#     >>> [str(x) for x in B.recieved_flags]
#     ['FORMAT', 'INDIRECT', 'ND', 'STRIDES', 'WRITABLE']
#     """
#     print buf1[1, 1]
#     print buf2[1, 1]
#
#     buf1[2, -1] = 10
#     buf2[2, -1] = 11
#
#     print buf1[2, 2]
#     print buf2[2, 2]

@testcase
def indirect_strided_and_contig(
             int[::view.indirect, ::view.strided] buf1,
             int[::view.indirect, ::view.contiguous] buf2):
    """
    >>> A = IntMockBuffer("A", [[0,1,2], [3,4,5], [6,7,8]])
    >>> B = IntMockBuffer("B", [[0,1,2], [3,4,5], [6,7,8]], shape=(3, 3), strides=(1, 3))
    >>> indirect_strided_and_contig(A, B)
    acquired A
    acquired B
    4
    4
    10
    11
    released A
    released B
    >>> [str(x) for x in A.recieved_flags]
    ['FORMAT', 'INDIRECT', 'ND', 'STRIDES', 'WRITABLE']
    >>> [str(x) for x in B.recieved_flags]
    ['FORMAT', 'INDIRECT', 'ND', 'STRIDES', 'WRITABLE']
    """
    print buf1[1, 1]
    print buf2[1, 1]

    buf1[2, -1] = 10
    buf2[2, -1] = 11

    print buf1[2, 2]
    print buf2[2, 2]


@testcase
def indirect_contig(
             int[::view.indirect_contiguous, ::view.contiguous] buf1,
             int[::view.indirect_contiguous, ::view.generic] buf2):
    """
    >>> A = IntMockBuffer("A", [[0,1,2], [3,4,5], [6,7,8]])
    >>> B = IntMockBuffer("B", [[0,1,2], [3,4,5], [6,7,8]], shape=(3, 3), strides=(1, 3))
    >>> indirect_contig(A, B)
    acquired A
    acquired B
    4
    4
    10
    11
    released A
    released B
    >>> [str(x) for x in A.recieved_flags]
    ['FORMAT', 'INDIRECT', 'ND', 'STRIDES', 'WRITABLE']
    >>> [str(x) for x in B.recieved_flags]
    ['FORMAT', 'INDIRECT', 'ND', 'STRIDES', 'WRITABLE']
    """
    print buf1[1, 1]
    print buf2[1, 1]

    buf1[2, -1] = 10
    buf2[2, -1] = 11

    print buf1[2, 2]
    print buf2[2, 2]



#
# Test compiler options for bounds checking. We create an array with a
# safe "boundary" (memory
# allocated outside of what it published) and then check whether we get back
# what we stored in the memory or an error.

@testcase
def safe_get(int[:] buf, int idx):
    """
    >>> A = IntMockBuffer(None, range(10), shape=(3,), offset=5)

    Validate our testing buffer...
    >>> safe_get(A, 0)
    5
    >>> safe_get(A, 2)
    7
    >>> safe_get(A, -3)
    5

    Access outside it. This is already done above for bounds check
    testing but we include it to tell the story right.

    >>> safe_get(A, -4)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 0)
    >>> safe_get(A, 3)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 0)
    """
    return buf[idx]

@testcase
@cython.boundscheck(False) # outer decorators should take precedence
@cython.boundscheck(True)
def unsafe_get(int[:] buf, int idx):
    """
    Access outside of the area the buffer publishes.
    >>> A = IntMockBuffer(None, range(10), shape=(3,), offset=5)
    >>> unsafe_get(A, -4)
    4
    >>> unsafe_get(A, -5)
    3
    >>> unsafe_get(A, 3)
    8
    """
    return buf[idx]

@testcase
def mixed_get(int[:] buf, int unsafe_idx, int safe_idx):
    """
    >>> A = IntMockBuffer(None, range(10), shape=(3,), offset=5)
    >>> mixed_get(A, -4, 0)
    (4, 5)
    >>> mixed_get(A, 0, -4)
    Traceback (most recent call last):
        ...
    IndexError: Out of bounds on buffer access (axis 0)
    """
    with cython.boundscheck(False):
        one = buf[unsafe_idx]
    with cython.boundscheck(True):
        two = buf[safe_idx]
    return (one, two)

#
# Testing that accessing data using various types of buffer access
# all works.
#

def printbuf_int(int[:] buf, shape):
    # Utility func
    cdef int i
    for i in range(shape[0]):
        print buf[i],
    print 'END'


@testcase
def printbuf_int_2d(o, shape):
    """
    Strided:

    >>> printbuf_int_2d(IntMockBuffer("A", range(6), (2,3)), (2,3))
    acquired A
    0 1 2 END
    3 4 5 END
    released A
    >>> printbuf_int_2d(IntMockBuffer("A", range(100), (3,3), strides=(20,5)), (3,3))
    acquired A
    0 5 10 END
    20 25 30 END
    40 45 50 END
    released A

    Indirect:
    >>> printbuf_int_2d(IntMockBuffer("A", [[1,2],[3,4]]), (2,2))
    acquired A
    1 2 END
    3 4 END
    released A
    """
    # should make shape builtin
    cdef int[::view.generic, ::view.generic] buf
    buf = o
    cdef int i, j
    for i in range(shape[0]):
        for j in range(shape[1]):
            print buf[i, j],
        print 'END'

@testcase
def printbuf_float(o, shape):
    """
    >>> printbuf_float(FloatMockBuffer("F", [1.0, 1.25, 0.75, 1.0]), (4,))
    acquired F
    1.0 1.25 0.75 1.0 END
    released F
    """

    # should make shape builtin
    cdef float[:] buf
    buf = o
    cdef int i, j
    for i in range(shape[0]):
        print buf[i],
    print "END"


#
# Test assignments
#
@testcase
def inplace_operators(int[:] buf):
    """
    >>> buf = IntMockBuffer(None, [2, 2])
    >>> inplace_operators(buf)
    >>> printbuf_int(buf, (2,))
    0 3 END
    """
    cdef int j = 0
    buf[1] += 1
    buf[j] *= 2
    buf[0] -= 4



#
# Typedefs
#
# Test three layers of typedefs going through a h file for plain int, and
# simply a header file typedef for floats and unsigned.

ctypedef int td_cy_int
cdef extern from "bufaccess.h":
    ctypedef td_cy_int td_h_short # Defined as short, but Cython doesn't know this!
    ctypedef float td_h_double # Defined as double
    ctypedef unsigned int td_h_ushort # Defined as unsigned short
ctypedef td_h_short td_h_cy_short

@testcase
def printbuf_td_cy_int(td_cy_int[:] buf, shape):
    """
    >>> printbuf_td_cy_int(IntMockBuffer(None, range(3)), (3,))
    0 1 2 END
    >>> printbuf_td_cy_int(ShortMockBuffer(None, range(3)), (3,))
    Traceback (most recent call last):
       ...
    ValueError: Buffer dtype mismatch, expected 'td_cy_int' but got 'short'
    """
    cdef int i
    for i in range(shape[0]):
        print buf[i],
    print 'END'

@testcase
def printbuf_td_h_short(td_h_short[:] buf, shape):
    """
    >>> printbuf_td_h_short(ShortMockBuffer(None, range(3)), (3,))
    0 1 2 END
    >>> printbuf_td_h_short(IntMockBuffer(None, range(3)), (3,))
    Traceback (most recent call last):
       ...
    ValueError: Buffer dtype mismatch, expected 'td_h_short' but got 'int'
    """
    cdef int i
    for i in range(shape[0]):
        print buf[i],
    print 'END'

@testcase
def printbuf_td_h_cy_short(td_h_cy_short[:] buf, shape):
    """
    >>> printbuf_td_h_cy_short(ShortMockBuffer(None, range(3)), (3,))
    0 1 2 END
    >>> printbuf_td_h_cy_short(IntMockBuffer(None, range(3)), (3,))
    Traceback (most recent call last):
       ...
    ValueError: Buffer dtype mismatch, expected 'td_h_cy_short' but got 'int'
    """
    cdef int i
    for i in range(shape[0]):
        print buf[i],
    print 'END'

@testcase
def printbuf_td_h_ushort(td_h_ushort[:] buf, shape):
    """
    >>> printbuf_td_h_ushort(UnsignedShortMockBuffer(None, range(3)), (3,))
    0 1 2 END
    >>> printbuf_td_h_ushort(ShortMockBuffer(None, range(3)), (3,))
    Traceback (most recent call last):
       ...
    ValueError: Buffer dtype mismatch, expected 'td_h_ushort' but got 'short'
    """
    cdef int i
    for i in range(shape[0]):
        print buf[i],
    print 'END'

@testcase
def printbuf_td_h_double(td_h_double[:] buf, shape):
    """
    >>> printbuf_td_h_double(DoubleMockBuffer(None, [0.25, 1, 3.125]), (3,))
    0.25 1.0 3.125 END
    >>> printbuf_td_h_double(FloatMockBuffer(None, [0.25, 1, 3.125]), (3,))
    Traceback (most recent call last):
       ...
    ValueError: Buffer dtype mismatch, expected 'td_h_double' but got 'float'
    """
    cdef int i
    for i in range(shape[0]):
        print buf[i],
    print 'END'


#
# Object access
#
def addref(*args):
    for item in args: Py_INCREF(item)
def decref(*args):
    for item in args: Py_DECREF(item)

def get_refcount(x):
    return (x).ob_refcnt

@testcase
def printbuf_object(object[:] buf, shape):
    """
    Only play with unique objects, interned numbers etc. will have
    unpredictable refcounts.

    ObjectMockBuffer doesn't do anything about increfing/decrefing,
    we to the "buffer implementor" refcounting directly in the
    testcase.

    >>> a, b, c = "globally_unique_string_23234123", {4:23}, [34,3]
    >>> get_refcount(a), get_refcount(b), get_refcount(c)
    (2, 2, 2)
    >>> A = ObjectMockBuffer(None, [a, b, c])
    >>> printbuf_object(A, (3,))
    'globally_unique_string_23234123' 2
    {4: 23} 2
    [34, 3] 2
    """
    cdef int i
    for i in range(shape[0]):
        print repr(buf[i]), (buf[i]).ob_refcnt

@testcase
def assign_to_object(object[:] buf, int idx, obj):
    """
    See comments on printbuf_object above.

    >>> a, b = [1, 2, 3], [4, 5, 6]
    >>> get_refcount(a), get_refcount(b)
    (2, 2)
    >>> addref(a)
    >>> A = ObjectMockBuffer(None, [1, a]) # 1, ...,otherwise it thinks nested lists...
    >>> get_refcount(a), get_refcount(b)
    (3, 2)
    >>> assign_to_object(A, 1, b)
    >>> get_refcount(a), get_refcount(b)
    (2, 3)
    >>> decref(b)
    """
    buf[idx] = obj

@testcase
def assign_temporary_to_object(object[:] buf):
    """
    See comments on printbuf_object above.

    >>> a, b = [1, 2, 3], {4:23}
    >>> get_refcount(a)
    2
    >>> addref(a)
    >>> A = ObjectMockBuffer(None, [b, a])
    >>> get_refcount(a)
    3
    >>> assign_temporary_to_object(A)
    >>> get_refcount(a)
    2

    >>> printbuf_object(A, (2,))
    {4: 23} 2
    {1: 8} 2

    To avoid leaking a reference in our testcase we need to
    replace the temporary with something we can manually decref :-)
    >>> assign_to_object(A, 1, a)
    >>> decref(a)
    """
    buf[1] = {3-2: 2+(2*4)-2}

#
# Test __cythonbufferdefaults__
#
@testcase
def bufdefaults1(int[:] buf):
    """
    For IntStridedMockBuffer, mode should be
    "strided" by defaults which should show
    up in the flags.

    >>> A = IntStridedMockBuffer("A", range(10))
    >>> bufdefaults1(A)
    acquired A
    released A
    >>> [str(x) for x in A.recieved_flags]
    ['FORMAT', 'ND', 'STRIDES', 'WRITABLE']
    """
    pass


@testcase
def basic_struct(MyStruct[:] buf):
    """
    See also buffmt.pyx

    >>> basic_struct(MyStructMockBuffer(None, [(1, 2, 3, 4, 5)]))
    1 2 3 4 5
    >>> basic_struct(MyStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="ccqii"))
    1 2 3 4 5
    """
    print buf[0].a, buf[0].b, buf[0].c, buf[0].d, buf[0].e

@testcase
def nested_struct(NestedStruct[:] buf):
    """
    See also buffmt.pyx

    >>> nested_struct(NestedStructMockBuffer(None, [(1, 2, 3, 4, 5)]))
    1 2 3 4 5
    >>> nested_struct(NestedStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="T{ii}T{2i}i"))
    1 2 3 4 5
    """
    print buf[0].x.a, buf[0].x.b, buf[0].y.a, buf[0].y.b, buf[0].z

@testcase
def packed_struct(PackedStruct[:] buf):
    """
    See also buffmt.pyx

    >>> packed_struct(PackedStructMockBuffer(None, [(1, 2)]))
    1 2
    >>> packed_struct(PackedStructMockBuffer(None, [(1, 2)], format="T{c^i}"))
    1 2
    >>> packed_struct(PackedStructMockBuffer(None, [(1, 2)], format="T{c=i}"))
    1 2

    """
    print buf[0].a, buf[0].b

@testcase
def nested_packed_struct(NestedPackedStruct[:] buf):
    """
    See also buffmt.pyx

    >>> nested_packed_struct(NestedPackedStructMockBuffer(None, [(1, 2, 3, 4, 5)]))
    1 2 3 4 5
    >>> nested_packed_struct(NestedPackedStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="ci^ci@i"))
    1 2 3 4 5
    >>> nested_packed_struct(NestedPackedStructMockBuffer(None, [(1, 2, 3, 4, 5)], format="^c@i^ci@i"))
    1 2 3 4 5
    """
    print buf[0].a, buf[0].b, buf[0].sub.a, buf[0].sub.b, buf[0].c


@testcase
def complex_dtype(long double complex[:] buf):
    """
    >>> complex_dtype(LongComplexMockBuffer(None, [(0, -1)]))
    -1j
    """
    print buf[0]

@testcase
def complex_inplace(long double complex[:] buf):
    """
    >>> complex_inplace(LongComplexMockBuffer(None, [(0, -1)]))
    (1+1j)
    """
    buf[0] = buf[0] + 1 + 2j
    print buf[0]

@testcase
def complex_struct_dtype(LongComplex[:] buf):
    """
    Note that the format string is "Zg" rather than "2g", yet a struct
    is accessed.
    >>> complex_struct_dtype(LongComplexMockBuffer(None, [(0, -1)]))
    0.0 -1.0
    """
    print buf[0].real, buf[0].imag

@testcase
def complex_struct_inplace(LongComplex[:] buf):
    """
    >>> complex_struct_inplace(LongComplexMockBuffer(None, [(0, -1)]))
    1.0 1.0
    """
    buf[0].real += 1
    buf[0].imag += 2
    print buf[0].real, buf[0].imag

#
# Nogil
#

@testcase
@cython.boundscheck(False)
def buffer_nogil():
    """
    >>> buffer_nogil()
    (10, 10)
    """
    cdef int[:] buf = IntMockBuffer(None, [1,2,3])
    cdef int[:] buf2 = IntMockBuffer(None, [4,5,6])

    with nogil:
        buf[1] = 10
        buf2 = buf

    return buf[1], buf2[1]

#
### Test cdef functions
#
class UniqueObject(object):
    def __init__(self, value):
        self.value = value

    def __repr__(self):
        return self.value

objs = [[UniqueObject("spam")], [UniqueObject("ham")], [UniqueObject("eggs")]]
addref(*[obj for L in objs for obj in L])
cdef cdef_function(int[:] buf1, object[::view.indirect, :] buf2 = ObjectMockBuffer(None, objs)):
    print 'cdef called'
    print buf1[6], buf2[1, 0]
    buf2[1, 0] = UniqueObject("eggs")

@testcase
def test_cdef_function(o1, o2=None):
    """
    >>> A = IntMockBuffer("A", range(10))
    >>> test_cdef_function(A)
    acquired A
    cdef called
    6 ham
    released A
    acquired A
    cdef called
    6 eggs
    released A

    >>> L = [[x] for x in range(25)]
    >>> addref(*[obj for mylist in L for obj in mylist])
    >>> B = ObjectMockBuffer("B", L, shape=(5, 5))

    >>> test_cdef_function(A, B)
    acquired A
    cdef called
    6 eggs
    released A
    acquired A
    cdef called
    6 eggs
    released A
    acquired A
    acquired B
    cdef called
    6 1
    released A
    released B
    """
    cdef_function(o1)
    cdef_function(o1)

    if o2:
        cdef_function(o1, o2)

cdef int[:] global_A = IntMockBuffer("Global_A", range(10))

addref(*[obj for L in objs for obj in L])
cdef object[::view.indirect, :] global_B = ObjectMockBuffer(None, objs)

cdef cdef_function2(int[:] buf1, object[::view.indirect, :] buf2 = global_B):
    print 'cdef2 called'
    print buf1[6], buf2[1, 0]
    buf2[1, 0] = UniqueObject("eggs")

@testcase
def test_cdef_function2():
    """
    >>> test_cdef_function2()
    cdef2 called
    6 ham
    eggs
    cdef2 called
    6 eggs
    """
    cdef int[:] A = global_A
    cdef object[::view.indirect, :] B = global_B

    cdef_function2(A, B)

    del A
    del B

    print global_B[1, 0]

    cdef_function2(global_A, global_B)

@testcase
def test_generic_slicing(arg, indirect=False):
    """
    Test simple slicing
    >>> test_generic_slicing(IntMockBuffer("A", range(8 * 14 * 11), shape=(8, 14, 11)))
    acquired A
    3 9 2
    308 -11 1
    -1 -1 -1
    released A

    Test direct slicing, negative slice oob in dim 2
    >>> test_generic_slicing(IntMockBuffer("A", range(1 * 2 * 3), shape=(1, 2, 3)))
    acquired A
    0 0 2
    12 -3 1
    -1 -1 -1
    released A

    Test indirect slicing
    >>> test_generic_slicing(IntMockBuffer("A", shape_5_3_4_list, shape=(5, 3, 4)), indirect=True)
    acquired A
    2 0 2
    0 1 -1
    released A

    >>> test_generic_slicing(IntMockBuffer("A", shape_9_14_21_list, shape=(9, 14, 21)), indirect=True)
    acquired A
    3 9 2
    10 1 -1
    released A

    """
    cdef int[::view.generic, ::view.generic, :] a = arg
    cdef int[::view.generic, ::view.generic, :] b = a[2:8:2, -4:1:-1, 1:3]

    print b.shape[0], b.shape[1], b.shape[2]

    if indirect:
        print b.suboffsets[0] // sizeof(int *),
        print b.suboffsets[1] // sizeof(int),
        print b.suboffsets[2]
    else:
        print_int_offsets(b.strides[0], b.strides[1], b.strides[2])
        print_int_offsets(b.suboffsets[0], b.suboffsets[1], b.suboffsets[2])

    cdef int i, j, k
    for i in range(b.shape[0]):
        for j in range(b.shape[1]):
            for k in range(b.shape[2]):
                itemA = a[2 + 2 * i, -4 - j, 1 + k]
                itemB = b[i, j, k]
                assert itemA == itemB, (i, j, k, itemA, itemB)

@testcase
def test_indirect_slicing(arg):
    """
    Test indirect slicing
    >>> test_indirect_slicing(IntMockBuffer("A", shape_5_3_4_list, shape=(5, 3, 4)))
    acquired A
    5 3 2
    0 0 -1
    58
    56
    58
    index away indirect
    58
    58
    index away generic
    58
    58
    released A

    >>> test_indirect_slicing(IntMockBuffer("A", shape_9_14_21_list, shape=(9, 14, 21)))
    acquired A
    5 14 3
    0 16 -1
    2412
    2410
    2412
    index away indirect
    2412
    2412
    index away generic
    2412
    2412
    released A
    """
    cdef int[::view.indirect, ::view.indirect, :] a = arg
    cdef int[::view.indirect, ::view.indirect, :] b = a[-5:, ..., -5:100:2]
    cdef int[::view.generic , :: view.generic, :] generic_b = a[-5:, ..., -5:100:2]
    cdef int[::view.indirect, ::view.indirect] c = b[..., 0]

    # try indexing away leading indirect dimensions
    cdef int[::view.indirect, :] d = b[4]
    cdef int[:] e = b[4, 2]

    cdef int[::view.generic, :] generic_d = generic_b[4]
    cdef int[:] generic_e = generic_b[4, 2]

    print b.shape[0], b.shape[1], b.shape[2]
    print b.suboffsets[0] // sizeof(int *),
    print b.suboffsets[1] // sizeof(int),
    print b.suboffsets[2]

    print b[4, 2, 1]
    print c[4, 2]
    # test adding offset from last dimension to suboffset
    print b[..., 1][4, 2]

    print "index away indirect"
    print d[2, 1]
    print e[1]

    print "index away generic"
    print generic_d[2, 1]
    print generic_e[1]

cdef class TestIndexSlicingDirectIndirectDims(object):
    "Test a int[:, ::view.indirect, :] slice"

    cdef Py_ssize_t[3] shape, strides, suboffsets

    cdef int[5] c_array
    cdef int *myarray[5][5]
    cdef bytes format

    def __init__(self):
        cdef int i
        self.c_array[3] = 20
        self.myarray[1][2] = self.c_array

        for i in range(3):
            self.shape[i] = 5

        self.strides[0] = sizeof(int *) * 5
        self.strides[1] = sizeof(int *)
        self.strides[2] = sizeof(int)

        self.suboffsets[0] = -1
        self.suboffsets[1] = 0
        self.suboffsets[2] = -1

        self.format = b"i"

    def __getbuffer__(self, Py_buffer *info, int flags):
        info.buf =  self.myarray
        info.len = 5 * 5 * 5
        info.ndim = 3
        info.shape = self.shape
        info.strides = self.strides
        info.suboffsets = self.suboffsets
        info.itemsize = sizeof(int)
        info.readonly = 0
        info.obj = self
        info.format = self.format

@testcase
def test_index_slicing_away_direct_indirect():
    """
    >>> test_index_slicing_away_direct_indirect()
    20
    20
    20
    20
    
    20
    20
    20
    20
    All dimensions preceding dimension 1 must be indexed and not sliced
    """
    cdef int[:, ::view.indirect, :] a = TestIndexSlicingDirectIndirectDims()
    a_obj = a

    print a[1][2][3]
    print a[1, 2, 3]
    print a[1, 2][3]
    print a[..., 3][1, 2]

    print

    print a_obj[1][2][3]
    print a_obj[1, 2, 3]
    print a_obj[1, 2][3]
    print a_obj[..., 3][1, 2]

    try:
        print a_obj[1:, 2][3]
    except IndexError, e:
        print e.args[0]

@testcase
def test_direct_slicing(arg):
    """
    Fused types would be convenient to test this stuff!

    Test simple slicing
    >>> test_direct_slicing(IntMockBuffer("A", range(8 * 14 * 11), shape=(8, 14, 11)))
    acquired A
    3 9 2
    308 -11 1
    -1 -1 -1
    released A

    Test direct slicing, negative slice oob in dim 2
    >>> test_direct_slicing(IntMockBuffer("A", range(1 * 2 * 3), shape=(1, 2, 3)))
    acquired A
    0 0 2
    12 -3 1
    -1 -1 -1
    released A
    """
    cdef int[:, :, ::1] a = arg
    cdef int[:, :, :] b = a[2:8:2, -4:1:-1, 1:3]

    print b.shape[0], b.shape[1], b.shape[2]
    print_int_offsets(b.strides[0], b.strides[1], b.strides[2])
    print_int_offsets(b.suboffsets[0], b.suboffsets[1], b.suboffsets[2])

    cdef int i, j, k
    for i in range(b.shape[0]):
        for j in range(b.shape[1]):
            for k in range(b.shape[2]):
                itemA = a[2 + 2 * i, -4 - j, 1 + k]
                itemB = b[i, j, k]
                assert itemA == itemB, (i, j, k, itemA, itemB)

@testcase
def test_slicing_and_indexing(arg):
    """
    >>> a = IntStridedMockBuffer("A", range(10 * 3 * 5), shape=(10, 3, 5))
    >>> test_slicing_and_indexing(a)
    acquired A
    5 2
    15 2
    126 113
    [111]
    released A
    """
    cdef int[:, :, :] a = arg
    cdef int[:, :] b = a[-5:, 1, 1::2]
    cdef int[:, :] c = b[4:1:-1, ::-1]
    cdef int[:] d = c[2, 1:2]

    print b.shape[0], b.shape[1]
    print_int_offsets(b.strides[0], b.strides[1])

    cdef int i, j
    for i in range(b.shape[0]):
        for j in range(b.shape[1]):
            itemA = a[-5 + i, 1, 1 + 2 * j]
            itemB = b[i, j]
            assert itemA == itemB, (i, j, itemA, itemB)

    print c[1, 1], c[2, 0]
    print [d[i] for i in range(d.shape[0])]


@testcase
def test_oob():
    """
    >>> test_oob()
    Traceback (most recent call last):
       ...
    IndexError: Index out of bounds (axis 1)
    """
    cdef int[:, :] a = IntMockBuffer("A", range(4 * 9), shape=(4, 9))
    print a[:, 20]


cdef int nogil_oob(int[:, :] a) nogil except 0:
    a[100, 9:]
    return 1

@testcase
def test_nogil_oob1():
    """
    A is acquired at the beginning of the function and released at the end.
    B is acquired as a temporary and as such is immediately released in the
    except clause.
    >>> test_nogil_oob1()
    acquired A
    acquired B
    released B
    Index out of bounds (axis 0)
    Index out of bounds (axis 0)
    released A
    """
    cdef int[:, :] a = IntMockBuffer("A", range(4 * 9), shape=(4, 9))

    try:
        nogil_oob(IntMockBuffer("B", range(4 * 9), shape=(4, 9)))
    except IndexError, e:
        print e.args[0]

    try:
        with nogil:
            nogil_oob(a)
    except IndexError, e:
        print e.args[0]

@testcase
def test_nogil_oob2():
    """
    >>> test_nogil_oob2()
    Traceback (most recent call last):
       ...
    IndexError: Index out of bounds (axis 0)
    """
    cdef int[:, :] a = IntMockBuffer("A", range(4 * 9), shape=(4, 9))
    with nogil:
        a[100, 9:]

@cython.boundscheck(False)
cdef int cdef_nogil(int[:, :] a) nogil except 0:
    cdef int i, j
    cdef int[:, :] b = a[::-1, 3:10:2]
    for i in range(b.shape[0]):
        for j in range(b.shape[1]):
            b[i, j] = -b[i, j]

    return 1

@testcase
def test_nogil():
    """
    >>> test_nogil()
    acquired A
    released A
    acquired A
    -25
    released A
    """
    _a = IntMockBuffer("A", range(4 * 9), shape=(4, 9))
    cdef_nogil(_a)
    cdef int[:, :] a = _a
    print a[2, 7]

@testcase
def test_convert_slicenode_to_indexnode():
    """
    When indexing with a[i:j] a SliceNode gets created instead of an IndexNode, which
    forces coercion to object and back. This would not only be inefficient, but it would
    also not compile in nogil mode. So instead we mutate it into an IndexNode.

    >>> test_convert_slicenode_to_indexnode()
    acquired A
    2
    released A
    """
    cdef int[:] a = IntMockBuffer("A", range(10), shape=(10,))
    with nogil:
        a = a[2:4]
    print a[0]

@testcase
@cython.boundscheck(False)
@cython.wraparound(False)
def test_memslice_prange(arg):
    """
    >>> test_memslice_prange(IntMockBuffer("A", range(400), shape=(20, 4, 5)))
    acquired A
    released A
    >>> test_memslice_prange(IntMockBuffer("A", range(200), shape=(100, 2, 1)))
    acquired A
    released A
    """
    cdef int[:, :, :] src, dst

    src = arg

    dst = array(( src).shape, sizeof(int), format="i")

    cdef int i, j, k

    for i in prange(src.shape[0], nogil=True):
        for j in range(src.shape[1]):
            for k in range(src.shape[2]):
                dst[i, j, k] = src[i, j, k]

    for i in range(src.shape[0]):
        for j in range(src.shape[1]):
            for k in range(src.shape[2]):
                assert src[i, j, k] == dst[i, j, k], (src[i, j, k] == dst[i, j, k])

@testcase
def test_clean_temps_prange(int[:, :] buf):
    """
    Try to access a buffer out of bounds in a parallel section, and make sure any
    temps used by the slicing processes are correctly counted.

    >>> A = IntMockBuffer("A", range(100), (10, 10))
    >>> test_clean_temps_prange(A)
    acquired A
    released A
    """
    cdef int i
    try:
        for i in prange(buf.shape[0], nogil=True):
            buf[1:10, 20] = 0
    except IndexError:
        pass

@testcase
def test_clean_temps_parallel(int[:, :] buf):
    """
    Try to access a buffer out of bounds in a parallel section, and make sure any
    temps used by the slicing processes are correctly counted.

    >>> A = IntMockBuffer("A", range(100), (10, 10))
    >>> test_clean_temps_parallel(A)
    acquired A
    released A
    """
    cdef int i
    try:
        with nogil, parallel():
            try:
                with gil: pass
                for i in prange(buf.shape[0]):
                    buf[1:10, 20] = 0
            finally:
                buf[1:10, 20] = 0
    except IndexError:
        pass


# Test arrays in structs
cdef struct ArrayStruct:
    int ints[10]
    char chars[3]

cdef packed struct PackedArrayStruct:
    int ints[10]
    char chars[3]

cdef fused FusedStruct:
    ArrayStruct
    PackedArrayStruct

@testcase
def test_memslice_struct_with_arrays():
    """
    >>> test_memslice_struct_with_arrays()
    abc
    abc
    """
    cdef ArrayStruct[10] a1
    cdef PackedArrayStruct[10] a2

    test_structs_with_arr(a1)
    test_structs_with_arr(a2)

cdef test_structs_with_arr(FusedStruct array[10]):
    cdef FusedStruct[:] myslice1, myslice2, myslice3, myslice4
    cdef int i, j

    myslice1 =  array

    for i in range(10):
        for j in range(10):
            myslice1[i].ints[j] = i
        for j in range(3):
            myslice1[i].chars[j] = 97 + j

    if (2, 7) <= sys.version_info[:2] < (3, 3):
        size1 = sizeof(FusedStruct)
        size2 = len(builtins.memoryview(myslice1)[0])
        assert size1 == size2, (size1, size2, builtins.memoryview(myslice1).format)

        myslice2 = builtins.memoryview(myslice1)
        for i in range(10):
            assert myslice2[i].ints[i] == myslice1[i].ints[i]
            assert myslice2[i].chars[i] == myslice1[i].chars[i]

    myslice3 =  myslice1
    myslice4 = myslice1
    for i in range(10):
        for j in range(10):
            assert myslice3[i].ints[j] == myslice4[i].ints[j] == myslice1[i].ints[j]
        for j in range(3):
            assert myslice3[i].chars[j] == myslice4[i].chars[j] == myslice1[i].chars[j]

    print myslice1[0].chars[:3].decode('ascii')

cdef struct TestAttrs:
    int int_attrib
    char char_attrib

@testcase
def test_struct_attributes_format():
    """
    >>> test_struct_attributes_format()
    T{i:int_attrib:c:char_attrib:}
    """
    cdef TestAttrs[10] array
    cdef TestAttrs[:] struct_memview = array

    if sys.version_info[:2] >= (2, 7):
        print builtins.memoryview(struct_memview).format
    else:
        print "T{i:int_attrib:c:char_attrib:}"


# Test padding at the end of structs in the buffer support
cdef struct PaddedAtEnd:
    int a[3]
    char b[3]

cdef struct AlignedNested:
    PaddedAtEnd a
    char chars[1]

cdef struct PaddedAtEndNormal:
    int a
    char b
    char c
    char d

cdef struct AlignedNestedNormal:
    PaddedAtEndNormal a
    char chars

# Test nested structs in a struct, make sure we compute padding each time
# accordingly. If the first struct member is a struct, align on the first
# member of that struct (recursively)
cdef struct A:
    double d
    char c

cdef struct B:
    char c1
    A a
    char c2

cdef struct C:
    A a
    char c1

cdef struct D:
    B b
    C cstruct
    int a[2]
    char c

cdef fused FusedPadded:
    ArrayStruct
    PackedArrayStruct
    AlignedNested
    AlignedNestedNormal
    A
    B
    C
    D

@testcase
def test_padded_structs():
    """
    >>> test_padded_structs()
    """
    cdef ArrayStruct[10] a1
    cdef PackedArrayStruct[10] a2
    cdef AlignedNested[10] a3
    cdef AlignedNestedNormal[10] a4
    cdef A[10] a5
    cdef B[10] a6
    cdef C[10] a7
    cdef D[10] a8

    _test_padded(a1)
    _test_padded(a2)
    _test_padded(a3)
    _test_padded(a4)
    _test_padded(a5)
    _test_padded(a6)
    _test_padded(a7)
    # There is a pre-existing bug that doesn't parse the format for this
    # struct properly -- fix this
    #_test_padded(a8)

cdef _test_padded(FusedPadded myarray[10]):
    # test that the buffer format parser accepts our format string...
    cdef FusedPadded[:] myslice =  myarray
    obj = myslice
    cdef FusedPadded[:] myotherslice = obj

@testcase
def test_object_indices():
    """
    >>> test_object_indices()
    0
    1
    2
    """
    cdef int[3] array
    cdef int[:] myslice = array
    cdef int j

    for i in range(3):
        myslice[i] = i

    for j in range(3):
        print myslice[j]

cdef fused slice_1d:
    object
    int[:]

cdef fused slice_2d:
    object
    int[:, :]

@testcase
def test_ellipsis_expr():
    """
    >>> test_ellipsis_expr()
    8
    8
    """
    cdef int[10] a
    cdef int[:] m = a

    _test_ellipsis_expr(m)
    _test_ellipsis_expr( m)

cdef _test_ellipsis_expr(slice_1d m):
    m[4] = 8
    m[...] = m[...]
    print m[4]

@testcase
def test_slice_assignment():
    """
    >>> test_slice_assignment()
    """
    cdef int[10][100] carray
    cdef int i, j

    for i in range(10):
        for j in range(100):
            carray[i][j] = i * 100 + j

    cdef int[:, :] m = carray
    cdef int[:, :] copy = m[-6:-1, 60:65].copy()

    _test_slice_assignment(m, copy)
    _test_slice_assignment( m,  copy)

cdef _test_slice_assignment(slice_2d m, slice_2d copy):
    cdef int i, j

    m[...] = m[::-1, ::-1]
    m[:, :] = m[::-1, ::-1]
    m[-5:, -5:] = m[-6:-1, 60:65]

    for i in range(5):
        for j in range(5):
            assert copy[i, j] == m[-5 + i, -5 + j], (copy[i, j], m[-5 + i, -5 + j])

@testcase
def test_slice_assignment_broadcast_leading():
    """
    >>> test_slice_assignment_broadcast_leading()
    """
    cdef int[1][10] array1
    cdef int[10] array2
    cdef int i

    for i in range(10):
        array1[0][i] = i

    cdef int[:, :] a = array1
    cdef int[:] b = array2

    _test_slice_assignment_broadcast_leading(a, b)

    for i in range(10):
        array1[0][i] = i

    _test_slice_assignment_broadcast_leading( a,  b)

cdef _test_slice_assignment_broadcast_leading(slice_2d a, slice_1d b):
    cdef int i

    b[:] = a[:, :]
    b = b[::-1]
    a[:, :] = b[:]

    for i in range(10):
        assert a[0, i] == b[i] == 10 - 1 - i, (a[0, i], b[i], 10 - 1 - i)

@testcase
def test_slice_assignment_broadcast_strides():
    """
    >>> test_slice_assignment_broadcast_strides()
    """
    cdef int[10] src_array
    cdef int[10][5] dst_array
    cdef int i, j

    for i in range(10):
        src_array[i] = 10 - 1 - i

    cdef int[:] src = src_array
    cdef int[:, :] dst = dst_array
    cdef int[:, :] dst_f = dst.copy_fortran()

    _test_slice_assignment_broadcast_strides(src, dst, dst_f)
    _test_slice_assignment_broadcast_strides( src,  dst,  dst_f)

cdef _test_slice_assignment_broadcast_strides(slice_1d src, slice_2d dst, slice_2d dst_f):
    cdef int i, j

    dst[1:] = src[-1:-6:-1]
    dst_f[1:] = src[-1:-6:-1]

    for i in range(1, 10):
        for j in range(1, 5):
            assert dst[i, j] == dst_f[i, j] == j, (dst[i, j], dst_f[i, j], j)

    # test overlapping memory with broadcasting
    dst[:, 1:4] = dst[1, :3]
    dst_f[:, 1:4] = dst[1, 1:4]

    for i in range(10):
        for j in range(1, 3):
            assert dst[i, j] == dst_f[i, j] == j - 1, (dst[i, j], dst_f[i, j], j - 1)

@testcase
def test_borrowed_slice():
    """
    Test the difference between borrowed an non-borrowed slices. If you delete or assign
    to a slice in a cdef function, it is not borrowed.

    >>> test_borrowed_slice()
    5
    5
    5
    """
    cdef int i
    cdef int[10] carray
    carray[:] = range(10)
    _borrowed(carray)
    _not_borrowed(carray)
    _not_borrowed2(carray)

cdef _borrowed(int[:] m):
    print m[5]

cdef _not_borrowed(int[:] m):
    print m[5]
    if object():
        del m

cdef _not_borrowed2(int[:] m):
    cdef int[10] carray
    print m[5]
    if object():
        m = carray

class SingleObject(object):
    def __init__(self, value):
        self.value = value

    def __str__(self):
        return str(self.value)

    def __eq__(self, other):
        return self.value == getattr(other, 'value', None) or self.value == other

cdef _get_empty_object_slice(fill=None):
    cdef array a = array((10,), sizeof(PyObject *), 'O')
    assert a.dtype_is_object
    return a

@testcase
def test_object_dtype_copying():
    """
    >>> test_object_dtype_copying()
    0
    1
    2
    3
    4
    5
    6
    7
    8
    9
    2 5
    1 5
    """
    cdef int i

    unique = object()
    unique_refcount = get_refcount(unique)

    cdef object[:] m1 = _get_empty_object_slice()
    cdef object[:] m2 = _get_empty_object_slice()

    for i in range(10):
        m1[i] = SingleObject(i)

    m2[...] = m1
    del m1

    for i in range(10):
        print m2[i]

    obj = m2[5]
    print get_refcount(obj), obj

    del m2
    print get_refcount(obj), obj

    assert unique_refcount == get_refcount(unique), (unique_refcount, get_refcount(unique))

@testcase
def test_scalar_slice_assignment():
    """
    >>> test_scalar_slice_assignment()
    0
    1
    6
    3
    6
    5
    6
    7
    6
    9
    
    0
    1
    6
    3
    6
    5
    6
    7
    6
    9
    """
    cdef int[10] a
    cdef int[:] m = a

    cdef int[5][10] a2
    cdef int[:, ::1] m2 = a2

    _test_scalar_slice_assignment(m, m2)
    print
    _test_scalar_slice_assignment( m,  m2)

cdef _test_scalar_slice_assignment(slice_1d m, slice_2d m2):
    cdef int i, j
    for i in range(10):
        m[i] = i

    m[-2:0:-2] = 6
    for i in range(10):
        print m[i]

    for i in range(m2.shape[0]):
        for j in range(m2.shape[1]):
            m2[i, j] = i * m2.shape[1] + j

    cdef int x = 2, y = -2
    cdef long value = 1
    m2[::2,    ::-1] = value
    m2[-2::-2, ::-1] = 2
    m2[::2,    -2::-2] = 0
    m2[-2::-2, -2::-2] = 0


    cdef int[:, :] s = m2[..., 1::2]
    for i in range(s.shape[0]):
        for j in range(s.shape[1]):
            assert s[i, j] == i % 2 + 1, (s[i, j], i)

    s = m2[::2, 1::2]
    for i in range(s.shape[0]):
        for j in range(s.shape[1]):
            assert s[i, j] == 1, s[i, j]

    s = m2[1::2, ::2]
    for i in range(s.shape[0]):
        for j in range(s.shape[1]):
            assert s[i, j] == 0, s[i, j]


    m2[...] = 3
    for i in range(m2.shape[0]):
        for j in range(m2.shape[1]):
            assert m2[i, j] == 3, s[i, j]

@testcase
def test_contig_scalar_to_slice_assignment():
    """
    >>> test_contig_scalar_to_slice_assignment()
    14 14 14 14
    20 20 20 20
    """
    cdef int[5][10] a
    cdef int[:, ::1] m = a

    m[...] = 14
    print m[0, 0], m[-1, -1], m[3, 2], m[4, 9]

    m[:, :] = 20
    print m[0, 0], m[-1, -1], m[3, 2], m[4, 9]

@testcase
def test_dtype_object_scalar_assignment():
    """
    >>> test_dtype_object_scalar_assignment()
    """
    cdef object[:] m = array((10,), sizeof(PyObject *), 'O')
    m[:] = SingleObject(2)
    assert m[0] == m[4] == m[-1] == 2

    ( m)[:] = SingleObject(3)
    assert m[0] == m[4] == m[-1] == 3

#
### Test slices that are set to None
#

# for none memoryview slice attribute testing, slicing, indexing, etc, see
# nonecheck.pyx

@testcase
def test_coerce_to_from_None(double[:] m1, double[:] m2 = None):
    """
    >>> test_coerce_to_from_None(None)
    (None, None)
    >>> test_coerce_to_from_None(None, None)
    (None, None)
    """
    return m1, m2

@testcase
def test_noneslice_compare(double[:] m):
    """
    >>> test_noneslice_compare(None)
    (True, True)
    """
    with cython.nonecheck(True):
        result = m is None

    return result, m is None

cdef class NoneSliceAttr(object):
    cdef double[:] m

@testcase
def test_noneslice_ext_attr():
    """
    >>> test_noneslice_ext_attr()
    AttributeError Memoryview is not initialized
    None
    """
    cdef NoneSliceAttr obj = NoneSliceAttr()

    with cython.nonecheck(True):
        try: print obj.m
        except Exception, e: print type(e).__name__, e.args[0]

        obj.m = None
        print obj.m

@testcase
def test_noneslice_del():
    """
    >>> test_noneslice_del()
    Traceback (most recent call last):
       ...
    UnboundLocalError: local variable 'm' referenced before assignment
    """
    cdef int[10] a
    cdef int[:] m = a

    with cython.nonecheck(True):
        m = None
        del m
        print m

@testcase
def test_noneslice_nogil_check_none(double[:] m):
    """
    >>> test_noneslice_nogil_check_none(None)
    (True, False)
    """
    cdef bint is_none = False
    cdef bint not_none = True

    with nogil:
        is_none = m is None and None is m and m == None and None == m
        not_none = m is not None and None is not m and m != None and None != m

    return is_none, not_none

@testcase
def test_noneslice_not_none(double[:] m not None):
    """
    >>> test_noneslice_not_none(None)
    Traceback (most recent call last):
    TypeError: Argument 'm' must not be None
    """

def get_int():
    return 10

@testcase
def test_inplace_assignment():
    """
    >>> test_inplace_assignment()
    10
    """
    cdef int[10] a
    cdef int[:] m = a

    m[0] = get_int()
    print m[0]

@testcase
def test_newaxis(int[:] one_D):
    """
    >>> A = IntMockBuffer("A", range(6))
    >>> test_newaxis(A)
    acquired A
    3
    3
    3
    3
    released A
    """
    cdef int[:, :] two_D_1 = one_D[None]
    cdef int[:, :] two_D_2 = one_D[None, :]
    cdef int[:, :] two_D_3 = one_D[:, None]
    cdef int[:, :] two_D_4 = one_D[..., None]

    print two_D_1[0, 3]
    print two_D_2[0, 3]
    print two_D_3[3, 0]
    print two_D_4[3, 0]

@testcase
def test_newaxis2(int[:, :] two_D):
    """
    >>> A = IntMockBuffer("A", range(6), shape=(3, 2))
    >>> test_newaxis2(A)
    acquired A
    shape: 3 1 1
    strides: 2 0 0
    suboffsets: -1 -1 -1
    
    shape: 1 2 1
    strides: 0 1 0
    suboffsets: -1 -1 -1
    
    shape: 3 1 1 1
    strides: 2 0 1 0
    suboffsets: -1 -1 -1 -1
    
    shape: 1 2 2 1
    strides: 0 2 1 0
    suboffsets: -1 -1 -1 -1
    released A
    """
    cdef int[:, :, :] a = two_D[..., None, 1, None]
    cdef int[:, :, :] b = two_D[None, 1, ..., None]
    cdef int[:, :, :, :] c = two_D[..., None, 1:, None]
    cdef int[:, :, :, :] d = two_D[None, 1:, ..., None]

    _print_attributes(a)
    print
    _print_attributes(b)
    print
    _print_attributes(c)
    print
    _print_attributes(d)


Cython-0.26.1/tests/memoryview/compile_declarations.pyx0000664000175000017500000000441312542002467024123 0ustar  stefanstefan00000000000000# mode: compile

cimport cython
# from cython.view cimport contig as foo, full as bar #, follow
from cython cimport view
from cython.view cimport (generic, strided, indirect,
                          contiguous, indirect_contiguous)

cdef char[:] one_dim
cdef char[:,:,:] three_dim
cdef unsigned int[::1, :] view1
cdef unsigned int[:, ::1] view2
cdef long long[::1, :, :, :] fort_contig
cdef unsigned long[:, :, :, ::1] c_contig
cdef unsigned short int[::1] c_and_fort
cdef unsigned long[:, :, :, ::0x0001] c_contig0

cdef int[::generic, ::generic] a1
cdef int[::strided, ::generic] a2
cdef int[::indirect, ::generic] a3
cdef int[::generic, ::strided] a4
cdef int[::strided, ::strided] a5
cdef int[::indirect, ::strided] a6
cdef int[::generic, ::indirect] a7
cdef int[::strided, ::indirect] a8
cdef int[::indirect, ::indirect] a9

cdef int[::generic, ::contiguous] a13
cdef int[::strided, ::contiguous] a14
cdef int[::indirect, ::contiguous] a15
cdef int[::generic, ::indirect_contiguous] a16
cdef int[::strided, ::indirect_contiguous] a17
cdef int[::indirect, ::indirect_contiguous] a18

cdef int[::generic, ::] a19
cdef int[::strided, :] a20
cdef int[::indirect, :] a21
cdef int[::contiguous, :] a23
cdef int[::indirect_contiguous, :] a24

cdef int[::indirect_contiguous, ::1] a25
cdef int[::indirect_contiguous, ::1, :] a26
cdef int[::indirect_contiguous, :, ::1] a27
cdef int[::indirect_contiguous, ::1, :] a28
cdef int[::indirect_contiguous, ::view.contiguous, :] a29
cdef int[::indirect_contiguous, :, ::view.contiguous] a30

cdef int[::indirect, ::1] a31
cdef int[::indirect, ::1, :] a32 = object()
cdef int[::indirect, :, ::1] a33 = object()
cdef int[::indirect, ::1, :] a34
cdef int[::indirect, ::view.contiguous, :] a35
cdef int[::indirect, :, ::view.contiguous] a36

cdef int[::1, :] my_f_contig = a32[0]
cdef int[:, ::1] my_c_contig = a33[0]

my_f_contig = a32[0, :, :]
my_c_contig = a33[0, :, :]

my_f_contig = a32[0, ...]
my_c_contig = a33[0, ...]

# Test casting to cython.view.array
cdef double[:, :] m1 =  NULL
cdef double[:, :] m2 =  NULL
cdef double[:, :] m3 =  NULL

cdef double[:, :, :] m4 =  NULL
cdef double[:, :, :] m5 =  NULL
cdef double[:, :, :] m6 =  NULL
Cython-0.26.1/tests/memoryview/error_declarations.pyx0000664000175000017500000000617112542002467023627 0ustar  stefanstefan00000000000000# mode: error

cimport cython
from cython cimport view






cdef signed short[::1, ::1] both
cdef signed short[::1, :, :, ::1] both2
cdef signed char[::2] err0
cdef signed char[::-100] err1
cdef signed char[::-1] err2
cdef long long[01::1, 0x01:, '0'   :, False:] fort_contig0
cdef signed char[1::] bad_start
cdef unsigned long[:,:1] bad_stop
cdef unsigned long[:,::1,:] neither_c_or_f
cdef signed char[::1-1+1] expr_spec
cdef signed char[::blargh] bad_name
cdef double[::alist[0]['view'].full] expr_attribute

cdef object[::1, :] unconformable1 = object()
cdef object[:, ::1] unconformable2 = unconformable1

cdef int[::1, :] dtype_unconformable = object()
unconformable1 = dtype_unconformable

# These are INVALID
cdef int[::view.contiguous, ::1] a1
#cdef int[::view.generic_contiguous, ::1] a2

#cdef int[::view.contiguous, ::view.generic_contiguous] a3
#cdef int[::view.generic_contiguous, ::view.generic_contiguous] a4

cdef int[::view.contiguous, ::view.contiguous] a5
cdef int[:, ::view.contiguous, ::view.indirect_contiguous] a6

#cdef int[::view.generic_contiguous, ::view.contiguous] a7
#cdef int[::view.contiguous, ::view.generic_contiguous] a8

ctypedef int *intp
cdef intp[:, :] myarray

cdef int[:] a10 =  object()
cdef int[:] a11 =   1

cdef struct Valid:
    int array[1][2][3][4][5][6][7][8]
cdef struct Invalid:
    int array[1][2][3][4][5][6][7][8][9]

cdef Valid[:] validslice
cdef Invalid[:] invalidslice

cdef int[:, :, :, :] four_D
four_D[None, None, None, None]
four_D[None, None, None, None, None]

cdef int[:, :, :, :, :, :, :, :] eight_D = object()

cdef double[:] m
print  &m

# These are VALID
cdef int[::view.indirect_contiguous, ::view.contiguous] a9
four_D[None, None, None]

_ERRORS = u'''
11:25: Cannot specify an array that is both C and Fortran contiguous.
12:31: Cannot specify an array that is both C and Fortran contiguous.
13:19: Step must be omitted, 1, or a valid specifier.
14:20: Step must be omitted, 1, or a valid specifier.
15:20: Step must be omitted, 1, or a valid specifier.
16:17: Start must not be given.
17:18: Start must not be given.
18:22: Axis specification only allowed in the 'step' slot.
19:19: Fortran contiguous specifier must follow an indirect dimension
20:22: Invalid axis specification.
21:25: Invalid axis specification.
22:22: no expressions allowed in axis spec, only names and literals.
25:51: Memoryview 'object[::1, :]' not conformable to memoryview 'object[:, ::1]'.
28:36: Different base types for memoryviews (int, Python object)
31:9: Dimension may not be contiguous
37:9: Only one direct contiguous axis may be specified.
38:9:Only dimensions 3 and 2 may be contiguous and direct
44:10: Invalid base type for memoryview slice: intp
46:35: Can only create cython.array from pointer or array
47:24: Cannot assign type 'double' to 'Py_ssize_t'
55:13: Invalid base type for memoryview slice: Invalid
58:6: More dimensions than the maximum number of buffer dimensions were used.
59:6: More dimensions than the maximum number of buffer dimensions were used.
61:9: More dimensions than the maximum number of buffer dimensions were used.
64:13: Cannot take address of memoryview slice
'''
Cython-0.26.1/tests/memoryview/memview_assignments.pyx0000664000175000017500000000021613023021023024004 0ustar  stefanstefan00000000000000# mode: compile
# tag: memoryview

cdef double[::1] contig
# see if we can assign a strided value to a contiguous one
contig[:] = contig[::2]
Cython-0.26.1/tests/memoryview/memoryview_compare_type_pointers.srctree0000664000175000017500000000306312542002467027467 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import test_compare_type_pointers"

######## setup.py ########

from Cython.Build import cythonize
from distutils.core import setup

setup(
  ext_modules = cythonize("*.pyx"),
)

######## test_compare_type_pointers.pyx ########
include "types.pxi"

import other_module

def test_foo_view(Foo[:] m):
    return m[0].f

assert test_foo_view(other_module.fooview_obj) == 5.0
assert test_foo_view(other_module.otherfooview_obj) == 4.0

# Test for type comparison where the memoryview instance check succeeds
cdef OtherFoo[10] otherfooarray
cdef OtherFoo[:] otherfooview = otherfooarray
otherfooview_obj = otherfooview
otherfooview[0].f = 4.0
assert test_foo_view(otherfooview_obj) == 4.0

# Test a simple dtype now
def test_double_view(double[:] m):
    return m[0]

assert test_double_view(other_module.doubleview_obj) == 6.0

######## other_module.pyx ########
include "types.pxi"

cdef Foo[10] fooarray
cdef Foo[:] fooview = fooarray
fooview_obj = fooview

fooview[0].f = 5.0

cdef OtherFoo[10] otherfooarray
cdef OtherFoo[:] otherfooview = otherfooarray
otherfooview_obj = otherfooview

otherfooview[0].f = 4.0

cdef double[10] doublearray
cdef double[:] doubleview = doublearray
doubleview_obj = doubleview

doubleview[0] = 6.0

######## types.pxi ########
ctypedef packed struct Baz:
    double d

ctypedef struct Bar:
    int i

ctypedef struct Foo:
    float f
    double complex dc
    char c
    int i
    Bar b
    char s[20]

ctypedef struct OtherFoo:
    float f
    double complex dc
    char c
    int i
    Bar b
    char s[20]
Cython-0.26.1/tests/memoryview/memoryview_acq_count.srctree0000664000175000017500000000171312542002467025031 0ustar  stefanstefan00000000000000PYTHON setup.py build_ext --inplace
PYTHON -c "import counting_atomic"
PYTHON -c "import counting_locked"

######## setup.py ########

from distutils.core import setup
from Cython.Distutils import build_ext
from Cython.Distutils.extension import Extension

setup(
    ext_modules = [
        Extension("counting_atomic", ["counting_atomic.pyx"]),
        Extension("counting_locked", ["counting_locked.pyx"],
                  define_macros=[('CYTHON_ATOMICS', '0')])
    ],
    cmdclass={'build_ext': build_ext},
)

######## counting_atomic.pyx ########
include "counting.pxi"

######## counting_locked.pyx ########
include "counting.pxi"

######## counting.pxi ########
cimport cython
from cython.parallel cimport prange

cdef int[100] a
cdef int[:] m = a


cdef Py_ssize_t i
for i in prange(1000000, nogil=True, num_threads=16):
    use_slice(m[::2])

cdef int use_slice(int[:] m) nogil except -1:
    cdef int[:] m2 = m[1:]
    m = m2[:-1]
    del m, m2
    return 0
Cython-0.26.1/tests/memoryview/memoryview_in_subclasses.pyx0000664000175000017500000000212712542002467025063 0ustar  stefanstefan00000000000000"""
Test for memory leaks when adding more memory view attributes in subtypes.
"""

import gc

from cython.view cimport array


def count_memoryviews():
    gc.collect()
    return sum([1 if 'memoryview' in str(type(o)) else 0
                for o in gc.get_objects()])


def run_test(cls, num_iters):
    orig_count = count_memoryviews()
    def f():
        x = cls(1024)
    for i in range(num_iters):
        f()
    return count_memoryviews() - orig_count


cdef class BaseType:
    """
    >>> run_test(BaseType, 10)
    0
    """
    cdef double[:] buffer

    def __cinit__(self, n):
        self.buffer = array((n,), sizeof(double), 'd')


cdef class Subtype(BaseType):
    """
    >>> run_test(Subtype, 10)
    0
    """
    cdef double[:] buffer2

    def __cinit__(self, n):
        self.buffer2 = array((n,), sizeof(double), 'd')


cdef class SubtypeWithUserDealloc(BaseType):
    """
    >>> run_test(SubtypeWithUserDealloc, 10)
    0
    """
    cdef double[:] buffer2

    def __cinit__(self, n):
        self.buffer2 = array((n,), sizeof(double), 'd')

    def __dealloc__(self):
        pass
Cython-0.26.1/tests/memoryview/memoryview.pxd0000664000175000017500000000013112542002467022112 0ustar  stefanstefan00000000000000ctypedef float ext_dtype

cdef extern from "bufaccess.h":
    ctypedef float td_h_double
Cython-0.26.1/tests/memoryview/memoryviewattrs.pyx0000664000175000017500000002024013150045407023215 0ustar  stefanstefan00000000000000# mode: run
# tag: numpy

__test__ = {}

def testcase(func):
    __test__[func.__name__] = func.__doc__
    return func


cimport cython
from cython.view cimport array

import numpy as np
cimport numpy as np


@testcase
def test_shape_stride_suboffset():
    u'''
    >>> test_shape_stride_suboffset()
    5 7 11
    77 11 1
    -1 -1 -1
    
    5 7 11
    1 5 35
    -1 -1 -1
    
    5 7 11
    77 11 1
    -1 -1 -1
    '''
    cdef char[:,:,:] larr = array((5,7,11), 1, 'c')
    print larr.shape[0], larr.shape[1], larr.shape[2]
    print larr.strides[0], larr.strides[1], larr.strides[2]
    print larr.suboffsets[0], larr.suboffsets[1], larr.suboffsets[2]
    print

    larr = array((5,7,11), 1, 'c', mode='fortran')
    print larr.shape[0], larr.shape[1], larr.shape[2]
    print larr.strides[0], larr.strides[1], larr.strides[2]
    print larr.suboffsets[0], larr.suboffsets[1], larr.suboffsets[2]
    print

    cdef char[:,:,:] c_contig = larr.copy()
    print c_contig.shape[0], c_contig.shape[1], c_contig.shape[2]
    print c_contig.strides[0], c_contig.strides[1], c_contig.strides[2]
    print c_contig.suboffsets[0], c_contig.suboffsets[1], c_contig.suboffsets[2]


@testcase
def test_copy_to():
    u'''
    >>> test_copy_to()
    0 1 2 3 4 5 6 7
    0 1 2 3 4 5 6 7
    0 1 2 3 4 5 6 7
    '''
    cdef int[:, :, :] from_mvs, to_mvs
    from_mvs = np.arange(8, dtype=np.int32).reshape(2,2,2)

    cdef int *from_data =  from_mvs._data
    print ' '.join(str(from_data[i]) for i in range(2*2*2))

    to_mvs = array((2,2,2), sizeof(int), 'i')
    to_mvs[...] = from_mvs

    # TODO Mark: remove this _data attribute
    cdef int *to_data = to_mvs._data
    print ' '.join(str(from_data[i]) for i in range(2*2*2))
    print ' '.join(str(to_data[i]) for i in range(2*2*2))


@testcase
def test_overlapping_copy():
    """
    >>> test_overlapping_copy()
    """
    cdef int i, array[10]
    for i in range(10):
        array[i] = i

    cdef int[:] slice = array
    slice[...] = slice[::-1]

    for i in range(10):
        assert slice[i] == 10 - 1 - i


@testcase
def test_copy_return_type():
    """
    >>> test_copy_return_type()
    60.0
    60.0
    """
    cdef double[:, :, :] a = np.arange(5 * 5 * 5, dtype=np.float64).reshape(5, 5, 5)
    cdef double[:, ::1] c_contig = a[..., 0].copy()
    cdef double[::1, :] f_contig = a[..., 0].copy_fortran()

    print(c_contig[2, 2])
    print(f_contig[2, 2])


@testcase
def test_partly_overlapping():
    """
    >>> test_partly_overlapping()
    """
    cdef int i, array[10]
    for i in range(10):
        array[i] = i

    cdef int[:] slice = array
    cdef int[:] slice2 = slice[:5]
    slice2[...] = slice[4:9]

    for i in range(5):
        assert slice2[i] == i + 4

@testcase
@cython.nonecheck(True)
def test_nonecheck1():
    u'''
    >>> test_nonecheck1()
    Traceback (most recent call last):
      ...
    UnboundLocalError: local variable 'uninitialized' referenced before assignment
    '''
    cdef int[:,:,:] uninitialized
    print uninitialized.is_c_contig()

@testcase
@cython.nonecheck(True)
def test_nonecheck2():
    u'''
    >>> test_nonecheck2()
    Traceback (most recent call last):
      ...
    UnboundLocalError: local variable 'uninitialized' referenced before assignment
    '''
    cdef int[:,:,:] uninitialized
    print uninitialized.is_f_contig()

@testcase
@cython.nonecheck(True)
def test_nonecheck3():
    u'''
    >>> test_nonecheck3()
    Traceback (most recent call last):
      ...
    UnboundLocalError: local variable 'uninitialized' referenced before assignment
    '''
    cdef int[:,:,:] uninitialized
    uninitialized.copy()

@testcase
@cython.nonecheck(True)
def test_nonecheck4():
    u'''
    >>> test_nonecheck4()
    Traceback (most recent call last):
      ...
    UnboundLocalError: local variable 'uninitialized' referenced before assignment
    '''
    cdef int[:,:,:] uninitialized
    uninitialized.copy_fortran()

@testcase
@cython.nonecheck(True)
def test_nonecheck5():
    u'''
    >>> test_nonecheck5()
    Traceback (most recent call last):
      ...
    UnboundLocalError: local variable 'uninitialized' referenced before assignment
    '''
    cdef int[:,:,:] uninitialized
    uninitialized._data

@testcase
def test_copy_mismatch():
    u'''
    >>> test_copy_mismatch()
    Traceback (most recent call last):
       ...
    ValueError: got differing extents in dimension 0 (got 2 and 3)
    '''
    cdef int[:,:,::1] mv1  = array((2,2,3), sizeof(int), 'i')
    cdef int[:,:,::1] mv2  = array((3,2,3), sizeof(int), 'i')

    mv1[...] = mv2


@testcase
def test_is_contiguous():
    u"""
    >>> test_is_contiguous()
    one sized is_c/f_contig True True
    is_c/f_contig False True
    f_contig.copy().is_c/f_contig True False
    f_contig.copy_fortran().is_c/f_contig False True
    one sized strided contig True True
    strided False
    """
    cdef int[::1, :, :] fort_contig = array((1,1,1), sizeof(int), 'i', mode='fortran')
    cdef int[:,:,:] strided = fort_contig

    print 'one sized is_c/f_contig', fort_contig.is_c_contig(), fort_contig.is_f_contig()
    fort_contig = array((2,2,2), sizeof(int), 'i', mode='fortran')
    print 'is_c/f_contig', fort_contig.is_c_contig(), fort_contig.is_f_contig()

    print 'f_contig.copy().is_c/f_contig', fort_contig.copy().is_c_contig(), \
                                           fort_contig.copy().is_f_contig()
    print 'f_contig.copy_fortran().is_c/f_contig', \
           fort_contig.copy_fortran().is_c_contig(), \
           fort_contig.copy_fortran().is_f_contig()

    print 'one sized strided contig', strided.is_c_contig(), strided.is_f_contig()

    print 'strided', strided[::2].is_c_contig()


@testcase
def call():
    u'''
    >>> call()
    1000 2000 3000
    1000
    2000 3000
    3000
    1 1 1000
    '''
    cdef int[::1] mv1, mv2, mv3
    cdef array arr = array((3,), sizeof(int), 'i')
    mv1 = arr
    cdef int *data
    data = arr.data
    data[0] = 1000
    data[1] = 2000
    data[2] = 3000

    print (mv1._data)[0] , (mv1._data)[1] , (mv1._data)[2]

    mv2 = mv1.copy()

    print (mv2._data)[0]


    print (mv2._data)[1] , (mv2._data)[2]

    mv3 = mv2

    cdef int *mv3_data = mv3._data

    print (mv1._data)[2]

    mv3_data[0] = 1

    print (mv3._data)[0] , (mv2._data)[0] , (mv1._data)[0]

    assert len(mv1) == 3
    assert len(mv2) == 3
    assert len(mv3) == 3


@testcase
def two_dee():
    u'''
    >>> two_dee()
    1 2 3 4
    -4 -4
    1 2 3 -4
    1 2 3 -4
    '''
    cdef long[:,::1] mv1, mv2, mv3
    cdef array arr = array((2,2), sizeof(long), 'l')

    assert len(arr) == 2

    cdef long *arr_data
    arr_data = arr.data

    mv1 = arr

    arr_data[0] = 1
    arr_data[1] = 2
    arr_data[2] = 3
    arr_data[3] = 4

    print (mv1._data)[0] , (mv1._data)[1] , (mv1._data)[2] , (mv1._data)[3]

    mv2 = mv1

    arr_data = mv2._data

    arr_data[3] = -4

    print (mv2._data)[3] , (mv1._data)[3]

    mv3 = mv2.copy()

    print (mv2._data)[0] , (mv2._data)[1] , (mv2._data)[2] , (mv2._data)[3]

    print (mv3._data)[0] , (mv3._data)[1] , (mv3._data)[2] , (mv3._data)[3]


@testcase
def fort_two_dee():
    u'''
    >>> fort_two_dee()
    1 2 3 4
    -4 -4
    1 2 3 -4
    1 3 2 -4
    1 2 3 -4
    '''
    cdef array arr = array((2,2), sizeof(long), 'l', mode='fortran')
    cdef long[::1,:] mv1, mv2, mv4
    cdef long[:, ::1] mv3

    cdef long *arr_data
    arr_data = arr.data

    mv1 = arr

    arr_data[0] = 1
    arr_data[1] = 2
    arr_data[2] = 3
    arr_data[3] = 4

    print (mv1._data)[0], (mv1._data)[1], (mv1._data)[2], (mv1._data)[3]

    mv2 = mv1

    arr_data = mv2._data

    arr_data[3] = -4

    print (mv2._data)[3], (mv1._data)[3]

    mv3 = mv2.copy()

    print (mv2._data)[0], (mv2._data)[1], (mv2._data)[2], (mv2._data)[3]

    print (mv3._data)[0], (mv3._data)[1], (mv3._data)[2], (mv3._data)[3]

    mv4 = mv3.copy_fortran()

    print (mv4._data)[0], (mv4._data)[1], (mv4._data)[2], (mv4._data)[3]
Cython-0.26.1/tests/memoryview/extension_type_memoryview.pyx0000664000175000017500000000120012542002467025272 0ustar  stefanstefan00000000000000# mode: run
# tag: numpy

cimport numpy as np
import numpy as np


cdef class ExtensionType(object):
    cdef public int dummy

    def __init__(self, n):
        self.dummy = n

items = [ExtensionType(1), ExtensionType(2)]
cdef ExtensionType[:] view = np.array(items, dtype=ExtensionType)

def test_getitem():
    """
    >>> test_getitem()
    1
    2
    """
    for i in range(view.shape[0]):
        item = view[i]
        print item.dummy

def test_getitem_typed():
    """
    >>> test_getitem_typed()
    1
    2
    """
    cdef ExtensionType item
    for i in range(view.shape[0]):
        item = view[i]
        print item.dummy
Cython-0.26.1/tests/memoryview/cythonarray.pyx0000664000175000017500000001206513150045407022305 0ustar  stefanstefan00000000000000# mode: run

from __future__ import unicode_literals

# these imports allow testing different ways to access [[cython.]view.]array()
from cython.view cimport array
from cython cimport view as v
cimport cython as cy

include "cythonarrayutil.pxi"


def length(shape):
    """
    >>> len(length((2,)))
    2
    >>> len(length((2,3)))
    2
    >>> len(length((5,3,2)))
    5
    """
    cdef array cvarray = array(shape=shape, itemsize=sizeof(int), format="i", mode='c')
    assert len(cvarray) == shape[0]
    return cvarray


def contiguity():
    '''
    >>> contiguity()
    12 4
    2 3
    2
    
    4 8
    2 3
    2
    '''
    cdef v.array cvarray = cy.view.array(shape=(2,3), itemsize=sizeof(int), format="i", mode='c')
    assert cvarray.len == 2*3*sizeof(int), (cvarray.len, 2*3*sizeof(int))
    assert cvarray.itemsize == sizeof(int)
    print cvarray.strides[0], cvarray.strides[1]
    print cvarray.shape[0], cvarray.shape[1]
    print cvarray.ndim

    print

    cdef v.array farray = v.array(shape=(2,3), itemsize=sizeof(int), format="i", mode='fortran')
    assert farray.len == 2*3*sizeof(int)
    assert farray.itemsize == sizeof(int)
    print farray.strides[0], farray.strides[1]
    print farray.shape[0], farray.shape[1]
    print farray.ndim

def acquire():
    '''
    >>> acquire()
    '''
    cdef object[int, ndim=1, mode="c"] buf1d = \
            array(shape=(10,), itemsize=sizeof(int), format='i', mode='c')
    cdef object[int, ndim=2, mode="c"] buf2d = \
            array(shape=(10,10), itemsize=sizeof(int), format='i')
    cdef object[unsigned long, ndim=3, mode='fortran'] buf3d = \
            array(shape=(1,2,3), itemsize=sizeof(unsigned long), format='L', mode='fortran')
    cdef object[long double, ndim=3, mode='fortran'] bufld = \
            array(shape=(1,2,3), itemsize=sizeof(long double), format='g', mode='fortran')

def full_or_strided():
    '''
    >>> full_or_strided()
    '''
    cdef object[float, ndim=2, mode='full'] fullbuf = \
            array(shape=(10,10), itemsize=sizeof(float), format='f', mode='c')
    cdef object[long long int, ndim=3, mode='strided'] stridedbuf = \
            array(shape=(1,2,3), itemsize=sizeof(long long int), format='q', mode='fortran')

def dont_allocate_buffer():
    """
    >>> dont_allocate_buffer()
    callback called
    """
    cdef array result = array((10, 10), itemsize=sizeof(int), format='i', allocate_buffer=False)
    assert result.data == NULL
    result.callback_free_data = callback
    result = None

def test_cython_array_getbuffer():
    """
    >>> test_cython_array_getbuffer()
    98
    61
    98
    61
    """
    cdef int[:, ::1] cslice = create_array((14, 10), 'c')
    cdef int[::1, :] fslice = create_array((14, 10), 'fortran')

    print cslice[9, 8]
    print cslice[6, 1]

    print fslice[9, 8]
    print fslice[6, 1]

def test_cython_array_index():
    """
    >>> test_cython_array_index()
    98
    61
    98
    61
    """
    c_array = create_array((14, 10), 'c')
    f_array = create_array((14, 10), 'fortran')

    print c_array[9, 8]
    print c_array[6, 1]

    print f_array[9, 8]
    print f_array[6, 1]

cdef int *getp(int dim1=10, int dim2=10, dim3=1) except NULL:
    print "getp()"

    cdef int *p =  malloc(dim1 * dim2 * dim3 * sizeof(int))

    if p == NULL:
        raise MemoryError

    for i in range(dim1 * dim2 * dim3):
        p[i] = i

    return p

cdef void callback_free_data(void *p):
    print 'callback free data called'
    free(p)

def test_array_from_pointer():
    """
    >>> test_array_from_pointer()
    getp()
    69
    c
    getp()
    callback free data called
    fortran
    getp()
    56
    getp()
    56
    getp()
    119
    callback free data called
    """
    cdef int *p = getp()
    cdef array c_arr =  p
    c_arr.callback_free_data = callback_free_data
    print c_arr[6, 9]
    print c_arr.mode

    c_arr = ( getp())
    print c_arr.mode
    c_arr.callback_free_data = free

    c_arr =   getp()
    c_arr.callback_free_data = free
    cdef int[:, ::1] mslice = c_arr
    print mslice[5, 6]

    c_arr =  getp(12, 10)
    c_arr.callback_free_data = free
    print c_arr[5, 6]

    cdef int m = 12
    cdef int n = 10
    c_arr =  getp(m, n)
    c_arr.callback_free_data = callback_free_data
    print c_arr[m - 1, n - 1]

def test_array_from_pointer_3d():
    """
    >>> test_array_from_pointer_3d()
    getp()
    3 3
    True True
    """
    cdef int *p = getp(2, 2, 2)
    cdef array c_arr =  p
    cdef array f_arr =  p

    cdef int[:, :, ::1] m1 = c_arr
    cdef int[::1, :, :] m2 = f_arr

    print m1[0, 1, 1], m2[1, 1, 0]
    print m1.is_c_contig(), m2.is_f_contig()

def test_cyarray_from_carray():
    """
    >>> test_cyarray_from_carray()
    0 8 21
    0 8 21
    """
    cdef int[7][8] a
    for i in range(7):
        for j in range(8):
            a[i][j] = i * 8 + j

    cdef int[:, :] mslice =  a
    print mslice[0, 0], mslice[1, 0], mslice[2, 5]

    mslice = a
    print mslice[0, 0], mslice[1, 0], mslice[2, 5]
Cython-0.26.1/tests/memoryview/bufaccess.h0000664000175000017500000000016312542002467021306 0ustar  stefanstefan00000000000000/* See bufaccess.pyx */

typedef short td_h_short;
typedef double td_h_double;
typedef unsigned short td_h_ushort;
Cython-0.26.1/tests/memoryview/transpose_refcount.pyx0000664000175000017500000000122312542002467023662 0ustar  stefanstefan00000000000000# mode: run

from cython cimport view

cdef bint print_upper_right(double[:, :] M):
    print M[0, 1]

cdef class MemViewContainer:
    cdef double[:, :] A

    def __init__(self, A):
        self.A = A

    cpdef run(self):
        print_upper_right(self.A)
        print_upper_right(self.A.T)
        print_upper_right(self.A.T)

def test_transpose_refcount():
    """
    >>> test_transpose_refcount()
    2.0
    3.0
    3.0
    """
    cdef double[:, :] A = view.array(shape=(2, 2), itemsize=sizeof(double), format="d")
    A[0, 0], A[0, 1], A[1, 0], A[1, 1] = 1., 2., 3., 4.
    cdef MemViewContainer container = MemViewContainer(A)
    container.run()
Cython-0.26.1/tests/errors/0000775000175000017500000000000013151203436016304 5ustar  stefanstefan00000000000000Cython-0.26.1/tests/errors/bufaccess_noassignT444.pyx0000664000175000017500000000030212542002467023264 0ustar  stefanstefan00000000000000# ticket: 444
# mode: error

def test():
     cdef object[int] not_assigned_to
     not_assigned_to[2] = 3

_ERRORS = """
6:20: local variable 'not_assigned_to' referenced before assignment
"""
Cython-0.26.1/tests/errors/e_argdefault.pyx0000664000175000017500000000072012542002467021473 0ustar  stefanstefan00000000000000# mode: error

cdef spam(int i, char *s = "blarg", float f): # can't have default value
	pass

def swallow(x, y = 42, z): # non-default after default
	pass

cdef class Grail:

	def __add__(x, y = 42): # can't have default value
		pass

_ERRORS = u"""
3:10: Non-default argument follows default argument
3:36: Non-default argument following default argument
6:23: Non-default argument following default argument
11:16: This argument cannot have a default value
"""
Cython-0.26.1/tests/errors/w_unreachable_cf.pyx0000664000175000017500000000122212542002467022316 0ustar  stefanstefan00000000000000# mode: error
# tag: werror, unreachable, control-flow

def try_finally():
    try:
        return
    finally:
        return
    print 'oops'

def try_return():
    try:
        return
    except:
        return
    print 'oops'

def for_return(a):
    for i in a:
        return
    else:
        return
    print 'oops'

def while_return(a):
    while a:
        return
    else:
        return
    print 'oops'

def forfrom_return(a):
    for i from 0 <= i <= a:
        return
    else:
        return
    print 'oops'

_ERRORS = """
9:4: Unreachable code
16:4: Unreachable code
23:4: Unreachable code
30:4: Unreachable code
37:4: Unreachable code
"""
Cython-0.26.1/tests/errors/se_badindent2.pyx0000664000175000017500000000021012542002467021544 0ustar  stefanstefan00000000000000# mode: error

def f():
	a = b
		c = d
_ERRORS = u"""
5:0: Possible inconsistent indentation
5:0: Expected an identifier or literal
"""
Cython-0.26.1/tests/errors/invalid_cast.pyx0000664000175000017500000000014412542002467021511 0ustar  stefanstefan00000000000000# mode: error

def f():
    a = True

_ERRORS = u"""
4:9: 'foao' is not a type identifier
"""
Cython-0.26.1/tests/errors/e_cdefassign.pyx0000664000175000017500000000044412542002467021466 0ustar  stefanstefan00000000000000# mode: error

cdef class A:
    cdef int value = 3

cdef extern from *:
    cdef struct B:
        int value = 3

_ERRORS = u"""
4:13: Cannot assign default value to fields in cdef classes, structs or unions
8:12: Cannot assign default value to fields in cdef classes, structs or unions
"""
Cython-0.26.1/tests/errors/e_exttype_freelist.pyx0000664000175000017500000000057212542002467022761 0ustar  stefanstefan00000000000000# mode: error
# tag: freelist, werror

cimport cython

@cython.freelist(8)
cdef class ExtType:
    pass

@cython.freelist(8)
cdef class ExtTypeObject(object):
    pass

cdef class ExtSubTypeOk(ExtType):
    pass

@cython.freelist(8)
cdef class ExtSubTypeFail(ExtType):
    pass


_ERRORS = """
18:5: freelists cannot be used on subtypes, only the base class can manage them
"""
Cython-0.26.1/tests/errors/break_outside_loop.pyx0000664000175000017500000000162713143605603022730 0ustar  stefanstefan00000000000000# cython: remove_unreachable=False
# mode: error

break

class A:
    break

cdef class B:
    break

def test():
    break

try: break
except: pass

try: break
finally: pass

if bool_result():
    break
else:
    break

def bool_result():
    return True


def break_after_loop():
    for _ in range(2):
        pass

    if bool_result():
        break

    try:
        if bool_result():
            break
    except Exception:
        pass

    if bool_result():
        break


_ERRORS = u'''
4:0: break statement not inside loop
7:4: break statement not inside loop
10:4: break statement not inside loop
13:4: break statement not inside loop
15:5: break statement not inside loop
18:5: break statement not inside loop
22:4: break statement not inside loop
24:4: break statement not inside loop
35:8: break statement not inside loop
39:12: break statement not inside loop
44:8: break statement not inside loop
'''
Cython-0.26.1/tests/errors/undefinedname.pyx0000664000175000017500000000041212542002467021651 0ustar  stefanstefan00000000000000# mode: error

i = _this_global_name_does_not_exist_

def test(i):
    return _this_local_name_does_not_exist_

_ERRORS = u"""
3:37:undeclared name not builtin: _this_global_name_does_not_exist_
6:43:undeclared name not builtin: _this_local_name_does_not_exist_
"""
Cython-0.26.1/tests/errors/e_index.pyx0000664000175000017500000000062112542002467020464 0ustar  stefanstefan00000000000000# mode: error

def f(obj1, obj2):
	cdef int int1, int2, int3
	cdef float flt1, *ptr1
	cdef int array1[42]
	int1 = array1[flt1] # error
	int1 = array1[ptr1] # error
	int1 = int2[int3] # error
	obj1 = obj2[ptr1] # error
_ERRORS = u"""
7:14: Invalid index type 'float'
8:14: Invalid index type 'float *'
9:12: Attempting to index non-array type 'int'
10:17: Cannot convert 'float *' to Python object
"""
Cython-0.26.1/tests/errors/e_directives.pyx0000664000175000017500000000052312542002467021517 0ustar  stefanstefan00000000000000# mode: error

# cython: nonexistant = True
# cython: boundscheck = true
# cython: boundscheck = 9

print 3

# Options should not be interpreted any longer:
# cython: boundscheck = true

_ERRORS = u"""
4:0: boundscheck directive must be set to True or False, got 'true'
5:0: boundscheck directive must be set to True or False, got '9'
"""
Cython-0.26.1/tests/errors/e_tuple_args_T692.py0000664000175000017500000000027212542002467022060 0ustar  stefanstefan00000000000000# ticket: 692
# mode: error

def func((a, b)):
    return a + b

_ERRORS = u"""
4:9: Missing argument name
5:13: undeclared name not builtin: a
5:16: undeclared name not builtin: b
"""

Cython-0.26.1/tests/errors/e_nargs.pyx0000664000175000017500000000111112542002467020462 0ustar  stefanstefan00000000000000# mode: error

cdef extern grail(char *s, int i)
cdef extern spam(char *s, int i,...)

cdef f():
	grail() # too few args
	grail("foo") # too few args
	grail("foo", 42, 17) # too many args
	spam() # too few args
	spam("blarg") # too few args
_ERRORS = u"""
7:6: Call with wrong number of arguments (expected 2, got 0)
8:6: Call with wrong number of arguments (expected 2, got 1)
9:6: Call with wrong number of arguments (expected 2, got 3)
10:5: Call with wrong number of arguments (expected at least 2, got 0)
11:5: Call with wrong number of arguments (expected at least 2, got 1)
"""
Cython-0.26.1/tests/errors/e_notnone2.pyx0000664000175000017500000000027612542002467021125 0ustar  stefanstefan00000000000000# mode: error

def eggs(int x not None, char* y not None):
	pass
_ERRORS = u"""
3: 9: Only Python type arguments can have 'not None'
3:25: Only Python type arguments can have 'not None'
"""
Cython-0.26.1/tests/errors/w_numpy_arr_as_cppvec_ref.pyx0000664000175000017500000000175512542002467024303 0ustar  stefanstefan00000000000000# mode: error
# tag: cpp, werror, numpy

import numpy as np
cimport numpy as np
from libcpp.vector cimport vector

cdef extern from *:
    void cpp_function_vector1(vector[int])
    void cpp_function_vector2(vector[int] &)
    void cpp_function_2_vec_refs(vector[int] &, vector[int] &)


def main():
    cdef np.ndarray[int, ndim=1, mode="c"] arr = np.zeros(10, dtype='intc')
    cpp_function_vector1(arr)
    cpp_function_vector2(arr)
    cpp_function_vector2(arr)
    cpp_function_2_vec_refs(arr, arr)

    cdef vector[int] vec
    vec.push_back(0)
    cpp_function_vector2(vec)


_ERRORS = """
17:28: Cannot pass Python object as C++ data structure reference (vector[int] &), will pass by copy.
18:28: Cannot pass Python object as C++ data structure reference (vector[int] &), will pass by copy.
19:31: Cannot pass Python object as C++ data structure reference (vector[int] &), will pass by copy.
19:36: Cannot pass Python object as C++ data structure reference (vector[int] &), will pass by copy.
"""
Cython-0.26.1/tests/errors/e_relative_cimport.pyx0000664000175000017500000000065112542002467022730 0ustar  stefanstefan00000000000000# mode: error
# tag: cimport

from ..relative_cimport cimport some_name
from .e_relative_cimport cimport some_name
from ..cython cimport declare
from . cimport e_relative_cimport


_ERRORS="""
4:0: relative cimport beyond main package is not allowed
5:0: relative cimport beyond main package is not allowed
6:0: relative cimport beyond main package is not allowed
7:0: relative cimport beyond main package is not allowed
"""
Cython-0.26.1/tests/errors/syntax_warnings.pyx0000664000175000017500000000032612542002467022311 0ustar  stefanstefan00000000000000# mode: error
# tag: werror

cdef useless_semicolon():
    cdef int i;
    pass;

ctypedef int x;


_ERRORS="""
5:14: useless trailing semicolon
6:8: useless trailing semicolon
8:14: useless trailing semicolon
"""
Cython-0.26.1/tests/errors/e_arrayassign.pyx0000664000175000017500000000107012542002467021677 0ustar  stefanstefan00000000000000# mode: error

ctypedef int[1] int_array
ctypedef int[2] int_array2


cdef int_array x, y
x = y  # not an error

cdef int_array *x_ptr = &x
x_ptr[0] = y  # not an error

cdef class A:
    cdef int_array value
    def __init__(self):
        self.value = x  # not an error


cdef int_array2 z
z = x  # error
x = z  # error

cdef enum:
    SIZE = 2

ctypedef int[SIZE] int_array_dyn

cdef int_array_dyn d
d = z  # not an error


_ERRORS = u"""
20:2: Assignment to slice of wrong length, expected 2, got 1
21:2: Assignment to slice of wrong length, expected 1, got 2
"""
Cython-0.26.1/tests/errors/w_uninitialized_with.pyx0000664000175000017500000000103512542002467023302 0ustar  stefanstefan00000000000000# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror

def with_no_target(m):
    with m:
        print a
        a = 1

def unbound_manager(m1):
    with m2:
        pass
    m2 = m1

def with_target(m):
    with m as f:
        print(f)

def with_mgr(m):
    try:
        with m() as f:
            pass
    except:
        print f

_ERRORS = """
7:15: local variable 'a' referenced before assignment
11:11: local variable 'm2' referenced before assignment
24:15: local variable 'f' might be referenced before assignment
"""
Cython-0.26.1/tests/errors/extended_unpacking_parser.pyx0000664000175000017500000000112412542002467024263 0ustar  stefanstefan00000000000000# mode: error

# wrong size RHS (as handled by the parser)

def length1():
    a, b = [1,2,3]

def length2():
    a, b = [1]

def length3():
    a, b = []

def length4():
    a, *b = []

def length5():
    a, *b, c = []
    a, *b, c = [1]

def length_recursive():
    *(a, b), c  = (1,2)


_ERRORS = u"""
6:4: too many values to unpack (expected 2, got 3)
9:4: need more than 1 value to unpack
12:4: need more than 0 values to unpack
15:4: need more than 0 values to unpack
18:4: need more than 0 values to unpack
19:4: need more than 1 value to unpack
22:6: need more than 1 value to unpack
"""
Cython-0.26.1/tests/errors/pep448_syntax_2.pyx0000664000175000017500000000017412542002467021727 0ustar  stefanstefan00000000000000# mode: error
# tag: pep448

def unpack_wrong_stars():
    [**1]


_ERRORS = """
5:5: Expected an identifier or literal
"""
Cython-0.26.1/tests/errors/invalid_uescape0.pyx0000664000175000017500000000011112542002467022256 0ustar  stefanstefan00000000000000# mode: error

u'\u'

_ERRORS = '''
3:2: Invalid unicode escape '\u'
'''
Cython-0.26.1/tests/errors/cpp_comparison.pyx0000664000175000017500000000045212574327400022071 0ustar  stefanstefan00000000000000# mode: error
# tag: cpp

from libcpp.vector cimport vector

def vector_is_none(vector[int] iv):
    # TODO: this isn't strictly wrong, so it might be allowed as a 'feature' at some point
    if iv is None:
        pass


_ERRORS = """
8:10: Invalid types for 'is' (vector[int], Python object)
"""
Cython-0.26.1/tests/errors/w_uninitialized_py3.pyx0000664000175000017500000000061412542002467023044 0ustar  stefanstefan00000000000000# cython: language_level=3, warn.maybe_uninitialized=True
# mode: error
# tag: werror

def ref(obj):
    pass

def list_comp(a):
    r = [i for i in a]
    ref(i)
    i = 0
    return r

def dict_comp(a):
    r = {i: j for i, j in a}
    ref(i)
    i = 0
    return r


_ERRORS = """
10:9: local variable 'i' referenced before assignment
16:9: local variable 'i' referenced before assignment
"""
Cython-0.26.1/tests/errors/cdefspecial.pyx0000664000175000017500000000037312542002467021317 0ustar  stefanstefan00000000000000# mode: error

cdef class Test:
    cdef __cinit__(self):
        pass

    cdef __len__(self):
        pass

_ERRORS = u"""
4:9: Special methods must be declared with 'def', not 'cdef'
7:9: Special methods must be declared with 'def', not 'cdef'
"""
Cython-0.26.1/tests/errors/e_bufaccess_pxd.pxd0000664000175000017500000000005312542002467022140 0ustar  stefanstefan00000000000000# See e_bufaccess2.pyx

ctypedef nothing T
Cython-0.26.1/tests/errors/e_bufaccess.pyx0000664000175000017500000000153312542002467021316 0ustar  stefanstefan00000000000000# mode: error

cdef object[int] buf
cdef class A:
    cdef object[int] buf

def f():
    cdef object[fakeoption=True] buf1
    cdef object[int, -1] buf1b
    cdef object[ndim=-1] buf2
    cdef object[int, 'a'] buf3
    cdef object[int,2,3,4,5,6] buf4
    cdef object[int, 2, 'foo'] buf5
    cdef object[int, 2, well] buf6
    cdef object[x, 1] buf0

_ERRORS = u"""
3:17: Buffer types only allowed as function local variables
5:21: Buffer types only allowed as function local variables
8:31: "fakeoption" is not a buffer option
"""
#TODO:
#7:22: "ndim" must be non-negative
#8:15: "dtype" missing
#9:21: "ndim" must be an integer
#10:15: Too many buffer options
#11:24: Only allowed buffer modes are "full" or "strided" (as a compile-time string)
#12:28: Only allowed buffer modes are "full" or "strided" (as a compile-time string)
#13:17: Invalid type.
#"""

Cython-0.26.1/tests/errors/e_cstruct.pyx0000664000175000017500000000157012542002467021050 0ustar  stefanstefan00000000000000# mode: error

cdef struct Spam:
    int i
    char c
    float[42] *p
    obj             # error - py object

#cdef struct Spam: # error - redefined (not an error in Cython, should it be?)
#    int j

cdef struct Grail

cdef void eggs(Spam s):
    cdef int j
    cdef Grail *gp
    j = s.k # error - undef attribute
    j = s.p # type error
    s.p = j # type error
    j = j.i # no error - coercion to Python object
    j.i = j # no error - coercion to Python object
    j = gp.x # error - incomplete type
    gp.x = j # error - incomplete type


_ERRORS = u"""
7:39: C struct/union member cannot be a Python object
17:9: Object of type 'Spam' has no attribute 'k'
18:9: Cannot assign type 'float (*)[42]' to 'int'
19:24: Cannot assign type 'int' to 'float (*)[42]'
22:10: Cannot select attribute of incomplete type 'Grail'
23:6: Cannot select attribute of incomplete type 'Grail'
"""
Cython-0.26.1/tests/errors/return_outside_function_T135.pyx0000664000175000017500000000134012542002467024544 0ustar  stefanstefan00000000000000# cython: remove_unreachable=False
# ticket: 135
# mode: error

def _runtime_True():
    return True

return 'bar'

class A:
    return None

cdef class B:
    return None

try: return None
except: pass

try: return None
finally: pass

for i in (1,2):
    return None

while True:
    return None

if _runtime_True():
    return None
else:
    return None


_ERRORS = u'''
8:0: Return not inside a function body
11:4: Return not inside a function body
14:4: Return not inside a function body
16:5: Return not inside a function body
19:5: Return not inside a function body
23:4: Return not inside a function body
26:4: Return not inside a function body
29:4: Return not inside a function body
31:4: Return not inside a function body
'''
Cython-0.26.1/tests/errors/e_return.pyx0000664000175000017500000000044012542002467020673 0ustar  stefanstefan00000000000000# cython: remove_unreachable=False
# mode: error

cdef void g():
	cdef int i
	return i # error

cdef int h():
	cdef int *p
	return # error
	return p # error
_ERRORS = u"""
6:17: Return with value in void function
10:1: Return value required
11:17: Cannot assign type 'int *' to 'int'
"""
Cython-0.26.1/tests/errors/e_tempcast.pyx0000664000175000017500000000043412542002467021177 0ustar  stefanstefan00000000000000# mode: error

cdef object blarg

def foo(obj):
	cdef void *p
	p = blarg # ok
	p = (obj + blarg) # error - temporary

_ERRORS = u"""
8:5: Casting temporary Python object to non-numeric non-Python type
8:5: Storing unsafe C derivative of temporary Python reference
"""
Cython-0.26.1/tests/errors/w_uninitialized_generators.pyx0000664000175000017500000000036112542002467024501 0ustar  stefanstefan00000000000000# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror

def unbound_inside_generator(*args):
    for i in args:
        yield x
        x = i + i

_ERRORS = """
7:15: local variable 'x' might be referenced before assignment
"""
Cython-0.26.1/tests/errors/e_cdef_yield.pyx0000664000175000017500000000024012542002467021441 0ustar  stefanstefan00000000000000# mode: error

cdef cdef_yield():
    yield

cpdef cpdef_yield():
    yield

_ERRORS = u"""
4:4: 'yield' not supported here
7:4: 'yield' not supported here
"""
Cython-0.26.1/tests/errors/e_unop.pyx0000664000175000017500000000031512542002467020336 0ustar  stefanstefan00000000000000# mode: error

def f():
	cdef int int1
	cdef char *str2
	int1 = -str2 # error
	int1 = ~str2 # error
_ERRORS = u"""
6:8: Invalid operand type for '-' (char *)
7:8: Invalid operand type for '~' (char *)
"""
Cython-0.26.1/tests/errors/extended_unpacking.pyx0000664000175000017500000000154112542002467022712 0ustar  stefanstefan00000000000000# mode: error

# invalid syntax (not handled by the parser)

def syntax1():
    a = b = c = d = e = f = g = h = i = 1 # prevent undefined names

    *a

    *1

    *"abc"

    *a*b

    [*a, *b]

    (a, b, *c, d, e, f, *g, h, i)
    [a, b, *c, d, e, f, *g, h, i]
    {a, b, *c, d, e, f, *g, h, i}


def syntax2():
    list_of_sequences = [[1,2], [3,4]]

    for *a,*b in list_of_sequences:
        pass


def types(l):
    cdef int a,b
    a, *b = (1,2,3,4)
    a, *b = l


_ERRORS = u"""
# syntax1()
 8: 4: starred expression is not allowed here
10: 4: starred expression is not allowed here
12: 4: starred expression is not allowed here
14: 4: starred expression is not allowed here

# syntax2()
26:11: more than 1 starred expression in assignment

# types()
32:15: Cannot coerce list to type 'int'
33:10: starred target must have Python object (list) type
"""
Cython-0.26.1/tests/errors/w_uninitialized_py2.pyx0000664000175000017500000000113112542002467023036 0ustar  stefanstefan00000000000000# cython: language_level=2, warn.maybe_uninitialized=True
# mode: error
# tag: werror

def list_comp(a):
    r = [i for i in a]
    return i

# dict comp is py3 feuture and don't leak here
def dict_comp(a):
    r = {i: j for i, j in a}
    return i, j

def dict_comp2(a):
    r = {i: j for i, j in a}
    print i, j
    i, j = 0, 0


_ERRORS = """
7:12: local variable 'i' might be referenced before assignment
12:12: undeclared name not builtin: i
12:15: undeclared name not builtin: j
16:11: local variable 'i' referenced before assignment
16:14: local variable 'j' referenced before assignment
"""
Cython-0.26.1/tests/errors/missing_baseclass_in_predecl_T262.pyx0000664000175000017500000000033312542002467025443 0ustar  stefanstefan00000000000000# ticket: 262
# mode: error

cdef class Album

cdef class SessionStruct:
     cdef Album _create_album(self, void* album, bint take_owner):
          pass

cdef class Album(SessionStruct):
     pass


_ERROR = u"""
"""
Cython-0.26.1/tests/errors/nogilcmeth.pxd0000664000175000017500000000005212542002467021153 0ustar  stefanstefan00000000000000cdef class C:
    cdef void f(self) nogil
Cython-0.26.1/tests/errors/fused_types.pyx0000664000175000017500000000354712542002467021415 0ustar  stefanstefan00000000000000# mode: error

cimport cython
from cython import fused_type

# This is all invalid
# ctypedef foo(int) dtype1
# ctypedef foo.bar(float) dtype2
# ctypedef fused_type(foo) dtype3
dtype4 = cython.fused_type(int, long, kw=None)

# ctypedef public cython.fused_type(int, long) dtype7
# ctypedef api cython.fused_type(int, long) dtype8

int_t = cython.fused_type(short, short, int)
int2_t = cython.fused_type(int, long)
dtype9 = cython.fused_type(int2_t, int)

floating = cython.fused_type(float, double)

cdef func(floating x, int2_t y):
    print x, y

cdef float x = 10.0
cdef int y = 10
func[float](x, y)
func[float][int](x, y)
func[float, int](x)
func[float, int](x, y, y)
func(x, y=y)

ctypedef fused memslice_dtype_t:
    cython.p_int # invalid dtype
    cython.long

def f(memslice_dtype_t[:, :] a):
    pass

lambda cython.integral i: i


cdef cython.floating x

cdef class Foo(object):
    cdef cython.floating attr

def outer(cython.floating f):
    def inner():
        cdef cython.floating g

# This is all valid
dtype5 = fused_type(int, long, float)
dtype6 = cython.fused_type(int, long)
func[float, int](x, y)

cdef fused fused1:
    int
    long long

ctypedef fused fused2:
    int
    long long

func(x, y)


_ERRORS = u"""
10:15: fused_type does not take keyword arguments
15:38: Type specified multiple times
26:4: Invalid use of fused types, type cannot be specialized
26:4: Not enough types specified to specialize the function, int2_t is still fused
27:4: Invalid use of fused types, type cannot be specialized
27:4: Not enough types specified to specialize the function, int2_t is still fused
28:16: Call with wrong number of arguments (expected 2, got 1)
29:16: Call with wrong number of arguments (expected 2, got 3)
36:6: Invalid base type for memoryview slice: int *
39:0: Fused lambdas not allowed
42:5: Fused types not allowed here
45:9: Fused types not allowed here
"""
Cython-0.26.1/tests/errors/nonconst_def.pyx0000664000175000017500000000031512542002467021530 0ustar  stefanstefan00000000000000# mode: error

import os
DEF ospath = os.path

_ERRORS = u"""
4:15: Compile-time name 'os' not defined
4:15: Error in compile-time expression: AttributeError: 'NoneType' object has no attribute 'path'
"""
Cython-0.26.1/tests/errors/continue_outside_loop.pyx0000664000175000017500000000116213143605603023462 0ustar  stefanstefan00000000000000# cython: remove_unreachable=False
# mode: error

continue

class A:
    continue

cdef class B:
    continue

def test():
    continue

try: continue
except: pass

try: continue
finally: pass

if bool_result():
    continue
else:
    continue

def bool_result():
    return True


_ERRORS = '''
4:0: continue statement not inside loop
7:4: continue statement not inside loop
10:4: continue statement not inside loop
13:4: continue statement not inside loop
15:5: continue statement not inside loop
18:5: continue statement not inside loop
22:4: continue statement not inside loop
24:4: continue statement not inside loop
'''
Cython-0.26.1/tests/errors/uninitialized_lhs.pyx0000664000175000017500000000050212542002467022565 0ustar  stefanstefan00000000000000# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror
# ticket: 739

def index_lhs(a):
    cdef object idx
    a[idx] = 1

def slice_lhs(a):
    cdef object idx
    a[:idx] = 1

_ERRORS = """
8:9: local variable 'idx' referenced before assignment
12:10: local variable 'idx' referenced before assignment
"""
Cython-0.26.1/tests/errors/cfunc_directive_in_pyclass.pyx0000664000175000017500000000024312542002467024431 0ustar  stefanstefan00000000000000# mode: error
import cython

class Pyclass(object):
    @cython.cfunc
    def bad(self):
        pass

_ERRORS = """
 6:4: cfunc directive is not allowed here
"""
Cython-0.26.1/tests/errors/e_badpyparam.pyx0000664000175000017500000000020312542002467021471 0ustar  stefanstefan00000000000000# mode: error

cdef struct Foo

def f(Foo *p):
	pass
_ERRORS = u"""
5:6: Cannot convert Python object argument to type 'Foo *'
"""
Cython-0.26.1/tests/errors/nogil_buffer_acquisition.pyx0000664000175000017500000000033412542002467024123 0ustar  stefanstefan00000000000000# mode: error

cimport numpy as np

cdef void func(np.ndarray[np.double_t, ndim=1] myarray) nogil:
    pass

_ERRORS = u"""
5:15: Buffer may not be acquired without the GIL. Consider using memoryview slices instead.
"""
Cython-0.26.1/tests/errors/cdef_class_properties_decorated.pyx0000664000175000017500000000134213023021033025407 0ustar  stefanstefan00000000000000# mode: error
# ticket: 264
# tag: property, decorator


from functools import wraps


def wrap_func(f):
    @wraps(f)
    def wrap(*args, **kwargs):
        print("WRAPPED")
        return f(*args, **kwargs)
    return wrap


cdef class Prop:
    @property
    @wrap_func
    def prop1(self):
        return 1

    @property
    def prop2(self):
        return 2

    @wrap_func
    @prop2.setter
    def prop2(self, value):
        pass

    @prop2.setter
    @wrap_func
    def prop2(self, value):
        pass


_ERRORS = """
19:4: Property methods with additional decorators are not supported
27:4: Property methods with additional decorators are not supported
33:4: Property methods with additional decorators are not supported
"""
Cython-0.26.1/tests/errors/invalid_hex_escape1.pyx0000664000175000017500000000010512542002467022741 0ustar  stefanstefan00000000000000# mode: error

'\x1'

_ERRORS = '''
3:1: Invalid hex escape '\x'
'''
Cython-0.26.1/tests/errors/e_fused_closure.pyx0000664000175000017500000000106012542002467022215 0ustar  stefanstefan00000000000000# mode: error

cimport cython

def closure(cython.integral i):
    def inner(cython.floating f):
        pass

def closure2(cython.integral i):
    return lambda cython.integral i: i

def closure3(cython.integral i):
    def inner():
        return lambda cython.floating f: f

def generator(cython.integral i):
    yield i

_ERRORS = u"""
e_fused_closure.pyx:6:4: Cannot nest fused functions
e_fused_closure.pyx:10:11: Fused lambdas not allowed
e_fused_closure.pyx:14:15: Fused lambdas not allowed
e_fused_closure.pyx:16:0: Fused generators not supported
"""
Cython-0.26.1/tests/errors/fused_syntax_ctypedef.pyx0000664000175000017500000000024512542002467023452 0ustar  stefanstefan00000000000000# mode: error

cimport cython

ctypedef cython.fused_type(int, float) fused_t

_ERRORS = u"""
fused_syntax_ctypedef.pyx:5:39: Syntax error in ctypedef statement
"""
Cython-0.26.1/tests/errors/py_ucs4_type_errors.pyx0000664000175000017500000000137412542002467023102 0ustar  stefanstefan00000000000000# mode: error
# -*- coding: iso-8859-1 -*-

cdef Py_UCS4 char_ASCII = u'A'
cdef Py_UCS4 char_KLINGON = u'\uF8D2'

def char_too_long_ASCII():
    cdef Py_UCS4 c = u'AB'

def char_too_long_Unicode():
    cdef Py_UCS4 c = u'A\uF8D2'

def char_too_long_bytes():
    cdef Py_UCS4 c = b'AB'

def char_too_long_latin1():
    cdef Py_UCS4 char_bytes_latin1 = b'\xf6'


_ERRORS = """
 8:21: Only single-character Unicode string literals or surrogate pairs can be coerced into Py_UCS4/Py_UNICODE.
11:21: Only single-character Unicode string literals or surrogate pairs can be coerced into Py_UCS4/Py_UNICODE.
14:21: Only single-character string literals can be coerced into ints.
17:37: Bytes literals cannot coerce to Py_UNICODE/Py_UCS4, use a unicode literal instead.
"""
Cython-0.26.1/tests/errors/se_multass.pyx0000664000175000017500000000030412542002467021226 0ustar  stefanstefan00000000000000# mode: error

def f(obj1a, obj2a, obj3a, obj1b, obj2b, obj3b, obj4b):
	obj1a, (obj2a, obj3a) = obj1b, (obj2b, obj3b, obj4b)

_ERRORS = u"""
4:9: too many values to unpack (expected 2, got 3)
"""
Cython-0.26.1/tests/errors/exec_errors.pyx0000664000175000017500000000215412542002467021374 0ustar  stefanstefan00000000000000# mode: error
# tag: exec

def test_exec_tuples():
    exec()
    exec(1,)
    exec(1,2,3,4)

def test_exec_tuples_with_in(d1, d2):
    exec(1,2) in d1
    exec(1,2,3) in d1
    exec(1,2) in d1, d2
    exec(1,2,3) in d1, d2
    exec() in d1, d2
    exec(1,) in d1, d2
    exec(1,2,3,4) in d1, d2


_ERRORS = """
 5:4: expected tuple of length 2 or 3, got length 0
 6:4: expected tuple of length 2 or 3, got length 1
 7:4: expected tuple of length 2 or 3, got length 4
10:14: tuple variant of exec does not support additional 'in' arguments
11:16: tuple variant of exec does not support additional 'in' arguments
12:14: tuple variant of exec does not support additional 'in' arguments
13:16: tuple variant of exec does not support additional 'in' arguments
14:4: expected tuple of length 2 or 3, got length 0
14:11: tuple variant of exec does not support additional 'in' arguments
15:4: expected tuple of length 2 or 3, got length 1
15:13: tuple variant of exec does not support additional 'in' arguments
16:4: expected tuple of length 2 or 3, got length 4
16:18: tuple variant of exec does not support additional 'in' arguments
"""
Cython-0.26.1/tests/errors/w_uninitialized_exc.pyx0000664000175000017500000000434212542002467023112 0ustar  stefanstefan00000000000000# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror

def exc_target():
    try:
        {}['foo']
    except KeyError, e:
        pass
    except IndexError, i:
        pass
    return e, i

def exc_body():
    try:
        a = 1
    except Exception:
        pass
    return a

def exc_else_pos():
    try:
        pass
    except Exception, e:
        pass
    else:
        e = 1
    return e

def exc_body_pos(d):
    try:
        a = d['foo']
    except KeyError:
        a = None
    return a

def exc_pos():
    try:
        a = 1
    except Exception:
        a = 1
    return a

def exc_finally():
    try:
        a = 1
    finally:
        pass
    return a

def exc_finally2():
    try:
        pass
    finally:
        a = 1
    return a


def exc_assmt_except(a):
    try:
        x = a
    except:
        return x

def exc_assmt_finaly(a):
    try:
        x = a
    except:
        return x

def raise_stat(a):
    try:
        if a < 0:
            raise IndexError
    except IndexError:
        oops = 1
    print oops

def try_loop(args):
    try:
        x = 0
        for i in args:
            if i is 0:
                continue
            elif i is None:
                break
            elif i is False:
                return
            i()
    except ValueError:
        x = 1
    finally:
        return x

def try_finally(a):
    try:
        for i in a:
            if i > 0:
                x = 1
    finally:
        return x

def try_finally_nested(m):
    try:
        try:
            try:
                f = m()
            except:
                pass
        finally:
            pass
    except:
        print f

_ERRORS = """
12:12: local variable 'e' might be referenced before assignment
12:15: local variable 'i' might be referenced before assignment
19:12: local variable 'a' might be referenced before assignment
63:16: local variable 'x' might be referenced before assignment
69:16: local variable 'x' might be referenced before assignment
77:14: local variable 'oops' might be referenced before assignment
93:16: local variable 'x' might be referenced before assignment
101:16: local variable 'x' might be referenced before assignment
113:15: local variable 'f' might be referenced before assignment
"""
Cython-0.26.1/tests/errors/pep492_badsyntax_async8.pyx0000664000175000017500000000020412542002467023433 0ustar  stefanstefan00000000000000# mode: error
# tag: pep492, async

async def foo():
    await await fut

_ERRORS = """
5:10: Expected an identifier or literal
"""
Cython-0.26.1/tests/errors/w_python_list_as_cppset_ref.pyx0000664000175000017500000000066312542002467024656 0ustar  stefanstefan00000000000000# mode: error
# tag: cpp, werror

from libcpp.set cimport set

cdef extern from *:
    void cpp_function_set1(set[int] arg)
    void cpp_function_set2(set[int]& arg)


def pass_py_obj_as_cpp_cont_ref():
    cdef list ordered_set = [0, 0, 0, 0, 0]
    cpp_function_set1(ordered_set)
    cpp_function_set2(ordered_set)


_ERRORS = """
14:33: Cannot pass Python object as C++ data structure reference (set[int] &), will pass by copy.
"""
Cython-0.26.1/tests/errors/def_nogil.pyx0000664000175000017500000000015312542002467020777 0ustar  stefanstefan00000000000000# mode: error

def test() nogil:
    pass

_ERRORS = """
3:0: Python function cannot be declared nogil
"""
Cython-0.26.1/tests/errors/e_int_literals_py2.py0000664000175000017500000000055412542002467022455 0ustar  stefanstefan00000000000000# mode: error
# cython: language_level=2

def int_literals():
    a = 1L  # ok
    b = 10000000000000L # ok
    c = 1UL
    d = 10000000000000UL
    e = 10000000000000LL


_ERRORS = """
7:8: illegal integer literal syntax in Python source file
8:8: illegal integer literal syntax in Python source file
9:8: illegal integer literal syntax in Python source file
"""
Cython-0.26.1/tests/errors/parsed_directive.pyx0000664000175000017500000000067012542002467022371 0ustar  stefanstefan00000000000000# mode: error

cimport cython

cdef class TestClass:
    def foo(self):
        with cython.c_string_encoding("ascii"):
            return


### FIXME: way to many errors for my taste...

_ERRORS = """
7:13: The c_string_encoding compiler directive is not allowed in with statement scope
7:19: 'c_string_encoding' not a valid cython language construct
7:19: 'c_string_encoding' not a valid cython attribute or is being used incorrectly
"""
Cython-0.26.1/tests/errors/fused_syntax.pyx0000664000175000017500000000016512542002467021570 0ustar  stefanstefan00000000000000# mode: error

cdef fused my_fused_type: int a; char b

_ERRORS = u"""
fused_syntax.pyx:3:26: Expected a newline
"""
Cython-0.26.1/tests/errors/nonconst_def_tuple.pyx0000664000175000017500000000042012542002467022736 0ustar  stefanstefan00000000000000# mode: error

DEF t = (1,2,3)
DEF t_const = (1,t,2)
DEF t_non_const = (1,[1,2,3],3,t[4])

x = t_non_const

_ERRORS = u"""
5:32: Error in compile-time expression: IndexError: tuple index out of range
7:15: Invalid type for compile-time constant: [1, 2, 3] (type list)
"""
Cython-0.26.1/tests/errors/cdefkwargs.pyx0000664000175000017500000000055312542002467021175 0ustar  stefanstefan00000000000000# mode: error

__doc__ = u"""
    >>> call2()
    >>> call3()
    >>> call4()
"""

import sys, re
if sys.version_info >= (2,6):
    __doc__ = re.sub(u"Error: (.*)exactly(.*)", u"Error: \\1at most\\2", __doc__)

# the calls:

def call2():
    b(1,2)

def call3():
    b(1,2,3)

def call4():
    b(1,2,3,4)

# the called function:

cdef b(a, b, c=1, d=2):
    pass
Cython-0.26.1/tests/errors/e_badtypeuse.pyx0000664000175000017500000000263712542002467021533 0ustar  stefanstefan00000000000000# mode: error

cdef struct Grail

cdef extern object xobj # Python object cannot be extern
cdef object aobj[42]    # array element cannot be Python object
cdef object *pobj       # pointer base type cannot be Python object

cdef int spam[] # incomplete variable type
cdef Grail g     # incomplete variable type
cdef void nada   # incomplete variable type

cdef int a_spam[17][]  # incomplete element type
cdef Grail a_g[42]     # incomplete element type
cdef void a_nada[88]   # incomplete element type

cdef struct Eggs:
	int spam[]

cdef f(Grail g,   # incomplete argument type
	void v,         # incomplete argument type
	int a[]):
		pass

cdef NoSuchType* ptr
ptr = None             # This should not produce another error

_ERRORS = u"""
5:19: Python object cannot be declared extern
6:16: Array element cannot be a Python object
7:12: Pointer base type cannot be a Python object
9:13: Variable type 'int []' is incomplete
10:11: Variable type 'Grail' is incomplete
11:10: Variable type 'void' is incomplete
13:15: Array element type 'int []' is incomplete
14:14: Array element type 'Grail' is incomplete
15:16: Array element type 'void' is incomplete
18:9: Variable type 'int []' is incomplete
#19:1: Function argument cannot be void
21:1: Use spam() rather than spam(void) to declare a function with no arguments.
20:7: Argument type 'Grail' is incomplete
21:1: Invalid use of 'void'
25:5: 'NoSuchType' is not a type identifier
"""
Cython-0.26.1/tests/errors/w_unreachable.pyx0000664000175000017500000000115612542002467021654 0ustar  stefanstefan00000000000000# mode: error
# tag: werror

def simple_return():
    return
    print 'Where am I?'

def simple_loops(*args):
    for i in args:
        continue
        print 'Never be here'

    while True:
        break
        print 'Never be here'

def conditional(a, b):
    if a:
        return 1
    elif b:
        return 2
    else:
        return 3
    print 'oops'

def try_except():
    try:
        raise TypeError
    except ValueError:
        pass
    else:
        print 'unreachable'


_ERRORS = """
6:4: Unreachable code
11:8: Unreachable code
15:8: Unreachable code
24:4: Unreachable code
32:8: Unreachable code
"""
Cython-0.26.1/tests/errors/e_callspec.pyx0000664000175000017500000000404012542002467021142 0ustar  stefanstefan00000000000000# mode: error

cimport cython

ctypedef int USERTYPE

# Functions

@cython.callspec("")
cdef void h1(): pass

@cython.callspec("__cdecl")
cdef void __cdecl h2(): pass

@cython.callspec("__stdcall")
cdef void __stdcall h3(): pass

@cython.callspec("__fastcall")
cdef void __fastcall h4(): pass

cdef USERTYPE h5(): return 0

cdef USERTYPE __cdecl h6(): return 0

cdef USERTYPE __stdcall h7(): return 0

cdef USERTYPE __fastcall h8(): return 0

@cython.callspec("__cdecl")
cdef void __stdcall herr1(): pass # fail

@cython.callspec("__cdecl")
cdef void __fastcall herr2(): pass # fail

# Pointer typedefs

ctypedef void (*PT1)()
ctypedef void (__cdecl *PT2)()
ctypedef void (__stdcall *PT3)()
ctypedef void (__fastcall *PT4)()
ctypedef USERTYPE (*PT5)()
ctypedef USERTYPE (__cdecl *PT6)()
ctypedef USERTYPE (__stdcall *PT7)()
ctypedef USERTYPE (__fastcall *PT8)()

# Pointers

cdef void (*p1)()
cdef void (__cdecl *p2)()
cdef void (__stdcall *p3)()
cdef void (__fastcall *p4)()
cdef USERTYPE (*p5)()
cdef USERTYPE (__cdecl *p6)()
cdef USERTYPE (__stdcall *p7)()
cdef USERTYPE (__fastcall *p8)()

cdef PT1 pt1
cdef PT2 pt2
cdef PT3 pt3
cdef PT4 pt4
cdef PT5 pt5
cdef PT6 pt6
cdef PT7 pt7
cdef PT8 pt8

# Assignments

p1 = pt1 = p2 = pt2 = h1
p1 = pt1 = p2 = pt2 = h2
p3 = pt3 = h3
p4 = pt4 = h4

p5 = pt5 = p6 = pt6 = h5
p5 = pt5 = p6 = pt6 = h6
p7 = pt7 = h7
p8 = pt8 = h8

#p1 = h2 # fail
#p1 = h3 # fail
#p1 = h4 # fail

#p2 = h1 # fail
#p2 = h3 # fail
#p2 = h4 # fail

_ERRORS = u"""
30:25: cannot have both '__stdcall' and '__cdecl' calling conventions
33:26: cannot have both '__fastcall' and '__cdecl' calling conventions
"""
#31:14: Cannot assign type 'void (__cdecl )(void)' to 'void (*)(void)'
#32:14: Cannot assign type 'void (__stdcall )(void)' to 'void (*)(void)'
#33:14: Cannot assign type 'void (__fastcall )(void)' to 'void (*)(void)'
#35:14: Cannot assign type 'void (void)' to 'void (__cdecl *)(void)'
#36:14: Cannot assign type 'void (__stdcall )(void)' to 'void (__cdecl *)(void)'
#37:14: Cannot assign type 'void (__fastcall )(void)' to 'void (__cdecl *)(void)'
Cython-0.26.1/tests/errors/e_autotestdict.pyx0000664000175000017500000000025512542002467022074 0ustar  stefanstefan00000000000000# mode: error

cimport cython

@cython.autotestdict(False)
def foo():
    pass

_ERRORS = u"""
6:0: The autotestdict compiler directive is not allowed in function scope
"""
Cython-0.26.1/tests/errors/e_nogilcmeth.pxd0000664000175000017500000000004112542002467021455 0ustar  stefanstefan00000000000000cdef class C:
	cdef void f(self)
Cython-0.26.1/tests/errors/e_public_cdef_private_types.pyx0000664000175000017500000000226512542002467024600 0ustar  stefanstefan00000000000000# mode: error

ctypedef char *string_t
ctypedef public char *public_string_t
ctypedef api char *api_string_t

# This should all fail
cdef public pub_func1(string_t x):
    pass

cdef api api_func1(string_t x):
    pass

cdef public string_t pub_func2():
    pass

cdef api string_t api_func2():
    pass

cdef public opt_pub_func(x = None):
    pass

cdef api opt_api_func(x = None):
    pass

# This should all work
cdef public pub_func3(public_string_t x, api_string_t y):
    pass

cdef api api_func3(public_string_t x, api_string_t y):
    pass

cdef opt_func(x = None):
    pass

_ERRORS = u"""
e_public_cdef_private_types.pyx:8:22: Function declared public or api may not have private types
e_public_cdef_private_types.pyx:11:19: Function declared public or api may not have private types
e_public_cdef_private_types.pyx:14:5: Function declared public or api may not have private types
e_public_cdef_private_types.pyx:17:5: Function declared public or api may not have private types
e_public_cdef_private_types.pyx:20:25: Function with optional arguments may not be declared public or api
e_public_cdef_private_types.pyx:23:22: Function with optional arguments may not be declared public or api
"""
Cython-0.26.1/tests/errors/pep492_badsyntax_async9.pyx0000664000175000017500000000017112542002467023437 0ustar  stefanstefan00000000000000# mode: error
# tag: pep492, async

async def foo():
    await

_ERRORS = """
5:9: Expected an identifier or literal
"""
Cython-0.26.1/tests/errors/py_unicode_type_errors.pyx0000664000175000017500000000141612542002467023647 0ustar  stefanstefan00000000000000# mode: error
# -*- coding: iso-8859-1 -*-

cdef Py_UNICODE char_ASCII = u'A'
cdef Py_UNICODE char_KLINGON = u'\uF8D2'

def char_too_long_ASCII():
    cdef Py_UNICODE c = u'AB'

def char_too_long_Unicode():
    cdef Py_UNICODE c = u'A\uF8D2'

def char_too_long_bytes():
    cdef Py_UNICODE c = b'AB'

def char_too_long_latin1():
    cdef Py_UNICODE char_bytes_latin1 = b'\xf6'


_ERRORS = """
 8:24: Only single-character Unicode string literals or surrogate pairs can be coerced into Py_UCS4/Py_UNICODE.
11:24: Only single-character Unicode string literals or surrogate pairs can be coerced into Py_UCS4/Py_UNICODE.
14:24: Only single-character string literals can be coerced into ints.
17:40: Bytes literals cannot coerce to Py_UNICODE/Py_UCS4, use a unicode literal instead.
"""
Cython-0.26.1/tests/errors/pep448_syntax_1.pyx0000664000175000017500000000017012542002467021722 0ustar  stefanstefan00000000000000# mode: error
# tag: pep448

def unpack_mix():
    [*1, **1]


_ERRORS = """
5:9: Expected an identifier or literal
"""
Cython-0.26.1/tests/errors/e_pyobinstruct.pyx0000664000175000017500000000025412542002467022124 0ustar  stefanstefan00000000000000# mode: error

cdef object x

cdef struct spam:
	object parrot

def f():
	cdef spam s
	s.parrot = x
_ERRORS = u"""
6:8: C struct/union member cannot be a Python object
"""
Cython-0.26.1/tests/errors/extclassattrsetting.pyx0000664000175000017500000000051612542002467023173 0ustar  stefanstefan00000000000000# mode: error

__doc__ = u"""
>>> e = ExtClass()
>>> e.get()
5
"""

cdef class ExtClass:
    cdef int _attribute = 2

    def get(self):
        return self._attribute

    _attribute = 5     # FIXME: this is not currently handled!!!

_ERRORS = u"""
10:13: Cannot assign default value to fields in cdef classes, structs or unions
"""
Cython-0.26.1/tests/errors/e_exceptclause.pyx0000664000175000017500000000034012542002467022040 0ustar  stefanstefan00000000000000# mode: error

try:
    raise KeyError
except KeyError:
    pass
except:
    pass
except:
    pass
except AttributeError:
    pass

_ERRORS = u"""
9:0: default 'except:' must be last
11:0: default 'except:' must be last
"""
Cython-0.26.1/tests/errors/e_slice.pyx0000664000175000017500000000111412542002467020452 0ustar  stefanstefan00000000000000# mode: error

def f(obj2):
    cdef int *ptr1
    obj1 = obj2[ptr1::] # error
    obj1 = obj2[:ptr1:] # error
    obj1 = obj2[::ptr1] # error

cdef int a
cdef int* int_ptr

for a in int_ptr:
    pass
for a in int_ptr[2:]:
    pass
for a in int_ptr[2:2:a]:
    pass

_ERRORS = u"""
5:20: Cannot convert 'int *' to Python object
6:21: Cannot convert 'int *' to Python object
7:22: Cannot convert 'int *' to Python object
12:16: C array iteration requires known end index
14:16: C array iteration requires known end index
16:22: C array iteration requires known step size and end index
"""
Cython-0.26.1/tests/errors/e_cdef_closure.pyx0000664000175000017500000000021512542002467022011 0ustar  stefanstefan00000000000000# mode: error

cpdef cpdef_yield():
    def inner():
        pass

_ERRORS = u"""
3:6: closures inside cpdef functions not yet supported
"""
Cython-0.26.1/tests/errors/compile_time_unraisable_T370.pyx0000664000175000017500000000024112542002467024437 0ustar  stefanstefan00000000000000# ticket: 370
# mode: error

cdef int raiseit():
    raise IndexError

try: raiseit()
except: pass

_ERRORS = u"""
FIXME: provide a good error message here.
"""
Cython-0.26.1/tests/errors/pure_cclass_without_body.py0000664000175000017500000000016013023021033023743 0ustar  stefanstefan00000000000000# mode: error

class Test(object):
    pass

_ERRORS = u"""
3:5: C class 'Test' is declared but not defined
"""
Cython-0.26.1/tests/errors/e_addressof.pyx0000664000175000017500000000145113150045407021326 0ustar  stefanstefan00000000000000# mode: error

cdef class Ext:
    cdef int a
    cdef object o

def f(int a):
    cdef Ext e = Ext()
    x = &a  # ok

    cdef object o = &a  # pointer != object

    po1 = &o        # pointer to Python variable
    po2 = &o.xyz    # pointer to Python expression
    po3 = &e.o      # pointer to Python object
    po4 = &e.a      # ok (C attribute)

    po5 = &(o + 1)  # pointer to non-lvalue Python expression
    po6 = &(a + 1)  # pointer to non-lvalue C expression


_ERRORS="""
11:20: Cannot convert 'int *' to Python object
13:10: Cannot take address of Python variable 'o'
14:10: Cannot take address of Python object attribute 'xyz'
15:10: Cannot take address of Python object attribute 'o'
18:10: Taking address of non-lvalue (type Python object)
19:10: Taking address of non-lvalue (type long)
"""
Cython-0.26.1/tests/errors/empty.pyx0000664000175000017500000000005312542002467020206 0ustar  stefanstefan00000000000000# cython: autotestdict=False
# mode: error
Cython-0.26.1/tests/errors/pep492_badsyntax_async3.pyx0000664000175000017500000000020412542002467023426 0ustar  stefanstefan00000000000000# mode: error
# tag: pep492, async

async def foo():
    [i async for i in els]

_ERRORS = """
5:7: Expected ']', found 'async'
"""
Cython-0.26.1/tests/errors/e_decorators.pyx0000664000175000017500000000016712542002467021527 0ustar  stefanstefan00000000000000# mode: error

_ERRORS = u"""
4:4 Expected a newline after decorator
"""


class A:
    pass

@A().a
def f():
    pass
Cython-0.26.1/tests/errors/cdef_syntax.pyx0000664000175000017500000000026712542002467021366 0ustar  stefanstefan00000000000000# mode: error

cdef pass
cdef void
cdef nogil class test: pass

_ERRORS = u"""
3: 5: Expected an identifier, found 'pass'
4: 9: Empty declarator
5:11: Expected ':', found 'class'
"""
Cython-0.26.1/tests/errors/e_del.pyx0000664000175000017500000000130512542002467020121 0ustar  stefanstefan00000000000000# mode: error

cdef struct S:
    int m

def f(a):
    cdef int i, x[2]
    cdef S s
    global j
    del f() # error
    del i # error: deletion of non-Python object
    del j # error: deletion of non-Python object
    del x[i] # error: deletion of non-Python object
    del s.m # error: deletion of non-Python object

def outer(a):
    def inner():
        print a
    del a
    return inner()

cdef object g
del g


_ERRORS = u"""
10:9: Cannot assign to or delete this
11:48: Deletion of non-Python, non-C++ object
13:9: Deletion of non-Python, non-C++ object
14:9: Deletion of non-Python, non-C++ object
19:9: can not delete variable 'a' referenced in nested scope
23:5: Deletion of global C variable
"""
Cython-0.26.1/tests/errors/cpp_no_constructor.pyx0000664000175000017500000000030412542002467022772 0ustar  stefanstefan00000000000000# tag: cpp
# mode: error

cdef extern from *:
    cdef cppclass Foo:
        Foo()
        Foo(int)

new Foo(1, 2)

_ERRORS = u"""
9:7: Call with wrong number of arguments (expected 1, got 2)
"""
Cython-0.26.1/tests/errors/w_uninitialized_for.pyx0000664000175000017500000000343712542002467023125 0ustar  stefanstefan00000000000000# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror

def simple_for(n):
    for i in n:
        a = 1
    return a

def simple_for_break(n):
    for i in n:
        a = 1
        break
    return a

def simple_for_pos(n):
    for i in n:
        a = 1
    else:
        a = 0
    return a

def simple_target(n):
    for i in n:
        pass
    return i

def simple_target_f(n):
    for i in n:
        i *= i
    return i

def simple_for_from(n):
    for i from 0 <= i <= n:
        x = i
    else:
        return x

def for_continue(l):
    for i in l:
        if i > 0:
            continue
        x = i
    print x

def for_break(l):
    for i in l:
        if i > 0:
            break
        x = i
    print x

def for_finally_continue(f):
    for i in f:
        try:
            x = i()
        finally:
            print x
            continue

def for_finally_break(f):
    for i in f:
        try:
            x = i()
        finally:
            print x
            break

def for_finally_outer(p, f):
    x = 1
    try:
        for i in f:
            print x
            x = i()
            if x > 0:
                continue
            if x < 0:
                break
    finally:
        del x


_ERRORS = """
8:12: local variable 'a' might be referenced before assignment
14:12: local variable 'a' might be referenced before assignment
26:12: local variable 'i' might be referenced before assignment
31:12: local variable 'i' might be referenced before assignment
37:16: local variable 'x' might be referenced before assignment
44:11: local variable 'x' might be referenced before assignment
51:11: local variable 'x' might be referenced before assignment
58:19: local variable 'x' might be referenced before assignment
66:19: local variable 'x' might be referenced before assignment
"""
Cython-0.26.1/tests/errors/cdef_members_T517.pyx0000664000175000017500000000110412542002467022201 0ustar  stefanstefan00000000000000# ticket: 517
# mode: error

ctypedef void* VoidP

cdef class Spam:
    cdef          VoidP vp0
    cdef readonly VoidP vp2
    cdef public   VoidP vp1

ctypedef struct Foo:
    int i

cdef class Bar:
    cdef          Foo foo0
    cdef readonly Foo foo2
    cdef public   Foo foo1
    pass

_ERRORS = u"""
8:24: C attribute of type 'VoidP' cannot be accessed from Python
8:24: Cannot convert 'VoidP' to Python object
9:24: C attribute of type 'VoidP' cannot be accessed from Python
9:24: Cannot convert 'VoidP' to Python object
9:24: Cannot convert Python object to 'VoidP'
"""

Cython-0.26.1/tests/errors/e_powop.pyx0000664000175000017500000000037612542002467020530 0ustar  stefanstefan00000000000000# mode: error

def f():
	cdef char *str1
	cdef float flt1, flt2, flt3
	flt1 = str1 ** flt3 # error
	flt1 = flt2 ** str1 # error

_ERRORS = u"""
6:13: Invalid operand types for '**' (char *; float)
7:13: Invalid operand types for '**' (float; char *)
"""
Cython-0.26.1/tests/errors/cpdef_syntax.pyx0000664000175000017500000000031212542002467021535 0ustar  stefanstefan00000000000000# mode: error

cpdef nogil: pass
cpdef nogil class test: pass

_ERRORS = u"""
3: 6: cdef blocks cannot be declared cpdef
4: 6: cdef blocks cannot be declared cpdef
4:12: Expected ':', found 'class'
"""
Cython-0.26.1/tests/errors/pep448_syntax_3.pyx0000664000175000017500000000020412542002467021722 0ustar  stefanstefan00000000000000# mode: error
# tag: pep448

def unpack_mix_in_set():
    {*1, **2}


_ERRORS = """
5:9: unexpected **item found in set literal
"""
Cython-0.26.1/tests/errors/const_decl_errors.pyx0000664000175000017500000000140312542002467022561 0ustar  stefanstefan00000000000000# mode: error

cdef const object o

# TODO: This requires making the assignment at declaration time.
# (We could fake this case by dropping the const here in the C code,
# as it's not needed for agreeing with external libraries.
cdef const int x = 10

cdef struct S:
    int member

cdef func(const int a, const int* b, const (int*) c, const S s, int *const d,
          const S *const t):
    a = 10
    c = NULL
    b[0] = 100
    s.member = 1000
    d = NULL
    t = &s

_ERRORS = """
3:5: Const base type cannot be a Python object
8:5: Assignment to const 'x'
15:6: Assignment to const 'a'
16:6: Assignment to const 'c'
17:5: Assignment to const dereference
18:5: Assignment to const attribute 'member'
19:6: Assignment to const 'd'
20:6: Assignment to const 't'
"""
Cython-0.26.1/tests/errors/extended_unpacking_parser2.pyx0000664000175000017500000000033212542002467024345 0ustar  stefanstefan00000000000000# mode: error

# invalid syntax (as handled by the parser)

def syntax():
    *a, *b = 1,2,3,4,5


_ERRORS = u"""
6:4: more than 1 starred expression in assignment
6:8: more than 1 starred expression in assignment
"""
Cython-0.26.1/tests/errors/e2_packedstruct_T290.pyx0000664000175000017500000000016012542002467022647 0ustar  stefanstefan00000000000000# ticket: 290
# mode: error

cdef packed foo:
    pass

_ERRORS = u"""
4:12: Expected 'struct', found 'foo'
"""
Cython-0.26.1/tests/errors/e_func_in_pxd.pyx0000664000175000017500000000040012542002467021644 0ustar  stefanstefan00000000000000# mode: error

cimport e_func_in_pxd_support

_ERRORS = u"""
1:5: function definition in pxd file must be declared 'cdef inline'
4:5: inline function definition in pxd file cannot be 'public'
7:5: inline function definition in pxd file cannot be 'api'
"""
Cython-0.26.1/tests/errors/cimport_attributes.pyx0000664000175000017500000000161013143605603022772 0ustar  stefanstefan00000000000000# mode: error


cimport libcpp
print libcpp.no_such_attribute

cimport libcpp.map
print libcpp.map.no_such_attribute

from libcpp cimport vector
print vector.no_such_attribute

from libcpp cimport vector as my_vector
print my_vector.no_such_attribute

from libcpp cimport vector as my_vector_with_shadow
from libcpp import vector as my_vector_with_shadow
print my_vector_with_shadow.python_attribute   # OK (if such a module existed at runtime)

# Other ordering
from libcpp import map as my_map_with_shadow
from libcpp cimport map as my_map_with_shadow
print my_map_with_shadow.python_attribute   # OK (if such a module existed at runtime)


_ERRORS = u"""
5:12: cimported module has no attribute 'no_such_attribute'
8:16: cimported module has no attribute 'no_such_attribute'
11:12: cimported module has no attribute 'no_such_attribute'
14:15: cimported module has no attribute 'no_such_attribute'
"""
Cython-0.26.1/tests/errors/e_declarations.pyx0000664000175000017500000000065112542002467022030 0ustar  stefanstefan00000000000000# mode: error

cdef extern void fa[5]()
cdef extern int af()[5]
cdef extern int ff()()

cdef void f():
	cdef void *p
	cdef int (*h)()
	h = f # this is an error
	h = f # this is OK


_ERRORS = u"""
3:20: Template arguments must be a list of names
3:20: Template parameter not a type
5:18: Function cannot return a function
10:10: Function cannot return a function
10:5: Cannot cast to a function type
"""
Cython-0.26.1/tests/errors/e_switch.pyx0000664000175000017500000000027112542002467020657 0ustar  stefanstefan00000000000000# mode: error

cdef int x = 3

if x == NONEXISTING:
    print 2
elif x == 2:
    print 2342
elif x == 4:
    print 34

_ERRORS = u"""
5:19: undeclared name not builtin: NONEXISTING
"""
Cython-0.26.1/tests/errors/missing_self_in_cpdef_method_T156.pyx0000664000175000017500000000017712542002467025447 0ustar  stefanstefan00000000000000# ticket: 156
# mode: error

cdef class B:
    cpdef b():
        pass

_ERRORS = u"""
5:10: C method has no self argument
"""
Cython-0.26.1/tests/errors/invalid_uescape2.pyx0000664000175000017500000000011312542002467022262 0ustar  stefanstefan00000000000000# mode: error

u'\u12'

_ERRORS = '''
3:2: Invalid unicode escape '\u'
'''
Cython-0.26.1/tests/errors/e_cenum.pyx0000664000175000017500000000024012542002467020461 0ustar  stefanstefan00000000000000# mode: error

cdef enum Spam:
	a, b, c

cdef void f():
	global a
	a = 42      # assignment to non-lvalue

_ERRORS = u"""
8:3: Assignment to non-lvalue 'a'
"""
Cython-0.26.1/tests/errors/e_badexcvaltype.pyx0000664000175000017500000000025712542002467022215 0ustar  stefanstefan00000000000000# mode: error

cdef char *spam() except -1:
	pass

_ERRORS = u"""
3:26: Cannot assign type 'long' to 'char *'
3:26: Exception value incompatible with function return type
"""
Cython-0.26.1/tests/errors/extended_unpacking_notuple.pyx0000664000175000017500000000040512542002467024456 0ustar  stefanstefan00000000000000# mode: error

# very common mistake for new users (handled early by the parser)

def no_tuple_assignment():
    *a = [1]

_ERRORS = u"""
6:4: a starred assignment target must be in a list or tuple - maybe you meant to use an index assignment: var[0] = ...
"""
Cython-0.26.1/tests/errors/pxd_signature_mismatch.pyx0000664000175000017500000000162212542002467023614 0ustar  stefanstefan00000000000000# mode: error
# tag: pxd

cdef int wrong_args(int x, int y):
    return 2

cdef int wrong_return_type(int x, int y):
    return 2

cdef int wrong_exception_check(int x, int y) except? 0:
    return 2

cdef int wrong_exception_value(int x, int y) except 1:
    return 2

cdef int wrong_exception_value_check(int x, int y) except? 1:
    return 2

cdef int inherit_exception_value(int x, int y):
    return 2

cdef int inherit_exception_check(int x, int y):
    return 2


_ERRORS = """
4:5: Function signature does not match previous declaration
7:5: Function signature does not match previous declaration
10:5: Function signature does not match previous declaration
13:5: Function signature does not match previous declaration
16:5: Function signature does not match previous declaration
19:5: Function signature does not match previous declaration
22:5: Function signature does not match previous declaration
"""
Cython-0.26.1/tests/errors/nogilfunctype.pyx0000664000175000017500000000034512542002467021742 0ustar  stefanstefan00000000000000# mode: error

cdef extern from *:
    cdef void f()
    cdef void (*fp)() nogil

    cdef void g() nogil
    cdef void (*gp)()

gp = g

fp = f

_ERRORS = u"""
12:6: Cannot assign type 'void (void)' to 'void (*)(void) nogil'
"""
Cython-0.26.1/tests/errors/encoding.pyx0000664000175000017500000000033012542002467020634 0ustar  stefanstefan00000000000000# coding=ASCII
# mode: error

"""
Trs bien.
"""

_ERRORS = u"""
5:3: Decoding error, missing or incorrect coding= at top of source (cannot decode with encoding 'ascii': ordinal not in range(128))
"""
Cython-0.26.1/tests/errors/e_nosignword.pyx0000664000175000017500000000072412542002467021552 0ustar  stefanstefan00000000000000# mode: error

cdef signed   float       e
cdef unsigned float       f
cdef signed   double      g
cdef unsigned double      h
cdef signed   long double i
cdef unsigned long double j


_ERRORS = u"""
3:5: Unrecognised type modifier combination
4:5: Unrecognised type modifier combination
5:5: Unrecognised type modifier combination
6:5: Unrecognised type modifier combination
7:5: Unrecognised type modifier combination
8:5: Unrecognised type modifier combination
"""
Cython-0.26.1/tests/errors/pep492_badsyntax_async2.pyx0000664000175000017500000000026412542002467023433 0ustar  stefanstefan00000000000000# mode: error
# tag: pep492, async

async def foo():
    def foo(a:await list()):
        pass

_ERRORS = """
5:14: 'await' not supported here
5:14: 'await' not supported here
"""
Cython-0.26.1/tests/errors/cython3_bytes.pyx0000664000175000017500000000031412542002467021645 0ustar  stefanstefan00000000000000# mode: error
# -*- coding: utf-8 -*-
# cython: language_level=3

escaped = b'abc\xc3\xbc\xc3\xb6\xc3\xa4'
invalid = b'abcüöä'

_ERRORS = """
6:10: bytes can only contain ASCII literal characters.
"""
Cython-0.26.1/tests/errors/reversed_literal_pyobjs.pyx0000664000175000017500000000137512542002467024001 0ustar  stefanstefan00000000000000# mode: error
# tag: reversed

cdef int i, j
for i in reversed(range([], j, 2)):
    pass
for i in reversed(range([], j, -2)):
    pass
for i in reversed(range(j, [], 2)):
    pass
for i in reversed(range(j, [], -2)):
    pass
for i in reversed(range({}, j, 2)):
    pass
for i in reversed(range({}, j, -2)):
    pass
for i in reversed(range(j, {}, 2)):
    pass
for i in reversed(range(j, {}, -2)):
    pass

_ERRORS = """
5:24: Cannot coerce list to type 'long'
7:24: Cannot coerce list to type 'long'
9:27: Cannot coerce list to type 'long'
11:27: Cannot coerce list to type 'long'
13:24: Cannot interpret dict as type 'long'
15:24: Cannot interpret dict as type 'long'
17:27: Cannot interpret dict as type 'long'
19:27: Cannot interpret dict as type 'long'
"""
Cython-0.26.1/tests/errors/e_int_literals_py3.py0000664000175000017500000000072512542002467022456 0ustar  stefanstefan00000000000000# mode: error
# cython: language_level=3

def int_literals():
    a = 1L
    b = 10000000000000L
    c = 1UL
    d = 10000000000000UL
    e = 10000000000000LL


_ERRORS = """
5:8: illegal integer literal syntax in Python source file
6:8: illegal integer literal syntax in Python source file
7:8: illegal integer literal syntax in Python source file
8:8: illegal integer literal syntax in Python source file
9:8: illegal integer literal syntax in Python source file
"""
Cython-0.26.1/tests/errors/e_nonlocal_T490.pyx0000664000175000017500000000115412542002467021704 0ustar  stefanstefan00000000000000# mode: error

def test_non_existant():
    nonlocal no_such_name
    no_such_name = 1

def redef():
    x = 1
    def f():
        x = 2
        nonlocal x

global_name = 5

def ref_to_global():
    nonlocal global_name
    global_name = 6

def global_in_class_scope():
    class Test():
        nonlocal global_name
        global_name = 6

def redef_in_class_scope():
    x = 1
    class Test():
        x = 2
        nonlocal x


_ERRORS = u"""
 4:4: no binding for nonlocal 'no_such_name' found
11:8: 'x' redeclared as nonlocal
16:4: no binding for nonlocal 'global_name' found
28:8: 'x' redeclared as nonlocal
"""
Cython-0.26.1/tests/errors/e_nogilcmeth.pyx0000664000175000017500000000025612542002467021512 0ustar  stefanstefan00000000000000# mode: error

cdef class C:
	cdef void f(self) nogil:
		pass

_ERRORS = u"""
2:12: Previous declaration is here
4: 6: Signature not compatible with previous declaration
"""
Cython-0.26.1/tests/errors/final_methods.pyx0000664000175000017500000000057312542002467021673 0ustar  stefanstefan00000000000000# mode: error

cimport cython

cdef class BaseClass:
    @cython.final
    cdef cdef_method(self):
        pass

    @cython.final
    cpdef cpdef_method(self):
        pass


cdef class SubType(BaseClass):
    cdef cdef_method(self):
        pass

_ERRORS = """
11:10: Only final types can have final Python (def/cpdef) methods
16:9: Overriding final methods is not allowed
"""
Cython-0.26.1/tests/errors/e_nogilfunctype.pyx0000664000175000017500000000024612542002467022246 0ustar  stefanstefan00000000000000# mode: error

cdef extern from *:
	cdef void f()
	cdef void (*fp)() nogil

fp = f
_ERRORS = u"""
7:6: Cannot assign type 'void (void)' to 'void (*)(void) nogil'
"""
Cython-0.26.1/tests/errors/e_bufaccess2.pyx0000664000175000017500000000156612542002467021406 0ustar  stefanstefan00000000000000# mode: error

cimport e_bufaccess_pxd # was needed to provoke a bug involving ErrorType
import cython

def f():
    cdef object[e_bufaccess_pxd.T] buf

def withnogil_access_fail():
    cdef object[int] buf = None
    with nogil:
        buf[2] = 2

@cython.boundscheck(False)
def withnogil_access_ok():
    cdef object[int] buf = None
    with nogil:
        buf[2] = 2 # No error should be triggered here

@cython.boundscheck(False)
def withnogil_access_fail_2():
    cdef object[object] buf = None
    with nogil:
        buf[2] = 2 # Not OK as dtype is object

def withnogil_acquire(x):
    cdef object[int] buf
    with nogil:
        buf = x

_ERRORS = u"""
 3: 9: 'nothing' is not a type identifier
24:11: Cannot access buffer with object dtype without gil
24:11: Assignment of Python object not allowed without gil
29:12: Assignment of Python object not allowed without gil
"""
Cython-0.26.1/tests/errors/e_multass.pyx0000664000175000017500000000027312542002467021050 0ustar  stefanstefan00000000000000# mode: error

def f(obj1a, obj1b):
	cdef int int1, int2, int3
	cdef int *ptr2
	int1, int3, obj1a = int2, ptr2, obj1b # error
_ERRORS = u"""
6:31: Cannot assign type 'int *' to 'int'
"""
Cython-0.26.1/tests/errors/callingnonexisting_T307.pyx0000664000175000017500000000017612542002467023472 0ustar  stefanstefan00000000000000# ticket: 307
# mode: error

nonexisting(3, with_kw_arg=4)

_ERRORS = u"""
4:11: undeclared name not builtin: nonexisting
"""
Cython-0.26.1/tests/errors/e_notnone.pyx0000664000175000017500000000024412542002467021036 0ustar  stefanstefan00000000000000# mode: error

cdef extern class Grail.Shrubbery

cdef void spam(Shrubbery sh not None):
	pass
_ERRORS = u"""
5:15: 'not None' only allowed in Python functions
"""
Cython-0.26.1/tests/errors/pyobjcastdisallow_T313.pyx0000664000175000017500000000027612542002467023326 0ustar  stefanstefan00000000000000# ticket: 313
# mode: error

a = 3

cdef void* allowed = a
cdef double* disallowed = a

_ERRORS = u"""
7:26: Python objects cannot be cast to pointers of primitive types
"""
Cython-0.26.1/tests/errors/e_func_in_pxd_support.pxd0000664000175000017500000000015012542002467023415 0ustar  stefanstefan00000000000000cdef foo():
    return 1

cdef public inline foo2():
    return 1

cdef api inline foo3():
    return 1
Cython-0.26.1/tests/errors/e_invalid_num_threads.pyx0000664000175000017500000000136212542002467023377 0ustar  stefanstefan00000000000000# mode: error

from cython.parallel cimport parallel, prange

cdef int i

# valid
with nogil, parallel(num_threads=None):
    pass

# invalid
with nogil, parallel(num_threads=0):
    pass

with nogil, parallel(num_threads=i):
    pass

with nogil, parallel(num_threads=2):
    for i in prange(10, num_threads=2):
        pass

with nogil, parallel():
    for i in prange(10, num_threads=2):
        pass

# this one is valid
for i in prange(10, nogil=True, num_threads=2):
    pass

_ERRORS = u"""
e_invalid_num_threads.pyx:12:20: argument to num_threads must be greater than 0
e_invalid_num_threads.pyx:19:19: num_threads already declared in outer section
e_invalid_num_threads.pyx:23:19: num_threads must be declared in the parent parallel section
"""
Cython-0.26.1/tests/errors/pep492_badsyntax_async1.pyx0000664000175000017500000000022312542002467023425 0ustar  stefanstefan00000000000000# mode: error
# tag: pep492, async

async def foo():
    def foo(a=await list()):
        pass

_ERRORS = """
5:14: 'await' not supported here
"""
Cython-0.26.1/tests/errors/pep492_badsyntax_async6.pyx0000664000175000017500000000025512542002467023437 0ustar  stefanstefan00000000000000# mode: error
# tag: pep492, async

async def foo():
    yield

_ERRORS = """
5:4: 'yield' not allowed in async coroutines (use 'await')
5:4: 'yield' not supported here
"""
Cython-0.26.1/tests/errors/w_uninitialized_while.pyx0000664000175000017500000000217412542002467023444 0ustar  stefanstefan00000000000000# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror

def simple_while(n):
    while n > 0:
        n -= 1
        a = 0
    return a

def simple_while_break(n):
    while n > 0:
        n -= 1
        break
    else:
        a = 1
    return a

def simple_while_pos(n):
    while n > 0:
        n -= 1
        a = 0
    else:
        a = 1
    return a

def while_finally_continue(p, f):
    while p():
        try:
            x = f()
        finally:
            print x
            continue

def while_finally_break(p, f):
    while p():
        try:
            x = f()
        finally:
            print x
            break

def while_finally_outer(p, f):
    x = 1
    try:
        while p():
            print x
            x = f()
            if x > 0:
                continue
            if x < 0:
                break
    finally:
        del x


_ERRORS = """
9:12: local variable 'a' might be referenced before assignment
17:12: local variable 'a' might be referenced before assignment
32:19: local variable 'x' might be referenced before assignment
40:19: local variable 'x' might be referenced before assignment
"""
Cython-0.26.1/tests/errors/pxd_cdef_class_declaration_T286.pyx0000664000175000017500000000016512542002467025105 0ustar  stefanstefan00000000000000# ticket: 286
# mode: error

cdef class A:
    pass

_ERRORS = u"""
1:5: C class 'A' is declared but not defined
"""
Cython-0.26.1/tests/errors/cmethbasematch.pyx0000664000175000017500000000213113143605603022016 0ustar  stefanstefan00000000000000# mode: error

cdef class C:
    cdef void f(self):
        pass

cdef class D(C):
    cdef void f(self, int x):
        pass

# These are declared in the pxd.
cdef class Base(object):
  cdef f(self):
    pass

cdef class MissingRedeclaration(Base):
  # Not declared (so assumed cdef) in the pxd.
  cpdef f(self):
    pass

cdef class BadRedeclaration(Base):
  # Declared as cdef in the pxd.
  cpdef f(self):
    pass

cdef class UnneededRedeclaration(Base):
  # This is OK, as it's not declared in the pxd.
  cpdef f(self):
    pass

cdef class NarrowerReturn(Base):
  # This does not require a new vtable entry.
  cdef Base f(self):
    pass


_ERRORS = u"""
8: 9: Signature not compatible with previous declaration
4: 9: Previous declaration is here
# TODO(robertwb): Re-enable these errors.
#18:8: Compatible but non-identical C method 'f' not redeclared in definition part of extension type 'MissingRedeclaration'
#2:9: Previous declaration is here
#23:8: Compatible but non-identical C method 'f' not redeclared in definition part of extension type 'BadRedeclaration'
#2:9: Previous declaration is here
"""
Cython-0.26.1/tests/errors/pxd_signature_mismatch.pxd0000664000175000017500000000055412542002467023572 0ustar  stefanstefan00000000000000
cdef int wrong_args(int x, long y)

cdef long wrong_return_type(int x, int y)

cdef int wrong_exception_check(int x, int y) except 0

cdef int wrong_exception_value(int x, int y) except 0

cdef int wrong_exception_value_check(int x, int y) except 0

cdef int inherit_exception_value(int x, int y) except 0

cdef int inherit_exception_check(int x, int y) except *
Cython-0.26.1/tests/errors/e_strcoerce.pyx0000664000175000017500000000115112542002467021345 0ustar  stefanstefan00000000000000# mode: error

cdef int c1 = "t"     # works
cdef int c2 = "te"    # fails
cdef int cx = "test"  # fails

cdef int x1 =  "\xFF"    # works
cdef int x2 =  "\u0FFF"  # fails

cdef Py_UNICODE u1 = u"\xFF"   # works
cdef int u3 = u"\xFF"          # fails


_ERRORS = """
4:14: Only single-character string literals can be coerced into ints.
5:14: Only single-character string literals can be coerced into ints.
8:15: Only single-character string literals can be coerced into ints.
11:14: Unicode literals do not support coercion to C types other than Py_UNICODE/Py_UCS4 (for characters) or Py_UNICODE* (for strings).
"""
Cython-0.26.1/tests/errors/e_badfuncargtype.pyx0000664000175000017500000000071512542002467022357 0ustar  stefanstefan00000000000000# mode: error

cdef struct Spam

cdef extern int spam(void)           # function argument cannot be void
cdef extern int grail(int i, void v) # function argument cannot be void
cdef int tomato(Spam s):             # incomplete type
	pass

_ERRORS = u"""
5:21: Use spam() rather than spam(void) to declare a function with no arguments.
6:29: Use spam() rather than spam(void) to declare a function with no arguments.
7:16: Argument type 'Spam' is incomplete
"""
Cython-0.26.1/tests/errors/w_cdef_override.pyx0000664000175000017500000000023612542002467022201 0ustar  stefanstefan00000000000000# mode: error
# tag: werror

cdef foo():
    pass

def foo():
    pass

_ERRORS = u"""
7:0: 'foo' redeclared
7:0: Overriding cdef method with def method.
"""
Cython-0.26.1/tests/errors/e_addop.pyx0000664000175000017500000000025612542002467020450 0ustar  stefanstefan00000000000000# mode: error

def f():
	cdef int int1, int3
	cdef int *ptr1, *ptr2, *ptr3
	ptr1 = ptr2 + ptr3 # error

_ERRORS = u"""
6:13: Invalid operand types for '+' (int *; int *)
"""
Cython-0.26.1/tests/errors/invalid_uescape.pyx0000664000175000017500000000011412542002467022201 0ustar  stefanstefan00000000000000# mode: error

u'\uXYZ'

_ERRORS = '''
3:2: Invalid unicode escape '\u'
'''
Cython-0.26.1/tests/errors/se_badindent.pyx0000664000175000017500000000016712542002467021475 0ustar  stefanstefan00000000000000# mode: error

def f():
  a = b # space space
 	c = d # space tab
_ERRORS = u"""
5:0: Mixed use of tabs and spaces
"""
Cython-0.26.1/tests/errors/nogilcmeth.pyx0000664000175000017500000000026012542002467021201 0ustar  stefanstefan00000000000000# mode: error

cdef class C:
    cdef void f(self):
        pass

_ERRORS = u"""
2:15: Previous declaration is here
4:9: Signature not compatible with previous declaration
"""
Cython-0.26.1/tests/errors/e_cunion.pyx0000664000175000017500000000053712574327400020660 0ustar  stefanstefan00000000000000# mode: error

cdef union AllCharptr:
    char *s1
    char *s2
    char *s3


def convert_ok():
    cdef AllCharptr u
    u.s1 = b"abc"
    return u


cdef union IllegalMix:
    char *s1
    char *s2
    int i


def convert_nok():
    cdef IllegalMix u
    u.i = 5
    return u


_ERRORS = """
24:12: Cannot convert 'IllegalMix' to Python object
"""
Cython-0.26.1/tests/errors/pep492_badsyntax_async10.pyx0000664000175000017500000000066112574327400023515 0ustar  stefanstefan00000000000000# mode: error
# tag: pep492, async

async def genexpr(it):
    return (await x for x in it)


async def listcomp(it):
    return [await x for x in it]


async def setcomp(it):
    return {await x for x in it}


async def dictcomp(it):
    return {await x:x+1 for x in it}


# NOTE: CPython doesn't allow comprehensions either


_ERRORS = """
5:12: 'await' not allowed in generators (use 'yield')
5:12: 'await' not supported here
"""
Cython-0.26.1/tests/errors/cpp_object_template.pyx0000664000175000017500000000053712542002467023062 0ustar  stefanstefan00000000000000# mode: error

from libcpp.vector cimport vector

cdef class A:
    pass

def main():
    cdef vector[object] vo
    vo.push_back(object())
    cdef vector[A] va
    va.push_back(A())

_ERRORS = u"""
9:16: Python object type 'Python object' cannot be used as a template argument
11:16: Python object type 'A' cannot be used as a template argument
"""
Cython-0.26.1/tests/errors/pure_cclass_without_body.pxd0000664000175000017500000000003713023021033024111 0ustar  stefanstefan00000000000000# mode: error

cdef class Test
Cython-0.26.1/tests/errors/subtyping_final_class.pyx0000664000175000017500000000030212542002467023427 0ustar  stefanstefan00000000000000# mode: error

cimport cython

@cython.final
cdef class FinalClass:
    pass

cdef class SubType(FinalClass):
    pass

_ERRORS = """
9:5: Base class 'FinalClass' of type 'SubType' is final
"""
Cython-0.26.1/tests/errors/e_cdef_keywords_T241.pyx0000664000175000017500000000452612542002467022727 0ustar  stefanstefan00000000000000# ticket: 241
# mode: error

cdef some_function(x, y):
    pass

cdef class A:
    cdef some_method(self, x, y=1):
        pass

from libc.string cimport strcmp

cdef extern from "string.h":
    char *strstr(char*, char*)


# ok
some_function(1, 2)
some_function(1, y=2)

# nok
some_function(1, x=1)
some_function(1, x=2, y=2)
some_function(1, y=2, z=3)
some_function(1, z=3)
some_function(1, 2, z=3)
some_function(x=1, y=2, z=3)
some_function(x=1, y=2, x=1)
some_function(x=1, y=2, x=1, z=3)

cdef A a = A()
# ok
a.some_method(1)
a.some_method(1, 2)
a.some_method(1, y=2)
a.some_method(x=1, y=2)

# nok
a.some_method(1, x=1)
a.some_method(1, 2, x=1)
a.some_method(1, 2, y=2)
a.some_method(1, 2, x=1, y=2)
a.some_method(1, 2, y=2, x=1)
a.some_method(1, y=2, x=1)
a.some_method(1, 2, z=3)
a.some_method(1, y=2, z=3)
a.some_method(x=1, x=1)
a.some_method(x=1, x=1, y=2)
a.some_method(x=1, y=2, x=1)

# ok
strcmp("abc", "cde")
strcmp("abc", s2="cde")
strcmp(s1="abc", s2="cde")
strcmp(s2="cde", s1="abc")

# nok
strcmp("abc", s1="cde")
strcmp("abc", s2="cde", s1="cde")
strcmp(s1="abc", s2="cde", s1="cde")
strcmp(s2="cde", s1="abc", s2="cde")

# ok
strstr("abc", "abcdef")

# nok
strstr("abc", char="abcdef")
strstr("abc", "abcdef", char="xyz")


_ERRORS = u"""
22:18: argument 'x' passed twice
23:18: argument 'x' passed twice
24:23: C function got unexpected keyword argument 'z'
25:18: C function got unexpected keyword argument 'z'
26:21: C function got unexpected keyword argument 'z'
27:25: C function got unexpected keyword argument 'z'
28:25: argument 'x' passed twice
29:25: argument 'x' passed twice
29:30: C function got unexpected keyword argument 'z'

39:18: argument 'x' passed twice
40:21: argument 'x' passed twice
41:21: argument 'y' passed twice
42:21: argument 'x' passed twice
42:26: argument 'y' passed twice
43:21: argument 'y' passed twice
43:26: argument 'x' passed twice
44:23: argument 'x' passed twice
45:21: C function got unexpected keyword argument 'z'
46:23: C function got unexpected keyword argument 'z'
47:20: argument 'x' passed twice
48:20: argument 'x' passed twice
49:25: argument 'x' passed twice

58:16: argument 's1' passed twice
59:26: argument 's1' passed twice
60:29: argument 's1' passed twice
61:29: argument 's2' passed twice

67:18: C function got unexpected keyword argument 'char'
68:28: C function got unexpected keyword argument 'char'
"""
Cython-0.26.1/tests/errors/e_subop.pyx0000664000175000017500000000036412542002467020511 0ustar  stefanstefan00000000000000# mode: error

def f():
	cdef int int2
	cdef char *ptr1, *ptr2, *ptr3
	ptr1 = int2 - ptr3 # error
	ptr1 = ptr2 - ptr3 # error
_ERRORS = u"""
6:13: Invalid operand types for '-' (int; char *)
7:13: Cannot assign type 'ptrdiff_t' to 'char *'
"""
Cython-0.26.1/tests/errors/invalid_hex_escape0.pyx0000664000175000017500000000010412542002467022737 0ustar  stefanstefan00000000000000# mode: error

'\x'

_ERRORS = '''
3:1: Invalid hex escape '\x'
'''
Cython-0.26.1/tests/errors/pxd_cdef_class_declaration_T286.pxd0000664000175000017500000000001512542002467025052 0ustar  stefanstefan00000000000000cdef class A
Cython-0.26.1/tests/errors/cpp_no_auto_conversion.pyx0000664000175000017500000000120313023021033023602 0ustar  stefanstefan00000000000000# mode: error
# tag: cpp

# cpp will convert function arguments to a type if it has suitable constructor
# we do not want that when calling from cython

cdef extern from "no_such_file.cpp" nogil:
    cppclass wrapped_int:
        long long val
        wrapped_int()
        wrapped_int(long long val)
        wrapped_int& operator=(const wrapped_int &other)
        wrapped_int& operator=(const long long other)

    long long constructor_overload(const wrapped_int& x)
    long long constructor_overload(const wrapped_int x)

cdef long long e = constructor_overload(17)
 

_ERRORS = u"""
18:40: Cannot assign type 'long' to 'wrapped_int'
"""
Cython-0.26.1/tests/errors/e_assnone.pyx0000664000175000017500000000014412542002467021023 0ustar  stefanstefan00000000000000# mode: error

cdef void spam():
	None = 42
_ERRORS = u"""
4:1: Cannot assign to or delete this
"""
Cython-0.26.1/tests/errors/e_ass.pyx0000664000175000017500000000046212542002467020146 0ustar  stefanstefan00000000000000# mode: error

cdef void foo(obj):
    cdef int i1
    cdef char *p1
    cdef int *p2
    i1 = p1 # error
    p2 = obj # error

    obj = p2 # error


_ERRORS = u"""
7:19: Cannot assign type 'char *' to 'int'
8:20: Cannot convert Python object to 'int *'
10:20: Cannot convert 'int *' to Python object
"""
Cython-0.26.1/tests/errors/declareafteruse_T158.pyx0000664000175000017500000000340012542002467022726 0ustar  stefanstefan00000000000000# ticket: 158
# mode: error

def mult_decl_test():
    print "%s" % vv
    print "%s" % s
    cdef str s, vv = "Test"

def def_test():
    cdef int j = 10
    i[0] = j
    cdef int *i = NULL # pointer variables are special case

cdef cdef_test():
    cdef int j = 10
    i[0] = j
    print "%d" % i[0]
    cdef int *i = NULL

cpdef cpdef_test():
    cdef int j = 10
    i[0] = j
    print "%d" % i[0]
    cdef int *i = NULL

s.upper()
cdef str s = "Test"

class Foo(object):
    def bar(self, x, y):
        cdef unsigned long w = 20
        z = w + t
        cdef int t = 10

cdef class Foo2(object):
    print '%s' % r # check error inside class scope
    cdef str r
    def bar(self, x, y):
        cdef unsigned long w = 20
        self.r = c'r'
        print self.r
        z = w + g(t)
        cdef int t = 10

def g(x):
    return x

cdef int d = 20
baz[0] = d
cdef int *baz

print var[0][0]
cdef unsigned long long[100][100] var

# in 0.11.1 these are warnings
FUTURE_ERRORS = u"""
6:13: cdef variable 's' declared after it is used
6:16: cdef variable 'vv' declared after it is used
11:14: cdef variable 'i' declared after it is used
17:14: cdef variable 'i' declared after it is used
23:14: cdef variable 'i' declared after it is used
26:9: cdef variable 's' declared after it is used
32:17: cdef variable 't' declared after it is used
36:13: cdef variable 'r' declared after it is used
42:17: cdef variable 't' declared after it is used
49:10: cdef variable 'baz' declared after it is used
52:24: cdef variable 'var' declared after it is used
"""

syntax error

_ERRORS = u"""
42:17: cdef variable 't' declared after it is used
49:10: cdef variable 'baz' declared after it is used
52:24: cdef variable 'var' declared after it is used
70:7: Syntax error in simple statement list
"""
Cython-0.26.1/tests/errors/invalid_uescapeN.pyx0000664000175000017500000000014712542002467022325 0ustar  stefanstefan00000000000000# mode: error

u'\N{GIBBET NICH}'

_ERRORS = '''
3:2: Unknown Unicode character name 'GIBBET NICH'
'''
Cython-0.26.1/tests/errors/e_excvalfunctype.pyx0000664000175000017500000000050412542002467022415 0ustar  stefanstefan00000000000000# mode: error

ctypedef int (*spamfunc)(int, char *) except 42
ctypedef int (*grailfunc)(int, char *)

cdef grailfunc grail
cdef spamfunc spam

grail = spam # type mismatch
spam = grail # type mismatch
_ERRORS = u"""
9:28: Cannot assign type 'spamfunc' to 'grailfunc'
10:28: Cannot assign type 'grailfunc' to 'spamfunc'
"""
Cython-0.26.1/tests/errors/e_callnonfunction.pyx0000664000175000017500000000045012542002467022551 0ustar  stefanstefan00000000000000# mode: error

cdef int i
i()

cdef float f
f()

ctypedef struct s:    # FIXME: this might be worth an error ...
    int x
s()

cdef int x():
    return 0

x()()

_ERRORS = u"""
4:1: Calling non-function type 'int'
7:1: Calling non-function type 'float'
16:3: Calling non-function type 'int'
"""
Cython-0.26.1/tests/errors/w_uninitialized_del.pyx0000664000175000017500000000040112542002467023067 0ustar  stefanstefan00000000000000# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror

def foo(x):
    a = 1
    del a, b
    b = 2
    return a, b

_ERRORS = """
7:12: local variable 'b' referenced before assignment
9:12: local variable 'a' referenced before assignment
"""
Cython-0.26.1/tests/errors/pep492_badsyntax_async5.pyx0000664000175000017500000000017712542002467023441 0ustar  stefanstefan00000000000000# mode: error
# tag: pep492, async

def foo():
    await list()

_ERRORS = """
5:10: Syntax error in simple statement list
"""
Cython-0.26.1/tests/errors/e_cmp.pyx0000664000175000017500000000064512542002467020142 0ustar  stefanstefan00000000000000# mode: error

cdef void foo():
	cdef int bool, int1
	cdef char *ptr2
	cdef int *ptr3
	cdef object i = 5

	bool = i == ptr2  # evaluated in Python space
	bool = ptr3 == i # error
	bool = int1 == ptr2 # error
	bool = ptr2 == ptr3 # error

	bool = 1 in 2 in 3

_ERRORS = u"""
10:13: Invalid types for '==' (int *, Python object)
11:13: Invalid types for '==' (int, char *)
12:13: Invalid types for '==' (char *, int *)
"""
Cython-0.26.1/tests/errors/e_cdef_missing_declarator.pyx0000664000175000017500000000014312542002467024206 0ustar  stefanstefan00000000000000# mode: error

cdef int

cdef extern from *:
	void f(int)
_ERRORS = u"""
3:8: Empty declarator
"""
Cython-0.26.1/tests/errors/tree_assert.pyx0000664000175000017500000000106112542002467021370 0ustar  stefanstefan00000000000000# mode: error

cimport cython

@cython.test_fail_if_path_exists("//SimpleCallNode",
                                 "//NameNode")
@cython.test_assert_path_exists("//ComprehensionNode",
                                "//ComprehensionNode//FuncDefNode")
def test():
    object()


_ERRORS = u"""
9:0: Expected path '//ComprehensionNode' not found in result tree
9:0: Expected path '//ComprehensionNode//FuncDefNode' not found in result tree
9:0: Unexpected path '//NameNode' found in result tree
9:0: Unexpected path '//SimpleCallNode' found in result tree
"""
Cython-0.26.1/tests/errors/pep492_badsyntax_async7.pyx0000664000175000017500000000027212542002467023437 0ustar  stefanstefan00000000000000# mode: error
# tag: pep492, async

async def foo():
    yield from []

_ERRORS = """
5:4: 'yield from' not supported here
5:4: 'yield' not allowed in async coroutines (use 'await')
"""
Cython-0.26.1/tests/errors/w_uninitialized.pyx0000664000175000017500000000525012542002467022252 0ustar  stefanstefan00000000000000# cython: warn.maybe_uninitialized=True
# mode: error
# tag: werror

def simple():
    print a
    a = 0

def simple2(arg):
    if arg > 0:
        a = 1
    return a

def simple_pos(arg):
    if arg > 0:
        a = 1
    else:
        a = 0
    return a

def ifelif(c1, c2):
    if c1 == 1:
        if c2:
            a = 1
        else:
            a = 2
    elif c1 == 2:
        a = 3
    return a

def nowimpossible(a):
    if a:
        b = 1
    if a:
        print b

def fromclosure():
    def bar():
        print a
    a = 1
    return bar

# Should work ok in both py2 and py3
def list_comp(a):
    return [i for i in a]

def set_comp(a):
    return set(i for i in a)

def dict_comp(a):
    return {i: j for i, j in a}

# args and kwargs
def generic_args_call(*args, **kwargs):
    return args, kwargs

def cascaded(x):
    print a, b
    a = b = x

def from_import():
    print bar
    from foo import bar

def regular_import():
    print foo
    import foo

def raise_stat():
    try:
        raise exc, msg
    except:
        pass
    exc = ValueError
    msg = 'dummy'

def defnode_decorator():
    @decorator
    def foo():
        pass
    def decorator():
        pass

def defnode_default():
    def foo(arg=default()):
        pass
    def default():
        pass

def class_bases():
    class foo(bar):
        pass
    class bar(object):
        pass

def class_decorators():
    @decorator
    class foo(object):
        pass
    def decorator(cls):
        return cls

def class_py3k_metaclass():
    class foo(metaclass=Meta):
        pass
    class Meta(object):
        pass

def class_py3k_args():
    class foo(*args, **kwargs):
        pass
    args = []
    kwargs = {}

_ERRORS = """
6:11: local variable 'a' referenced before assignment
12:12: local variable 'a' might be referenced before assignment
29:12: local variable 'a' might be referenced before assignment
35:15: local variable 'b' might be referenced before assignment
58:11: local variable 'a' referenced before assignment
58:14: local variable 'b' referenced before assignment
62:13: local variable 'bar' referenced before assignment
66:13: local variable 'foo' referenced before assignment
71:17: local variable 'exc' referenced before assignment
71:22: local variable 'msg' referenced before assignment
78:4: local variable 'decorator' referenced before assignment
85:23: local variable 'default' referenced before assignment
91:17: local variable 'bar' referenced before assignment
97:4: local variable 'decorator' referenced before assignment
104:28: local variable 'Meta' referenced before assignment
110:19: local variable 'args' referenced before assignment
110:29: local variable 'kwargs' referenced before assignment
"""
Cython-0.26.1/tests/errors/futurebraces.pyx0000664000175000017500000000012412542002467021541 0ustar  stefanstefan00000000000000# mode: error

from __future__ import braces

_ERRORS = u"""
3:23: not a chance
"""
Cython-0.26.1/tests/errors/notcimportedT418.pyx0000664000175000017500000000022512542002467022141 0ustar  stefanstefan00000000000000# ticket: 418
# mode: error

import somemod.child

cdef somemod.child.something x

_ERRORS = u"""
6:5: 'somemod.child' is not a cimported module
"""
Cython-0.26.1/tests/errors/e_while.pyx0000664000175000017500000000031412542002467020464 0ustar  stefanstefan00000000000000# cython: remove_unreachable=False
# mode: error

def f(a, b):
	cdef int i
	break # error
	continue # error
_ERRORS = u"""
6:1: break statement not inside loop
7:1: continue statement not inside loop
"""
Cython-0.26.1/tests/errors/se_mixtabspace.pyx0000664000175000017500000000015712542002467022044 0ustar  stefanstefan00000000000000# mode: error

def f():
 a = b # space space
	c = d # tab
_ERRORS = u"""
5:0: Mixed use of tabs and spaces
"""
Cython-0.26.1/tests/errors/void_as_arg.pyx0000664000175000017500000000023212542002467021324 0ustar  stefanstefan00000000000000# mode: error

cdef extern from *:
    void foo(void)

_ERRORS = u"""
4:13:Use spam() rather than spam(void) to declare a function with no arguments.
"""
Cython-0.26.1/tests/errors/e_generators.pyx0000664000175000017500000000042512542002467021530 0ustar  stefanstefan00000000000000# mode: error

def foo():
    yield
    return 0

def bar(a):
    return 0
    yield

yield

class Foo:
    yield

_ERRORS = u"""
#5:4: 'return' with argument inside generator
#9:4: 'yield' outside function
11:0: 'yield' not supported here
14:4: 'yield' not supported here
"""
Cython-0.26.1/tests/errors/e_boolcoerce.pyx0000664000175000017500000000203612542002467021473 0ustar  stefanstefan00000000000000# mode: error

ctypedef struct struct_type_not_boolean:
    int i
    float f

if struct_type_not_boolean:
    print("INVALID CODE")

cdef struct struct_not_boolean:
    int i
    float f

if struct_not_boolean:
    print("INVALID CODE")

ctypedef union union_type_not_boolean:
    int i
    float f

if union_type_not_boolean:
    print("INVALID CODE")

cdef union union_not_boolean:
    int i
    float f

if union_not_boolean:
    print("INVALID CODE")


_ERRORS = u"""
7:26: 'struct_type_not_boolean' is not a constant, variable or function identifier
7:26: Type 'struct_type_not_boolean' not acceptable as a boolean

14:21: 'struct_not_boolean' is not a constant, variable or function identifier
14:21: Type 'struct_not_boolean' not acceptable as a boolean

21:25: 'union_type_not_boolean' is not a constant, variable or function identifier
21:25: Type 'union_type_not_boolean' not acceptable as a boolean

28:20: 'union_not_boolean' is not a constant, variable or function identifier
28:20: Type 'union_not_boolean' not acceptable as a boolean
"""
Cython-0.26.1/tests/errors/cmethbasematch.pxd0000664000175000017500000000026313143605603021775 0ustar  stefanstefan00000000000000cdef class Base(object):
  cdef f(self)

cdef class MissingRedeclaration(Base):
  pass

cdef class BadRedeclaration(Base):
  cdef f(self)

cdef class NarrowerReturn(Base):
  pass
Cython-0.26.1/tests/errors/wraparound_warnings.pyx0000664000175000017500000000406412542002467023150 0ustar  stefanstefan00000000000000# mode: error
# tag: werror

cimport cython

s = "abc"
l = [1, 2, 3]

def normal_wraparound(int i, bytes B not None, list L not None):
    a = s[:]
    a = s[1:2]
    a = s[-2:-1]
    a = "abc"[-2:-1]
    a = "abc"[-2:i]
    a = B[-2:-1]
    a = B[:-1]
    a = B[-2:]

    b = l[1:2]
    b = l[:2]
    b = l[1:]
    b = l[-2:-1]
    b = [1, 2, 3][-2:-1]
    b = [1, 2, 3][-2:i]
    b = L[-2:-1]

@cython.wraparound(False)
def no_wraparound(int i, bytes B not None, list L not None):
    a = s[:]
    a = s[1:2]
    a = s[-2:-1]
    a = "abc"[-2:-1]  # evaluated at compile time
    a = "abc"[-2:i]
    a = B[:]
    a = B[-2:-1]
    a = B[:-1]
    a = B[-2:]

    b = l[1:2]
    b = l[:2]
    b = l[1:]
    b = l[-2:-1]
    b = [1, 2, 3][-2:i]
    b = L[-2:-1]


_ERRORS = """
31:11: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
31:14: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
33:15: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
35:11: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
35:14: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
36:12: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
37:11: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
42:11: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
42:14: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
43:19: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
44:11: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
44:14: the result of using negative indices inside of code sections marked as 'wraparound=False' is undefined
"""
Cython-0.26.1/tests/errors/buffertypedef_T117.pyx0000664000175000017500000000027112542002467022420 0ustar  stefanstefan00000000000000# ticket: 117
# mode: error

ctypedef object[float] mybuffer

_ERRORS = u"""
1:0: Buffer vars not allowed in module scope
4:0: Buffer types only allowed as function local variables
"""
Cython-0.26.1/tests/errors/e_undefexttype.pyx0000664000175000017500000000026212542002467022102 0ustar  stefanstefan00000000000000# mode: error

cdef class Spam
cdef extern class external.Eggs
_ERRORS = u"""
3:5: C class 'Spam' is declared but not defined
4:5: C class 'Eggs' is declared but not defined
"""
Cython-0.26.1/tests/errors/nogil.pyx0000664000175000017500000001261712542002467020171 0ustar  stefanstefan00000000000000# cython: remove_unreachable=False
# mode: error

cdef object f(object x) nogil:
    pass

cdef void g(int x) nogil:
    cdef object z
    z = None

cdef void h(int x) nogil:
    p()

cdef object p() nogil:
    pass

cdef void r() nogil:
    q()

cdef object m():
    cdef object x, y = 0, obj
    cdef int i, j, k
    global fred
    q()
    with nogil:
        r()
        q()
        i = 42
        obj = None
        17L
        7j
        help
        xxx = `"Hello"`
        import fred
        from fred import obj
        for x in obj:
            pass
        obj[i]
        obj[i:j]
        obj[i:j:k]
        obj.fred
        (x, y)
        [x, y]
        {x: y}
        {x, y}
        obj and x
        t(obj)
#        f(42) # Cython handles this internally
        x + obj
        -obj
        x = y = obj
        x, y = y, x
        obj[i] = x
        obj.fred = x
        print obj
        del fred
        return obj
        raise obj
        if obj:
            pass
        while obj:
            pass
        for x <= obj <= y:
            pass
        try:
            pass
        except:
            pass
        try:
            pass
        finally:
            pass

cdef void q():
    pass

cdef class C:
    pass

cdef void t(C c) nogil:
    pass

def ticket_338():
    cdef object obj
    with nogil:
        for obj from 0 <= obj < 4:
            pass

def bare_pyvar_name(object x):
    with nogil:
        x

# For m(), the important thing is that there are errors on all lines in the range 23-69
# except these: 29, 34, 44, 56, 58, 60, 62-64

_ERRORS = u"""
4:5: Function with Python return type cannot be declared nogil
7:5: Function declared nogil has Python locals or temporaries
9:6: Assignment of Python object not allowed without gil
12:5: Discarding owned Python object not allowed without gil
14:5: Function with Python return type cannot be declared nogil
18:5: Calling gil-requiring function not allowed without gil
27:9: Calling gil-requiring function not allowed without gil
29:12: Assignment of Python object not allowed without gil
31:16: Constructing complex number not allowed without gil
33:12: Assignment of Python object not allowed without gil
33:14: Backquote expression not allowed without gil
33:15: Operation not allowed without gil
34:15: Assignment of Python object not allowed without gil
34:15: Operation not allowed without gil
34:15: Python import not allowed without gil
35:8: Operation not allowed without gil
35:13: Python import not allowed without gil
35:25: Constructing Python list not allowed without gil
35:25: Operation not allowed without gil
36:17: Iterating over Python object not allowed without gil
38:11: Discarding owned Python object not allowed without gil
38:11: Indexing Python object not allowed without gil
39:11: Discarding owned Python object not allowed without gil
39:11: Slicing Python object not allowed without gil
40:11: Constructing Python slice object not allowed without gil
40:11: Discarding owned Python object not allowed without gil
40:11: Indexing Python object not allowed without gil
40:13: Converting to Python object not allowed without gil
40:15: Converting to Python object not allowed without gil
40:17: Converting to Python object not allowed without gil
41:11: Accessing Python attribute not allowed without gil
41:11: Discarding owned Python object not allowed without gil
42:9: Constructing Python tuple not allowed without gil
42:9: Discarding owned Python object not allowed without gil
43:8: Constructing Python list not allowed without gil
43:8: Discarding owned Python object not allowed without gil
44:10: Constructing Python dict not allowed without gil
44:10: Discarding owned Python object not allowed without gil
45:10: Constructing Python set not allowed without gil
45:10: Discarding owned Python object not allowed without gil
46:12: Discarding owned Python object not allowed without gil
46:12: Truth-testing Python object not allowed without gil
47:13: Python type test not allowed without gil
49:10: Discarding owned Python object not allowed without gil
49:10: Operation not allowed without gil
50:8: Discarding owned Python object not allowed without gil
50:8: Operation not allowed without gil
51:10: Assignment of Python object not allowed without gil
51:14: Assignment of Python object not allowed without gil
52:9: Assignment of Python object not allowed without gil
52:13: Assignment of Python object not allowed without gil
52:16: Creating temporary Python reference not allowed without gil
52:19: Creating temporary Python reference not allowed without gil
53:11: Assignment of Python object not allowed without gil
53:11: Indexing Python object not allowed without gil
54:11: Accessing Python attribute not allowed without gil
54:11: Assignment of Python object not allowed without gil
55:8: Constructing Python tuple not allowed without gil
55:8: Python print statement not allowed without gil
56:8: Deleting Python object not allowed without gil
57:8: Returning Python object not allowed without gil
58:8: Raising exception not allowed without gil
59:14: Truth-testing Python object not allowed without gil
61:17: Truth-testing Python object not allowed without gil
63:8: For-loop using object bounds or target not allowed without gil
63:14: Coercion from Python not allowed without the GIL
63:25: Coercion from Python not allowed without the GIL
65:8: Try-except statement not allowed without gil
86:8: For-loop using object bounds or target not allowed without gil
"""
Cython-0.26.1/tests/errors/missing_self_in_cpdef_method_T165.pyx0000664000175000017500000000026012542002467025440 0ustar  stefanstefan00000000000000# ticket: 165
# mode: error

cdef class A:
    cpdef a(int not_self):
        pass

_ERRORS = u"""
5:10: Self argument (int) of C method 'a' does not match parent type (A)
"""
Cython-0.26.1/tests/errors/e_cython_parallel.pyx0000664000175000017500000001215712542002467022544 0ustar  stefanstefan00000000000000# mode: error

cimport cython.parallel.parallel as p
from cython.parallel cimport something

import cython.parallel.parallel as p
from cython.parallel import something

from cython.parallel cimport prange

import cython.parallel

prange(1, 2, 3, schedule='dynamic')

cdef int i

with nogil, cython.parallel.parallel():
    for i in prange(10, schedule='invalid_schedule'):
        pass

with cython.parallel.parallel():
    print "hello world!"

cdef int *x = NULL

with nogil, cython.parallel.parallel():
    for j in prange(10):
        pass

    for x[1] in prange(10):
        pass

    for x in prange(10):
        pass

    with cython.parallel.parallel():
        pass

with nogil, cython.parallel.parallel:
    pass

cdef int y

for i in prange(10, nogil=True):
    i = y * 4
    y = i

for i in prange(10, nogil=True):
    y = i
    i = y * 4
    y = i


with nogil, cython.parallel.parallel():
    i = y
    y = i

for i in prange(10, nogil=True):
    y += i
    y *= i

with nogil, cython.parallel.parallel("invalid"):
    pass

with nogil, cython.parallel.parallel(invalid=True):
    pass

def f(x):
    cdef int i

    with nogil, cython.parallel.parallel():
        with gil:
            yield x

        for i in prange(10):
            with gil:
                yield x

# Disabled nesting:

for i in prange(10, nogil=True):
    for y in prange(10):
        pass

with nogil, cython.parallel.parallel():
    for i in prange(10):
        for i in prange(10):
            pass


# Assign to private from parallel block in prange:
cdef int myprivate1, myprivate2

with nogil, cython.parallel.parallel():
    myprivate1 = 1
    for i in prange(10):
        myprivate1 = 3
        myprivate2 = 4
    myprivate2 = 2

# Disallow parallel with block reductions:
i = 0
with nogil, cython.parallel.parallel():
    i += 1

# Use of privates after the parallel with block
with nogil, cython.parallel.parallel():
    i = 1

print i
i = 2
print i

# Reading of reduction variables in the prange block
cdef int sum = 0
for i in prange(10, nogil=True):
    sum += i
    with gil:
        print sum

for pyobj in prange("hello"):
    pass

from cython import parallel
with nogil, parallel.parallel():
    for i in parallel.prange(10):
        pass

cdef int[:] dst, src = object()
for i in prange(10, nogil=True):
    dst = src

for i in prange(10, nogil=True, chunksize=20):
    pass

for i in prange(10, nogil=True, schedule='static', chunksize=-1):
    pass

for i in prange(10, nogil=True, schedule='runtime', chunksize=10):
    pass

cdef int chunksize():
    return 10

for i in prange(10, nogil=True, schedule='static', chunksize=chunksize()):
    pass

with nogil, cython.parallel.parallel():
    with cython.parallel.parallel():
        pass

_ERRORS = u"""
e_cython_parallel.pyx:3:8: cython.parallel.parallel is not a module
e_cython_parallel.pyx:4:0: No such directive: cython.parallel.something
e_cython_parallel.pyx:6:7: cython.parallel.parallel is not a module
e_cython_parallel.pyx:7:0: No such directive: cython.parallel.something
e_cython_parallel.pyx:13:6: prange() can only be used as part of a for loop
e_cython_parallel.pyx:13:6: prange() can only be used without the GIL
e_cython_parallel.pyx:18:19: Invalid schedule argument to prange: invalid_schedule
c_cython_parallel.pyx:21:29: The parallel section may only be used without the GIL
e_cython_parallel.pyx:27:10: target may not be a Python object as we don't have the GIL
e_cython_parallel.pyx:30:9: Can only iterate over an iteration variable
e_cython_parallel.pyx:33:10: Must be of numeric type, not int *
e_cython_parallel.pyx:36:33: Nested parallel with blocks are disallowed
e_cython_parallel.pyx:39:12: The parallel directive must be called
e_cython_parallel.pyx:45:10: local variable 'y' referenced before assignment
e_cython_parallel.pyx:55:9: local variable 'y' referenced before assignment
e_cython_parallel.pyx:60:6: Reduction operator '*' is inconsistent with previous reduction operator '+'
e_cython_parallel.pyx:62:36: cython.parallel.parallel() does not take positional arguments
e_cython_parallel.pyx:65:36: Invalid keyword argument: invalid
e_cython_parallel.pyx:73:12: Yield not allowed in parallel sections
e_cython_parallel.pyx:77:16: Yield not allowed in parallel sections
e_cython_parallel.pyx:97:19: Cannot assign to private of outer parallel block
e_cython_parallel.pyx:98:19: Cannot assign to private of outer parallel block
e_cython_parallel.pyx:104:6: Reductions not allowed for parallel blocks
e_cython_parallel.pyx:110:7: local variable 'i' referenced before assignment
e_cython_parallel.pyx:119:17: Cannot read reduction variable in loop body
e_cython_parallel.pyx:121:20: stop argument must be numeric
e_cython_parallel.pyx:121:19: prange() can only be used without the GIL
e_cython_parallel.pyx:131:8: Memoryview slices can only be shared in parallel sections
e_cython_parallel.pyx:133:42: Must provide schedule with chunksize
e_cython_parallel.pyx:136:62: Chunksize must not be negative
e_cython_parallel.pyx:139:62: Chunksize not valid for the schedule runtime
e_cython_parallel.pyx:145:70: Calling gil-requiring function not allowed without gil
e_cython_parallel.pyx:149:33: Nested parallel with blocks are disallowed
"""
Cython-0.26.1/tests/errors/e_sizeofincomplete.pyx0000664000175000017500000000021512542002467022733 0ustar  stefanstefan00000000000000# mode: error

cdef struct unbekannt
cdef int n
n = sizeof(unbekannt)
_ERRORS = u"""
5:4: Cannot take sizeof incomplete type 'unbekannt'
"""
Cython-0.26.1/tests/errors/typoT304.pyx0000664000175000017500000000023712542002467020422 0ustar  stefanstefan00000000000000# ticket: 304
# mode: error

def f():
    print assert sizeof(int) == sizof(short) == sizeof(long)

_ERRORS = u"""
5:10: Expected an identifier or literal
"""
Cython-0.26.1/tests/errors/e_switch_transform.pyx0000664000175000017500000000032412542002467022751 0ustar  stefanstefan00000000000000# cython: optimize.use_switch=True
# mode: error
# tag: cerror

import cython

cdef extern from *:
    enum:
        ONE "1"
        ONE_AGAIN "1+0"

def is_not_one(int i):
    return i != ONE and i != ONE_AGAIN
Cython-0.26.1/tests/errors/mod_errors.pyx0000664000175000017500000000022412542002467021223 0ustar  stefanstefan00000000000000# mode: error

def mod_complex():
    x = (1.1+2.0j) % 4
    return x

_ERRORS = """
4:19: mod operator not supported for type 'double complex'
"""
Cython-0.26.1/tests/errors/e_bitop.pyx0000664000175000017500000000023512542002467020473 0ustar  stefanstefan00000000000000# mode: error

def f():
	cdef int int1, int2
	cdef char *ptr
	int1 = int2 | ptr # error
_ERRORS = u"""
6:13: Invalid operand types for '|' (int; char *)
"""
Cython-0.26.1/tests/errors/charptr_from_temp.pyx0000664000175000017500000000275112542002467022572 0ustar  stefanstefan00000000000000# mode: error
# tag: werror, charptr, conversion, temp, py_unicode_strings

cdef bytes c_s = b"abc"
s = b"abc"

cdef char* cptr

# constant => ok
cptr = b"xyz"

# global cdef variable => ok
cptr = c_s

# pyglobal => warning
cptr = s

# temp => error
cptr = s + b"cba"

# indexing => error (but not clear enough to make it a compiler error)
cptr = s[0]
cdef char* x = s[0]

# slicing => error
cptr = s[:2]


cdef unicode  c_u = u"abc"
u = u"abc"

cdef Py_UNICODE* cuptr

# constant => ok
cuptr = u"xyz"

# global cdef variable => ok
cuptr = c_u

# pyglobal => warning
cuptr = u

# temp => error
cuptr = u + u"cba"


# coercion in conditional expression => ok
boolval = list(u)
cptr = c_s if boolval else c_s

# temp in conditional expression => error
cptr = s + b'x' if boolval else s + b'y'


_ERRORS = """
16:8: Obtaining 'char *' from externally modifiable global Python value
19:9: Storing unsafe C derivative of temporary Python reference
#22:8: Storing unsafe C derivative of temporary Python reference
#23:5: Storing unsafe C derivative of temporary Python reference
#23:15: Casting temporary Python object to non-numeric non-Python type
26:8: Storing unsafe C derivative of temporary Python reference
41:9: Obtaining 'Py_UNICODE *' from externally modifiable global Python value
44:10: Storing unsafe C derivative of temporary Python reference
52:7: Storing unsafe C derivative of temporary Python reference
52:7: Unsafe C derivative of temporary Python reference used in conditional expression
"""
Cython-0.26.1/tests/errors/e_cdefemptysue.pyx0000664000175000017500000000054012542002467022052 0ustar  stefanstefan00000000000000# mode: error

cdef struct spam:
	pass

ctypedef union eggs:
	pass

cdef enum ham:
	pass
_ERRORS = u"""
3:5: Empty struct or union definition not allowed outside a 'cdef extern from' block
6:0: Empty struct or union definition not allowed outside a 'cdef extern from' block
9:5: Empty enum definition not allowed outside a 'cdef extern from' block
"""
Cython-0.26.1/tests/errors/incorrectly_nested_gil_blocks.pyx0000664000175000017500000000121112542002467025134 0ustar  stefanstefan00000000000000# mode: error

with gil:
    pass

with nogil:
    with nogil:
        pass

cdef void without_gil() nogil:
   # This is not an error, as 'func' *may* be called without the GIL, but it
   # may also be held.
    with nogil:
        pass

cdef void with_gil() with gil:
    # This is an error, as the GIL is acquired already
    with gil:
        pass

def func():
    with gil:
        pass

_ERRORS = u'''
3:5: Trying to acquire the GIL while it is already held.
7:9: Trying to release the GIL while it was previously released.
18:9: Trying to acquire the GIL while it is already held.
22:9: Trying to acquire the GIL while it is already held.
'''
Cython-0.26.1/tests/errors/builtin_type_inheritance.pyx0000664000175000017500000000077312542002467024141 0ustar  stefanstefan00000000000000# mode: error

# current restriction: cannot inherit from PyVarObject (see ticket #152)

cdef class MyTuple(tuple):
    pass

cdef class MyBytes(bytes):
    pass

cdef class MyStr(str): # only in Py2, but can't know that during compilation
    pass

_ERRORS = """
5:5: inheritance from PyVarObject types like 'tuple' is not currently supported
8:5: inheritance from PyVarObject types like 'bytes' is not currently supported
11:5: inheritance from PyVarObject types like 'str' is not currently supported
"""
Cython-0.26.1/tests/errors/e_numop.pyx0000664000175000017500000000023312542002467020512 0ustar  stefanstefan00000000000000# mode: error

def f():
	cdef int int1, int2
	cdef int *ptr
	int1 = int2 * ptr # error
_ERRORS = u"""
6:13: Invalid operand types for '*' (int; int *)
"""
Cython-0.26.1/tests/errors/w_unused.pyx0000664000175000017500000000216112542002467020703 0ustar  stefanstefan00000000000000# cython: warn.unused=True, warn.unused_arg=True, warn.unused_result=True
# mode: error
# tag: werror

def unused_variable():
    a = 1

def unused_cascade(arg):
    a, b = arg.split()
    return a

def unused_arg(arg):
    pass

def unused_result():
    r = 1 + 1
    r = 2
    return r

def unused_nested():
    def _unused_one():
        pass

def unused_class():
    class Unused:
        pass

# this should not generate warning
def used(x, y):
    x.y = 1
    y[0] = 1
    lambda x: x

def unused_and_unassigned():
    cdef object foo
    cdef int i

def unused_generic(*args, **kwargs):
    pass

def unused_in_closure(a,b,c):
    x = 1
    def inner():
        nonlocal c
        c = 1
        y = 2
        return a+b
    return inner()


_ERRORS = """
6:6: Unused entry 'a'
9:9: Unused entry 'b'
12:15: Unused argument 'arg'
16:6: Unused result in 'r'
21:4: Unused entry '_unused_one'
25:4: Unused entry 'Unused'
35:16: Unused entry 'foo'
36:13: Unused entry 'i'
38:20: Unused argument 'args'
38:28: Unused argument 'kwargs'
41:26: Unused argument 'c'
41:26: Unused entry 'c'
42:6: Unused entry 'x'
46:10: Unused entry 'y'
"""
Cython-0.26.1/tests/errors/e_pxdimpl_imported.pxd0000664000175000017500000000105612542002467022713 0ustar  stefanstefan00000000000000
cdef class A:
    cdef int test(self)

    # Should give error:
    def somefunc(self):
        pass

    # While this should *not* be an error...:
    def __getbuffer__(self, Py_buffer* info, int flags):
        pass
    # This neither:
    def __releasebuffer__(self, Py_buffer* info):
        pass

    # Terminate with an error to be sure the compiler is
    # not terminating prior to previous errors
    def terminate(self):
        pass

cdef extern from "foo.h":
    cdef class pxdimpl.B [object MyB]:
        def otherfunc(self):
            pass

Cython-0.26.1/tests/errors/string_assignments.pyx0000664000175000017500000000702312542002467022775 0ustar  stefanstefan00000000000000# mode: error
# coding: ASCII
# tag: py_unicode_strings

# ok:
cdef char* c1   =  "abc"
cdef str s1     =  "abc"

cdef unicode u1 = u"abc"
cdef Py_UNICODE* cu1 = u1

cdef bytes b1 = b"abc"
cdef char* c2 = b"abc"

cdef bytes b2 = c1
cdef char* c3 = b1

cdef basestring bs1  =  "abc"
cdef basestring bs2  = u"abc"

cdef object o1  =  "abc"
cdef object o2  = b"abc"
cdef object o3  = u"abc"

o4 = c1
o5 = b1
o6 = s1
o7 = u1
o8 = cu1
o9 = bs1

u1 = bs1
s1 = bs1

# errors:
cdef char* c_f1   = u"abc"
cdef char* c_f2   = u1
cdef char* c_f3   = s1

cdef Py_UNICODE* cu_f1 = c1
cdef Py_UNICODE* cu_f2 = b1
cdef Py_UNICODE* cu_f3 = s1
cdef Py_UNICODE* cu_f4 = b"abc"

cdef bytes b_f1   = u"abc"
cdef bytes b_f2   = u1
cdef bytes b_f3   = s1
cdef bytes b_f4   = bs1

cdef str s_f1  = b"abc"
cdef str s_f2  = b1
cdef str s_f3  = u"abc"
cdef str s_f4  = u1

cdef unicode u_f1 = "abc"
cdef unicode u_f2 = s1
cdef unicode u_f3 = b"abc"
cdef unicode u_f4 = b1
cdef unicode u_f5 = c1

cdef basestring bs_f1 = b"abc"
cdef basestring bs_f2 = b1

cdef tuple t_f1 =  "abc"
cdef tuple t_f2 = u"abc"
cdef tuple t_f3 = b"abc"

cdef list  l_f1 = s1
cdef list  l_f2 = b1
cdef list  l_f3 = u1

print c1
print c1[1:2]
print c1
print c1[1:2]

_ERRORS = u"""
36:20: Unicode literals do not support coercion to C types other than Py_UNICODE/Py_UCS4 (for characters) or Py_UNICODE* (for strings).
37:22: Unicode objects only support coercion to Py_UNICODE*.
38:22: 'str' objects do not support coercion to C types (use 'bytes'?).

40:27: Cannot assign type 'char *' to 'Py_UNICODE *'
41:27: Cannot convert 'bytes' object to Py_UNICODE*, use 'unicode'.
42:27: 'str' objects do not support coercion to C types (use 'unicode'?).
43:25: Cannot convert 'bytes' object to Py_UNICODE*, use 'unicode'.

45:20: Cannot convert Unicode string to 'bytes' implicitly, encoding required.
46:22: Cannot convert Unicode string to 'bytes' implicitly, encoding required.
47:22: Cannot convert 'str' to 'bytes' implicitly. This is not portable.
48:23: Cannot convert 'basestring' object to bytes implicitly. This is not portable.

50:17: Cannot convert 'bytes' object to str implicitly. This is not portable to Py3.
51:19: Cannot convert 'bytes' object to str implicitly. This is not portable to Py3.
52:17: Cannot convert Unicode string to 'str' implicitly. This is not portable and requires explicit encoding.
53:19: Cannot convert Unicode string to 'str' implicitly. This is not portable and requires explicit encoding.

55:20: str objects do not support coercion to unicode, use a unicode string literal instead (u'')
56:22: str objects do not support coercion to unicode, use a unicode string literal instead (u'')
57:20: Cannot convert 'bytes' object to unicode implicitly, decoding required
58:22: Cannot convert 'bytes' object to unicode implicitly, decoding required
59:22: Cannot convert 'char*' to unicode implicitly, decoding required

61:24: Cannot convert 'bytes' object to basestring implicitly. This is not portable to Py3.
62:26: Cannot convert 'bytes' object to basestring implicitly. This is not portable to Py3.

64:19: Cannot assign type 'str object' to 'tuple object'
65:18: Cannot assign type 'unicode object' to 'tuple object'
66:18: Cannot assign type 'bytes object' to 'tuple object'

72:13: default encoding required for conversion from 'char *' to 'str object'
73:13: default encoding required for conversion from 'char *' to 'str object'
74:17: Cannot convert 'char*' to unicode implicitly, decoding required
75:17: default encoding required for conversion from 'char *' to 'unicode object'
"""
Cython-0.26.1/tests/errors/e_ctypedefornot.pyx0000664000175000017500000000037312542002467022246 0ustar  stefanstefan00000000000000# mode: error

cdef struct Foo

ctypedef struct Foo:
	int i

ctypedef struct Blarg:
	char c

cdef struct Blarg

cdef Foo f
cdef Blarg b

_ERRORS = u"""
5:0: 'Foo' previously declared using 'cdef'
11:5: 'Blarg' previously declared using 'ctypedef'
"""
Cython-0.26.1/tests/errors/cdefoptargs.pyx0000664000175000017500000000024712542002467021356 0ustar  stefanstefan00000000000000# mode: error

def call5():
    b(1,2,3,4,5)

cdef b(a, b, c=1, d=2):
    pass

_ERRORS = u"""
4:5:Call with wrong number of arguments (expected at most 4, got 5)
"""
Cython-0.26.1/tests/errors/e_extweakref.pyx0000664000175000017500000000126212542002467021524 0ustar  stefanstefan00000000000000# mode: error

cdef class C:
	cdef object __weakref__

cdef class D:
	cdef public object __weakref__

cdef class E:
	cdef readonly object __weakref__

cdef void f():
	cdef C c = C()
	cdef object x
	x = c.__weakref__
	c.__weakref__ = x
_ERRORS = u"""
7:20: Illegal use of special attribute __weakref__
7:20: Illegal use of special attribute __weakref__
7:20: Illegal use of special attribute __weakref__
7:20: Special attribute __weakref__ cannot be exposed to Python
10:22: Illegal use of special attribute __weakref__
10:22: Special attribute __weakref__ cannot be exposed to Python
15:6: Illegal use of special attribute __weakref__
16:2: Illegal use of special attribute __weakref__
"""
Cython-0.26.1/tests/errors/cdef_in_pyclass.pyx0000664000175000017500000000020012542002467022167 0ustar  stefanstefan00000000000000# mode: error

class Pyclass(object):
    cdef bad(self):
        pass

_ERRORS = """
 4:9: cdef statement not allowed here
"""
Cython-0.26.1/tests/errors/pep492_badsyntax_async4.pyx0000664000175000017500000000023312542002467023431 0ustar  stefanstefan00000000000000# mode: error
# tag: pep492, async

async def foo():
    async def foo(): await list()

_ERRORS = """
# ???  - this fails in CPython, not sure why ...
"""
Cython-0.26.1/tests/errors/literal_lists.pyx0000664000175000017500000000030712542002467021724 0ustar  stefanstefan00000000000000# mode: error

def f():
    cdef int* p
    if false():
        p = [1, 2, 3]

def false():
    return False

_ERRORS = u"""
6:10: Literal list must be assigned to pointer at time of declaration
"""
Cython-0.26.1/tests/errors/e_pxdimpl.pyx0000664000175000017500000000041512542002467021033 0ustar  stefanstefan00000000000000# mode: error

cimport e_pxdimpl_imported

_ERRORS = u"""
 6:4: function definition in pxd file must be declared 'cdef inline'
18:4: function definition in pxd file must be declared 'cdef inline'
23:8: function definition in pxd file must be declared 'cdef inline'
"""
Cython-0.26.1/tests/errors/callargs.pyx0000664000175000017500000000102512542002467020640 0ustar  stefanstefan00000000000000# mode: error

def f(*args, **kwargs):
    pass

args   = (1,2,3)
kwargs = {u"test" : "toast"}

def test():
    # ok
    f(1, 2, c=3, *args, d=5, **kwargs, **kwargs)
    f(1, 2, c=3, *args, d=5, **kwargs, x=6)
    f(1, 2, **kwargs, c=3)
    f(1, 2, c=3, *args, *args, **kwargs)
    f(*args, 1, 2, 3)
    # errors
    f(**kwargs, 1, 2, c=3)
    f(*args, **kwargs, *args)
    f(1, 2, c=3, *args, **kwargs, *args)
    f(1=2)


# too bad we don't get more errors here ...

_ERRORS = u"""
17:16: Non-keyword arg following keyword arg
"""
Cython-0.26.1/tests/pypy_bugs.txt0000664000175000017500000000117213143605603017556 0ustar  stefanstefan00000000000000# This file contains tests corresponding to unresolved bugs
# either in PyPy, PyPy's cpyext, or Cython under PyPy,
# which will be skipped in the normal testing run.

broken_exception
bufaccess
memoryview
memslice
sequential_parallel

yield_from_pep380
memoryview_inplace_division

# gc issue?
memoryview_in_subclasses
external_ref_reassignment
run.exttype_dealloc

# bugs in cpyext
run.special_methods_T561
run.special_methods_T561_py2

# tests for things that don't exist in cpyext
compile.pylong
run.datetime_pxd
run.datetime_cimport
run.datetime_members
run.extern_builtins_T258

# refcounting-specific tests
double_dealloc_T796

Cython-0.26.1/tests/bugs.txt0000664000175000017500000000261113023021033016456 0ustar  stefanstefan00000000000000# This file contains tests corresponding to unresolved bugs,
# which will be skipped in the normal testing run.

class_attribute_init_values_T18
unsignedbehaviour_T184
missing_baseclass_in_predecl_T262
cfunc_call_tuple_args_T408
genexpr_iterable_lookup_T600
generator_expressions_in_class
for_from_pyvar_loop_T601
temp_sideeffects_T654    # not really a bug, Cython warns about it
inherited_final_method
cimport_alias_subclass

# CPython regression tests that don't current work:
pyregr.test_signal
pyregr.test_capi
pyregr.test_socket
pyregr.test_sys
pyregr.test_pep3131
pyregr.test_multiprocessing
pyregr.test_tempfile
pyregr.test_ioctl

# CPython regression tests with threading issues
pyregr.test_threadsignals
pyregr.test_threading
pyregr.test_threaded_import
pyregr.test_logging

# CPython regression tests that don't make sense
pyregr.test_gdb
pyregr.test_support
pyregr.test_peepholer

# the atexit test runs the registered atexit functions => module cleanup => crash
pyregr.test_atexit

# a settrace test bypasses a with statement and currently crashes
pyregr.test_sys_settrace

# tests for exception on infinite recursion - may crash with stack overflow when calling C function
pyregr.test_exceptions

# CPython regression tests that take too long
pyregr.test_subprocess
pyregr.test_zipfile64
pyregr.test_tuple
pyregr.test_urllib2net
pyregr.test_urllibnet

# Inlined generators
inlined_generator_expressions
Cython-0.26.1/tests/cygwin_bugs.txt0000664000175000017500000000016313143605603020054 0ustar  stefanstefan00000000000000complex_numbers_c89_T398_long_double
complex_numbers_T305_long_double
int_float_builtins_as_casts_T400_long_double
Cython-0.26.1/tests/broken/0000775000175000017500000000000013151203436016250 5ustar  stefanstefan00000000000000Cython-0.26.1/tests/broken/r_kwonlyargs.pyx0000664000175000017500000000131112542002467021533 0ustar  stefanstefan00000000000000def pd(d):
    l = []
    i = d.items()
    i.sort()
    for kv in i:
        l.append("%r: %r" % kv)
    return "{%s}" % ", ".join(l)

def c(a, b, c):
    print "a =", a, "b =", b, "c =", c

def d(a, b, *, c = 88):
    print "a =", a, "b =", b, "c =", c

def e(a, b, c = 88, **kwds):
    print "a =", a, "b =", b, "c =", c, "kwds =", pd(kwds)

def f(a, b, *, c, d = 42):
    print "a =", a, "b =", b, "c =", c, "d =", d

def g(a, b, *, c, d = 42, e = 17, f, **kwds):
    print "a =", a, "b =", b, "c =", c, "d =", d, "e =", e, "f =", f, "kwds =", pd(kwds)

def h(a, b, *args, c, d = 42, e = 17, f, **kwds):
    print "a =", a, "b =", b, "args =", args, "c =", c, "d =", d, "e =", e, "f =", f, "kwds =", pd(kwds)
Cython-0.26.1/tests/broken/includepublic.pyx0000664000175000017500000000003012542002467021631 0ustar  stefanstefan00000000000000include "i_public.pxi"

Cython-0.26.1/tests/broken/invalid-module-name.pyx0000664000175000017500000000000012542002467022633 0ustar  stefanstefan00000000000000Cython-0.26.1/tests/broken/r_extweakref.pyx0000664000175000017500000000011312542002467021477 0ustar  stefanstefan00000000000000cdef class Animal:
    cdef object __weakref__
    cdef public object name
Cython-0.26.1/tests/broken/r_cfuncimport.pyx0000664000175000017500000000013212542002467021664 0ustar  stefanstefan00000000000000cimport l_cfuncexport
from l_cfuncexport cimport g

print l_cfuncexport.f(42)
print g(42)
Cython-0.26.1/tests/broken/externsue.pyx0000664000175000017500000000042312542002467021037 0ustar  stefanstefan00000000000000cdef extern from "externsue.h":

    enum Eggs:
        runny, firm, hard

    struct Spam:
        int i

    union Soviet:
        char c

cdef extern Eggs e
cdef extern Spam s
cdef extern Soviet u

cdef void tomato():
    global e
    e = runny
    e = firm
    e = hard

Cython-0.26.1/tests/broken/r_extmember.pyx0000664000175000017500000000047712542002467021337 0ustar  stefanstefan00000000000000cdef class Spam:
    cdef public int tons
    cdef readonly float tastiness
    cdef int temperature

    def __init__(self, tons, tastiness, temperature):
        self.tons = tons
        self.tastiness = tastiness
        self.temperature = temperature

    def get_temperature(self):
        return self.temperature
Cython-0.26.1/tests/broken/r_capi.pyx0000664000175000017500000000021012542002467020244 0ustar  stefanstefan00000000000000cdef extern from "l_capi_api.h":
    float f(float)
    int import_l_capi() except -1

def test():
    print f(3.1415)

import_l_capi()
Cython-0.26.1/tests/broken/r_classmodname.pyx0000664000175000017500000000002512542002467022002 0ustar  stefanstefan00000000000000class Spam:
    pass
Cython-0.26.1/tests/broken/builtinconst.pyx0000664000175000017500000000250712542002467021537 0ustar  stefanstefan00000000000000cdef int f() except -1:
    cdef type t
    cdef object x
    t = buffer
    t = enumerate
    t = file
    t = float
    t = int
    t = long
    t = open
    t = property
    t = str
    t = tuple
    t = xrange
    x = True
    x = False
    x = Ellipsis
    x = Exception
    x = StopIteration
    x = StandardError
    x = ArithmeticError
    x = LookupError
    x = AssertionError
    x = AssertionError
    x = EOFError
    x = FloatingPointError
    x = EnvironmentError
    x = IOError
    x = OSError
    x = ImportError
    x = IndexError
    x = KeyError
    x = KeyboardInterrupt
    x = MemoryError
    x = NameError
    x = OverflowError
    x = RuntimeError
    x = NotImplementedError
    x = SyntaxError
    x = IndentationError
    x = TabError
    x = ReferenceError
    x = SystemError
    x = SystemExit
    x = TypeError
    x = UnboundLocalError
    x = UnicodeError
    x = UnicodeEncodeError
    x = UnicodeDecodeError
    x = UnicodeTranslateError
    x = ValueError
    x = ZeroDivisionError
    x = MemoryErrorInst
    x = Warning
    x = UserWarning
    x = DeprecationWarning
    x = PendingDeprecationWarning
    x = SyntaxWarning
    #x = OverflowWarning # Does not seem to exist in 2.5
    x = RuntimeWarning
    x = FutureWarning
    typecheck(x, Exception)
    try:
        pass
    except ValueError:
        pass
Cython-0.26.1/tests/broken/cexportfunc.pyx0000664000175000017500000000007712542002467021362 0ustar  stefanstefan00000000000000cdef int f():
    pass

cdef char *g(int k, float z):
    pass
Cython-0.26.1/tests/broken/r_excval.pyx0000664000175000017500000000060112542002467020616 0ustar  stefanstefan00000000000000cdef int tomato() except -1:
    print "Entering tomato"
    raise Exception("Eject! Eject! Eject!")
    print "Leaving tomato"

cdef void sandwich():
    print "Entering sandwich"
    tomato()
    print "Leaving sandwich"

def snack():
    print "Entering snack"
    tomato()
    print "Leaving snack"

def lunch():
    print "Entering lunch"
    sandwich()
    print "Leaving lunch"
Cython-0.26.1/tests/broken/r_simpcall.pyx0000664000175000017500000000004012542002467021135 0ustar  stefanstefan00000000000000def f():
    print "Spam!"

f()
Cython-0.26.1/tests/broken/l_capi.pyx0000664000175000017500000000006212542002467020243 0ustar  stefanstefan00000000000000cdef api float f(float x):
    return 0.5 * x * x
Cython-0.26.1/tests/broken/r_getattr3.pyx0000664000175000017500000000010012542002467021063 0ustar  stefanstefan00000000000000def test(obj, attr, dflt):
    return getattr3(obj, attr, dflt)
Cython-0.26.1/tests/broken/getattr.pyx0000664000175000017500000000016312542002467020470 0ustar  stefanstefan00000000000000cdef class Spam:
    cdef public object eggs

    def __getattr__(self, name):
        print "Spam getattr:", name
Cython-0.26.1/tests/broken/r_unpack.pyx0000664000175000017500000000021712542002467020620 0ustar  stefanstefan00000000000000seq = [1, [2, 3]]

def f():
    a, (b, c) = [1, [2, 3]]
    print a
    print b
    print c

def g():
    a, b, c = seq

def h():
    a, = seq
Cython-0.26.1/tests/broken/l_cfuncexport.pyx0000664000175000017500000000011112542002467021662 0ustar  stefanstefan00000000000000cdef int f(int x):
    return x * x

cdef int g(int x):
    return 5 * x
Cython-0.26.1/tests/broken/r_tbfilename.pyx0000664000175000017500000000003712542002467021445 0ustar  stefanstefan00000000000000def foo():
    raise Exception
Cython-0.26.1/tests/broken/fwddeclcclass.pyx0000664000175000017500000000015112542002467021614 0ustar  stefanstefan00000000000000cdef class Widget:
    pass

cdef class Container:
    pass

cdef Widget w
cdef Container c
w.parent = c
Cython-0.26.1/tests/broken/cascadedass.pyx0000664000175000017500000000023112542002467021250 0ustar  stefanstefan00000000000000cdef void foo():
    cdef int i, j, k
    i = j = k
    a = b = c
    i = j = c
    a = b = k
    (a, b), c = (d, e), f = (x, y), z
#	a, b = p, q = x, y
Cython-0.26.1/tests/broken/raise.pyx0000664000175000017500000000021012542002467020112 0ustar  stefanstefan00000000000000def f(a, b, c):
    #raise
    raise a
    raise "spam"
    raise a, b
    raise "spam", 42
    raise a, b, c
    raise "spam", 42, c()
Cython-0.26.1/tests/broken/big_t.pyx0000664000175000017500000000014012542002467020075 0ustar  stefanstefan00000000000000cdef extern from "foo.h":
    ctypedef long long big_t
    cdef void spam(big_t b)

spam(grail)
Cython-0.26.1/tests/broken/cimport.pyx0000664000175000017500000000022612542002467020473 0ustar  stefanstefan00000000000000cimport spam
cimport pkg.eggs

cdef spam.Spam yummy
cdef pkg.eggs.Eggs fried

spam.eat(yummy)
spam.tons = 3.14
ova = pkg.eggs
fried = pkg.eggs.Eggs()
Cython-0.26.1/tests/broken/b_extimpinherit.pyx0000664000175000017500000000021612542002467022207 0ustar  stefanstefan00000000000000cdef class Parrot:

    cdef describe(self):
        print "This is a parrot."

    cdef action(self):
        print "Polly wants a cracker!"
Cython-0.26.1/tests/broken/r_extinherit.pyx0000664000175000017500000000042512542002467021523 0ustar  stefanstefan00000000000000cdef class Parrot:

    cdef object plumage

    def __init__(self):
        self.plumage = "yellow"

    def describe(self):
        print "This bird has lovely", self.plumage, "plumage."


cdef class Norwegian(Parrot):

    def __init__(self):
        self.plumage = "blue"

Cython-0.26.1/tests/broken/builtindict.pyx0000664000175000017500000000041112542002467021324 0ustar  stefanstefan00000000000000cdef int f() except -1:
    cdef dict d
    cdef object x, z
    cdef int i
    z = dict
    d = dict(x)
    d = dict(*x)
    d.clear()
    z = d.copy()
    z = d.items()
    z = d.keys()
    z = d.values()
    d.merge(x, i)
    d.update(x)
    d.merge_pairs(x, i)
Cython-0.26.1/tests/broken/tryexceptelse.pyx0000664000175000017500000000044312542002467021717 0ustar  stefanstefan00000000000000def f():
    cdef int i
    try:
        i = 1
        raise x
        i = 2
    else:
        i = 3
        raise y
        i = 4

def g():
    cdef int i
    try:
        i = 1
        raise x
        i = 2
    except a:
        i = 3
    else:
        i = 4
        raise y
        i = 5
Cython-0.26.1/tests/broken/externfunc.pyx0000664000175000017500000000005212542002467021174 0ustar  stefanstefan00000000000000cdef extern from "foo.h":

    int fred()
Cython-0.26.1/tests/broken/r_inhcmethcall.pyx0000664000175000017500000000042712542002467021775 0ustar  stefanstefan00000000000000cdef class Parrot:

  cdef void describe(self):
    print "This parrot is resting."


cdef class Norwegian(Parrot):

  cdef void describe(self):
    Parrot.describe(self)
    print "Lovely plumage!"


cdef Parrot p1, p2
p1 = Parrot()
p2 = Norwegian()
p1.describe()
p2.describe()
Cython-0.26.1/tests/broken/cimportfrom.pyx0000664000175000017500000000015612542002467021361 0ustar  stefanstefan00000000000000from spam cimport Spam
from pkg.eggs cimport Eggs as ova

cdef extern Spam yummy
cdef ova fried

fried = None
Cython-0.26.1/tests/broken/i_public.pyx0000664000175000017500000000025612542002467020607 0ustar  stefanstefan00000000000000cdef public int grail

cdef public spam(int servings):
    pass

cdef public class sandwich [object sandwich, type sandwich_Type]:
    cdef int tomato
    cdef float lettuce
Cython-0.26.1/tests/broken/r_classdoc.pyx0000664000175000017500000000005412542002467021131 0ustar  stefanstefan00000000000000class Spam:
    """Spam, glorious spam!"""

Cython-0.26.1/tests/broken/tslots.pyx0000664000175000017500000000006112542002467020343 0ustar  stefanstefan00000000000000cdef class Spam:
    pass

def probe():
    pass
Cython-0.26.1/tests/broken/ctypedefextern.pyx0000664000175000017500000000025012542002467022044 0ustar  stefanstefan00000000000000cdef extern from "ctypedefextern.h":

    ctypedef int some_int
    ctypedef some_int *some_ptr

cdef void spam():
    cdef some_int i
    cdef some_ptr p
    p[0] = i
Cython-0.26.1/tests/broken/cimportfunc.pyx0000664000175000017500000000003612542002467021346 0ustar  stefanstefan00000000000000from cexportfunc cimport f, g
Cython-0.26.1/tests/broken/r_traceback.pyx0000664000175000017500000000020112542002467021247 0ustar  stefanstefan00000000000000cdef int spam() except -1:
    raise Exception("Spam error")

cdef int grail() except -1:
    spam()

def tomato():
    grail()

Cython-0.26.1/tests/broken/cimportfrompkgdir.pyx0000664000175000017500000000006212542002467022556 0ustar  stefanstefan00000000000000from package.inpackage cimport Spam

cdef Spam s2
Cython-0.26.1/tests/broken/pkg.cimportfrom.pyx0000664000175000017500000000014412542002467022136 0ustar  stefanstefan00000000000000from spam cimport Spam
from eggs cimport Eggs

cdef extern Spam yummy
cdef Eggs fried

fried = None
Cython-0.26.1/tests/broken/r_extnumeric2.pyx0000664000175000017500000000112612542002467021604 0ustar  stefanstefan00000000000000cdef extern from "numeric.h":

    struct PyArray_Descr:
        int type_num, elsize
        char type

    ctypedef class Numeric.ArrayType [object PyArrayObject]:
        cdef char *data
        cdef int nd
        cdef int *dimensions, *strides
        cdef object base
        cdef PyArray_Descr *descr
        cdef int flags

def ogle(ArrayType a):
    print "No. of dimensions:", a.nd
    print "  Dim Value"
    for i in range(a.nd):
        print "%5d %5d" % (i, a.dimensions[i])
    print "flags:", a.flags
    print "Type no.", a.descr.type_num
    print "Element size:", a.descr.elsize
Cython-0.26.1/tests/broken/r_extproperty.pyx0000664000175000017500000000057512542002467021753 0ustar  stefanstefan00000000000000cdef class CheeseShop:
    cdef object cheeses

    def __cinit__(self):
        self.cheeses = []

    property cheese:

        "A senseless waste of a property."

        def __get__(self):
            return "We don't have: %s" % self.cheeses

        def __set__(self, value):
            self.cheeses.append(value)

        def __del__(self):
            del self.cheeses[:]
Cython-0.26.1/tests/broken/naanou_1.pyx0000664000175000017500000000003412542002467020514 0ustar  stefanstefan00000000000000def f(a, *p, **n):
    pass
Cython-0.26.1/tests/broken/retconvert.pyx0000664000175000017500000000010512542002467021205 0ustar  stefanstefan00000000000000def f():
    return 42

cdef int g():
    cdef object x
    return x
Cython-0.26.1/tests/broken/getattr3ref.pyx0000664000175000017500000000005112542002467021244 0ustar  stefanstefan00000000000000cdef int f() except -1:
    g = getattr3
Cython-0.26.1/tests/broken/intindex.pyx0000664000175000017500000000025012542002467020635 0ustar  stefanstefan00000000000000cdef int f() except -1:
    cdef object x, y, z
    cdef int i
    cdef unsigned int ui
    z = x[y]
    z = x[i]
    x[y] = z
    x[i] = z
    z = x[ui]
    x[ui] = z
Cython-0.26.1/tests/broken/r_import.pyx0000664000175000017500000000013612542002467020651 0ustar  stefanstefan00000000000000import spam
print "Imported spam"
print dir(spam)

import sys
print "Imported sys"
print sys

Cython-0.26.1/tests/broken/cdefemptysue.pyx0000664000175000017500000000031012542002467021505 0ustar  stefanstefan00000000000000cdef extern from "cdefemptysue.h":

    cdef struct spam:
        pass

    ctypedef union eggs:
        pass

    cdef enum ham:
        pass

cdef extern spam s
cdef extern eggs e
cdef extern ham h
Cython-0.26.1/tests/broken/test_include_options.pyx0000664000175000017500000000020212542002467023245 0ustar  stefanstefan00000000000000import sys
from Pyrex.Compiler.Main import main

sys.argv[1:] = "-I spam -Ieggs --include-dir ham".split()
main(command_line = 1)
Cython-0.26.1/tests/broken/r_extimpinherit.pyx0000664000175000017500000000062212542002467022230 0ustar  stefanstefan00000000000000from b_extimpinherit cimport Parrot


cdef class Norwegian(Parrot):

    cdef action(self):
        print "This parrot is resting."

    cdef plumage(self):
        print "Lovely plumage!"


def main():
    cdef Parrot p
    cdef Norwegian n
    p = Parrot()
    n = Norwegian()
    print "Parrot:"
    p.describe()
    p.action()
    print "Norwegian:"
    n.describe()
    n.action()
    n.plumage()
Cython-0.26.1/tests/broken/pkg.cimport.pyx0000664000175000017500000000012312542002467021247 0ustar  stefanstefan00000000000000cimport spam, eggs

cdef extern spam.Spam yummy
cdef eggs.Eggs fried

fried = None
Cython-0.26.1/tests/broken/r_newstyleclass.pyx0000664000175000017500000000017012542002467022235 0ustar  stefanstefan00000000000000class Inquisition(object):
    """Something that nobody expects."""

    def __repr__(self):
        return "Surprise!"
Cython-0.26.1/tests/broken/plex2.pyx0000664000175000017500000000016612542002467020053 0ustar  stefanstefan00000000000000cdef class Spam:
    pass

cdef void foo(object blarg):
    pass

cdef void xyzzy():
    cdef Spam spam
    foo(spam)
Cython-0.26.1/tests/broken/cdefexternblock.pyx0000664000175000017500000000041612542002467022161 0ustar  stefanstefan00000000000000cdef extern from "cheese.h":

    ctypedef int camembert

    struct roquefort:
        int x

    char *swiss

    void cheddar()

    class external.runny [object runny_obj]:
        cdef int a
        def __init__(self):
            pass

cdef runny r
r = x
r.a = 42
Cython-0.26.1/tests/testsupport/0000775000175000017500000000000013151203436017404 5ustar  stefanstefan00000000000000Cython-0.26.1/tests/testsupport/cythonarrayutil.pxi0000664000175000017500000000134312542002467023374 0ustar  stefanstefan00000000000000from libc.stdlib cimport malloc, free
cimport cython
from cython.view cimport array

cdef void callback(void *data):
    print "callback called"
    free(data)

def create_array(shape, mode, use_callback=False):
    cdef array result = array(shape, itemsize=sizeof(int),
                              format='i', mode=mode)
    cdef int *data =  result.data
    cdef int i, j, cidx, fidx

    for i in range(shape[0]):
        for j in range(shape[1]):
            cidx = i * shape[1] + j
            fidx = i + j * shape[0]

            if mode == 'fortran':
                data[fidx] = cidx
            else:
                data[cidx] = cidx

    if use_callback:
        result.callback_free_data = callback

    return result
Cython-0.26.1/.gitrev0000664000175000017500000000005113151203435015122 0ustar  stefanstefan0000000000000039c71d912af541229d24da17987d65e65731d2bf
Cython-0.26.1/cygdb.py0000775000175000017500000000017312542002467015300 0ustar  stefanstefan00000000000000#!/usr/bin/env python

import sys

from Cython.Debugger import Cygdb as cygdb

if __name__ == '__main__':
    cygdb.main()
Cython-0.26.1/COPYING.txt0000664000175000017500000000136412542002467015507 0ustar  stefanstefan00000000000000The original Pyrex code as of 2006-04 is licensed under the following
license: "Copyright stuff: Pyrex is free of restrictions. You may use,
redistribute, modify and distribute modified versions."

------------------

Cython, which derives from Pyrex, is licensed under the Apache 2.0
Software License.  More precisely, all modifications and new code
made to go from Pyrex to Cython are so licensed.

See LICENSE.txt for more details.

------------------

The output of a Cython compilation is NOT considered a derivative
work of Cython.  Specifically, though the compilation process may
embed snippets of varying lengths into the final output, these
snippets, as embedded in the output, do not encumber the resulting
output with any license restrictions.
Cython-0.26.1/Cython/0000775000175000017500000000000013151203436015072 5ustar  stefanstefan00000000000000Cython-0.26.1/Cython/Shadow.py0000664000175000017500000003021413151203171016665 0ustar  stefanstefan00000000000000# cython.* namespace for pure mode.
from __future__ import absolute_import

__version__ = "0.26.1"

try:
    from __builtin__ import basestring
except ImportError:
    basestring = str


# BEGIN shameless copy from Cython/minivect/minitypes.py

class _ArrayType(object):

    is_array = True
    subtypes = ['dtype']

    def __init__(self, dtype, ndim, is_c_contig=False, is_f_contig=False,
                 inner_contig=False, broadcasting=None):
        self.dtype = dtype
        self.ndim = ndim
        self.is_c_contig = is_c_contig
        self.is_f_contig = is_f_contig
        self.inner_contig = inner_contig or is_c_contig or is_f_contig
        self.broadcasting = broadcasting

    def __repr__(self):
        axes = [":"] * self.ndim
        if self.is_c_contig:
            axes[-1] = "::1"
        elif self.is_f_contig:
            axes[0] = "::1"

        return "%s[%s]" % (self.dtype, ", ".join(axes))


def index_type(base_type, item):
    """
    Support array type creation by slicing, e.g. double[:, :] specifies
    a 2D strided array of doubles. The syntax is the same as for
    Cython memoryviews.
    """
    class InvalidTypeSpecification(Exception):
        pass

    def verify_slice(s):
        if s.start or s.stop or s.step not in (None, 1):
            raise InvalidTypeSpecification(
                "Only a step of 1 may be provided to indicate C or "
                "Fortran contiguity")

    if isinstance(item, tuple):
        step_idx = None
        for idx, s in enumerate(item):
            verify_slice(s)
            if s.step and (step_idx or idx not in (0, len(item) - 1)):
                raise InvalidTypeSpecification(
                    "Step may only be provided once, and only in the "
                    "first or last dimension.")

            if s.step == 1:
                step_idx = idx

        return _ArrayType(base_type, len(item),
                          is_c_contig=step_idx == len(item) - 1,
                          is_f_contig=step_idx == 0)
    elif isinstance(item, slice):
        verify_slice(item)
        return _ArrayType(base_type, 1, is_c_contig=bool(item.step))
    else:
        # int[8] etc.
        assert int(item) == item  # array size must be a plain integer
        array(base_type, item)

# END shameless copy


compiled = False

_Unspecified = object()

# Function decorators

def _empty_decorator(x):
    return x

def locals(**arg_types):
    return _empty_decorator

def test_assert_path_exists(*paths):
    return _empty_decorator

def test_fail_if_path_exists(*paths):
    return _empty_decorator

class _EmptyDecoratorAndManager(object):
    def __call__(self, x):
        return x
    def __enter__(self):
        pass
    def __exit__(self, exc_type, exc_value, traceback):
        pass

class _Optimization(object):
    pass

cclass = ccall = cfunc = _EmptyDecoratorAndManager()

returns = wraparound = boundscheck = initializedcheck = nonecheck = \
    overflowcheck = embedsignature = cdivision = cdivision_warnings = \
    always_allows_keywords = profile = linetrace = infer_type = \
    unraisable_tracebacks = freelist = \
        lambda arg: _EmptyDecoratorAndManager()

optimization = _Optimization()

overflowcheck.fold = optimization.use_switch = \
    optimization.unpack_method_calls = lambda arg: _EmptyDecoratorAndManager()

final = internal = type_version_tag = no_gc_clear = no_gc = _empty_decorator


_cython_inline = None
def inline(f, *args, **kwds):
    if isinstance(f, basestring):
        global _cython_inline
        if _cython_inline is None:
            from Cython.Build.Inline import cython_inline as _cython_inline
        return _cython_inline(f, *args, **kwds)
    else:
        assert len(args) == len(kwds) == 0
        return f


def compile(f):
    from Cython.Build.Inline import RuntimeCompiledFunction
    return RuntimeCompiledFunction(f)


# Special functions

def cdiv(a, b):
    q = a / b
    if q < 0:
        q += 1
    return q

def cmod(a, b):
    r = a % b
    if (a*b) < 0:
        r -= b
    return r


# Emulated language constructs

def cast(type, *args, **kwargs):
    kwargs.pop('typecheck', None)
    assert not kwargs
    if hasattr(type, '__call__'):
        return type(*args)
    else:
        return args[0]

def sizeof(arg):
    return 1

def typeof(arg):
    return arg.__class__.__name__
    # return type(arg)

def address(arg):
    return pointer(type(arg))([arg])

def declare(type=None, value=_Unspecified, **kwds):
    if type not in (None, object) and hasattr(type, '__call__'):
        if value is not _Unspecified:
            return type(value)
        else:
            return type()
    else:
        return value

class _nogil(object):
    """Support for 'with nogil' statement
    """
    def __enter__(self):
        pass
    def __exit__(self, exc_class, exc, tb):
        return exc_class is None

nogil = _nogil()
gil = _nogil()
del _nogil

# Emulated types

class CythonMetaType(type):

    def __getitem__(type, ix):
        return array(type, ix)

CythonTypeObject = CythonMetaType('CythonTypeObject', (object,), {})

class CythonType(CythonTypeObject):

    def _pointer(self, n=1):
        for i in range(n):
            self = pointer(self)
        return self

class PointerType(CythonType):

    def __init__(self, value=None):
        if isinstance(value, (ArrayType, PointerType)):
            self._items = [cast(self._basetype, a) for a in value._items]
        elif isinstance(value, list):
            self._items = [cast(self._basetype, a) for a in value]
        elif value is None or value == 0:
            self._items = []
        else:
            raise ValueError

    def __getitem__(self, ix):
        if ix < 0:
            raise IndexError("negative indexing not allowed in C")
        return self._items[ix]

    def __setitem__(self, ix, value):
        if ix < 0:
            raise IndexError("negative indexing not allowed in C")
        self._items[ix] = cast(self._basetype, value)

    def __eq__(self, value):
        if value is None and not self._items:
            return True
        elif type(self) != type(value):
            return False
        else:
            return not self._items and not value._items

    def __repr__(self):
        return "%s *" % (self._basetype,)

class ArrayType(PointerType):

    def __init__(self):
        self._items = [None] * self._n


class StructType(CythonType):

    def __init__(self, cast_from=_Unspecified, **data):
        if cast_from is not _Unspecified:
            # do cast
            if len(data) > 0:
                raise ValueError('Cannot accept keyword arguments when casting.')
            if type(cast_from) is not type(self):
                raise ValueError('Cannot cast from %s'%cast_from)
            for key, value in cast_from.__dict__.items():
                setattr(self, key, value)
        else:
            for key, value in data.items():
                setattr(self, key, value)

    def __setattr__(self, key, value):
        if key in self._members:
            self.__dict__[key] = cast(self._members[key], value)
        else:
            raise AttributeError("Struct has no member '%s'" % key)


class UnionType(CythonType):

    def __init__(self, cast_from=_Unspecified, **data):
        if cast_from is not _Unspecified:
            # do type cast
            if len(data) > 0:
                raise ValueError('Cannot accept keyword arguments when casting.')
            if isinstance(cast_from, dict):
                datadict = cast_from
            elif type(cast_from) is type(self):
                datadict = cast_from.__dict__
            else:
                raise ValueError('Cannot cast from %s'%cast_from)
        else:
            datadict = data
        if len(datadict) > 1:
            raise AttributeError("Union can only store one field at a time.")
        for key, value in datadict.items():
            setattr(self, key, value)

    def __setattr__(self, key, value):
        if key in '__dict__':
            CythonType.__setattr__(self, key, value)
        elif key in self._members:
            self.__dict__ = {key: cast(self._members[key], value)}
        else:
            raise AttributeError("Union has no member '%s'" % key)

def pointer(basetype):
    class PointerInstance(PointerType):
        _basetype = basetype
    return PointerInstance

def array(basetype, n):
    class ArrayInstance(ArrayType):
        _basetype = basetype
        _n = n
    return ArrayInstance

def struct(**members):
    class StructInstance(StructType):
        _members = members
    for key in members:
        setattr(StructInstance, key, None)
    return StructInstance

def union(**members):
    class UnionInstance(UnionType):
        _members = members
    for key in members:
        setattr(UnionInstance, key, None)
    return UnionInstance

class typedef(CythonType):

    def __init__(self, type, name=None):
        self._basetype = type
        self.name = name

    def __call__(self, *arg):
        value = cast(self._basetype, *arg)
        return value

    def __repr__(self):
        return self.name or str(self._basetype)

    __getitem__ = index_type

class _FusedType(CythonType):
    pass


def fused_type(*args):
    if not args:
        raise TypeError("Expected at least one type as argument")

    # Find the numeric type with biggest rank if all types are numeric
    rank = -1
    for type in args:
        if type not in (py_int, py_long, py_float, py_complex):
            break

        if type_ordering.index(type) > rank:
            result_type = type
    else:
        return result_type

    # Not a simple numeric type, return a fused type instance. The result
    # isn't really meant to be used, as we can't keep track of the context in
    # pure-mode. Casting won't do anything in this case.
    return _FusedType()


def _specialized_from_args(signatures, args, kwargs):
    "Perhaps this should be implemented in a TreeFragment in Cython code"
    raise Exception("yet to be implemented")


py_int = typedef(int, "int")
try:
    py_long = typedef(long, "long")
except NameError:  # Py3
    py_long = typedef(int, "long")
py_float = typedef(float, "float")
py_complex = typedef(complex, "double complex")


# Predefined types

int_types = ['char', 'short', 'Py_UNICODE', 'int', 'Py_UCS4', 'long', 'longlong', 'Py_ssize_t', 'size_t']
float_types = ['longdouble', 'double', 'float']
complex_types = ['longdoublecomplex', 'doublecomplex', 'floatcomplex', 'complex']
other_types = ['bint', 'void']

to_repr = {
    'longlong': 'long long',
    'longdouble': 'long double',
    'longdoublecomplex': 'long double complex',
    'doublecomplex': 'double complex',
    'floatcomplex': 'float complex',
}.get

gs = globals()

# note: cannot simply name the unicode type here as 2to3 gets in the way and replaces it by str
try:
    import __builtin__ as builtins
except ImportError:  # Py3
    import builtins

gs['unicode'] = typedef(getattr(builtins, 'unicode', str), 'unicode')
del builtins

for name in int_types:
    reprname = to_repr(name, name)
    gs[name] = typedef(py_int, reprname)
    if name not in ('Py_UNICODE', 'Py_UCS4') and not name.endswith('size_t'):
        gs['u'+name] = typedef(py_int, "unsigned " + reprname)
        gs['s'+name] = typedef(py_int, "signed " + reprname)

for name in float_types:
    gs[name] = typedef(py_float, to_repr(name, name))

for name in complex_types:
    gs[name] = typedef(py_complex, to_repr(name, name))

bint = typedef(bool, "bint")
void = typedef(int, "void")

for t in int_types + float_types + complex_types + other_types:
    for i in range(1, 4):
        gs["%s_%s" % ('p'*i, t)] = globals()[t]._pointer(i)

void = typedef(None, "void")
NULL = p_void(0)

integral = floating = numeric = _FusedType()

type_ordering = [py_int, py_long, py_float, py_complex]

class CythonDotParallel(object):
    """
    The cython.parallel module.
    """

    __all__ = ['parallel', 'prange', 'threadid']

    def parallel(self, num_threads=None):
        return nogil

    def prange(self, start=0, stop=None, step=1, schedule=None, nogil=False):
        if stop is None:
            stop = start
            start = 0
        return range(start, stop, step)

    def threadid(self):
        return 0

    # def threadsavailable(self):
        # return 1

import sys
sys.modules['cython.parallel'] = CythonDotParallel()
del sys
Cython-0.26.1/Cython/Compiler/0000775000175000017500000000000013151203436016644 5ustar  stefanstefan00000000000000Cython-0.26.1/Cython/Compiler/Lexicon.py0000664000175000017500000001134513023021033020610 0ustar  stefanstefan00000000000000# cython: language_level=3, py2_import=True
#
#   Cython Scanner - Lexical Definitions
#

from __future__ import absolute_import

raw_prefixes = "rR"
bytes_prefixes = "bB"
string_prefixes = "fFuU" + bytes_prefixes
char_prefixes = "cC"
any_string_prefix = raw_prefixes + string_prefixes + char_prefixes
IDENT = 'IDENT'


def make_lexicon():
    from ..Plex import \
        Str, Any, AnyBut, AnyChar, Rep, Rep1, Opt, Bol, Eol, Eof, \
        TEXT, IGNORE, State, Lexicon
    from .Scanning import Method

    letter = Any("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_")
    digit = Any("0123456789")
    bindigit = Any("01")
    octdigit = Any("01234567")
    hexdigit = Any("0123456789ABCDEFabcdef")
    indentation = Bol + Rep(Any(" \t"))

    def underscore_digits(d):
        return Rep1(d) + Rep(Str("_") + Rep1(d))

    decimal = underscore_digits(digit)
    dot = Str(".")
    exponent = Any("Ee") + Opt(Any("+-")) + decimal
    decimal_fract = (decimal + dot + Opt(decimal)) | (dot + decimal)

    name = letter + Rep(letter | digit)
    intconst = decimal | (Str("0") + ((Any("Xx") + underscore_digits(hexdigit)) |
                                      (Any("Oo") + underscore_digits(octdigit)) |
                                      (Any("Bb") + underscore_digits(bindigit)) ))
    intsuffix = (Opt(Any("Uu")) + Opt(Any("Ll")) + Opt(Any("Ll"))) | (Opt(Any("Ll")) + Opt(Any("Ll")) + Opt(Any("Uu")))
    intliteral = intconst + intsuffix
    fltconst = (decimal_fract + Opt(exponent)) | (decimal + exponent)
    imagconst = (intconst | fltconst) + Any("jJ")

    # invalid combinations of prefixes are caught in p_string_literal
    beginstring = Opt(Rep(Any(string_prefixes + raw_prefixes)) |
                      Any(char_prefixes)
                      ) + (Str("'") | Str('"') | Str("'''") | Str('"""'))
    two_oct = octdigit + octdigit
    three_oct = octdigit + octdigit + octdigit
    two_hex = hexdigit + hexdigit
    four_hex = two_hex + two_hex
    escapeseq = Str("\\") + (two_oct | three_oct |
                             Str('N{') + Rep(AnyBut('}')) + Str('}') |
                             Str('u') + four_hex | Str('x') + two_hex |
                             Str('U') + four_hex + four_hex | AnyChar)

    bra = Any("([{")
    ket = Any(")]}")
    punct = Any(":,;+-*/|&<>=.%`~^?!@")
    diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**", "//",
                    "+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=",
                    "<<=", ">>=", "**=", "//=", "->", "@=")
    spaces = Rep1(Any(" \t\f"))
    escaped_newline = Str("\\\n")
    lineterm = Eol + Opt(Str("\n"))

    comment = Str("#") + Rep(AnyBut("\n"))

    return Lexicon([
        (name, IDENT),
        (intliteral, Method('strip_underscores', symbol='INT')),
        (fltconst, Method('strip_underscores', symbol='FLOAT')),
        (imagconst, Method('strip_underscores', symbol='IMAG')),
        (punct | diphthong, TEXT),

        (bra, Method('open_bracket_action')),
        (ket, Method('close_bracket_action')),
        (lineterm, Method('newline_action')),

        (beginstring, Method('begin_string_action')),

        (comment, IGNORE),
        (spaces, IGNORE),
        (escaped_newline, IGNORE),

        State('INDENT', [
            (comment + lineterm, Method('commentline')),
            (Opt(spaces) + Opt(comment) + lineterm, IGNORE),
            (indentation, Method('indentation_action')),
            (Eof, Method('eof_action'))
        ]),

        State('SQ_STRING', [
            (escapeseq, 'ESCAPE'),
            (Rep1(AnyBut("'\"\n\\")), 'CHARS'),
            (Str('"'), 'CHARS'),
            (Str("\n"), Method('unclosed_string_action')),
            (Str("'"), Method('end_string_action')),
            (Eof, 'EOF')
        ]),

        State('DQ_STRING', [
            (escapeseq, 'ESCAPE'),
            (Rep1(AnyBut('"\n\\')), 'CHARS'),
            (Str("'"), 'CHARS'),
            (Str("\n"), Method('unclosed_string_action')),
            (Str('"'), Method('end_string_action')),
            (Eof, 'EOF')
        ]),

        State('TSQ_STRING', [
            (escapeseq, 'ESCAPE'),
            (Rep1(AnyBut("'\"\n\\")), 'CHARS'),
            (Any("'\""), 'CHARS'),
            (Str("\n"), 'NEWLINE'),
            (Str("'''"), Method('end_string_action')),
            (Eof, 'EOF')
        ]),

        State('TDQ_STRING', [
            (escapeseq, 'ESCAPE'),
            (Rep1(AnyBut('"\'\n\\')), 'CHARS'),
            (Any("'\""), 'CHARS'),
            (Str("\n"), 'NEWLINE'),
            (Str('"""'), Method('end_string_action')),
            (Eof, 'EOF')
        ]),

        (Eof, Method('eof_action'))
        ],

        # FIXME: Plex 1.9 needs different args here from Plex 1.1.4
        #debug_flags = scanner_debug_flags,
        #debug_file = scanner_dump_file
        )

Cython-0.26.1/Cython/Compiler/Pythran.py0000664000175000017500000001255113143605603020652 0ustar  stefanstefan00000000000000from .PyrexTypes import BufferType, CType, CTypedefType, CStructOrUnionType

_pythran_var_prefix = "__pythran__"
# Pythran/Numpy specific operations
def has_np_pythran(env):
    while not env is None:
        if hasattr(env, "directives") and env.directives.get('np_pythran', False):
            return True
        env = env.outer_scope

def is_pythran_supported_dtype(type_):
    if isinstance(type_, CTypedefType):
        return is_pythran_supported_type(type_.typedef_base_type)
    return type_.is_numeric

def pythran_type(Ty,ptype="ndarray"):
    if Ty.is_buffer:
        ndim,dtype = Ty.ndim, Ty.dtype
        if isinstance(dtype, CStructOrUnionType):
            ctype = dtype.cname
        elif isinstance(dtype, CType):
            ctype = dtype.sign_and_name()
        elif isinstance(dtype, CTypedefType):
            ctype = dtype.typedef_cname
        else:
            raise ValueError("unsupported type %s!" % str(dtype))
        return "pythonic::types::%s<%s,%d>" % (ptype,ctype, ndim)
    from .PyrexTypes import PythranExpr
    if Ty.is_pythran_expr:
        return Ty.pythran_type
    #if Ty.is_none:
    #    return "decltype(pythonic::__builtin__::None)"
    if Ty.is_numeric:
        return Ty.sign_and_name()
    raise ValueError("unsupported pythran type %s (%s)" % (str(Ty), str(type(Ty))))
    return None

def type_remove_ref(ty):
    return "typename std::remove_reference<%s>::type" % ty

def pythran_binop_type(op, tA, tB):
    return "decltype(std::declval<%s>() %s std::declval<%s>())" % \
        (pythran_type(tA), op, pythran_type(tB))

def pythran_unaryop_type(op, type_):
    return "decltype(%sstd::declval<%s>())" % (
        op, pythran_type(type_))

def pythran_indexing_type(type_, indices):
    def index_code(idx):
        if idx.is_slice:
            if idx.step.is_none:
                func = "contiguous_slice"
                n = 2
            else:
                func = "slice"
                n = 3
            return "pythonic::types::%s(%s)" % (func,",".join(["0"]*n))
        elif idx.type.is_int:
            return "std::declval()"
        elif idx.type.is_pythran_expr:
            return "std::declval<%s>()" % idx.type.pythran_type
        raise ValueError("unsupported indice type %s!" % idx.type)
    indexing = ",".join(index_code(idx) for idx in indices)
    return type_remove_ref("decltype(std::declval<%s>()(%s))" % (pythran_type(type_), indexing))

def pythran_indexing_code(indices):
    def index_code(idx):
        if idx.is_slice:
            values = idx.start, idx.stop, idx.step
            if idx.step.is_none:
                func = "contiguous_slice"
                values = values[:2]
            else:
                func = "slice"
            return "pythonic::types::%s(%s)" % (func,",".join((v.pythran_result() for v in values)))
        elif idx.type.is_int:
            return idx.result()
        elif idx.type.is_pythran_expr:
            return idx.pythran_result()
        raise ValueError("unsupported indice type %s!" % str(idx.type))
    return ",".join(index_code(idx) for idx in indices)

def pythran_func_type(func, args):
    args = ",".join(("std::declval<%s>()" % pythran_type(a.type) for a in args))
    return "decltype(pythonic::numpy::functor::%s{}(%s))" % (func, args)

def to_pythran(op,ptype=None):
    op_type = op.type
    if is_type(op_type,["is_pythran_expr", "is_int", "is_numeric", "is_float",
        "is_complex"]):
        return op.result()
    if op.is_none:
        return "pythonic::__builtin__::None"
    if ptype is None:
        ptype = pythran_type(op_type)
    assert(op.type.is_pyobject)
    return "from_python<%s>(%s)" % (ptype, op.py_result())

def from_pythran():
    return "to_python"

def is_type(type_, types):
    for attr in types:
        if getattr(type_, attr, False):
            return True
    return False

def is_pythran_supported_node_or_none(node):
    return node.is_none or is_pythran_supported_type(node.type)

def is_pythran_supported_type(type_):
    pythran_supported = (
        "is_pythran_expr", "is_int", "is_numeric", "is_float", "is_none",
        "is_complex")
    return is_type(type_, pythran_supported) or is_pythran_expr(type_)

def is_pythran_supported_operation_type(type_):
    pythran_supported = (
        "is_pythran_expr", "is_int", "is_numeric", "is_float", "is_complex")
    return is_type(type_,pythran_supported) or is_pythran_expr(type_)

def is_pythran_expr(type_):
    return type_.is_pythran_expr

def is_pythran_buffer(type_):
    return type_.is_numpy_buffer and is_pythran_supported_dtype(type_.dtype) and \
           type_.mode in ("c","strided") and not type_.cast

def include_pythran_generic(env):
    # Generic files
    env.add_include_file("pythonic/core.hpp")
    env.add_include_file("pythonic/python/core.hpp")
    env.add_include_file("pythonic/types/bool.hpp")
    env.add_include_file("pythonic/types/ndarray.hpp")
    env.add_include_file("") # for placement new

    for i in (8,16,32,64):
        env.add_include_file("pythonic/types/uint%d.hpp" % i)
        env.add_include_file("pythonic/types/int%d.hpp" % i)
    for t in ("float", "float32", "float64", "set", "slice", "tuple", "int",
            "long", "complex", "complex64", "complex128"):
        env.add_include_file("pythonic/types/%s.hpp" % t)

def include_pythran_type(env, type_):
    pass

def type_is_numpy(type_):
    if not hasattr(type_, "is_numpy"):
        return False
    return type_.is_numpy
Cython-0.26.1/Cython/Compiler/Buffer.py0000664000175000017500000007222213150045407020435 0ustar  stefanstefan00000000000000from __future__ import absolute_import

from .Visitor import CythonTransform
from .ModuleNode import ModuleNode
from .Errors import CompileError
from .UtilityCode import CythonUtilityCode
from .Code import UtilityCode, TempitaUtilityCode

from . import Options
from . import Interpreter
from . import PyrexTypes
from . import Naming
from . import Symtab

def dedent(text, reindent=0):
    from textwrap import dedent
    text = dedent(text)
    if reindent > 0:
        indent = " " * reindent
        text = '\n'.join([indent + x for x in text.split('\n')])
    return text

class IntroduceBufferAuxiliaryVars(CythonTransform):

    #
    # Entry point
    #

    buffers_exists = False
    using_memoryview = False

    def __call__(self, node):
        assert isinstance(node, ModuleNode)
        self.max_ndim = 0
        result = super(IntroduceBufferAuxiliaryVars, self).__call__(node)
        if self.buffers_exists:
            use_bufstruct_declare_code(node.scope)
            use_py2_buffer_functions(node.scope)
            node.scope.use_utility_code(empty_bufstruct_utility)

        return result


    #
    # Basic operations for transforms
    #
    def handle_scope(self, node, scope):
        # For all buffers, insert extra variables in the scope.
        # The variables are also accessible from the buffer_info
        # on the buffer entry
        scope_items = scope.entries.items()
        bufvars = [entry for name, entry in scope_items if entry.type.is_buffer]
        if len(bufvars) > 0:
            bufvars.sort(key=lambda entry: entry.name)
            self.buffers_exists = True

        memviewslicevars = [entry for name, entry in scope_items if entry.type.is_memoryviewslice]
        if len(memviewslicevars) > 0:
            self.buffers_exists = True


        for (name, entry) in scope_items:
            if name == 'memoryview' and isinstance(entry.utility_code_definition, CythonUtilityCode):
                self.using_memoryview = True
                break
        del scope_items

        if isinstance(node, ModuleNode) and len(bufvars) > 0:
            # for now...note that pos is wrong
            raise CompileError(node.pos, "Buffer vars not allowed in module scope")
        for entry in bufvars:
            if entry.type.dtype.is_ptr:
                raise CompileError(node.pos, "Buffers with pointer types not yet supported.")

            name = entry.name
            buftype = entry.type
            if buftype.ndim > Options.buffer_max_dims:
                raise CompileError(node.pos,
                        "Buffer ndims exceeds Options.buffer_max_dims = %d" % Options.buffer_max_dims)
            if buftype.ndim > self.max_ndim:
                self.max_ndim = buftype.ndim

            # Declare auxiliary vars
            def decvar(type, prefix):
                cname = scope.mangle(prefix, name)
                aux_var = scope.declare_var(name=None, cname=cname,
                                            type=type, pos=node.pos)
                if entry.is_arg:
                    aux_var.used = True # otherwise, NameNode will mark whether it is used

                return aux_var

            auxvars = ((PyrexTypes.c_pyx_buffer_nd_type, Naming.pybuffernd_prefix),
                       (PyrexTypes.c_pyx_buffer_type, Naming.pybufferstruct_prefix))
            pybuffernd, rcbuffer = [decvar(type, prefix) for (type, prefix) in auxvars]

            entry.buffer_aux = Symtab.BufferAux(pybuffernd, rcbuffer)

        scope.buffer_entries = bufvars
        self.scope = scope

    def visit_ModuleNode(self, node):
        self.handle_scope(node, node.scope)
        self.visitchildren(node)
        return node

    def visit_FuncDefNode(self, node):
        self.handle_scope(node, node.local_scope)
        self.visitchildren(node)
        return node

#
# Analysis
#
buffer_options = ("dtype", "ndim", "mode", "negative_indices", "cast") # ordered!
buffer_defaults = {"ndim": 1, "mode": "full", "negative_indices": True, "cast": False}
buffer_positional_options_count = 1 # anything beyond this needs keyword argument

ERR_BUF_OPTION_UNKNOWN = '"%s" is not a buffer option'
ERR_BUF_TOO_MANY = 'Too many buffer options'
ERR_BUF_DUP = '"%s" buffer option already supplied'
ERR_BUF_MISSING = '"%s" missing'
ERR_BUF_MODE = 'Only allowed buffer modes are: "c", "fortran", "full", "strided" (as a compile-time string)'
ERR_BUF_NDIM = 'ndim must be a non-negative integer'
ERR_BUF_DTYPE = 'dtype must be "object", numeric type or a struct'
ERR_BUF_BOOL = '"%s" must be a boolean'

def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, need_complete=True):
    """
    Must be called during type analysis, as analyse is called
    on the dtype argument.

    posargs and dictargs should consist of a list and a dict
    of tuples (value, pos). Defaults should be a dict of values.

    Returns a dict containing all the options a buffer can have and
    its value (with the positions stripped).
    """
    if defaults is None:
        defaults = buffer_defaults

    posargs, dictargs = Interpreter.interpret_compiletime_options(
        posargs, dictargs, type_env=env, type_args=(0, 'dtype'))

    if len(posargs) > buffer_positional_options_count:
        raise CompileError(posargs[-1][1], ERR_BUF_TOO_MANY)

    options = {}
    for name, (value, pos) in dictargs.items():
        if not name in buffer_options:
            raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
        options[name] = value

    for name, (value, pos) in zip(buffer_options, posargs):
        if not name in buffer_options:
            raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
        if name in options:
            raise CompileError(pos, ERR_BUF_DUP % name)
        options[name] = value

    # Check that they are all there and copy defaults
    for name in buffer_options:
        if not name in options:
            try:
                options[name] = defaults[name]
            except KeyError:
                if need_complete:
                    raise CompileError(globalpos, ERR_BUF_MISSING % name)

    dtype = options.get("dtype")
    if dtype and dtype.is_extension_type:
        raise CompileError(globalpos, ERR_BUF_DTYPE)

    ndim = options.get("ndim")
    if ndim and (not isinstance(ndim, int) or ndim < 0):
        raise CompileError(globalpos, ERR_BUF_NDIM)

    mode = options.get("mode")
    if mode and not (mode in ('full', 'strided', 'c', 'fortran')):
        raise CompileError(globalpos, ERR_BUF_MODE)

    def assert_bool(name):
        x = options.get(name)
        if not isinstance(x, bool):
            raise CompileError(globalpos, ERR_BUF_BOOL % name)

    assert_bool('negative_indices')
    assert_bool('cast')

    return options


#
# Code generation
#

class BufferEntry(object):
    def __init__(self, entry):
        self.entry = entry
        self.type = entry.type
        self.cname = entry.buffer_aux.buflocal_nd_var.cname
        self.buf_ptr = "%s.rcbuffer->pybuffer.buf" % self.cname
        self.buf_ptr_type = entry.type.buffer_ptr_type
        self.init_attributes()

    def init_attributes(self):
        self.shape = self.get_buf_shapevars()
        self.strides = self.get_buf_stridevars()
        self.suboffsets = self.get_buf_suboffsetvars()

    def get_buf_suboffsetvars(self):
        return self._for_all_ndim("%s.diminfo[%d].suboffsets")

    def get_buf_stridevars(self):
        return self._for_all_ndim("%s.diminfo[%d].strides")

    def get_buf_shapevars(self):
        return self._for_all_ndim("%s.diminfo[%d].shape")

    def _for_all_ndim(self, s):
        return [s % (self.cname, i) for i in range(self.type.ndim)]

    def generate_buffer_lookup_code(self, code, index_cnames):
        # Create buffer lookup and return it
        # This is done via utility macros/inline functions, which vary
        # according to the access mode used.
        params = []
        nd = self.type.ndim
        mode = self.type.mode
        if mode == 'full':
            for i, s, o in zip(index_cnames,
                               self.get_buf_stridevars(),
                               self.get_buf_suboffsetvars()):
                params.append(i)
                params.append(s)
                params.append(o)
            funcname = "__Pyx_BufPtrFull%dd" % nd
            funcgen = buf_lookup_full_code
        else:
            if mode == 'strided':
                funcname = "__Pyx_BufPtrStrided%dd" % nd
                funcgen = buf_lookup_strided_code
            elif mode == 'c':
                funcname = "__Pyx_BufPtrCContig%dd" % nd
                funcgen = buf_lookup_c_code
            elif mode == 'fortran':
                funcname = "__Pyx_BufPtrFortranContig%dd" % nd
                funcgen = buf_lookup_fortran_code
            else:
                assert False
            for i, s in zip(index_cnames, self.get_buf_stridevars()):
                params.append(i)
                params.append(s)

        # Make sure the utility code is available
        if funcname not in code.globalstate.utility_codes:
            code.globalstate.utility_codes.add(funcname)
            protocode = code.globalstate['utility_code_proto']
            defcode = code.globalstate['utility_code_def']
            funcgen(protocode, defcode, name=funcname, nd=nd)

        buf_ptr_type_code = self.buf_ptr_type.empty_declaration_code()
        ptrcode = "%s(%s, %s, %s)" % (funcname, buf_ptr_type_code, self.buf_ptr,
                                      ", ".join(params))
        return ptrcode


def get_flags(buffer_aux, buffer_type):
    flags = 'PyBUF_FORMAT'
    mode = buffer_type.mode
    if mode == 'full':
        flags += '| PyBUF_INDIRECT'
    elif mode == 'strided':
        flags += '| PyBUF_STRIDES'
    elif mode == 'c':
        flags += '| PyBUF_C_CONTIGUOUS'
    elif mode == 'fortran':
        flags += '| PyBUF_F_CONTIGUOUS'
    else:
        assert False
    if buffer_aux.writable_needed: flags += "| PyBUF_WRITABLE"
    return flags

def used_buffer_aux_vars(entry):
    buffer_aux = entry.buffer_aux
    buffer_aux.buflocal_nd_var.used = True
    buffer_aux.rcbuf_var.used = True

def put_unpack_buffer_aux_into_scope(buf_entry, code):
    # Generate code to copy the needed struct info into local
    # variables.
    buffer_aux, mode = buf_entry.buffer_aux, buf_entry.type.mode
    pybuffernd_struct = buffer_aux.buflocal_nd_var.cname

    fldnames = ['strides', 'shape']
    if mode == 'full':
        fldnames.append('suboffsets')

    ln = []
    for i in range(buf_entry.type.ndim):
        for fldname in fldnames:
            ln.append("%s.diminfo[%d].%s = %s.rcbuffer->pybuffer.%s[%d];" % \
                    (pybuffernd_struct, i, fldname,
                     pybuffernd_struct, fldname, i))
    code.putln(' '.join(ln))

def put_init_vars(entry, code):
    bufaux = entry.buffer_aux
    pybuffernd_struct = bufaux.buflocal_nd_var.cname
    pybuffer_struct = bufaux.rcbuf_var.cname
    # init pybuffer_struct
    code.putln("%s.pybuffer.buf = NULL;" % pybuffer_struct)
    code.putln("%s.refcount = 0;" % pybuffer_struct)
    # init the buffer object
    # code.put_init_var_to_py_none(entry)
    # init the pybuffernd_struct
    code.putln("%s.data = NULL;" % pybuffernd_struct)
    code.putln("%s.rcbuffer = &%s;" % (pybuffernd_struct, pybuffer_struct))

def put_acquire_arg_buffer(entry, code, pos):
    code.globalstate.use_utility_code(acquire_utility_code)
    buffer_aux = entry.buffer_aux
    getbuffer = get_getbuffer_call(code, entry.cname, buffer_aux, entry.type)

    # Acquire any new buffer
    code.putln("{")
    code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % entry.type.dtype.struct_nesting_depth())
    code.putln(code.error_goto_if("%s == -1" % getbuffer, pos))
    code.putln("}")
    # An exception raised in arg parsing cannot be catched, so no
    # need to care about the buffer then.
    put_unpack_buffer_aux_into_scope(entry, code)

def put_release_buffer_code(code, entry):
    code.globalstate.use_utility_code(acquire_utility_code)
    code.putln("__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);" % entry.buffer_aux.buflocal_nd_var.cname)

def get_getbuffer_call(code, obj_cname, buffer_aux, buffer_type):
    ndim = buffer_type.ndim
    cast = int(buffer_type.cast)
    flags = get_flags(buffer_aux, buffer_type)
    pybuffernd_struct = buffer_aux.buflocal_nd_var.cname

    dtype_typeinfo = get_type_information_cname(code, buffer_type.dtype)

    return ("__Pyx_GetBufferAndValidate(&%(pybuffernd_struct)s.rcbuffer->pybuffer, "
            "(PyObject*)%(obj_cname)s, &%(dtype_typeinfo)s, %(flags)s, %(ndim)d, "
            "%(cast)d, __pyx_stack)" % locals())

def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry,
                         is_initialized, pos, code):
    """
    Generate code for reassigning a buffer variables. This only deals with getting
    the buffer auxiliary structure and variables set up correctly, the assignment
    itself and refcounting is the responsibility of the caller.

    However, the assignment operation may throw an exception so that the reassignment
    never happens.

    Depending on the circumstances there are two possible outcomes:
    - Old buffer released, new acquired, rhs assigned to lhs
    - Old buffer released, new acquired which fails, reaqcuire old lhs buffer
      (which may or may not succeed).
    """

    buffer_aux, buffer_type = buf_entry.buffer_aux, buf_entry.type
    code.globalstate.use_utility_code(acquire_utility_code)
    pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
    flags = get_flags(buffer_aux, buffer_type)

    code.putln("{")  # Set up necesarry stack for getbuffer
    code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % buffer_type.dtype.struct_nesting_depth())

    getbuffer = get_getbuffer_call(code, "%s", buffer_aux, buffer_type) # fill in object below

    if is_initialized:
        # Release any existing buffer
        code.putln('__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);' % pybuffernd_struct)
        # Acquire
        retcode_cname = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
        code.putln("%s = %s;" % (retcode_cname, getbuffer % rhs_cname))
        code.putln('if (%s) {' % (code.unlikely("%s < 0" % retcode_cname)))
        # If acquisition failed, attempt to reacquire the old buffer
        # before raising the exception. A failure of reacquisition
        # will cause the reacquisition exception to be reported, one
        # can consider working around this later.
        type, value, tb = [code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=False)
                           for i in range(3)]
        code.putln('PyErr_Fetch(&%s, &%s, &%s);' % (type, value, tb))
        code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % lhs_cname)))
        code.putln('Py_XDECREF(%s); Py_XDECREF(%s); Py_XDECREF(%s);' % (type, value, tb)) # Do not refnanny these!
        code.globalstate.use_utility_code(raise_buffer_fallback_code)
        code.putln('__Pyx_RaiseBufferFallbackError();')
        code.putln('} else {')
        code.putln('PyErr_Restore(%s, %s, %s);' % (type, value, tb))
        for t in (type, value, tb):
            code.funcstate.release_temp(t)
        code.putln('}')
        code.putln('}')
        # Unpack indices
        put_unpack_buffer_aux_into_scope(buf_entry, code)
        code.putln(code.error_goto_if_neg(retcode_cname, pos))
        code.funcstate.release_temp(retcode_cname)
    else:
        # Our entry had no previous value, so set to None when acquisition fails.
        # In this case, auxiliary vars should be set up right in initialization to a zero-buffer,
        # so it suffices to set the buf field to NULL.
        code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % rhs_cname)))
        code.putln('%s = %s; __Pyx_INCREF(Py_None); %s.rcbuffer->pybuffer.buf = NULL;' %
                   (lhs_cname,
                    PyrexTypes.typecast(buffer_type, PyrexTypes.py_object_type, "Py_None"),
                    pybuffernd_struct))
        code.putln(code.error_goto(pos))
        code.put('} else {')
        # Unpack indices
        put_unpack_buffer_aux_into_scope(buf_entry, code)
        code.putln('}')

    code.putln("}") # Release stack


def put_buffer_lookup_code(entry, index_signeds, index_cnames, directives,
                           pos, code, negative_indices, in_nogil_context):
    """
    Generates code to process indices and calculate an offset into
    a buffer. Returns a C string which gives a pointer which can be
    read from or written to at will (it is an expression so caller should
    store it in a temporary if it is used more than once).

    As the bounds checking can have any number of combinations of unsigned
    arguments, smart optimizations etc. we insert it directly in the function
    body. The lookup however is delegated to a inline function that is instantiated
    once per ndim (lookup with suboffsets tend to get quite complicated).

    entry is a BufferEntry
    """
    negative_indices = directives['wraparound'] and negative_indices

    if directives['boundscheck']:
        # Check bounds and fix negative indices.
        # We allocate a temporary which is initialized to -1, meaning OK (!).
        # If an error occurs, the temp is set to the index dimension the
        # error is occurring at.
        failed_dim_temp = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
        code.putln("%s = -1;" % failed_dim_temp)
        for dim, (signed, cname, shape) in enumerate(zip(index_signeds, index_cnames, entry.get_buf_shapevars())):
            if signed != 0:
                # not unsigned, deal with negative index
                code.putln("if (%s < 0) {" % cname)
                if negative_indices:
                    code.putln("%s += %s;" % (cname, shape))
                    code.putln("if (%s) %s = %d;" % (
                        code.unlikely("%s < 0" % cname),
                        failed_dim_temp, dim))
                else:
                    code.putln("%s = %d;" % (failed_dim_temp, dim))
                code.put("} else ")
            # check bounds in positive direction
            if signed != 0:
                cast = ""
            else:
                cast = "(size_t)"
            code.putln("if (%s) %s = %d;" % (
                code.unlikely("%s >= %s%s" % (cname, cast, shape)),
                failed_dim_temp, dim))

        if in_nogil_context:
            code.globalstate.use_utility_code(raise_indexerror_nogil)
            func = '__Pyx_RaiseBufferIndexErrorNogil'
        else:
            code.globalstate.use_utility_code(raise_indexerror_code)
            func = '__Pyx_RaiseBufferIndexError'

        code.putln("if (%s) {" % code.unlikely("%s != -1" % failed_dim_temp))
        code.putln('%s(%s);' % (func, failed_dim_temp))
        code.putln(code.error_goto(pos))
        code.putln('}')
        code.funcstate.release_temp(failed_dim_temp)
    elif negative_indices:
        # Only fix negative indices.
        for signed, cname, shape in zip(index_signeds, index_cnames, entry.get_buf_shapevars()):
            if signed != 0:
                code.putln("if (%s < 0) %s += %s;" % (cname, cname, shape))

    return entry.generate_buffer_lookup_code(code, index_cnames)


def use_bufstruct_declare_code(env):
    env.use_utility_code(buffer_struct_declare_code)


def get_empty_bufstruct_code(max_ndim):
    code = dedent("""
        static Py_ssize_t __Pyx_zeros[] = {%s};
        static Py_ssize_t __Pyx_minusones[] = {%s};
    """) % (", ".join(["0"] * max_ndim), ", ".join(["-1"] * max_ndim))
    return UtilityCode(proto=code)

empty_bufstruct_utility = get_empty_bufstruct_code(Options.buffer_max_dims)

def buf_lookup_full_code(proto, defin, name, nd):
    """
    Generates a buffer lookup function for the right number
    of dimensions. The function gives back a void* at the right location.
    """
    # _i_ndex, _s_tride, sub_o_ffset
    macroargs = ", ".join(["i%d, s%d, o%d" % (i, i, i) for i in range(nd)])
    proto.putln("#define %s(type, buf, %s) (type)(%s_imp(buf, %s))" % (name, macroargs, name, macroargs))

    funcargs = ", ".join(["Py_ssize_t i%d, Py_ssize_t s%d, Py_ssize_t o%d" % (i, i, i) for i in range(nd)])
    proto.putln("static CYTHON_INLINE void* %s_imp(void* buf, %s);" % (name, funcargs))
    defin.putln(dedent("""
        static CYTHON_INLINE void* %s_imp(void* buf, %s) {
          char* ptr = (char*)buf;
        """) % (name, funcargs) + "".join([dedent("""\
          ptr += s%d * i%d;
          if (o%d >= 0) ptr = *((char**)ptr) + o%d;
        """) % (i, i, i, i) for i in range(nd)]
        ) + "\nreturn ptr;\n}")

def buf_lookup_strided_code(proto, defin, name, nd):
    """
    Generates a buffer lookup function for the right number
    of dimensions. The function gives back a void* at the right location.
    """
    # _i_ndex, _s_tride
    args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
    offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd)])
    proto.putln("#define %s(type, buf, %s) (type)((char*)buf + %s)" % (name, args, offset))

def buf_lookup_c_code(proto, defin, name, nd):
    """
    Similar to strided lookup, but can assume that the last dimension
    doesn't need a multiplication as long as.
    Still we keep the same signature for now.
    """
    if nd == 1:
        proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
    else:
        args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
        offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd - 1)])
        proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, nd - 1))

def buf_lookup_fortran_code(proto, defin, name, nd):
    """
    Like C lookup, but the first index is optimized instead.
    """
    if nd == 1:
        proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
    else:
        args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
        offset = " + ".join(["i%d * s%d" % (i, i) for i in range(1, nd)])
        proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, 0))


def use_py2_buffer_functions(env):
    env.use_utility_code(GetAndReleaseBufferUtilityCode())

class GetAndReleaseBufferUtilityCode(object):
    # Emulation of PyObject_GetBuffer and PyBuffer_Release for Python 2.
    # For >= 2.6 we do double mode -- use the new buffer interface on objects
    # which has the right tp_flags set, but emulation otherwise.

    requires = None
    is_cython_utility = False

    def __init__(self):
        pass

    def __eq__(self, other):
        return isinstance(other, GetAndReleaseBufferUtilityCode)

    def __hash__(self):
        return 24342342

    def get_tree(self, **kwargs): pass

    def put_code(self, output):
        code = output['utility_code_def']
        proto_code = output['utility_code_proto']
        env = output.module_node.scope
        cython_scope = env.context.cython_scope

        # Search all types for __getbuffer__ overloads
        types = []
        visited_scopes = set()
        def find_buffer_types(scope):
            if scope in visited_scopes:
                return
            visited_scopes.add(scope)
            for m in scope.cimported_modules:
                find_buffer_types(m)
            for e in scope.type_entries:
                if isinstance(e.utility_code_definition, CythonUtilityCode):
                    continue
                t = e.type
                if t.is_extension_type:
                    if scope is cython_scope and not e.used:
                        continue
                    release = get = None
                    for x in t.scope.pyfunc_entries:
                        if x.name == u"__getbuffer__": get = x.func_cname
                        elif x.name == u"__releasebuffer__": release = x.func_cname
                    if get:
                        types.append((t.typeptr_cname, get, release))

        find_buffer_types(env)

        util_code = TempitaUtilityCode.load(
            "GetAndReleaseBuffer", from_file="Buffer.c",
            context=dict(types=types))

        proto = util_code.format_code(util_code.proto)
        impl = util_code.format_code(
            util_code.inject_string_constants(util_code.impl, output)[1])

        proto_code.putln(proto)
        code.putln(impl)


def mangle_dtype_name(dtype):
    # Use prefixes to seperate user defined types from builtins
    # (consider "typedef float unsigned_int")
    if dtype.is_pyobject:
        return "object"
    elif dtype.is_ptr:
        return "ptr"
    else:
        if dtype.is_typedef or dtype.is_struct_or_union:
            prefix = "nn_"
        else:
            prefix = ""
        return prefix + dtype.specialization_name()

def get_type_information_cname(code, dtype, maxdepth=None):
    """
    Output the run-time type information (__Pyx_TypeInfo) for given dtype,
    and return the name of the type info struct.

    Structs with two floats of the same size are encoded as complex numbers.
    One can seperate between complex numbers declared as struct or with native
    encoding by inspecting to see if the fields field of the type is
    filled in.
    """
    namesuffix = mangle_dtype_name(dtype)
    name = "__Pyx_TypeInfo_%s" % namesuffix
    structinfo_name = "__Pyx_StructFields_%s" % namesuffix

    if dtype.is_error: return ""

    # It's critical that walking the type info doesn't use more stack
    # depth than dtype.struct_nesting_depth() returns, so use an assertion for this
    if maxdepth is None: maxdepth = dtype.struct_nesting_depth()
    if maxdepth <= 0:
        assert False

    if name not in code.globalstate.utility_codes:
        code.globalstate.utility_codes.add(name)
        typecode = code.globalstate['typeinfo']

        arraysizes = []
        if dtype.is_array:
            while dtype.is_array:
                arraysizes.append(dtype.size)
                dtype = dtype.base_type

        complex_possible = dtype.is_struct_or_union and dtype.can_be_complex()

        declcode = dtype.empty_declaration_code()
        if dtype.is_simple_buffer_dtype():
            structinfo_name = "NULL"
        elif dtype.is_struct:
            fields = dtype.scope.var_entries
            # Must pre-call all used types in order not to recurse utility code
            # writing.
            assert len(fields) > 0
            types = [get_type_information_cname(code, f.type, maxdepth - 1)
                     for f in fields]
            typecode.putln("static __Pyx_StructField %s[] = {" % structinfo_name, safe=True)
            for f, typeinfo in zip(fields, types):
                typecode.putln('  {&%s, "%s", offsetof(%s, %s)},' %
                           (typeinfo, f.name, dtype.empty_declaration_code(), f.cname), safe=True)
            typecode.putln('  {NULL, NULL, 0}', safe=True)
            typecode.putln("};", safe=True)
        else:
            assert False

        rep = str(dtype)

        flags = "0"
        is_unsigned = "0"
        if dtype is PyrexTypes.c_char_type:
            is_unsigned = "IS_UNSIGNED(%s)" % declcode
            typegroup = "'H'"
        elif dtype.is_int:
            is_unsigned = "IS_UNSIGNED(%s)" % declcode
            typegroup = "%s ? 'U' : 'I'" % is_unsigned
        elif complex_possible or dtype.is_complex:
            typegroup = "'C'"
        elif dtype.is_float:
            typegroup = "'R'"
        elif dtype.is_struct:
            typegroup = "'S'"
            if dtype.packed:
                flags = "__PYX_BUF_FLAGS_PACKED_STRUCT"
        elif dtype.is_pyobject:
            typegroup = "'O'"
        else:
            assert False, dtype

        typeinfo = ('static __Pyx_TypeInfo %s = '
                        '{ "%s", %s, sizeof(%s), { %s }, %s, %s, %s, %s };')
        tup = (name, rep, structinfo_name, declcode,
               ', '.join([str(x) for x in arraysizes]) or '0', len(arraysizes),
               typegroup, is_unsigned, flags)
        typecode.putln(typeinfo % tup, safe=True)

    return name

def load_buffer_utility(util_code_name, context=None, **kwargs):
    if context is None:
        return UtilityCode.load(util_code_name, "Buffer.c", **kwargs)
    else:
        return TempitaUtilityCode.load(util_code_name, "Buffer.c", context=context, **kwargs)

context = dict(max_dims=str(Options.buffer_max_dims))
buffer_struct_declare_code = load_buffer_utility("BufferStructDeclare",
                                                 context=context)


# Utility function to set the right exception
# The caller should immediately goto_error
raise_indexerror_code = load_buffer_utility("BufferIndexError")
raise_indexerror_nogil = load_buffer_utility("BufferIndexErrorNogil")

raise_buffer_fallback_code = load_buffer_utility("BufferFallbackError")
buffer_structs_code = load_buffer_utility(
        "BufferFormatStructs", proto_block='utility_code_proto_before_types')
acquire_utility_code = load_buffer_utility("BufferFormatCheck",
                                           context=context,
                                           requires=[buffer_structs_code,
                                                     UtilityCode.load_cached("IsLittleEndian", "ModuleSetupCode.c")])

# See utility code BufferFormatFromTypeInfo
_typeinfo_to_format_code = load_buffer_utility("TypeInfoToFormat", context={},
                                               requires=[buffer_structs_code])
typeinfo_compare_code = load_buffer_utility("TypeInfoCompare", context={},
                                            requires=[buffer_structs_code])
Cython-0.26.1/Cython/Compiler/CodeGeneration.py0000664000175000017500000000212412542002467022107 0ustar  stefanstefan00000000000000from __future__ import absolute_import

from .Visitor import VisitorTransform
from .Nodes import StatListNode


class ExtractPxdCode(VisitorTransform):
    """
    Finds nodes in a pxd file that should generate code, and
    returns them in a StatListNode.

    The result is a tuple (StatListNode, ModuleScope), i.e.
    everything that is needed from the pxd after it is processed.

    A purer approach would be to seperately compile the pxd code,
    but the result would have to be slightly more sophisticated
    than pure strings (functions + wanted interned strings +
    wanted utility code + wanted cached objects) so for now this
    approach is taken.
    """

    def __call__(self, root):
        self.funcs = []
        self.visitchildren(root)
        return (StatListNode(root.pos, stats=self.funcs), root.scope)

    def visit_FuncDefNode(self, node):
        self.funcs.append(node)
        # Do not visit children, nested funcdefnodes will
        # also be moved by this action...
        return node

    def visit_Node(self, node):
        self.visitchildren(node)
        return node
Cython-0.26.1/Cython/Compiler/Main.py0000664000175000017500000007423113143605603020114 0ustar  stefanstefan00000000000000#
#   Cython Top Level
#

from __future__ import absolute_import

import os
import re
import sys
import io

if sys.version_info[:2] < (2, 6) or (3, 0) <= sys.version_info[:2] < (3, 2):
    sys.stderr.write("Sorry, Cython requires Python 2.6+ or 3.2+, found %d.%d\n" % tuple(sys.version_info[:2]))
    sys.exit(1)

try:
    from __builtin__ import basestring
except ImportError:
    basestring = str

from . import Errors
# Do not import Parsing here, import it when needed, because Parsing imports
# Nodes, which globally needs debug command line options initialized to set a
# conditional metaclass. These options are processed by CmdLine called from
# main() in this file.
# import Parsing
from .StringEncoding import EncodedString
from .Scanning import PyrexScanner, FileSourceDescriptor
from .Errors import PyrexError, CompileError, error, warning
from .Symtab import ModuleScope
from .. import Utils
from . import Options

from . import Version  # legacy import needed by old PyTables versions
version = Version.version  # legacy attribute - use "Cython.__version__" instead

module_name_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)*$")

verbose = 0

class CompilationData(object):
    #  Bundles the information that is passed from transform to transform.
    #  (For now, this is only)

    #  While Context contains every pxd ever loaded, path information etc.,
    #  this only contains the data related to a single compilation pass
    #
    #  pyx                   ModuleNode              Main code tree of this compilation.
    #  pxds                  {string : ModuleNode}   Trees for the pxds used in the pyx.
    #  codewriter            CCodeWriter             Where to output final code.
    #  options               CompilationOptions
    #  result                CompilationResult
    pass

class Context(object):
    #  This class encapsulates the context needed for compiling
    #  one or more Cython implementation files along with their
    #  associated and imported declaration files. It includes
    #  the root of the module import namespace and the list
    #  of directories to search for include files.
    #
    #  modules               {string : ModuleScope}
    #  include_directories   [string]
    #  future_directives     [object]
    #  language_level        int     currently 2 or 3 for Python 2/3

    cython_scope = None

    def __init__(self, include_directories, compiler_directives, cpp=False,
                 language_level=2, options=None, create_testscope=True):
        # cython_scope is a hack, set to False by subclasses, in order to break
        # an infinite loop.
        # Better code organization would fix it.

        from . import Builtin, CythonScope
        self.modules = {"__builtin__" : Builtin.builtin_scope}
        self.cython_scope = CythonScope.create_cython_scope(self)
        self.modules["cython"] = self.cython_scope
        self.include_directories = include_directories
        self.future_directives = set()
        self.compiler_directives = compiler_directives
        self.cpp = cpp
        self.options = options

        self.pxds = {}  # full name -> node tree
        self._interned = {}  # (type(value), value, *key_args) -> interned_value

        standard_include_path = os.path.abspath(os.path.normpath(
            os.path.join(os.path.dirname(__file__), os.path.pardir, 'Includes')))
        self.include_directories = include_directories + [standard_include_path]

        self.set_language_level(language_level)

        self.gdb_debug_outputwriter = None

    def set_language_level(self, level):
        self.language_level = level
        if level >= 3:
            from .Future import print_function, unicode_literals, absolute_import, division
            self.future_directives.update([print_function, unicode_literals, absolute_import, division])
            self.modules['builtins'] = self.modules['__builtin__']

    def intern_ustring(self, value, encoding=None):
        key = (EncodedString, value, encoding)
        try:
            return self._interned[key]
        except KeyError:
            pass
        value = EncodedString(value)
        if encoding:
            value.encoding = encoding
        self._interned[key] = value
        return value

    def intern_value(self, value, *key):
        key = (type(value), value) + key
        try:
            return self._interned[key]
        except KeyError:
            pass
        self._interned[key] = value
        return value

    # pipeline creation functions can now be found in Pipeline.py

    def process_pxd(self, source_desc, scope, module_name):
        from . import Pipeline
        if isinstance(source_desc, FileSourceDescriptor) and source_desc._file_type == 'pyx':
            source = CompilationSource(source_desc, module_name, os.getcwd())
            result_sink = create_default_resultobj(source, self.options)
            pipeline = Pipeline.create_pyx_as_pxd_pipeline(self, result_sink)
            result = Pipeline.run_pipeline(pipeline, source)
        else:
            pipeline = Pipeline.create_pxd_pipeline(self, scope, module_name)
            result = Pipeline.run_pipeline(pipeline, source_desc)
        return result

    def nonfatal_error(self, exc):
        return Errors.report_error(exc)

    def find_module(self, module_name, relative_to=None, pos=None, need_pxd=1,
                    absolute_fallback=True):
        # Finds and returns the module scope corresponding to
        # the given relative or absolute module name. If this
        # is the first time the module has been requested, finds
        # the corresponding .pxd file and process it.
        # If relative_to is not None, it must be a module scope,
        # and the module will first be searched for relative to
        # that module, provided its name is not a dotted name.
        debug_find_module = 0
        if debug_find_module:
            print("Context.find_module: module_name = %s, relative_to = %s, pos = %s, need_pxd = %s" % (
                module_name, relative_to, pos, need_pxd))

        scope = None
        pxd_pathname = None
        if relative_to:
            if module_name:
                # from .module import ...
                qualified_name = relative_to.qualify_name(module_name)
            else:
                # from . import ...
                qualified_name = relative_to.qualified_name
                scope = relative_to
                relative_to = None
        else:
            qualified_name = module_name

        if not module_name_pattern.match(qualified_name):
            raise CompileError(pos or (module_name, 0, 0),
                               "'%s' is not a valid module name" % module_name)

        if relative_to:
            if debug_find_module:
                print("...trying relative import")
            scope = relative_to.lookup_submodule(module_name)
            if not scope:
                pxd_pathname = self.find_pxd_file(qualified_name, pos)
                if pxd_pathname:
                    scope = relative_to.find_submodule(module_name)
        if not scope:
            if debug_find_module:
                print("...trying absolute import")
            if absolute_fallback:
                qualified_name = module_name
            scope = self
            for name in qualified_name.split("."):
                scope = scope.find_submodule(name)

        if debug_find_module:
            print("...scope = %s" % scope)
        if not scope.pxd_file_loaded:
            if debug_find_module:
                print("...pxd not loaded")
            if not pxd_pathname:
                if debug_find_module:
                    print("...looking for pxd file")
                # Only look in sys.path if we are explicitly looking
                # for a .pxd file.
                pxd_pathname = self.find_pxd_file(qualified_name, pos, sys_path=need_pxd)
                if debug_find_module:
                    print("......found %s" % pxd_pathname)
                if not pxd_pathname and need_pxd:
                    # Set pxd_file_loaded such that we don't need to
                    # look for the non-existing pxd file next time.
                    scope.pxd_file_loaded = True
                    package_pathname = self.search_include_directories(qualified_name, ".py", pos)
                    if package_pathname and package_pathname.endswith('__init__.py'):
                        pass
                    else:
                        error(pos, "'%s.pxd' not found" % qualified_name.replace('.', os.sep))
            if pxd_pathname:
                scope.pxd_file_loaded = True
                try:
                    if debug_find_module:
                        print("Context.find_module: Parsing %s" % pxd_pathname)
                    rel_path = module_name.replace('.', os.sep) + os.path.splitext(pxd_pathname)[1]
                    if not pxd_pathname.endswith(rel_path):
                        rel_path = pxd_pathname  # safety measure to prevent printing incorrect paths
                    source_desc = FileSourceDescriptor(pxd_pathname, rel_path)
                    err, result = self.process_pxd(source_desc, scope, qualified_name)
                    if err:
                        raise err
                    (pxd_codenodes, pxd_scope) = result
                    self.pxds[module_name] = (pxd_codenodes, pxd_scope)
                except CompileError:
                    pass
        return scope

    def find_pxd_file(self, qualified_name, pos, sys_path=True):
        # Search include path (and sys.path if sys_path is True) for
        # the .pxd file corresponding to the given fully-qualified
        # module name.
        # Will find either a dotted filename or a file in a
        # package directory. If a source file position is given,
        # the directory containing the source file is searched first
        # for a dotted filename, and its containing package root
        # directory is searched first for a non-dotted filename.
        pxd = self.search_include_directories(qualified_name, ".pxd", pos, sys_path=sys_path)
        if pxd is None: # XXX Keep this until Includes/Deprecated is removed
            if (qualified_name.startswith('python') or
                qualified_name in ('stdlib', 'stdio', 'stl')):
                standard_include_path = os.path.abspath(os.path.normpath(
                        os.path.join(os.path.dirname(__file__), os.path.pardir, 'Includes')))
                deprecated_include_path = os.path.join(standard_include_path, 'Deprecated')
                self.include_directories.append(deprecated_include_path)
                try:
                    pxd = self.search_include_directories(qualified_name, ".pxd", pos)
                finally:
                    self.include_directories.pop()
                if pxd:
                    name = qualified_name
                    if name.startswith('python'):
                        warning(pos, "'%s' is deprecated, use 'cpython'" % name, 1)
                    elif name in ('stdlib', 'stdio'):
                        warning(pos, "'%s' is deprecated, use 'libc.%s'" % (name, name), 1)
                    elif name in ('stl'):
                        warning(pos, "'%s' is deprecated, use 'libcpp.*.*'" % name, 1)
        if pxd is None and Options.cimport_from_pyx:
            return self.find_pyx_file(qualified_name, pos)
        return pxd

    def find_pyx_file(self, qualified_name, pos):
        # Search include path for the .pyx file corresponding to the
        # given fully-qualified module name, as for find_pxd_file().
        return self.search_include_directories(qualified_name, ".pyx", pos)

    def find_include_file(self, filename, pos):
        # Search list of include directories for filename.
        # Reports an error and returns None if not found.
        path = self.search_include_directories(filename, "", pos,
                                               include=True)
        if not path:
            error(pos, "'%s' not found" % filename)
        return path

    def search_include_directories(self, qualified_name, suffix, pos,
                                   include=False, sys_path=False):
        return Utils.search_include_directories(
            tuple(self.include_directories), qualified_name, suffix, pos, include, sys_path)

    def find_root_package_dir(self, file_path):
        return Utils.find_root_package_dir(file_path)

    def check_package_dir(self, dir, package_names):
        return Utils.check_package_dir(dir, tuple(package_names))

    def c_file_out_of_date(self, source_path, output_path):
        if not os.path.exists(output_path):
            return 1
        c_time = Utils.modification_time(output_path)
        if Utils.file_newer_than(source_path, c_time):
            return 1
        pos = [source_path]
        pxd_path = Utils.replace_suffix(source_path, ".pxd")
        if os.path.exists(pxd_path) and Utils.file_newer_than(pxd_path, c_time):
            return 1
        for kind, name in self.read_dependency_file(source_path):
            if kind == "cimport":
                dep_path = self.find_pxd_file(name, pos)
            elif kind == "include":
                dep_path = self.search_include_directories(name, pos)
            else:
                continue
            if dep_path and Utils.file_newer_than(dep_path, c_time):
                return 1
        return 0

    def find_cimported_module_names(self, source_path):
        return [ name for kind, name in self.read_dependency_file(source_path)
                 if kind == "cimport" ]

    def is_package_dir(self, dir_path):
        return Utils.is_package_dir(dir_path)

    def read_dependency_file(self, source_path):
        dep_path = Utils.replace_suffix(source_path, ".dep")
        if os.path.exists(dep_path):
            f = open(dep_path, "rU")
            chunks = [ line.strip().split(" ", 1)
                       for line in f.readlines()
                       if " " in line.strip() ]
            f.close()
            return chunks
        else:
            return ()

    def lookup_submodule(self, name):
        # Look up a top-level module. Returns None if not found.
        return self.modules.get(name, None)

    def find_submodule(self, name):
        # Find a top-level module, creating a new one if needed.
        scope = self.lookup_submodule(name)
        if not scope:
            scope = ModuleScope(name,
                parent_module = None, context = self)
            self.modules[name] = scope
        return scope

    def parse(self, source_desc, scope, pxd, full_module_name):
        if not isinstance(source_desc, FileSourceDescriptor):
            raise RuntimeError("Only file sources for code supported")
        source_filename = source_desc.filename
        scope.cpp = self.cpp
        # Parse the given source file and return a parse tree.
        num_errors = Errors.num_errors
        try:
            with Utils.open_source_file(source_filename) as f:
                from . import Parsing
                s = PyrexScanner(f, source_desc, source_encoding = f.encoding,
                                 scope = scope, context = self)
                tree = Parsing.p_module(s, pxd, full_module_name)
                if self.options.formal_grammar:
                    try:
                        from ..Parser import ConcreteSyntaxTree
                    except ImportError:
                        raise RuntimeError(
                            "Formal grammer can only be used with compiled Cython with an available pgen.")
                    ConcreteSyntaxTree.p_module(source_filename)
        except UnicodeDecodeError as e:
            #import traceback
            #traceback.print_exc()
            raise self._report_decode_error(source_desc, e)

        if Errors.num_errors > num_errors:
            raise CompileError()
        return tree

    def _report_decode_error(self, source_desc, exc):
        msg = exc.args[-1]
        position = exc.args[2]
        encoding = exc.args[0]

        line = 1
        column = idx = 0
        with io.open(source_desc.filename, "r", encoding='iso8859-1', newline='') as f:
            for line, data in enumerate(f, 1):
                idx += len(data)
                if idx >= position:
                    column = position - (idx - len(data)) + 1
                    break

        return error((source_desc, line, column),
                     "Decoding error, missing or incorrect coding= "
                     "at top of source (cannot decode with encoding %r: %s)" % (encoding, msg))

    def extract_module_name(self, path, options):
        # Find fully_qualified module name from the full pathname
        # of a source file.
        dir, filename = os.path.split(path)
        module_name, _ = os.path.splitext(filename)
        if "." in module_name:
            return module_name
        names = [module_name]
        while self.is_package_dir(dir):
            parent, package_name = os.path.split(dir)
            if parent == dir:
                break
            names.append(package_name)
            dir = parent
        names.reverse()
        return ".".join(names)

    def setup_errors(self, options, result):
        Errors.reset()  # clear any remaining error state
        if options.use_listing_file:
            path = result.listing_file = Utils.replace_suffix(result.main_source_file, ".lis")
        else:
            path = None
        Errors.open_listing_file(path=path,
                                 echo_to_stderr=options.errors_to_stderr)

    def teardown_errors(self, err, options, result):
        source_desc = result.compilation_source.source_desc
        if not isinstance(source_desc, FileSourceDescriptor):
            raise RuntimeError("Only file sources for code supported")
        Errors.close_listing_file()
        result.num_errors = Errors.num_errors
        if result.num_errors > 0:
            err = True
        if err and result.c_file:
            try:
                Utils.castrate_file(result.c_file, os.stat(source_desc.filename))
            except EnvironmentError:
                pass
            result.c_file = None

def get_output_filename(source_filename, cwd, options):
    if options.cplus:
        c_suffix = ".cpp"
    else:
        c_suffix = ".c"
    suggested_file_name = Utils.replace_suffix(source_filename, c_suffix)
    if options.output_file:
        out_path = os.path.join(cwd, options.output_file)
        if os.path.isdir(out_path):
            return os.path.join(out_path, os.path.basename(suggested_file_name))
        else:
            return out_path
    else:
        return suggested_file_name

def create_default_resultobj(compilation_source, options):
    result = CompilationResult()
    result.main_source_file = compilation_source.source_desc.filename
    result.compilation_source = compilation_source
    source_desc = compilation_source.source_desc
    result.c_file = get_output_filename(source_desc.filename,
                        compilation_source.cwd, options)
    result.embedded_metadata = options.embedded_metadata
    return result

def run_pipeline(source, options, full_module_name=None, context=None):
    from . import Pipeline

    source_ext = os.path.splitext(source)[1]
    options.configure_language_defaults(source_ext[1:]) # py/pyx
    if context is None:
        context = options.create_context()

    # Set up source object
    cwd = os.getcwd()
    abs_path = os.path.abspath(source)
    full_module_name = full_module_name or context.extract_module_name(source, options)

    if options.relative_path_in_code_position_comments:
        rel_path = full_module_name.replace('.', os.sep) + source_ext
        if not abs_path.endswith(rel_path):
            rel_path = source # safety measure to prevent printing incorrect paths
    else:
        rel_path = abs_path
    source_desc = FileSourceDescriptor(abs_path, rel_path)
    source = CompilationSource(source_desc, full_module_name, cwd)

    # Set up result object
    result = create_default_resultobj(source, options)

    if options.annotate is None:
        # By default, decide based on whether an html file already exists.
        html_filename = os.path.splitext(result.c_file)[0] + ".html"
        if os.path.exists(html_filename):
            with io.open(html_filename, "r", encoding="UTF-8") as html_file:
                if u'
            
            
                
                Cython: {filename}
                
                
            
            
            

Generated by Cython {watermark}{more_info}

Yellow lines hint at Python interaction.
Click on a line that starts with a "+" to see the C code that Cython generated for it.

''').format(css=self._css(), js=self._js, watermark=Version.watermark, filename=os.path.basename(source_filename) if source_filename else '', more_info=coverage_info) ] if c_file: outlist.append(u'

Raw output: %s

\n' % (c_file, c_file)) return outlist def _save_annotation_footer(self): return (u'\n',) def _save_annotation(self, code, generated_code, c_file=None, source_filename=None, coverage_xml=None): """ lines : original cython source code split by lines generated_code : generated c code keyed by line number in original file target filename : name of the file in which to store the generated html c_file : filename in which the c_code has been written """ if coverage_xml is not None and source_filename: coverage_timestamp = coverage_xml.get('timestamp', '').strip() covered_lines = self._get_line_coverage(coverage_xml, source_filename) else: coverage_timestamp = covered_lines = None annotation_items = dict(self.annotations[source_filename]) scopes = dict(self.scopes[source_filename]) outlist = [] outlist.extend(self._save_annotation_header(c_file, source_filename, coverage_timestamp)) outlist.extend(self._save_annotation_body(code, generated_code, annotation_items, scopes, covered_lines)) outlist.extend(self._save_annotation_footer()) return ''.join(outlist) def _get_line_coverage(self, coverage_xml, source_filename): coverage_data = None for entry in coverage_xml.iterfind('.//class'): if not entry.get('filename'): continue if (entry.get('filename') == source_filename or os.path.abspath(entry.get('filename')) == source_filename): coverage_data = entry break elif source_filename.endswith(entry.get('filename')): coverage_data = entry # but we might still find a better match... if coverage_data is None: return None return dict( (int(line.get('number')), int(line.get('hits'))) for line in coverage_data.iterfind('lines/line') ) def _htmlify_code(self, code): try: from pygments import highlight from pygments.lexers import CythonLexer from pygments.formatters import HtmlFormatter except ImportError: # no Pygments, just escape the code return html_escape(code) html_code = highlight( code, CythonLexer(stripnl=False, stripall=False), HtmlFormatter(nowrap=True)) return html_code def _save_annotation_body(self, cython_code, generated_code, annotation_items, scopes, covered_lines=None): outlist = [u'
'] pos_comment_marker = u'/* \N{HORIZONTAL ELLIPSIS} */\n' new_calls_map = dict( (name, 0) for name in 'refnanny trace py_macro_api py_c_api pyx_macro_api pyx_c_api error_goto'.split() ).copy self.mark_pos(None) def annotate(match): group_name = match.lastgroup calls[group_name] += 1 return u"%s" % ( group_name, match.group(group_name)) lines = self._htmlify_code(cython_code).splitlines() lineno_width = len(str(len(lines))) if not covered_lines: covered_lines = None for k, line in enumerate(lines, 1): try: c_code = generated_code[k] except KeyError: c_code = '' else: c_code = _replace_pos_comment(pos_comment_marker, c_code) if c_code.startswith(pos_comment_marker): c_code = c_code[len(pos_comment_marker):] c_code = html_escape(c_code) calls = new_calls_map() c_code = _parse_code(annotate, c_code) score = (5 * calls['py_c_api'] + 2 * calls['pyx_c_api'] + calls['py_macro_api'] + calls['pyx_macro_api']) if c_code: onclick = " onclick='toggleDiv(this)'" expandsymbol = '+' else: onclick = '' expandsymbol = ' ' covered = '' if covered_lines is not None and k in covered_lines: hits = covered_lines[k] if hits is not None: covered = 'run' if hits else 'mis' outlist.append( u'
'
                # generate line number with expand symbol in front,
                # and the right  number of digit
                u'{expandsymbol}{line:0{lineno_width}d}: {code}
\n'.format( score=score, expandsymbol=expandsymbol, covered=covered, lineno_width=lineno_width, line=k, code=line.rstrip(), onclick=onclick, )) if c_code: outlist.append(u"
{code}
".format( score=score, covered=covered, code=c_code)) outlist.append(u"
") return outlist _parse_code = re.compile(( br'(?P__Pyx_X?(?:GOT|GIVE)REF|__Pyx_RefNanny[A-Za-z]+)|' br'(?P__Pyx_Trace[A-Za-z]+)|' br'(?:' br'(?P__Pyx_[A-Z][A-Z_]+)|' br'(?P(?:__Pyx_[A-Z][a-z_][A-Za-z_]*)|__pyx_convert_[A-Za-z_]*)|' br'(?PPy[A-Z][a-z]+_[A-Z][A-Z_]+)|' br'(?PPy[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]*)' br')(?=\()|' # look-ahead to exclude subsequent '(' from replacement br'(?P(?:(?<=;) *if [^;]* +)?__PYX_ERR\([^)]+\))' ).decode('ascii')).sub _replace_pos_comment = re.compile( # this matches what Cython generates as code line marker comment br'^\s*/\*(?:(?:[^*]|\*[^/])*\n)+\s*\*/\s*\n'.decode('ascii'), re.M ).sub class AnnotationItem(object): def __init__(self, style, text, tag="", size=0): self.style = style self.text = text self.tag = tag self.size = size def start(self): return u"%s" % (self.style, self.text, self.tag) def end(self): return self.size, u"" Cython-0.26.1/Cython/Compiler/Builtin.py0000664000175000017500000005304613143605603020637 0ustar stefanstefan00000000000000# # Builtin Definitions # from __future__ import absolute_import from .Symtab import BuiltinScope, StructOrUnionScope from .Code import UtilityCode from .TypeSlots import Signature from . import PyrexTypes from . import Options # C-level implementations of builtin types, functions and methods iter_next_utility_code = UtilityCode.load("IterNext", "ObjectHandling.c") getattr_utility_code = UtilityCode.load("GetAttr", "ObjectHandling.c") getattr3_utility_code = UtilityCode.load("GetAttr3", "Builtins.c") pyexec_utility_code = UtilityCode.load("PyExec", "Builtins.c") pyexec_globals_utility_code = UtilityCode.load("PyExecGlobals", "Builtins.c") globals_utility_code = UtilityCode.load("Globals", "Builtins.c") builtin_utility_code = { 'StopAsyncIteration': UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"), } # mapping from builtins to their C-level equivalents class _BuiltinOverride(object): def __init__(self, py_name, args, ret_type, cname, py_equiv="*", utility_code=None, sig=None, func_type=None, is_strict_signature=False, builtin_return_type=None): self.py_name, self.cname, self.py_equiv = py_name, cname, py_equiv self.args, self.ret_type = args, ret_type self.func_type, self.sig = func_type, sig self.builtin_return_type = builtin_return_type self.is_strict_signature = is_strict_signature self.utility_code = utility_code def build_func_type(self, sig=None, self_arg=None): if sig is None: sig = Signature(self.args, self.ret_type) sig.exception_check = False # not needed for the current builtins func_type = sig.function_type(self_arg) if self.is_strict_signature: func_type.is_strict_signature = True if self.builtin_return_type: func_type.return_type = builtin_types[self.builtin_return_type] return func_type class BuiltinAttribute(object): def __init__(self, py_name, cname=None, field_type=None, field_type_name=None): self.py_name = py_name self.cname = cname or py_name self.field_type_name = field_type_name # can't do the lookup before the type is declared! self.field_type = field_type def declare_in_type(self, self_type): if self.field_type_name is not None: # lazy type lookup field_type = builtin_scope.lookup(self.field_type_name).type else: field_type = self.field_type or PyrexTypes.py_object_type entry = self_type.scope.declare(self.py_name, self.cname, field_type, None, 'private') entry.is_variable = True class BuiltinFunction(_BuiltinOverride): def declare_in_scope(self, scope): func_type, sig = self.func_type, self.sig if func_type is None: func_type = self.build_func_type(sig) scope.declare_builtin_cfunction(self.py_name, func_type, self.cname, self.py_equiv, self.utility_code) class BuiltinMethod(_BuiltinOverride): def declare_in_type(self, self_type): method_type, sig = self.func_type, self.sig if method_type is None: # override 'self' type (first argument) self_arg = PyrexTypes.CFuncTypeArg("", self_type, None) self_arg.not_none = True self_arg.accept_builtin_subtypes = True method_type = self.build_func_type(sig, self_arg) self_type.scope.declare_builtin_cfunction( self.py_name, method_type, self.cname, utility_code=self.utility_code) builtin_function_table = [ # name, args, return, C API func, py equiv = "*" BuiltinFunction('abs', "d", "d", "fabs", is_strict_signature = True), BuiltinFunction('abs', "f", "f", "fabsf", is_strict_signature = True), ] + list( # uses getattr to get PyrexTypes.c_uint_type etc to allow easy iteration over a list BuiltinFunction('abs', None, None, "__Pyx_abs_{0}".format(t), utility_code = UtilityCode.load("abs_{0}".format(t), "Builtins.c"), func_type = PyrexTypes.CFuncType( getattr(PyrexTypes,"c_u{0}_type".format(t)), [ PyrexTypes.CFuncTypeArg("arg", getattr(PyrexTypes,"c_{0}_type".format(t)), None) ], is_strict_signature = True, nogil=True)) for t in ("int", "long", "longlong") ) + list( BuiltinFunction('abs', None, None, "__Pyx_c_abs{0}".format(t.funcsuffix), func_type = PyrexTypes.CFuncType( t.real_type, [ PyrexTypes.CFuncTypeArg("arg", t, None) ], is_strict_signature = True, nogil=True)) for t in (PyrexTypes.c_float_complex_type, PyrexTypes.c_double_complex_type, PyrexTypes.c_longdouble_complex_type) ) + [ BuiltinFunction('abs', "O", "O", "PyNumber_Absolute"), #('all', "", "", ""), #('any', "", "", ""), #('ascii', "", "", ""), #('bin', "", "", ""), BuiltinFunction('callable', "O", "b", "__Pyx_PyCallable_Check", utility_code = UtilityCode.load("CallableCheck", "ObjectHandling.c")), #('chr', "", "", ""), #('cmp', "", "", "", ""), # int PyObject_Cmp(PyObject *o1, PyObject *o2, int *result) #('compile', "", "", ""), # PyObject* Py_CompileString( char *str, char *filename, int start) BuiltinFunction('delattr', "OO", "r", "PyObject_DelAttr"), BuiltinFunction('dir', "O", "O", "PyObject_Dir"), BuiltinFunction('divmod', "OO", "O", "PyNumber_Divmod"), BuiltinFunction('exec', "O", "O", "__Pyx_PyExecGlobals", utility_code = pyexec_globals_utility_code), BuiltinFunction('exec', "OO", "O", "__Pyx_PyExec2", utility_code = pyexec_utility_code), BuiltinFunction('exec', "OOO", "O", "__Pyx_PyExec3", utility_code = pyexec_utility_code), #('eval', "", "", ""), #('execfile', "", "", ""), #('filter', "", "", ""), BuiltinFunction('getattr3', "OOO", "O", "__Pyx_GetAttr3", "getattr", utility_code=getattr3_utility_code), # Pyrex legacy BuiltinFunction('getattr', "OOO", "O", "__Pyx_GetAttr3", utility_code=getattr3_utility_code), BuiltinFunction('getattr', "OO", "O", "__Pyx_GetAttr", utility_code=getattr_utility_code), BuiltinFunction('hasattr', "OO", "b", "__Pyx_HasAttr", utility_code = UtilityCode.load("HasAttr", "Builtins.c")), BuiltinFunction('hash', "O", "h", "PyObject_Hash"), #('hex', "", "", ""), #('id', "", "", ""), #('input', "", "", ""), BuiltinFunction('intern', "O", "O", "__Pyx_Intern", utility_code = UtilityCode.load("Intern", "Builtins.c")), BuiltinFunction('isinstance', "OO", "b", "PyObject_IsInstance"), BuiltinFunction('issubclass', "OO", "b", "PyObject_IsSubclass"), BuiltinFunction('iter', "OO", "O", "PyCallIter_New"), BuiltinFunction('iter', "O", "O", "PyObject_GetIter"), BuiltinFunction('len', "O", "z", "PyObject_Length"), BuiltinFunction('locals', "", "O", "__pyx_locals"), #('map', "", "", ""), #('max', "", "", ""), #('min', "", "", ""), BuiltinFunction('next', "O", "O", "__Pyx_PyIter_Next", utility_code = iter_next_utility_code), # not available in Py2 => implemented here BuiltinFunction('next', "OO", "O", "__Pyx_PyIter_Next2", utility_code = iter_next_utility_code), # not available in Py2 => implemented here #('oct', "", "", ""), #('open', "ss", "O", "PyFile_FromString"), # not in Py3 ] + [ BuiltinFunction('ord', None, None, "__Pyx_long_cast", func_type=PyrexTypes.CFuncType( PyrexTypes.c_long_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)], is_strict_signature=True)) for c_type in [PyrexTypes.c_py_ucs4_type, PyrexTypes.c_py_unicode_type] ] + [ BuiltinFunction('ord', None, None, "__Pyx_uchar_cast", func_type=PyrexTypes.CFuncType( PyrexTypes.c_uchar_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)], is_strict_signature=True)) for c_type in [PyrexTypes.c_char_type, PyrexTypes.c_schar_type, PyrexTypes.c_uchar_type] ] + [ BuiltinFunction('ord', None, None, "__Pyx_PyObject_Ord", utility_code=UtilityCode.load_cached("object_ord", "Builtins.c"), func_type=PyrexTypes.CFuncType( PyrexTypes.c_long_type, [ PyrexTypes.CFuncTypeArg("c", PyrexTypes.py_object_type, None) ], exception_value="(long)(Py_UCS4)-1")), BuiltinFunction('pow', "OOO", "O", "PyNumber_Power"), BuiltinFunction('pow', "OO", "O", "__Pyx_PyNumber_Power2", utility_code = UtilityCode.load("pow2", "Builtins.c")), #('range', "", "", ""), #('raw_input', "", "", ""), #('reduce', "", "", ""), BuiltinFunction('reload', "O", "O", "PyImport_ReloadModule"), BuiltinFunction('repr', "O", "O", "PyObject_Repr", builtin_return_type='str'), #('round', "", "", ""), BuiltinFunction('setattr', "OOO", "r", "PyObject_SetAttr"), #('sum', "", "", ""), #('sorted', "", "", ""), #('type', "O", "O", "PyObject_Type"), #('unichr', "", "", ""), #('unicode', "", "", ""), #('vars', "", "", ""), #('zip', "", "", ""), # Can't do these easily until we have builtin type entries. #('typecheck', "OO", "i", "PyObject_TypeCheck", False), #('issubtype', "OO", "i", "PyType_IsSubtype", False), # Put in namespace append optimization. BuiltinFunction('__Pyx_PyObject_Append', "OO", "O", "__Pyx_PyObject_Append"), # This is conditionally looked up based on a compiler directive. BuiltinFunction('__Pyx_Globals', "", "O", "__Pyx_Globals", utility_code=globals_utility_code), ] # Builtin types # bool # buffer # classmethod # dict # enumerate # file # float # int # list # long # object # property # slice # staticmethod # super # str # tuple # type # xrange builtin_types_table = [ ("type", "PyType_Type", []), # This conflicts with the C++ bool type, and unfortunately # C++ is too liberal about PyObject* <-> bool conversions, # resulting in unintuitive runtime behavior and segfaults. # ("bool", "PyBool_Type", []), ("int", "PyInt_Type", []), ("long", "PyLong_Type", []), ("float", "PyFloat_Type", []), ("complex", "PyComplex_Type", [BuiltinAttribute('cval', field_type_name = 'Py_complex'), BuiltinAttribute('real', 'cval.real', field_type = PyrexTypes.c_double_type), BuiltinAttribute('imag', 'cval.imag', field_type = PyrexTypes.c_double_type), ]), ("basestring", "PyBaseString_Type", [ BuiltinMethod("join", "TO", "T", "__Pyx_PyBaseString_Join", utility_code=UtilityCode.load("StringJoin", "StringTools.c")), ]), ("bytearray", "PyByteArray_Type", [ ]), ("bytes", "PyBytes_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"), BuiltinMethod("join", "TO", "O", "__Pyx_PyBytes_Join", utility_code=UtilityCode.load("StringJoin", "StringTools.c")), ]), ("str", "PyString_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"), BuiltinMethod("join", "TO", "O", "__Pyx_PyString_Join", builtin_return_type='basestring', utility_code=UtilityCode.load("StringJoin", "StringTools.c")), ]), ("unicode", "PyUnicode_Type", [BuiltinMethod("__contains__", "TO", "b", "PyUnicode_Contains"), BuiltinMethod("join", "TO", "T", "PyUnicode_Join"), ]), ("tuple", "PyTuple_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"), ]), ("list", "PyList_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"), BuiltinMethod("insert", "TzO", "r", "PyList_Insert"), BuiltinMethod("reverse", "T", "r", "PyList_Reverse"), BuiltinMethod("append", "TO", "r", "__Pyx_PyList_Append", utility_code=UtilityCode.load("ListAppend", "Optimize.c")), BuiltinMethod("extend", "TO", "r", "__Pyx_PyList_Extend", utility_code=UtilityCode.load("ListExtend", "Optimize.c")), ]), ("dict", "PyDict_Type", [BuiltinMethod("__contains__", "TO", "b", "PyDict_Contains"), BuiltinMethod("has_key", "TO", "b", "PyDict_Contains"), BuiltinMethod("items", "T", "O", "__Pyx_PyDict_Items", utility_code=UtilityCode.load("py_dict_items", "Builtins.c")), BuiltinMethod("keys", "T", "O", "__Pyx_PyDict_Keys", utility_code=UtilityCode.load("py_dict_keys", "Builtins.c")), BuiltinMethod("values", "T", "O", "__Pyx_PyDict_Values", utility_code=UtilityCode.load("py_dict_values", "Builtins.c")), BuiltinMethod("iteritems", "T", "O", "__Pyx_PyDict_IterItems", utility_code=UtilityCode.load("py_dict_iteritems", "Builtins.c")), BuiltinMethod("iterkeys", "T", "O", "__Pyx_PyDict_IterKeys", utility_code=UtilityCode.load("py_dict_iterkeys", "Builtins.c")), BuiltinMethod("itervalues", "T", "O", "__Pyx_PyDict_IterValues", utility_code=UtilityCode.load("py_dict_itervalues", "Builtins.c")), BuiltinMethod("viewitems", "T", "O", "__Pyx_PyDict_ViewItems", utility_code=UtilityCode.load("py_dict_viewitems", "Builtins.c")), BuiltinMethod("viewkeys", "T", "O", "__Pyx_PyDict_ViewKeys", utility_code=UtilityCode.load("py_dict_viewkeys", "Builtins.c")), BuiltinMethod("viewvalues", "T", "O", "__Pyx_PyDict_ViewValues", utility_code=UtilityCode.load("py_dict_viewvalues", "Builtins.c")), BuiltinMethod("clear", "T", "r", "__Pyx_PyDict_Clear", utility_code=UtilityCode.load("py_dict_clear", "Optimize.c")), BuiltinMethod("copy", "T", "T", "PyDict_Copy")]), ("slice", "PySlice_Type", [BuiltinAttribute('start'), BuiltinAttribute('stop'), BuiltinAttribute('step'), ]), # ("file", "PyFile_Type", []), # not in Py3 ("set", "PySet_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"), BuiltinMethod("clear", "T", "r", "PySet_Clear"), # discard() and remove() have a special treatment for unhashable values # BuiltinMethod("discard", "TO", "r", "PySet_Discard"), # update is actually variadic (see Github issue #1645) # BuiltinMethod("update", "TO", "r", "__Pyx_PySet_Update", # utility_code=UtilityCode.load_cached("PySet_Update", "Builtins.c")), BuiltinMethod("add", "TO", "r", "PySet_Add"), BuiltinMethod("pop", "T", "O", "PySet_Pop")]), ("frozenset", "PyFrozenSet_Type", []), ("Exception", "((PyTypeObject*)PyExc_Exception)[0]", []), ("StopAsyncIteration", "((PyTypeObject*)__Pyx_PyExc_StopAsyncIteration)[0]", []), ] types_that_construct_their_instance = set([ # some builtin types do not always return an instance of # themselves - these do: 'type', 'bool', 'long', 'float', 'complex', 'bytes', 'unicode', 'bytearray', 'tuple', 'list', 'dict', 'set', 'frozenset' # 'str', # only in Py3.x # 'file', # only in Py2.x ]) builtin_structs_table = [ ('Py_buffer', 'Py_buffer', [("buf", PyrexTypes.c_void_ptr_type), ("obj", PyrexTypes.py_object_type), ("len", PyrexTypes.c_py_ssize_t_type), ("itemsize", PyrexTypes.c_py_ssize_t_type), ("readonly", PyrexTypes.c_bint_type), ("ndim", PyrexTypes.c_int_type), ("format", PyrexTypes.c_char_ptr_type), ("shape", PyrexTypes.c_py_ssize_t_ptr_type), ("strides", PyrexTypes.c_py_ssize_t_ptr_type), ("suboffsets", PyrexTypes.c_py_ssize_t_ptr_type), ("smalltable", PyrexTypes.CArrayType(PyrexTypes.c_py_ssize_t_type, 2)), ("internal", PyrexTypes.c_void_ptr_type), ]), ('Py_complex', 'Py_complex', [('real', PyrexTypes.c_double_type), ('imag', PyrexTypes.c_double_type), ]) ] # set up builtin scope builtin_scope = BuiltinScope() def init_builtin_funcs(): for bf in builtin_function_table: bf.declare_in_scope(builtin_scope) builtin_types = {} def init_builtin_types(): global builtin_types for name, cname, methods in builtin_types_table: utility = builtin_utility_code.get(name) if name == 'frozenset': objstruct_cname = 'PySetObject' elif name == 'bool': objstruct_cname = None elif name == 'Exception': objstruct_cname = "PyBaseExceptionObject" elif name == 'StopAsyncIteration': objstruct_cname = "PyBaseExceptionObject" else: objstruct_cname = 'Py%sObject' % name.capitalize() the_type = builtin_scope.declare_builtin_type(name, cname, utility, objstruct_cname) builtin_types[name] = the_type for method in methods: method.declare_in_type(the_type) def init_builtin_structs(): for name, cname, attribute_types in builtin_structs_table: scope = StructOrUnionScope(name) for attribute_name, attribute_type in attribute_types: scope.declare_var(attribute_name, attribute_type, None, attribute_name, allow_pyobject=True) builtin_scope.declare_struct_or_union( name, "struct", scope, 1, None, cname = cname) def init_builtins(): init_builtin_structs() init_builtin_types() init_builtin_funcs() builtin_scope.declare_var( '__debug__', PyrexTypes.c_const_type(PyrexTypes.c_bint_type), pos=None, cname='(!Py_OptimizeFlag)', is_cdef=True) global list_type, tuple_type, dict_type, set_type, frozenset_type global bytes_type, str_type, unicode_type, basestring_type, slice_type global float_type, bool_type, type_type, complex_type, bytearray_type type_type = builtin_scope.lookup('type').type list_type = builtin_scope.lookup('list').type tuple_type = builtin_scope.lookup('tuple').type dict_type = builtin_scope.lookup('dict').type set_type = builtin_scope.lookup('set').type frozenset_type = builtin_scope.lookup('frozenset').type slice_type = builtin_scope.lookup('slice').type bytes_type = builtin_scope.lookup('bytes').type str_type = builtin_scope.lookup('str').type unicode_type = builtin_scope.lookup('unicode').type basestring_type = builtin_scope.lookup('basestring').type bytearray_type = builtin_scope.lookup('bytearray').type float_type = builtin_scope.lookup('float').type bool_type = builtin_scope.lookup('bool').type complex_type = builtin_scope.lookup('complex').type init_builtins() Cython-0.26.1/Cython/Compiler/Interpreter.py0000664000175000017500000000407212542002467021530 0ustar stefanstefan00000000000000""" This module deals with interpreting the parse tree as Python would have done, in the compiler. For now this only covers parse tree to value conversion of compile-time values. """ from __future__ import absolute_import from .Nodes import * from .ExprNodes import * from .Errors import CompileError class EmptyScope(object): def lookup(self, name): return None empty_scope = EmptyScope() def interpret_compiletime_options(optlist, optdict, type_env=None, type_args=()): """ Tries to interpret a list of compile time option nodes. The result will be a tuple (optlist, optdict) but where all expression nodes have been interpreted. The result is in the form of tuples (value, pos). optlist is a list of nodes, while optdict is a DictNode (the result optdict is a dict) If type_env is set, all type nodes will be analysed and the resulting type set. Otherwise only interpretateable ExprNodes are allowed, other nodes raises errors. A CompileError will be raised if there are problems. """ def interpret(node, ix): if ix in type_args: if type_env: type = node.analyse_as_type(type_env) if not type: raise CompileError(node.pos, "Invalid type.") return (type, node.pos) else: raise CompileError(node.pos, "Type not allowed here.") else: if (sys.version_info[0] >=3 and isinstance(node, StringNode) and node.unicode_value is not None): return (node.unicode_value, node.pos) return (node.compile_time_value(empty_scope), node.pos) if optlist: optlist = [interpret(x, ix) for ix, x in enumerate(optlist)] if optdict: assert isinstance(optdict, DictNode) new_optdict = {} for item in optdict.key_value_pairs: new_key, dummy = interpret(item.key, None) new_optdict[new_key] = interpret(item.value, item.key.value) optdict = new_optdict return (optlist, new_optdict) Cython-0.26.1/Cython/Compiler/ExprNodes.py0000664000175000017500000200220013150045407021122 0ustar stefanstefan00000000000000# # Parse tree nodes for expressions # from __future__ import absolute_import import cython cython.declare(error=object, warning=object, warn_once=object, InternalError=object, CompileError=object, UtilityCode=object, TempitaUtilityCode=object, StringEncoding=object, operator=object, Naming=object, Nodes=object, PyrexTypes=object, py_object_type=object, list_type=object, tuple_type=object, set_type=object, dict_type=object, unicode_type=object, str_type=object, bytes_type=object, type_type=object, Builtin=object, Symtab=object, Utils=object, find_coercion_error=object, debug_disposal_code=object, debug_temp_alloc=object, debug_coercion=object, bytearray_type=object, slice_type=object, _py_int_types=object, IS_PYTHON3=cython.bint) import sys import copy import os.path import operator from .Errors import error, warning, warn_once, InternalError, CompileError from .Errors import hold_errors, release_errors, held_errors, report_error from .Code import UtilityCode, TempitaUtilityCode from . import StringEncoding from . import Naming from . import Nodes from .Nodes import Node, utility_code_for_imports from . import PyrexTypes from .PyrexTypes import py_object_type, c_long_type, typecast, error_type, \ unspecified_type from . import TypeSlots from .Builtin import list_type, tuple_type, set_type, dict_type, type_type, \ unicode_type, str_type, bytes_type, bytearray_type, basestring_type, slice_type from . import Builtin from . import Symtab from .. import Utils from .Annotate import AnnotationItem from . import Future from ..Debugging import print_call_chain from .DebugFlags import debug_disposal_code, debug_temp_alloc, \ debug_coercion from .Pythran import to_pythran, is_pythran_supported_type, is_pythran_supported_operation_type, \ is_pythran_expr, pythran_func_type, pythran_binop_type, pythran_unaryop_type, has_np_pythran, \ pythran_indexing_code, pythran_indexing_type, is_pythran_supported_node_or_none, pythran_type from .PyrexTypes import PythranExpr try: from __builtin__ import basestring except ImportError: # Python 3 basestring = str any_string_type = (bytes, str) else: # Python 2 any_string_type = (bytes, unicode) if sys.version_info[0] >= 3: IS_PYTHON3 = True _py_int_types = int else: IS_PYTHON3 = False _py_int_types = (int, long) class NotConstant(object): _obj = None def __new__(cls): if NotConstant._obj is None: NotConstant._obj = super(NotConstant, cls).__new__(cls) return NotConstant._obj def __repr__(self): return "" not_a_constant = NotConstant() constant_value_not_set = object() # error messages when coercing from key[0] to key[1] coercion_error_dict = { # string related errors (unicode_type, str_type): ("Cannot convert Unicode string to 'str' implicitly." " This is not portable and requires explicit encoding."), (unicode_type, bytes_type): "Cannot convert Unicode string to 'bytes' implicitly, encoding required.", (unicode_type, PyrexTypes.c_char_ptr_type): "Unicode objects only support coercion to Py_UNICODE*.", (unicode_type, PyrexTypes.c_const_char_ptr_type): "Unicode objects only support coercion to Py_UNICODE*.", (unicode_type, PyrexTypes.c_uchar_ptr_type): "Unicode objects only support coercion to Py_UNICODE*.", (unicode_type, PyrexTypes.c_const_uchar_ptr_type): "Unicode objects only support coercion to Py_UNICODE*.", (bytes_type, unicode_type): "Cannot convert 'bytes' object to unicode implicitly, decoding required", (bytes_type, str_type): "Cannot convert 'bytes' object to str implicitly. This is not portable to Py3.", (bytes_type, basestring_type): ("Cannot convert 'bytes' object to basestring implicitly." " This is not portable to Py3."), (bytes_type, PyrexTypes.c_py_unicode_ptr_type): "Cannot convert 'bytes' object to Py_UNICODE*, use 'unicode'.", (bytes_type, PyrexTypes.c_const_py_unicode_ptr_type): ( "Cannot convert 'bytes' object to Py_UNICODE*, use 'unicode'."), (basestring_type, bytes_type): "Cannot convert 'basestring' object to bytes implicitly. This is not portable.", (str_type, unicode_type): ("str objects do not support coercion to unicode," " use a unicode string literal instead (u'')"), (str_type, bytes_type): "Cannot convert 'str' to 'bytes' implicitly. This is not portable.", (str_type, PyrexTypes.c_char_ptr_type): "'str' objects do not support coercion to C types (use 'bytes'?).", (str_type, PyrexTypes.c_const_char_ptr_type): "'str' objects do not support coercion to C types (use 'bytes'?).", (str_type, PyrexTypes.c_uchar_ptr_type): "'str' objects do not support coercion to C types (use 'bytes'?).", (str_type, PyrexTypes.c_const_uchar_ptr_type): "'str' objects do not support coercion to C types (use 'bytes'?).", (str_type, PyrexTypes.c_py_unicode_ptr_type): "'str' objects do not support coercion to C types (use 'unicode'?).", (str_type, PyrexTypes.c_const_py_unicode_ptr_type): ( "'str' objects do not support coercion to C types (use 'unicode'?)."), (PyrexTypes.c_char_ptr_type, unicode_type): "Cannot convert 'char*' to unicode implicitly, decoding required", (PyrexTypes.c_const_char_ptr_type, unicode_type): ( "Cannot convert 'char*' to unicode implicitly, decoding required"), (PyrexTypes.c_uchar_ptr_type, unicode_type): "Cannot convert 'char*' to unicode implicitly, decoding required", (PyrexTypes.c_const_uchar_ptr_type, unicode_type): ( "Cannot convert 'char*' to unicode implicitly, decoding required"), } def find_coercion_error(type_tuple, default, env): err = coercion_error_dict.get(type_tuple) if err is None: return default elif (env.directives['c_string_encoding'] and any(t in type_tuple for t in (PyrexTypes.c_char_ptr_type, PyrexTypes.c_uchar_ptr_type, PyrexTypes.c_const_char_ptr_type, PyrexTypes.c_const_uchar_ptr_type))): if type_tuple[1].is_pyobject: return default elif env.directives['c_string_encoding'] in ('ascii', 'default'): return default else: return "'%s' objects do not support coercion to C types with non-ascii or non-default c_string_encoding" % type_tuple[0].name else: return err def default_str_type(env): return { 'bytes': bytes_type, 'bytearray': bytearray_type, 'str': str_type, 'unicode': unicode_type }.get(env.directives['c_string_type']) def check_negative_indices(*nodes): """ Raise a warning on nodes that are known to have negative numeric values. Used to find (potential) bugs inside of "wraparound=False" sections. """ for node in nodes: if node is None or ( not isinstance(node.constant_result, _py_int_types) and not isinstance(node.constant_result, float)): continue if node.constant_result < 0: warning(node.pos, "the result of using negative indices inside of " "code sections marked as 'wraparound=False' is " "undefined", level=1) def infer_sequence_item_type(env, seq_node, index_node=None, seq_type=None): if not seq_node.is_sequence_constructor: if seq_type is None: seq_type = seq_node.infer_type(env) if seq_type is tuple_type: # tuples are immutable => we can safely follow assignments if seq_node.cf_state and len(seq_node.cf_state) == 1: try: seq_node = seq_node.cf_state[0].rhs except AttributeError: pass if seq_node is not None and seq_node.is_sequence_constructor: if index_node is not None and index_node.has_constant_result(): try: item = seq_node.args[index_node.constant_result] except (ValueError, TypeError, IndexError): pass else: return item.infer_type(env) # if we're lucky, all items have the same type item_types = set([item.infer_type(env) for item in seq_node.args]) if len(item_types) == 1: return item_types.pop() return None def get_exception_handler(exception_value): if exception_value is None: return "__Pyx_CppExn2PyErr();" elif exception_value.type.is_pyobject: return 'try { throw; } catch(const std::exception& exn) { PyErr_SetString(%s, exn.what()); } catch(...) { PyErr_SetNone(%s); }' % ( exception_value.entry.cname, exception_value.entry.cname) else: return '%s(); if (!PyErr_Occurred()) PyErr_SetString(PyExc_RuntimeError , "Error converting c++ exception.");' % exception_value.entry.cname def translate_cpp_exception(code, pos, inside, exception_value, nogil): raise_py_exception = get_exception_handler(exception_value) code.putln("try {") code.putln("%s" % inside) code.putln("} catch(...) {") if nogil: code.put_ensure_gil(declare_gilstate=True) code.putln(raise_py_exception) if nogil: code.put_release_ensured_gil() code.putln(code.error_goto(pos)) code.putln("}") # Used to handle the case where an lvalue expression and an overloaded assignment # both have an exception declaration. def translate_double_cpp_exception(code, pos, lhs_type, lhs_code, rhs_code, lhs_exc_val, assign_exc_val, nogil): handle_lhs_exc = get_exception_handler(lhs_exc_val) handle_assignment_exc = get_exception_handler(assign_exc_val) code.putln("try {") code.putln(lhs_type.declaration_code("__pyx_local_lvalue = %s;" % lhs_code)) code.putln("try {") code.putln("__pyx_local_lvalue = %s;" % rhs_code) # Catch any exception from the overloaded assignment. code.putln("} catch(...) {") if nogil: code.put_ensure_gil(declare_gilstate=True) code.putln(handle_assignment_exc) if nogil: code.put_release_ensured_gil() code.putln(code.error_goto(pos)) code.putln("}") # Catch any exception from evaluating lhs. code.putln("} catch(...) {") if nogil: code.put_ensure_gil(declare_gilstate=True) code.putln(handle_lhs_exc) if nogil: code.put_release_ensured_gil() code.putln(code.error_goto(pos)) code.putln('}') class ExprNode(Node): # subexprs [string] Class var holding names of subexpr node attrs # type PyrexType Type of the result # result_code string Code fragment # result_ctype string C type of result_code if different from type # is_temp boolean Result is in a temporary variable # is_sequence_constructor # boolean Is a list or tuple constructor expression # is_starred boolean Is a starred expression (e.g. '*a') # saved_subexpr_nodes # [ExprNode or [ExprNode or None] or None] # Cached result of subexpr_nodes() # use_managed_ref boolean use ref-counted temps/assignments/etc. # result_is_used boolean indicates that the result will be dropped and the # is_numpy_attribute boolean Is a Numpy module attribute # result_code/temp_result can safely be set to None result_ctype = None type = None temp_code = None old_temp = None # error checker for multiple frees etc. use_managed_ref = True # can be set by optimisation transforms result_is_used = True is_numpy_attribute = False # The Analyse Expressions phase for expressions is split # into two sub-phases: # # Analyse Types # Determines the result type of the expression based # on the types of its sub-expressions, and inserts # coercion nodes into the expression tree where needed. # Marks nodes which will need to have temporary variables # allocated. # # Allocate Temps # Allocates temporary variables where needed, and fills # in the result_code field of each node. # # ExprNode provides some convenience routines which # perform both of the above phases. These should only # be called from statement nodes, and only when no # coercion nodes need to be added around the expression # being analysed. In that case, the above two phases # should be invoked separately. # # Framework code in ExprNode provides much of the common # processing for the various phases. It makes use of the # 'subexprs' class attribute of ExprNodes, which should # contain a list of the names of attributes which can # hold sub-nodes or sequences of sub-nodes. # # The framework makes use of a number of abstract methods. # Their responsibilities are as follows. # # Declaration Analysis phase # # analyse_target_declaration # Called during the Analyse Declarations phase to analyse # the LHS of an assignment or argument of a del statement. # Nodes which cannot be the LHS of an assignment need not # implement it. # # Expression Analysis phase # # analyse_types # - Call analyse_types on all sub-expressions. # - Check operand types, and wrap coercion nodes around # sub-expressions where needed. # - Set the type of this node. # - If a temporary variable will be required for the # result, set the is_temp flag of this node. # # analyse_target_types # Called during the Analyse Types phase to analyse # the LHS of an assignment or argument of a del # statement. Similar responsibilities to analyse_types. # # target_code # Called by the default implementation of allocate_target_temps. # Should return a C lvalue for assigning to the node. The default # implementation calls calculate_result_code. # # check_const # - Check that this node and its subnodes form a # legal constant expression. If so, do nothing, # otherwise call not_const. # # The default implementation of check_const # assumes that the expression is not constant. # # check_const_addr # - Same as check_const, except check that the # expression is a C lvalue whose address is # constant. Otherwise, call addr_not_const. # # The default implementation of calc_const_addr # assumes that the expression is not a constant # lvalue. # # Code Generation phase # # generate_evaluation_code # - Call generate_evaluation_code for sub-expressions. # - Perform the functions of generate_result_code # (see below). # - If result is temporary, call generate_disposal_code # on all sub-expressions. # # A default implementation of generate_evaluation_code # is provided which uses the following abstract methods: # # generate_result_code # - Generate any C statements necessary to calculate # the result of this node from the results of its # sub-expressions. # # calculate_result_code # - Should return a C code fragment evaluating to the # result. This is only called when the result is not # a temporary. # # generate_assignment_code # Called on the LHS of an assignment. # - Call generate_evaluation_code for sub-expressions. # - Generate code to perform the assignment. # - If the assignment absorbed a reference, call # generate_post_assignment_code on the RHS, # otherwise call generate_disposal_code on it. # # generate_deletion_code # Called on an argument of a del statement. # - Call generate_evaluation_code for sub-expressions. # - Generate code to perform the deletion. # - Call generate_disposal_code on all sub-expressions. # # is_sequence_constructor = False is_dict_literal = False is_set_literal = False is_string_literal = False is_attribute = False is_subscript = False is_slice = False is_buffer_access = False is_memview_index = False is_memview_slice = False is_memview_broadcast = False is_memview_copy_assignment = False saved_subexpr_nodes = None is_temp = False is_target = False is_starred = False constant_result = constant_value_not_set child_attrs = property(fget=operator.attrgetter('subexprs')) def not_implemented(self, method_name): print_call_chain(method_name, "not implemented") ### raise InternalError( "%s.%s not implemented" % (self.__class__.__name__, method_name)) def is_lvalue(self): return 0 def is_addressable(self): return self.is_lvalue() and not self.type.is_memoryviewslice def is_ephemeral(self): # An ephemeral node is one whose result is in # a Python temporary and we suspect there are no # other references to it. Certain operations are # disallowed on such values, since they are # likely to result in a dangling pointer. return self.type.is_pyobject and self.is_temp def subexpr_nodes(self): # Extract a list of subexpression nodes based # on the contents of the subexprs class attribute. nodes = [] for name in self.subexprs: item = getattr(self, name) if item is not None: if type(item) is list: nodes.extend(item) else: nodes.append(item) return nodes def result(self): if self.is_temp: #if not self.temp_code: # pos = (os.path.basename(self.pos[0].get_description()),) + self.pos[1:] if self.pos else '(?)' # raise RuntimeError("temp result name not set in %s at %r" % ( # self.__class__.__name__, pos)) return self.temp_code else: return self.calculate_result_code() def pythran_result(self, type_=None): if is_pythran_supported_node_or_none(self): return to_pythran(self) assert(type_ is not None) return to_pythran(self, type_) def is_c_result_required(self): """ Subtypes may return False here if result temp allocation can be skipped. """ return True def result_as(self, type = None): # Return the result code cast to the specified C type. if (self.is_temp and self.type.is_pyobject and type != py_object_type): # Allocated temporaries are always PyObject *, which may not # reflect the actual type (e.g. an extension type) return typecast(type, py_object_type, self.result()) return typecast(type, self.ctype(), self.result()) def py_result(self): # Return the result code cast to PyObject *. return self.result_as(py_object_type) def ctype(self): # Return the native C type of the result (i.e. the # C type of the result_code expression). return self.result_ctype or self.type def get_constant_c_result_code(self): # Return the constant value of this node as a result code # string, or None if the node is not constant. This method # can be called when the constant result code is required # before the code generation phase. # # The return value is a string that can represent a simple C # value, a constant C name or a constant C expression. If the # node type depends on Python code, this must return None. return None def calculate_constant_result(self): # Calculate the constant compile time result value of this # expression and store it in ``self.constant_result``. Does # nothing by default, thus leaving ``self.constant_result`` # unknown. If valid, the result can be an arbitrary Python # value. # # This must only be called when it is assured that all # sub-expressions have a valid constant_result value. The # ConstantFolding transform will do this. pass def has_constant_result(self): return self.constant_result is not constant_value_not_set and \ self.constant_result is not not_a_constant def compile_time_value(self, denv): # Return value of compile-time expression, or report error. error(self.pos, "Invalid compile-time expression") def compile_time_value_error(self, e): error(self.pos, "Error in compile-time expression: %s: %s" % ( e.__class__.__name__, e)) # ------------- Declaration Analysis ---------------- def analyse_target_declaration(self, env): error(self.pos, "Cannot assign to or delete this") # ------------- Expression Analysis ---------------- def analyse_const_expression(self, env): # Called during the analyse_declarations phase of a # constant expression. Analyses the expression's type, # checks whether it is a legal const expression, # and determines its value. node = self.analyse_types(env) node.check_const() return node def analyse_expressions(self, env): # Convenience routine performing both the Type # Analysis and Temp Allocation phases for a whole # expression. return self.analyse_types(env) def analyse_target_expression(self, env, rhs): # Convenience routine performing both the Type # Analysis and Temp Allocation phases for the LHS of # an assignment. return self.analyse_target_types(env) def analyse_boolean_expression(self, env): # Analyse expression and coerce to a boolean. node = self.analyse_types(env) bool = node.coerce_to_boolean(env) return bool def analyse_temp_boolean_expression(self, env): # Analyse boolean expression and coerce result into # a temporary. This is used when a branch is to be # performed on the result and we won't have an # opportunity to ensure disposal code is executed # afterwards. By forcing the result into a temporary, # we ensure that all disposal has been done by the # time we get the result. node = self.analyse_types(env) return node.coerce_to_boolean(env).coerce_to_simple(env) # --------------- Type Inference ----------------- def type_dependencies(self, env): # Returns the list of entries whose types must be determined # before the type of self can be inferred. if hasattr(self, 'type') and self.type is not None: return () return sum([node.type_dependencies(env) for node in self.subexpr_nodes()], ()) def infer_type(self, env): # Attempt to deduce the type of self. # Differs from analyse_types as it avoids unnecessary # analysis of subexpressions, but can assume everything # in self.type_dependencies() has been resolved. if hasattr(self, 'type') and self.type is not None: return self.type elif hasattr(self, 'entry') and self.entry is not None: return self.entry.type else: self.not_implemented("infer_type") def nonlocally_immutable(self): # Returns whether this variable is a safe reference, i.e. # can't be modified as part of globals or closures. return self.is_literal or self.is_temp or self.type.is_array or self.type.is_cfunction def inferable_item_node(self, index=0): """ Return a node that represents the (type) result of an indexing operation, e.g. for tuple unpacking or iteration. """ return IndexNode(self.pos, base=self, index=IntNode( self.pos, value=str(index), constant_result=index, type=PyrexTypes.c_py_ssize_t_type)) # --------------- Type Analysis ------------------ def analyse_as_module(self, env): # If this node can be interpreted as a reference to a # cimported module, return its scope, else None. return None def analyse_as_type(self, env): # If this node can be interpreted as a reference to a # type, return that type, else None. return None def analyse_as_extension_type(self, env): # If this node can be interpreted as a reference to an # extension type or builtin type, return its type, else None. return None def analyse_types(self, env): self.not_implemented("analyse_types") def analyse_target_types(self, env): return self.analyse_types(env) def nogil_check(self, env): # By default, any expression based on Python objects is # prevented in nogil environments. Subtypes must override # this if they can work without the GIL. if self.type and self.type.is_pyobject: self.gil_error() def gil_assignment_check(self, env): if env.nogil and self.type.is_pyobject: error(self.pos, "Assignment of Python object not allowed without gil") def check_const(self): self.not_const() return False def not_const(self): error(self.pos, "Not allowed in a constant expression") def check_const_addr(self): self.addr_not_const() return False def addr_not_const(self): error(self.pos, "Address is not constant") # ----------------- Result Allocation ----------------- def result_in_temp(self): # Return true if result is in a temporary owned by # this node or one of its subexpressions. Overridden # by certain nodes which can share the result of # a subnode. return self.is_temp def target_code(self): # Return code fragment for use as LHS of a C assignment. return self.calculate_result_code() def calculate_result_code(self): self.not_implemented("calculate_result_code") # def release_target_temp(self, env): # # Release temporaries used by LHS of an assignment. # self.release_subexpr_temps(env) def allocate_temp_result(self, code): if self.temp_code: raise RuntimeError("Temp allocated multiple times in %r: %r" % (self.__class__.__name__, self.pos)) type = self.type if not type.is_void: if type.is_pyobject: type = PyrexTypes.py_object_type elif not (self.result_is_used or type.is_memoryviewslice or self.is_c_result_required()): self.temp_code = None return self.temp_code = code.funcstate.allocate_temp( type, manage_ref=self.use_managed_ref) else: self.temp_code = None def release_temp_result(self, code): if not self.temp_code: if not self.result_is_used: # not used anyway, so ignore if not set up return pos = (os.path.basename(self.pos[0].get_description()),) + self.pos[1:] if self.pos else '(?)' if self.old_temp: raise RuntimeError("temp %s released multiple times in %s at %r" % ( self.old_temp, self.__class__.__name__, pos)) else: raise RuntimeError("no temp, but release requested in %s at %r" % ( self.__class__.__name__, pos)) code.funcstate.release_temp(self.temp_code) self.old_temp = self.temp_code self.temp_code = None # ---------------- Code Generation ----------------- def make_owned_reference(self, code): """ If result is a pyobject, make sure we own a reference to it. If the result is in a temp, it is already a new reference. """ if self.type.is_pyobject and not self.result_in_temp(): code.put_incref(self.result(), self.ctype()) def make_owned_memoryviewslice(self, code): """ Make sure we own the reference to this memoryview slice. """ if not self.result_in_temp(): code.put_incref_memoryviewslice(self.result(), have_gil=self.in_nogil_context) def generate_evaluation_code(self, code): # Generate code to evaluate this node and # its sub-expressions, and dispose of any # temporary results of its sub-expressions. self.generate_subexpr_evaluation_code(code) code.mark_pos(self.pos) if self.is_temp: self.allocate_temp_result(code) self.generate_result_code(code) if self.is_temp and not (self.type.is_string or self.type.is_pyunicode_ptr): # If we are temp we do not need to wait until this node is disposed # before disposing children. self.generate_subexpr_disposal_code(code) self.free_subexpr_temps(code) def generate_subexpr_evaluation_code(self, code): for node in self.subexpr_nodes(): node.generate_evaluation_code(code) def generate_result_code(self, code): self.not_implemented("generate_result_code") def generate_disposal_code(self, code): if self.is_temp: if self.type.is_string or self.type.is_pyunicode_ptr: # postponed from self.generate_evaluation_code() self.generate_subexpr_disposal_code(code) self.free_subexpr_temps(code) if self.result(): if self.type.is_pyobject: code.put_decref_clear(self.result(), self.ctype()) elif self.type.is_memoryviewslice: code.put_xdecref_memoryviewslice( self.result(), have_gil=not self.in_nogil_context) code.putln("%s.memview = NULL;" % self.result()) code.putln("%s.data = NULL;" % self.result()) else: # Already done if self.is_temp self.generate_subexpr_disposal_code(code) def generate_subexpr_disposal_code(self, code): # Generate code to dispose of temporary results # of all sub-expressions. for node in self.subexpr_nodes(): node.generate_disposal_code(code) def generate_post_assignment_code(self, code): if self.is_temp: if self.type.is_string or self.type.is_pyunicode_ptr: # postponed from self.generate_evaluation_code() self.generate_subexpr_disposal_code(code) self.free_subexpr_temps(code) elif self.type.is_pyobject: code.putln("%s = 0;" % self.result()) elif self.type.is_memoryviewslice: code.putln("%s.memview = NULL;" % self.result()) code.putln("%s.data = NULL;" % self.result()) else: self.generate_subexpr_disposal_code(code) def generate_assignment_code(self, rhs, code, overloaded_assignment=False, exception_check=None, exception_value=None): # Stub method for nodes which are not legal as # the LHS of an assignment. An error will have # been reported earlier. pass def generate_deletion_code(self, code, ignore_nonexisting=False): # Stub method for nodes that are not legal as # the argument of a del statement. An error # will have been reported earlier. pass def free_temps(self, code): if self.is_temp: if not self.type.is_void: self.release_temp_result(code) else: self.free_subexpr_temps(code) def free_subexpr_temps(self, code): for sub in self.subexpr_nodes(): sub.free_temps(code) def generate_function_definitions(self, env, code): pass # ---------------- Annotation --------------------- def annotate(self, code): for node in self.subexpr_nodes(): node.annotate(code) # ----------------- Coercion ---------------------- def coerce_to(self, dst_type, env): # Coerce the result so that it can be assigned to # something of type dst_type. If processing is necessary, # wraps this node in a coercion node and returns that. # Otherwise, returns this node unchanged. # # This method is called during the analyse_expressions # phase of the src_node's processing. # # Note that subclasses that override this (especially # ConstNodes) must not (re-)set their own .type attribute # here. Since expression nodes may turn up in different # places in the tree (e.g. inside of CloneNodes in cascaded # assignments), this method must return a new node instance # if it changes the type. # src = self src_type = self.type if self.check_for_coercion_error(dst_type, env): return self used_as_reference = dst_type.is_reference if used_as_reference and not src_type.is_reference: dst_type = dst_type.ref_base_type if src_type.is_const: src_type = src_type.const_base_type if src_type.is_fused or dst_type.is_fused: # See if we are coercing a fused function to a pointer to a # specialized function if (src_type.is_cfunction and not dst_type.is_fused and dst_type.is_ptr and dst_type.base_type.is_cfunction): dst_type = dst_type.base_type for signature in src_type.get_all_specialized_function_types(): if signature.same_as(dst_type): src.type = signature src.entry = src.type.entry src.entry.used = True return self if src_type.is_fused: error(self.pos, "Type is not specialized") else: error(self.pos, "Cannot coerce to a type that is not specialized") self.type = error_type return self if self.coercion_type is not None: # This is purely for error checking purposes! node = NameNode(self.pos, name='', type=self.coercion_type) node.coerce_to(dst_type, env) if dst_type.is_memoryviewslice: from . import MemoryView if not src.type.is_memoryviewslice: if src.type.is_pyobject: src = CoerceToMemViewSliceNode(src, dst_type, env) elif src.type.is_array: src = CythonArrayNode.from_carray(src, env).coerce_to(dst_type, env) elif not src_type.is_error: error(self.pos, "Cannot convert '%s' to memoryviewslice" % (src_type,)) elif not src.type.conforms_to(dst_type, broadcast=self.is_memview_broadcast, copying=self.is_memview_copy_assignment): if src.type.dtype.same_as(dst_type.dtype): msg = "Memoryview '%s' not conformable to memoryview '%s'." tup = src.type, dst_type else: msg = "Different base types for memoryviews (%s, %s)" tup = src.type.dtype, dst_type.dtype error(self.pos, msg % tup) elif dst_type.is_pyobject: if not src.type.is_pyobject: if dst_type is bytes_type and src.type.is_int: src = CoerceIntToBytesNode(src, env) else: src = CoerceToPyTypeNode(src, env, type=dst_type) if not src.type.subtype_of(dst_type): if src.constant_result is not None: src = PyTypeTestNode(src, dst_type, env) elif is_pythran_expr(dst_type) and is_pythran_supported_type(src.type): # We let the compiler decide whether this is valid return src elif is_pythran_expr(src.type): if is_pythran_supported_type(dst_type): # Match the case were a pythran expr is assigned to a value, or vice versa. # We let the C++ compiler decide whether this is valid or not! return src # Else, we need to convert the Pythran expression to a Python object src = CoerceToPyTypeNode(src, env, type=dst_type) elif src.type.is_pyobject: if used_as_reference and dst_type.is_cpp_class: warning( self.pos, "Cannot pass Python object as C++ data structure reference (%s &), will pass by copy." % dst_type) src = CoerceFromPyTypeNode(dst_type, src, env) elif (dst_type.is_complex and src_type != dst_type and dst_type.assignable_from(src_type)): src = CoerceToComplexNode(src, dst_type, env) else: # neither src nor dst are py types # Added the string comparison, since for c types that # is enough, but Cython gets confused when the types are # in different pxi files. # TODO: Remove this hack and require shared declarations. if not (src.type == dst_type or str(src.type) == str(dst_type) or dst_type.assignable_from(src_type)): self.fail_assignment(dst_type) return src def fail_assignment(self, dst_type): error(self.pos, "Cannot assign type '%s' to '%s'" % (self.type, dst_type)) def check_for_coercion_error(self, dst_type, env, fail=False, default=None): if fail and not default: default = "Cannot assign type '%(FROM)s' to '%(TO)s'" message = find_coercion_error((self.type, dst_type), default, env) if message is not None: error(self.pos, message % {'FROM': self.type, 'TO': dst_type}) return True if fail: self.fail_assignment(dst_type) return True return False def coerce_to_pyobject(self, env): return self.coerce_to(PyrexTypes.py_object_type, env) def coerce_to_boolean(self, env): # Coerce result to something acceptable as # a boolean value. # if it's constant, calculate the result now if self.has_constant_result(): bool_value = bool(self.constant_result) return BoolNode(self.pos, value=bool_value, constant_result=bool_value) type = self.type if type.is_enum or type.is_error: return self elif type.is_pyobject or type.is_int or type.is_ptr or type.is_float: return CoerceToBooleanNode(self, env) elif type.is_cpp_class: return SimpleCallNode( self.pos, function=AttributeNode( self.pos, obj=self, attribute='operator bool'), args=[]).analyse_types(env) elif type.is_ctuple: bool_value = len(type.components) == 0 return BoolNode(self.pos, value=bool_value, constant_result=bool_value) else: error(self.pos, "Type '%s' not acceptable as a boolean" % type) return self def coerce_to_integer(self, env): # If not already some C integer type, coerce to longint. if self.type.is_int: return self else: return self.coerce_to(PyrexTypes.c_long_type, env) def coerce_to_temp(self, env): # Ensure that the result is in a temporary. if self.result_in_temp(): return self else: return CoerceToTempNode(self, env) def coerce_to_simple(self, env): # Ensure that the result is simple (see is_simple). if self.is_simple(): return self else: return self.coerce_to_temp(env) def is_simple(self): # A node is simple if its result is something that can # be referred to without performing any operations, e.g. # a constant, local var, C global var, struct member # reference, or temporary. return self.result_in_temp() def may_be_none(self): if self.type and not (self.type.is_pyobject or self.type.is_memoryviewslice): return False if self.has_constant_result(): return self.constant_result is not None return True def as_cython_attribute(self): return None def as_none_safe_node(self, message, error="PyExc_TypeError", format_args=()): # Wraps the node in a NoneCheckNode if it is not known to be # not-None (e.g. because it is a Python literal). if self.may_be_none(): return NoneCheckNode(self, error, message, format_args) else: return self @classmethod def from_node(cls, node, **kwargs): """Instantiate this node class from another node, properly copying over all attributes that one would forget otherwise. """ attributes = "cf_state cf_maybe_null cf_is_null constant_result".split() for attr_name in attributes: if attr_name in kwargs: continue try: value = getattr(node, attr_name) except AttributeError: pass else: kwargs[attr_name] = value return cls(node.pos, **kwargs) class AtomicExprNode(ExprNode): # Abstract base class for expression nodes which have # no sub-expressions. subexprs = [] # Override to optimize -- we know we have no children def generate_subexpr_evaluation_code(self, code): pass def generate_subexpr_disposal_code(self, code): pass class PyConstNode(AtomicExprNode): # Abstract base class for constant Python values. is_literal = 1 type = py_object_type def is_simple(self): return 1 def may_be_none(self): return False def analyse_types(self, env): return self def calculate_result_code(self): return self.value def generate_result_code(self, code): pass class NoneNode(PyConstNode): # The constant value None is_none = 1 value = "Py_None" constant_result = None nogil_check = None def compile_time_value(self, denv): return None def may_be_none(self): return True class EllipsisNode(PyConstNode): # '...' in a subscript list. value = "Py_Ellipsis" constant_result = Ellipsis def compile_time_value(self, denv): return Ellipsis class ConstNode(AtomicExprNode): # Abstract base type for literal constant nodes. # # value string C code fragment is_literal = 1 nogil_check = None def is_simple(self): return 1 def nonlocally_immutable(self): return 1 def may_be_none(self): return False def analyse_types(self, env): return self # Types are held in class variables def check_const(self): return True def get_constant_c_result_code(self): return self.calculate_result_code() def calculate_result_code(self): return str(self.value) def generate_result_code(self, code): pass class BoolNode(ConstNode): type = PyrexTypes.c_bint_type # The constant value True or False def calculate_constant_result(self): self.constant_result = self.value def compile_time_value(self, denv): return self.value def calculate_result_code(self): if self.type.is_pyobject: return self.value and 'Py_True' or 'Py_False' else: return str(int(self.value)) def coerce_to(self, dst_type, env): if dst_type.is_pyobject and self.type.is_int: return BoolNode( self.pos, value=self.value, constant_result=self.constant_result, type=Builtin.bool_type) if dst_type.is_int and self.type.is_pyobject: return BoolNode( self.pos, value=self.value, constant_result=self.constant_result, type=PyrexTypes.c_bint_type) return ConstNode.coerce_to(self, dst_type, env) class NullNode(ConstNode): type = PyrexTypes.c_null_ptr_type value = "NULL" constant_result = 0 def get_constant_c_result_code(self): return self.value class CharNode(ConstNode): type = PyrexTypes.c_char_type def calculate_constant_result(self): self.constant_result = ord(self.value) def compile_time_value(self, denv): return ord(self.value) def calculate_result_code(self): return "'%s'" % StringEncoding.escape_char(self.value) class IntNode(ConstNode): # unsigned "" or "U" # longness "" or "L" or "LL" # is_c_literal True/False/None creator considers this a C integer literal unsigned = "" longness = "" is_c_literal = None # unknown def __init__(self, pos, **kwds): ExprNode.__init__(self, pos, **kwds) if 'type' not in kwds: self.type = self.find_suitable_type_for_value() def find_suitable_type_for_value(self): if self.constant_result is constant_value_not_set: try: self.calculate_constant_result() except ValueError: pass # we ignore 'is_c_literal = True' and instead map signed 32bit # integers as C long values if self.is_c_literal or \ not self.has_constant_result() or \ self.unsigned or self.longness == 'LL': # clearly a C literal rank = (self.longness == 'LL') and 2 or 1 suitable_type = PyrexTypes.modifiers_and_name_to_type[not self.unsigned, rank, "int"] if self.type: suitable_type = PyrexTypes.widest_numeric_type(suitable_type, self.type) else: # C literal or Python literal - split at 32bit boundary if -2**31 <= self.constant_result < 2**31: if self.type and self.type.is_int: suitable_type = self.type else: suitable_type = PyrexTypes.c_long_type else: suitable_type = PyrexTypes.py_object_type return suitable_type def coerce_to(self, dst_type, env): if self.type is dst_type: return self elif dst_type.is_float: if self.has_constant_result(): return FloatNode(self.pos, value='%d.0' % int(self.constant_result), type=dst_type, constant_result=float(self.constant_result)) else: return FloatNode(self.pos, value=self.value, type=dst_type, constant_result=not_a_constant) if dst_type.is_numeric and not dst_type.is_complex: node = IntNode(self.pos, value=self.value, constant_result=self.constant_result, type=dst_type, is_c_literal=True, unsigned=self.unsigned, longness=self.longness) return node elif dst_type.is_pyobject: node = IntNode(self.pos, value=self.value, constant_result=self.constant_result, type=PyrexTypes.py_object_type, is_c_literal=False, unsigned=self.unsigned, longness=self.longness) else: # FIXME: not setting the type here to keep it working with # complex numbers. Should they be special cased? node = IntNode(self.pos, value=self.value, constant_result=self.constant_result, unsigned=self.unsigned, longness=self.longness) # We still need to perform normal coerce_to processing on the # result, because we might be coercing to an extension type, # in which case a type test node will be needed. return ConstNode.coerce_to(node, dst_type, env) def coerce_to_boolean(self, env): return IntNode( self.pos, value=self.value, constant_result=self.constant_result, type=PyrexTypes.c_bint_type, unsigned=self.unsigned, longness=self.longness) def generate_evaluation_code(self, code): if self.type.is_pyobject: # pre-allocate a Python version of the number plain_integer_string = str(Utils.str_to_number(self.value)) self.result_code = code.get_py_int(plain_integer_string, self.longness) else: self.result_code = self.get_constant_c_result_code() def get_constant_c_result_code(self): unsigned, longness = self.unsigned, self.longness literal = self.value_as_c_integer_string() if not (unsigned or longness) and self.type.is_int and literal[0] == '-' and literal[1] != '0': # negative decimal literal => guess longness from type to prevent wrap-around if self.type.rank >= PyrexTypes.c_longlong_type.rank: longness = 'LL' elif self.type.rank >= PyrexTypes.c_long_type.rank: longness = 'L' return literal + unsigned + longness def value_as_c_integer_string(self): value = self.value if len(value) <= 2: # too short to go wrong (and simplifies code below) return value neg_sign = '' if value[0] == '-': neg_sign = '-' value = value[1:] if value[0] == '0': literal_type = value[1] # 0'o' - 0'b' - 0'x' # 0x123 hex literals and 0123 octal literals work nicely in C # but C-incompatible Py3 oct/bin notations need conversion if neg_sign and literal_type in 'oOxX0123456789' and value[2:].isdigit(): # negative hex/octal literal => prevent C compiler from using # unsigned integer types by converting to decimal (see C standard 6.4.4.1) value = str(Utils.str_to_number(value)) elif literal_type in 'oO': value = '0' + value[2:] # '0o123' => '0123' elif literal_type in 'bB': value = str(int(value[2:], 2)) elif value.isdigit() and not self.unsigned and not self.longness: if not neg_sign: # C compilers do not consider unsigned types for decimal literals, # but they do for hex (see C standard 6.4.4.1) value = '0x%X' % int(value) return neg_sign + value def calculate_result_code(self): return self.result_code def calculate_constant_result(self): self.constant_result = Utils.str_to_number(self.value) def compile_time_value(self, denv): return Utils.str_to_number(self.value) class FloatNode(ConstNode): type = PyrexTypes.c_double_type def calculate_constant_result(self): self.constant_result = float(self.value) def compile_time_value(self, denv): return float(self.value) def coerce_to(self, dst_type, env): if dst_type.is_pyobject and self.type.is_float: return FloatNode( self.pos, value=self.value, constant_result=self.constant_result, type=Builtin.float_type) if dst_type.is_float and self.type.is_pyobject: return FloatNode( self.pos, value=self.value, constant_result=self.constant_result, type=dst_type) return ConstNode.coerce_to(self, dst_type, env) def calculate_result_code(self): return self.result_code def get_constant_c_result_code(self): strval = self.value assert isinstance(strval, basestring) cmpval = repr(float(strval)) if cmpval == 'nan': return "(Py_HUGE_VAL * 0)" elif cmpval == 'inf': return "Py_HUGE_VAL" elif cmpval == '-inf': return "(-Py_HUGE_VAL)" else: return strval def generate_evaluation_code(self, code): c_value = self.get_constant_c_result_code() if self.type.is_pyobject: self.result_code = code.get_py_float(self.value, c_value) else: self.result_code = c_value def _analyse_name_as_type(name, pos, env): type = PyrexTypes.parse_basic_type(name) if type is not None: return type hold_errors() from .TreeFragment import TreeFragment pos = (pos[0], pos[1], pos[2]-7) try: declaration = TreeFragment(u"sizeof(%s)" % name, name=pos[0].filename, initial_pos=pos) except CompileError: sizeof_node = None else: sizeof_node = declaration.root.stats[0].expr sizeof_node = sizeof_node.analyse_types(env) release_errors(ignore=True) if isinstance(sizeof_node, SizeofTypeNode): return sizeof_node.arg_type return None class BytesNode(ConstNode): # A char* or bytes literal # # value BytesLiteral is_string_literal = True # start off as Python 'bytes' to support len() in O(1) type = bytes_type def calculate_constant_result(self): self.constant_result = self.value def as_sliced_node(self, start, stop, step=None): value = StringEncoding.bytes_literal(self.value[start:stop:step], self.value.encoding) return BytesNode(self.pos, value=value, constant_result=value) def compile_time_value(self, denv): return self.value.byteencode() def analyse_as_type(self, env): return _analyse_name_as_type(self.value.decode('ISO8859-1'), self.pos, env) def can_coerce_to_char_literal(self): return len(self.value) == 1 def coerce_to_boolean(self, env): # This is special because testing a C char* for truth directly # would yield the wrong result. bool_value = bool(self.value) return BoolNode(self.pos, value=bool_value, constant_result=bool_value) def coerce_to(self, dst_type, env): if self.type == dst_type: return self if dst_type.is_int: if not self.can_coerce_to_char_literal(): error(self.pos, "Only single-character string literals can be coerced into ints.") return self if dst_type.is_unicode_char: error(self.pos, "Bytes literals cannot coerce to Py_UNICODE/Py_UCS4, use a unicode literal instead.") return self return CharNode(self.pos, value=self.value, constant_result=ord(self.value)) node = BytesNode(self.pos, value=self.value, constant_result=self.constant_result) if dst_type.is_pyobject: if dst_type in (py_object_type, Builtin.bytes_type): node.type = Builtin.bytes_type else: self.check_for_coercion_error(dst_type, env, fail=True) return node elif dst_type in (PyrexTypes.c_char_ptr_type, PyrexTypes.c_const_char_ptr_type): node.type = dst_type return node elif dst_type in (PyrexTypes.c_uchar_ptr_type, PyrexTypes.c_const_uchar_ptr_type, PyrexTypes.c_void_ptr_type): node.type = (PyrexTypes.c_const_char_ptr_type if dst_type == PyrexTypes.c_const_uchar_ptr_type else PyrexTypes.c_char_ptr_type) return CastNode(node, dst_type) elif dst_type.assignable_from(PyrexTypes.c_char_ptr_type): node.type = dst_type return node # We still need to perform normal coerce_to processing on the # result, because we might be coercing to an extension type, # in which case a type test node will be needed. return ConstNode.coerce_to(node, dst_type, env) def generate_evaluation_code(self, code): if self.type.is_pyobject: result = code.get_py_string_const(self.value) elif self.type.is_const: result = code.get_string_const(self.value) else: # not const => use plain C string literal and cast to mutable type literal = self.value.as_c_string_literal() # C++ may require a cast result = typecast(self.type, PyrexTypes.c_void_ptr_type, literal) self.result_code = result def get_constant_c_result_code(self): return None # FIXME def calculate_result_code(self): return self.result_code class UnicodeNode(ConstNode): # A Py_UNICODE* or unicode literal # # value EncodedString # bytes_value BytesLiteral the literal parsed as bytes string # ('-3' unicode literals only) is_string_literal = True bytes_value = None type = unicode_type def calculate_constant_result(self): self.constant_result = self.value def analyse_as_type(self, env): return _analyse_name_as_type(self.value, self.pos, env) def as_sliced_node(self, start, stop, step=None): if StringEncoding.string_contains_surrogates(self.value[:stop]): # this is unsafe as it may give different results # in different runtimes return None value = StringEncoding.EncodedString(self.value[start:stop:step]) value.encoding = self.value.encoding if self.bytes_value is not None: bytes_value = StringEncoding.bytes_literal( self.bytes_value[start:stop:step], self.bytes_value.encoding) else: bytes_value = None return UnicodeNode( self.pos, value=value, bytes_value=bytes_value, constant_result=value) def coerce_to(self, dst_type, env): if dst_type is self.type: pass elif dst_type.is_unicode_char: if not self.can_coerce_to_char_literal(): error(self.pos, "Only single-character Unicode string literals or " "surrogate pairs can be coerced into Py_UCS4/Py_UNICODE.") return self int_value = ord(self.value) return IntNode(self.pos, type=dst_type, value=str(int_value), constant_result=int_value) elif not dst_type.is_pyobject: if dst_type.is_string and self.bytes_value is not None: # special case: '-3' enforced unicode literal used in a # C char* context return BytesNode(self.pos, value=self.bytes_value ).coerce_to(dst_type, env) if dst_type.is_pyunicode_ptr: node = UnicodeNode(self.pos, value=self.value) node.type = dst_type return node error(self.pos, "Unicode literals do not support coercion to C types other " "than Py_UNICODE/Py_UCS4 (for characters) or Py_UNICODE* " "(for strings).") elif dst_type not in (py_object_type, Builtin.basestring_type): self.check_for_coercion_error(dst_type, env, fail=True) return self def can_coerce_to_char_literal(self): return len(self.value) == 1 ## or (len(self.value) == 2 ## and (0xD800 <= self.value[0] <= 0xDBFF) ## and (0xDC00 <= self.value[1] <= 0xDFFF)) def coerce_to_boolean(self, env): bool_value = bool(self.value) return BoolNode(self.pos, value=bool_value, constant_result=bool_value) def contains_surrogates(self): return StringEncoding.string_contains_surrogates(self.value) def generate_evaluation_code(self, code): if self.type.is_pyobject: if self.contains_surrogates(): # surrogates are not really portable and cannot be # decoded by the UTF-8 codec in Py3.3 self.result_code = code.get_py_const(py_object_type, 'ustring') data_cname = code.get_pyunicode_ptr_const(self.value) code = code.get_cached_constants_writer() code.mark_pos(self.pos) code.putln( "%s = PyUnicode_FromUnicode(%s, (sizeof(%s) / sizeof(Py_UNICODE))-1); %s" % ( self.result_code, data_cname, data_cname, code.error_goto_if_null(self.result_code, self.pos))) code.put_error_if_neg( self.pos, "__Pyx_PyUnicode_READY(%s)" % self.result_code) else: self.result_code = code.get_py_string_const(self.value) else: self.result_code = code.get_pyunicode_ptr_const(self.value) def calculate_result_code(self): return self.result_code def compile_time_value(self, env): return self.value class StringNode(PyConstNode): # A Python str object, i.e. a byte string in Python 2.x and a # unicode string in Python 3.x # # value BytesLiteral (or EncodedString with ASCII content) # unicode_value EncodedString or None # is_identifier boolean type = str_type is_string_literal = True is_identifier = None unicode_value = None def calculate_constant_result(self): if self.unicode_value is not None: # only the Unicode value is portable across Py2/3 self.constant_result = self.unicode_value def analyse_as_type(self, env): return _analyse_name_as_type(self.unicode_value or self.value.decode('ISO8859-1'), self.pos, env) def as_sliced_node(self, start, stop, step=None): value = type(self.value)(self.value[start:stop:step]) value.encoding = self.value.encoding if self.unicode_value is not None: if StringEncoding.string_contains_surrogates(self.unicode_value[:stop]): # this is unsafe as it may give different results in different runtimes return None unicode_value = StringEncoding.EncodedString( self.unicode_value[start:stop:step]) else: unicode_value = None return StringNode( self.pos, value=value, unicode_value=unicode_value, constant_result=value, is_identifier=self.is_identifier) def coerce_to(self, dst_type, env): if dst_type is not py_object_type and not str_type.subtype_of(dst_type): # if dst_type is Builtin.bytes_type: # # special case: bytes = 'str literal' # return BytesNode(self.pos, value=self.value) if not dst_type.is_pyobject: return BytesNode(self.pos, value=self.value).coerce_to(dst_type, env) if dst_type is not Builtin.basestring_type: self.check_for_coercion_error(dst_type, env, fail=True) return self def can_coerce_to_char_literal(self): return not self.is_identifier and len(self.value) == 1 def generate_evaluation_code(self, code): self.result_code = code.get_py_string_const( self.value, identifier=self.is_identifier, is_str=True, unicode_value=self.unicode_value) def get_constant_c_result_code(self): return None def calculate_result_code(self): return self.result_code def compile_time_value(self, env): if self.value.is_unicode: return self.value if not IS_PYTHON3: # use plain str/bytes object in Py2 return self.value.byteencode() # in Py3, always return a Unicode string if self.unicode_value is not None: return self.unicode_value return self.value.decode('iso8859-1') class IdentifierStringNode(StringNode): # A special str value that represents an identifier (bytes in Py2, # unicode in Py3). is_identifier = True class ImagNode(AtomicExprNode): # Imaginary number literal # # value string imaginary part (float value) type = PyrexTypes.c_double_complex_type def calculate_constant_result(self): self.constant_result = complex(0.0, float(self.value)) def compile_time_value(self, denv): return complex(0.0, float(self.value)) def analyse_types(self, env): self.type.create_declaration_utility_code(env) return self def may_be_none(self): return False def coerce_to(self, dst_type, env): if self.type is dst_type: return self node = ImagNode(self.pos, value=self.value) if dst_type.is_pyobject: node.is_temp = 1 node.type = Builtin.complex_type # We still need to perform normal coerce_to processing on the # result, because we might be coercing to an extension type, # in which case a type test node will be needed. return AtomicExprNode.coerce_to(node, dst_type, env) gil_message = "Constructing complex number" def calculate_result_code(self): if self.type.is_pyobject: return self.result() else: return "%s(0, %r)" % (self.type.from_parts, float(self.value)) def generate_result_code(self, code): if self.type.is_pyobject: code.putln( "%s = PyComplex_FromDoubles(0.0, %r); %s" % ( self.result(), float(self.value), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class NewExprNode(AtomicExprNode): # C++ new statement # # cppclass node c++ class to create type = None def infer_type(self, env): type = self.cppclass.analyse_as_type(env) if type is None or not type.is_cpp_class: error(self.pos, "new operator can only be applied to a C++ class") self.type = error_type return self.cpp_check(env) constructor = type.scope.lookup(u'') if constructor is None: func_type = PyrexTypes.CFuncType( type, [], exception_check='+', nogil=True) type.scope.declare_cfunction(u'', func_type, self.pos) constructor = type.scope.lookup(u'') self.class_type = type self.entry = constructor self.type = constructor.type return self.type def analyse_types(self, env): if self.type is None: self.infer_type(env) return self def may_be_none(self): return False def generate_result_code(self, code): pass def calculate_result_code(self): return "new " + self.class_type.empty_declaration_code() class NameNode(AtomicExprNode): # Reference to a local or global variable name. # # name string Python name of the variable # entry Entry Symbol table entry # type_entry Entry For extension type names, the original type entry # cf_is_null boolean Is uninitialized before this node # cf_maybe_null boolean Maybe uninitialized before this node # allow_null boolean Don't raise UnboundLocalError # nogil boolean Whether it is used in a nogil context is_name = True is_cython_module = False cython_attribute = None lhs_of_first_assignment = False # TODO: remove me is_used_as_rvalue = 0 entry = None type_entry = None cf_maybe_null = True cf_is_null = False allow_null = False nogil = False inferred_type = None def as_cython_attribute(self): return self.cython_attribute def type_dependencies(self, env): if self.entry is None: self.entry = env.lookup(self.name) if self.entry is not None and self.entry.type.is_unspecified: return (self,) else: return () def infer_type(self, env): if self.entry is None: self.entry = env.lookup(self.name) if self.entry is None or self.entry.type is unspecified_type: if self.inferred_type is not None: return self.inferred_type return py_object_type elif (self.entry.type.is_extension_type or self.entry.type.is_builtin_type) and \ self.name == self.entry.type.name: # Unfortunately the type attribute of type objects # is used for the pointer to the type they represent. return type_type elif self.entry.type.is_cfunction: if self.entry.scope.is_builtin_scope: # special case: optimised builtin functions must be treated as Python objects return py_object_type else: # special case: referring to a C function must return its pointer return PyrexTypes.CPtrType(self.entry.type) else: # If entry is inferred as pyobject it's safe to use local # NameNode's inferred_type. if self.entry.type.is_pyobject and self.inferred_type: # Overflow may happen if integer if not (self.inferred_type.is_int and self.entry.might_overflow): return self.inferred_type return self.entry.type def compile_time_value(self, denv): try: return denv.lookup(self.name) except KeyError: error(self.pos, "Compile-time name '%s' not defined" % self.name) def get_constant_c_result_code(self): if not self.entry or self.entry.type.is_pyobject: return None return self.entry.cname def coerce_to(self, dst_type, env): # If coercing to a generic pyobject and this is a builtin # C function with a Python equivalent, manufacture a NameNode # referring to the Python builtin. #print "NameNode.coerce_to:", self.name, dst_type ### if dst_type is py_object_type: entry = self.entry if entry and entry.is_cfunction: var_entry = entry.as_variable if var_entry: if var_entry.is_builtin and var_entry.is_const: var_entry = env.declare_builtin(var_entry.name, self.pos) node = NameNode(self.pos, name = self.name) node.entry = var_entry node.analyse_rvalue_entry(env) return node return super(NameNode, self).coerce_to(dst_type, env) def analyse_as_module(self, env): # Try to interpret this as a reference to a cimported module. # Returns the module scope, or None. entry = self.entry if not entry: entry = env.lookup(self.name) if entry and entry.as_module: return entry.as_module return None def analyse_as_type(self, env): if self.cython_attribute: type = PyrexTypes.parse_basic_type(self.cython_attribute) else: type = PyrexTypes.parse_basic_type(self.name) if type: return type entry = self.entry if not entry: entry = env.lookup(self.name) if entry and entry.is_type: return entry.type else: return None def analyse_as_extension_type(self, env): # Try to interpret this as a reference to an extension type. # Returns the extension type, or None. entry = self.entry if not entry: entry = env.lookup(self.name) if entry and entry.is_type: if entry.type.is_extension_type or entry.type.is_builtin_type: return entry.type return None def analyse_target_declaration(self, env): if not self.entry: self.entry = env.lookup_here(self.name) if not self.entry: if env.directives['warn.undeclared']: warning(self.pos, "implicit declaration of '%s'" % self.name, 1) if env.directives['infer_types'] != False: type = unspecified_type else: type = py_object_type self.entry = env.declare_var(self.name, type, self.pos) if self.entry.is_declared_generic: self.result_ctype = py_object_type if self.entry.as_module: # cimported modules namespace can shadow actual variables self.entry.is_variable = 1 def analyse_types(self, env): self.initialized_check = env.directives['initializedcheck'] if self.entry is None: self.entry = env.lookup(self.name) if not self.entry: self.entry = env.declare_builtin(self.name, self.pos) if not self.entry: self.type = PyrexTypes.error_type return self entry = self.entry if entry: entry.used = 1 if entry.type.is_buffer: from . import Buffer Buffer.used_buffer_aux_vars(entry) self.analyse_rvalue_entry(env) return self def analyse_target_types(self, env): self.analyse_entry(env, is_target=True) entry = self.entry if entry.is_cfunction and entry.as_variable: # FIXME: unify "is_overridable" flags below if (entry.is_overridable or entry.type.is_overridable) or not self.is_lvalue() and entry.fused_cfunction: # We need this for assigning to cpdef names and for the fused 'def' TreeFragment entry = self.entry = entry.as_variable self.type = entry.type if self.type.is_const: error(self.pos, "Assignment to const '%s'" % self.name) if self.type.is_reference: error(self.pos, "Assignment to reference '%s'" % self.name) if not self.is_lvalue(): error(self.pos, "Assignment to non-lvalue '%s'" % self.name) self.type = PyrexTypes.error_type entry.used = 1 if entry.type.is_buffer: from . import Buffer Buffer.used_buffer_aux_vars(entry) return self def analyse_rvalue_entry(self, env): #print "NameNode.analyse_rvalue_entry:", self.name ### #print "Entry:", self.entry.__dict__ ### self.analyse_entry(env) entry = self.entry if entry.is_declared_generic: self.result_ctype = py_object_type if entry.is_pyglobal or entry.is_builtin: if entry.is_builtin and entry.is_const: self.is_temp = 0 else: self.is_temp = 1 self.is_used_as_rvalue = 1 elif entry.type.is_memoryviewslice: self.is_temp = False self.is_used_as_rvalue = True self.use_managed_ref = True return self def nogil_check(self, env): self.nogil = True if self.is_used_as_rvalue: entry = self.entry if entry.is_builtin: if not entry.is_const: # cached builtins are ok self.gil_error() elif entry.is_pyglobal: self.gil_error() gil_message = "Accessing Python global or builtin" def analyse_entry(self, env, is_target=False): #print "NameNode.analyse_entry:", self.name ### self.check_identifier_kind() entry = self.entry type = entry.type if (not is_target and type.is_pyobject and self.inferred_type and self.inferred_type.is_builtin_type): # assume that type inference is smarter than the static entry type = self.inferred_type self.type = type def check_identifier_kind(self): # Check that this is an appropriate kind of name for use in an # expression. Also finds the variable entry associated with # an extension type. entry = self.entry if entry.is_type and entry.type.is_extension_type: self.type_entry = entry if entry.is_type and entry.type.is_enum: py_entry = Symtab.Entry(self.name, None, py_object_type) py_entry.is_pyglobal = True py_entry.scope = self.entry.scope self.entry = py_entry elif not (entry.is_const or entry.is_variable or entry.is_builtin or entry.is_cfunction or entry.is_cpp_class): if self.entry.as_variable: self.entry = self.entry.as_variable elif not self.is_cython_module: error(self.pos, "'%s' is not a constant, variable or function identifier" % self.name) def is_cimported_module_without_shadow(self, env): if self.is_cython_module or self.cython_attribute: return False entry = self.entry or env.lookup(self.name) return entry.as_module and not entry.is_variable def is_simple(self): # If it's not a C variable, it'll be in a temp. return 1 def may_be_none(self): if self.cf_state and self.type and (self.type.is_pyobject or self.type.is_memoryviewslice): # gard against infinite recursion on self-dependencies if getattr(self, '_none_checking', False): # self-dependency - either this node receives a None # value from *another* node, or it can not reference # None at this point => safe to assume "not None" return False self._none_checking = True # evaluate control flow state to see if there were any # potential None values assigned to the node so far may_be_none = False for assignment in self.cf_state: if assignment.rhs.may_be_none(): may_be_none = True break del self._none_checking return may_be_none return super(NameNode, self).may_be_none() def nonlocally_immutable(self): if ExprNode.nonlocally_immutable(self): return True entry = self.entry if not entry or entry.in_closure: return False return entry.is_local or entry.is_arg or entry.is_builtin or entry.is_readonly def calculate_target_results(self, env): pass def check_const(self): entry = self.entry if entry is not None and not (entry.is_const or entry.is_cfunction or entry.is_builtin): self.not_const() return False return True def check_const_addr(self): entry = self.entry if not (entry.is_cglobal or entry.is_cfunction or entry.is_builtin): self.addr_not_const() return False return True def is_lvalue(self): return ( self.entry.is_variable and not self.entry.is_readonly ) or ( self.entry.is_cfunction and self.entry.is_overridable ) def is_addressable(self): return self.entry.is_variable and not self.type.is_memoryviewslice def is_ephemeral(self): # Name nodes are never ephemeral, even if the # result is in a temporary. return 0 def calculate_result_code(self): entry = self.entry if not entry: return "" # There was an error earlier return entry.cname def generate_result_code(self, code): assert hasattr(self, 'entry') entry = self.entry if entry is None: return # There was an error earlier if entry.is_builtin and entry.is_const: return # Lookup already cached elif entry.is_pyclass_attr: assert entry.type.is_pyobject, "Python global or builtin not a Python object" interned_cname = code.intern_identifier(self.entry.name) if entry.is_builtin: namespace = Naming.builtins_cname else: # entry.is_pyglobal namespace = entry.scope.namespace_cname if not self.cf_is_null: code.putln( '%s = PyObject_GetItem(%s, %s);' % ( self.result(), namespace, interned_cname)) code.putln('if (unlikely(!%s)) {' % self.result()) code.putln('PyErr_Clear();') code.globalstate.use_utility_code( UtilityCode.load_cached("GetModuleGlobalName", "ObjectHandling.c")) code.putln( '%s = __Pyx_GetModuleGlobalName(%s);' % ( self.result(), interned_cname)) if not self.cf_is_null: code.putln("}") code.putln(code.error_goto_if_null(self.result(), self.pos)) code.put_gotref(self.py_result()) elif entry.is_builtin and not entry.scope.is_module_scope: # known builtin assert entry.type.is_pyobject, "Python global or builtin not a Python object" interned_cname = code.intern_identifier(self.entry.name) code.globalstate.use_utility_code( UtilityCode.load_cached("GetBuiltinName", "ObjectHandling.c")) code.putln( '%s = __Pyx_GetBuiltinName(%s); %s' % ( self.result(), interned_cname, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) elif entry.is_pyglobal or (entry.is_builtin and entry.scope.is_module_scope): # name in class body, global name or unknown builtin assert entry.type.is_pyobject, "Python global or builtin not a Python object" interned_cname = code.intern_identifier(self.entry.name) if entry.scope.is_module_scope: code.globalstate.use_utility_code( UtilityCode.load_cached("GetModuleGlobalName", "ObjectHandling.c")) code.putln( '%s = __Pyx_GetModuleGlobalName(%s); %s' % ( self.result(), interned_cname, code.error_goto_if_null(self.result(), self.pos))) else: # FIXME: is_pyglobal is also used for class namespace code.globalstate.use_utility_code( UtilityCode.load_cached("GetNameInClass", "ObjectHandling.c")) code.putln( '%s = __Pyx_GetNameInClass(%s, %s); %s' % ( self.result(), entry.scope.namespace_cname, interned_cname, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) elif entry.is_local or entry.in_closure or entry.from_closure or entry.type.is_memoryviewslice: # Raise UnboundLocalError for objects and memoryviewslices raise_unbound = ( (self.cf_maybe_null or self.cf_is_null) and not self.allow_null) null_code = entry.type.check_for_null_code(entry.cname) memslice_check = entry.type.is_memoryviewslice and self.initialized_check if null_code and raise_unbound and (entry.type.is_pyobject or memslice_check): code.put_error_if_unbound(self.pos, entry, self.in_nogil_context) def generate_assignment_code(self, rhs, code, overloaded_assignment=False, exception_check=None, exception_value=None): #print "NameNode.generate_assignment_code:", self.name ### entry = self.entry if entry is None: return # There was an error earlier if (self.entry.type.is_ptr and isinstance(rhs, ListNode) and not self.lhs_of_first_assignment and not rhs.in_module_scope): error(self.pos, "Literal list must be assigned to pointer at time of declaration") # is_pyglobal seems to be True for module level-globals only. # We use this to access class->tp_dict if necessary. if entry.is_pyglobal: assert entry.type.is_pyobject, "Python global or builtin not a Python object" interned_cname = code.intern_identifier(self.entry.name) namespace = self.entry.scope.namespace_cname if entry.is_member: # if the entry is a member we have to cheat: SetAttr does not work # on types, so we create a descriptor which is then added to tp_dict setter = 'PyDict_SetItem' namespace = '%s->tp_dict' % namespace elif entry.scope.is_module_scope: setter = 'PyDict_SetItem' namespace = Naming.moddict_cname elif entry.is_pyclass_attr: setter = 'PyObject_SetItem' else: assert False, repr(entry) code.put_error_if_neg( self.pos, '%s(%s, %s, %s)' % ( setter, namespace, interned_cname, rhs.py_result())) if debug_disposal_code: print("NameNode.generate_assignment_code:") print("...generating disposal code for %s" % rhs) rhs.generate_disposal_code(code) rhs.free_temps(code) if entry.is_member: # in Py2.6+, we need to invalidate the method cache code.putln("PyType_Modified(%s);" % entry.scope.parent_type.typeptr_cname) else: if self.type.is_memoryviewslice: self.generate_acquire_memoryviewslice(rhs, code) elif self.type.is_buffer: # Generate code for doing the buffer release/acquisition. # This might raise an exception in which case the assignment (done # below) will not happen. # # The reason this is not in a typetest-like node is because the # variables that the acquired buffer info is stored to is allocated # per entry and coupled with it. self.generate_acquire_buffer(rhs, code) assigned = False if self.type.is_pyobject: #print "NameNode.generate_assignment_code: to", self.name ### #print "...from", rhs ### #print "...LHS type", self.type, "ctype", self.ctype() ### #print "...RHS type", rhs.type, "ctype", rhs.ctype() ### if self.use_managed_ref: rhs.make_owned_reference(code) is_external_ref = entry.is_cglobal or self.entry.in_closure or self.entry.from_closure if is_external_ref: if not self.cf_is_null: if self.cf_maybe_null: code.put_xgotref(self.py_result()) else: code.put_gotref(self.py_result()) assigned = True if entry.is_cglobal: code.put_decref_set( self.result(), rhs.result_as(self.ctype())) else: if not self.cf_is_null: if self.cf_maybe_null: code.put_xdecref_set( self.result(), rhs.result_as(self.ctype())) else: code.put_decref_set( self.result(), rhs.result_as(self.ctype())) else: assigned = False if is_external_ref: code.put_giveref(rhs.py_result()) if not self.type.is_memoryviewslice: if not assigned: if overloaded_assignment: result = rhs.result() if exception_check == '+': translate_cpp_exception(code, self.pos, '%s = %s;' % (self.result(), result), exception_value, self.in_nogil_context) else: code.putln('%s = %s;' % (self.result(), result)) else: result = rhs.result_as(self.ctype()) code.putln('%s = %s;' % (self.result(), result)) if debug_disposal_code: print("NameNode.generate_assignment_code:") print("...generating post-assignment code for %s" % rhs) rhs.generate_post_assignment_code(code) elif rhs.result_in_temp(): rhs.generate_post_assignment_code(code) rhs.free_temps(code) def generate_acquire_memoryviewslice(self, rhs, code): """ Slices, coercions from objects, return values etc are new references. We have a borrowed reference in case of dst = src """ from . import MemoryView MemoryView.put_acquire_memoryviewslice( lhs_cname=self.result(), lhs_type=self.type, lhs_pos=self.pos, rhs=rhs, code=code, have_gil=not self.in_nogil_context, first_assignment=self.cf_is_null) def generate_acquire_buffer(self, rhs, code): # rhstmp is only used in case the rhs is a complicated expression leading to # the object, to avoid repeating the same C expression for every reference # to the rhs. It does NOT hold a reference. pretty_rhs = isinstance(rhs, NameNode) or rhs.is_temp if pretty_rhs: rhstmp = rhs.result_as(self.ctype()) else: rhstmp = code.funcstate.allocate_temp(self.entry.type, manage_ref=False) code.putln('%s = %s;' % (rhstmp, rhs.result_as(self.ctype()))) from . import Buffer Buffer.put_assign_to_buffer(self.result(), rhstmp, self.entry, is_initialized=not self.lhs_of_first_assignment, pos=self.pos, code=code) if not pretty_rhs: code.putln("%s = 0;" % rhstmp) code.funcstate.release_temp(rhstmp) def generate_deletion_code(self, code, ignore_nonexisting=False): if self.entry is None: return # There was an error earlier elif self.entry.is_pyclass_attr: namespace = self.entry.scope.namespace_cname interned_cname = code.intern_identifier(self.entry.name) if ignore_nonexisting: key_error_code = 'PyErr_Clear(); else' else: # minor hack: fake a NameError on KeyError key_error_code = ( '{ PyErr_Clear(); PyErr_Format(PyExc_NameError, "name \'%%s\' is not defined", "%s"); }' % self.entry.name) code.putln( 'if (unlikely(PyObject_DelItem(%s, %s) < 0)) {' ' if (likely(PyErr_ExceptionMatches(PyExc_KeyError))) %s' ' %s ' '}' % (namespace, interned_cname, key_error_code, code.error_goto(self.pos))) elif self.entry.is_pyglobal: code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectSetAttrStr", "ObjectHandling.c")) interned_cname = code.intern_identifier(self.entry.name) del_code = '__Pyx_PyObject_DelAttrStr(%s, %s)' % ( Naming.module_cname, interned_cname) if ignore_nonexisting: code.putln( 'if (unlikely(%s < 0)) {' ' if (likely(PyErr_ExceptionMatches(PyExc_AttributeError))) PyErr_Clear(); else %s ' '}' % (del_code, code.error_goto(self.pos))) else: code.put_error_if_neg(self.pos, del_code) elif self.entry.type.is_pyobject or self.entry.type.is_memoryviewslice: if not self.cf_is_null: if self.cf_maybe_null and not ignore_nonexisting: code.put_error_if_unbound(self.pos, self.entry) if self.entry.type.is_pyobject: if self.entry.in_closure: # generator if ignore_nonexisting and self.cf_maybe_null: code.put_xgotref(self.result()) else: code.put_gotref(self.result()) if ignore_nonexisting and self.cf_maybe_null: code.put_xdecref(self.result(), self.ctype()) else: code.put_decref(self.result(), self.ctype()) code.putln('%s = NULL;' % self.result()) else: code.put_xdecref_memoryviewslice(self.entry.cname, have_gil=not self.nogil) else: error(self.pos, "Deletion of C names not supported") def annotate(self, code): if hasattr(self, 'is_called') and self.is_called: pos = (self.pos[0], self.pos[1], self.pos[2] - len(self.name) - 1) if self.type.is_pyobject: style, text = 'py_call', 'python function (%s)' else: style, text = 'c_call', 'c function (%s)' code.annotate(pos, AnnotationItem(style, text % self.type, size=len(self.name))) class BackquoteNode(ExprNode): # `expr` # # arg ExprNode type = py_object_type subexprs = ['arg'] def analyse_types(self, env): self.arg = self.arg.analyse_types(env) self.arg = self.arg.coerce_to_pyobject(env) self.is_temp = 1 return self gil_message = "Backquote expression" def calculate_constant_result(self): self.constant_result = repr(self.arg.constant_result) def generate_result_code(self, code): code.putln( "%s = PyObject_Repr(%s); %s" % ( self.result(), self.arg.py_result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class ImportNode(ExprNode): # Used as part of import statement implementation. # Implements result = # __import__(module_name, globals(), None, name_list, level) # # module_name StringNode dotted name of module. Empty module # name means importing the parent package according # to level # name_list ListNode or None list of names to be imported # level int relative import level: # -1: attempt both relative import and absolute import; # 0: absolute import; # >0: the number of parent directories to search # relative to the current module. # None: decide the level according to language level and # directives type = py_object_type subexprs = ['module_name', 'name_list'] def analyse_types(self, env): if self.level is None: if (env.directives['py2_import'] or Future.absolute_import not in env.global_scope().context.future_directives): self.level = -1 else: self.level = 0 module_name = self.module_name.analyse_types(env) self.module_name = module_name.coerce_to_pyobject(env) if self.name_list: name_list = self.name_list.analyse_types(env) self.name_list = name_list.coerce_to_pyobject(env) self.is_temp = 1 return self gil_message = "Python import" def generate_result_code(self, code): if self.name_list: name_list_code = self.name_list.py_result() else: name_list_code = "0" code.globalstate.use_utility_code(UtilityCode.load_cached("Import", "ImportExport.c")) import_code = "__Pyx_Import(%s, %s, %d)" % ( self.module_name.py_result(), name_list_code, self.level) if (self.level <= 0 and self.module_name.is_string_literal and self.module_name.value in utility_code_for_imports): helper_func, code_name, code_file = utility_code_for_imports[self.module_name.value] code.globalstate.use_utility_code(UtilityCode.load_cached(code_name, code_file)) import_code = '%s(%s)' % (helper_func, import_code) code.putln("%s = %s; %s" % ( self.result(), import_code, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class IteratorNode(ExprNode): # Used as part of for statement implementation. # # Implements result = iter(sequence) # # sequence ExprNode type = py_object_type iter_func_ptr = None counter_cname = None cpp_iterator_cname = None reversed = False # currently only used for list/tuple types (see Optimize.py) is_async = False subexprs = ['sequence'] def analyse_types(self, env): self.sequence = self.sequence.analyse_types(env) if (self.sequence.type.is_array or self.sequence.type.is_ptr) and \ not self.sequence.type.is_string: # C array iteration will be transformed later on self.type = self.sequence.type elif self.sequence.type.is_cpp_class: self.analyse_cpp_types(env) else: self.sequence = self.sequence.coerce_to_pyobject(env) if self.sequence.type in (list_type, tuple_type): self.sequence = self.sequence.as_none_safe_node("'NoneType' object is not iterable") self.is_temp = 1 return self gil_message = "Iterating over Python object" _func_iternext_type = PyrexTypes.CPtrType(PyrexTypes.CFuncType( PyrexTypes.py_object_type, [ PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None), ])) def type_dependencies(self, env): return self.sequence.type_dependencies(env) def infer_type(self, env): sequence_type = self.sequence.infer_type(env) if sequence_type.is_array or sequence_type.is_ptr: return sequence_type elif sequence_type.is_cpp_class: begin = sequence_type.scope.lookup("begin") if begin is not None: return begin.type.return_type elif sequence_type.is_pyobject: return sequence_type return py_object_type def analyse_cpp_types(self, env): sequence_type = self.sequence.type if sequence_type.is_ptr: sequence_type = sequence_type.base_type begin = sequence_type.scope.lookup("begin") end = sequence_type.scope.lookup("end") if (begin is None or not begin.type.is_cfunction or begin.type.args): error(self.pos, "missing begin() on %s" % self.sequence.type) self.type = error_type return if (end is None or not end.type.is_cfunction or end.type.args): error(self.pos, "missing end() on %s" % self.sequence.type) self.type = error_type return iter_type = begin.type.return_type if iter_type.is_cpp_class: if env.lookup_operator_for_types( self.pos, "!=", [iter_type, end.type.return_type]) is None: error(self.pos, "missing operator!= on result of begin() on %s" % self.sequence.type) self.type = error_type return if env.lookup_operator_for_types(self.pos, '++', [iter_type]) is None: error(self.pos, "missing operator++ on result of begin() on %s" % self.sequence.type) self.type = error_type return if env.lookup_operator_for_types(self.pos, '*', [iter_type]) is None: error(self.pos, "missing operator* on result of begin() on %s" % self.sequence.type) self.type = error_type return self.type = iter_type elif iter_type.is_ptr: if not (iter_type == end.type.return_type): error(self.pos, "incompatible types for begin() and end()") self.type = iter_type else: error(self.pos, "result type of begin() on %s must be a C++ class or pointer" % self.sequence.type) self.type = error_type return def generate_result_code(self, code): sequence_type = self.sequence.type if sequence_type.is_cpp_class: if self.sequence.is_name: # safe: C++ won't allow you to reassign to class references begin_func = "%s.begin" % self.sequence.result() else: sequence_type = PyrexTypes.c_ptr_type(sequence_type) self.cpp_iterator_cname = code.funcstate.allocate_temp(sequence_type, manage_ref=False) code.putln("%s = &%s;" % (self.cpp_iterator_cname, self.sequence.result())) begin_func = "%s->begin" % self.cpp_iterator_cname # TODO: Limit scope. code.putln("%s = %s();" % (self.result(), begin_func)) return if sequence_type.is_array or sequence_type.is_ptr: raise InternalError("for in carray slice not transformed") is_builtin_sequence = sequence_type in (list_type, tuple_type) if not is_builtin_sequence: # reversed() not currently optimised (see Optimize.py) assert not self.reversed, "internal error: reversed() only implemented for list/tuple objects" self.may_be_a_sequence = not sequence_type.is_builtin_type if self.may_be_a_sequence: code.putln( "if (likely(PyList_CheckExact(%s)) || PyTuple_CheckExact(%s)) {" % ( self.sequence.py_result(), self.sequence.py_result())) if is_builtin_sequence or self.may_be_a_sequence: self.counter_cname = code.funcstate.allocate_temp( PyrexTypes.c_py_ssize_t_type, manage_ref=False) if self.reversed: if sequence_type is list_type: init_value = 'PyList_GET_SIZE(%s) - 1' % self.result() else: init_value = 'PyTuple_GET_SIZE(%s) - 1' % self.result() else: init_value = '0' code.putln("%s = %s; __Pyx_INCREF(%s); %s = %s;" % ( self.result(), self.sequence.py_result(), self.result(), self.counter_cname, init_value)) if not is_builtin_sequence: self.iter_func_ptr = code.funcstate.allocate_temp(self._func_iternext_type, manage_ref=False) if self.may_be_a_sequence: code.putln("%s = NULL;" % self.iter_func_ptr) code.putln("} else {") code.put("%s = -1; " % self.counter_cname) code.putln("%s = PyObject_GetIter(%s); %s" % ( self.result(), self.sequence.py_result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) # PyObject_GetIter() fails if "tp_iternext" is not set, but the check below # makes it visible to the C compiler that the pointer really isn't NULL, so that # it can distinguish between the special cases and the generic case code.putln("%s = Py_TYPE(%s)->tp_iternext; %s" % ( self.iter_func_ptr, self.py_result(), code.error_goto_if_null(self.iter_func_ptr, self.pos))) if self.may_be_a_sequence: code.putln("}") def generate_next_sequence_item(self, test_name, result_name, code): assert self.counter_cname, "internal error: counter_cname temp not prepared" final_size = 'Py%s_GET_SIZE(%s)' % (test_name, self.py_result()) if self.sequence.is_sequence_constructor: item_count = len(self.sequence.args) if self.sequence.mult_factor is None: final_size = item_count elif isinstance(self.sequence.mult_factor.constant_result, _py_int_types): final_size = item_count * self.sequence.mult_factor.constant_result code.putln("if (%s >= %s) break;" % (self.counter_cname, final_size)) if self.reversed: inc_dec = '--' else: inc_dec = '++' code.putln("#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS") code.putln( "%s = Py%s_GET_ITEM(%s, %s); __Pyx_INCREF(%s); %s%s; %s" % ( result_name, test_name, self.py_result(), self.counter_cname, result_name, self.counter_cname, inc_dec, # use the error label to avoid C compiler warnings if we only use it below code.error_goto_if_neg('0', self.pos) )) code.putln("#else") code.putln( "%s = PySequence_ITEM(%s, %s); %s%s; %s" % ( result_name, self.py_result(), self.counter_cname, self.counter_cname, inc_dec, code.error_goto_if_null(result_name, self.pos))) code.put_gotref(result_name) code.putln("#endif") def generate_iter_next_result_code(self, result_name, code): sequence_type = self.sequence.type if self.reversed: code.putln("if (%s < 0) break;" % self.counter_cname) if sequence_type.is_cpp_class: if self.cpp_iterator_cname: end_func = "%s->end" % self.cpp_iterator_cname else: end_func = "%s.end" % self.sequence.result() # TODO: Cache end() call? code.putln("if (!(%s != %s())) break;" % ( self.result(), end_func)) code.putln("%s = *%s;" % ( result_name, self.result())) code.putln("++%s;" % self.result()) return elif sequence_type is list_type: self.generate_next_sequence_item('List', result_name, code) return elif sequence_type is tuple_type: self.generate_next_sequence_item('Tuple', result_name, code) return if self.may_be_a_sequence: code.putln("if (likely(!%s)) {" % self.iter_func_ptr) code.putln("if (likely(PyList_CheckExact(%s))) {" % self.py_result()) self.generate_next_sequence_item('List', result_name, code) code.putln("} else {") self.generate_next_sequence_item('Tuple', result_name, code) code.putln("}") code.put("} else ") code.putln("{") code.putln( "%s = %s(%s);" % ( result_name, self.iter_func_ptr, self.py_result())) code.putln("if (unlikely(!%s)) {" % result_name) code.putln("PyObject* exc_type = PyErr_Occurred();") code.putln("if (exc_type) {") code.putln("if (likely(exc_type == PyExc_StopIteration ||" " PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();") code.putln("else %s" % code.error_goto(self.pos)) code.putln("}") code.putln("break;") code.putln("}") code.put_gotref(result_name) code.putln("}") def free_temps(self, code): if self.counter_cname: code.funcstate.release_temp(self.counter_cname) if self.iter_func_ptr: code.funcstate.release_temp(self.iter_func_ptr) self.iter_func_ptr = None if self.cpp_iterator_cname: code.funcstate.release_temp(self.cpp_iterator_cname) ExprNode.free_temps(self, code) class NextNode(AtomicExprNode): # Used as part of for statement implementation. # Implements result = next(iterator) # Created during analyse_types phase. # The iterator is not owned by this node. # # iterator IteratorNode def __init__(self, iterator): AtomicExprNode.__init__(self, iterator.pos) self.iterator = iterator def nogil_check(self, env): # ignore - errors (if any) are already handled by IteratorNode pass def type_dependencies(self, env): return self.iterator.type_dependencies(env) def infer_type(self, env, iterator_type=None): if iterator_type is None: iterator_type = self.iterator.infer_type(env) if iterator_type.is_ptr or iterator_type.is_array: return iterator_type.base_type elif iterator_type.is_cpp_class: item_type = env.lookup_operator_for_types(self.pos, "*", [iterator_type]).type.return_type if item_type.is_reference: item_type = item_type.ref_base_type if item_type.is_const: item_type = item_type.const_base_type return item_type else: # Avoid duplication of complicated logic. fake_index_node = IndexNode( self.pos, base=self.iterator.sequence, index=IntNode(self.pos, value='PY_SSIZE_T_MAX', type=PyrexTypes.c_py_ssize_t_type)) return fake_index_node.infer_type(env) def analyse_types(self, env): self.type = self.infer_type(env, self.iterator.type) self.is_temp = 1 return self def generate_result_code(self, code): self.iterator.generate_iter_next_result_code(self.result(), code) class AsyncIteratorNode(ExprNode): # Used as part of 'async for' statement implementation. # # Implements result = sequence.__aiter__() # # sequence ExprNode subexprs = ['sequence'] is_async = True type = py_object_type is_temp = 1 def infer_type(self, env): return py_object_type def analyse_types(self, env): self.sequence = self.sequence.analyse_types(env) if not self.sequence.type.is_pyobject: error(self.pos, "async for loops not allowed on C/C++ types") self.sequence = self.sequence.coerce_to_pyobject(env) return self def generate_result_code(self, code): code.globalstate.use_utility_code(UtilityCode.load_cached("AsyncIter", "Coroutine.c")) code.putln("%s = __Pyx_Coroutine_GetAsyncIter(%s); %s" % ( self.result(), self.sequence.py_result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.result()) class AsyncNextNode(AtomicExprNode): # Used as part of 'async for' statement implementation. # Implements result = iterator.__anext__() # Created during analyse_types phase. # The iterator is not owned by this node. # # iterator IteratorNode type = py_object_type is_temp = 1 def __init__(self, iterator): AtomicExprNode.__init__(self, iterator.pos) self.iterator = iterator def infer_type(self, env): return py_object_type def analyse_types(self, env): return self def generate_result_code(self, code): code.globalstate.use_utility_code(UtilityCode.load_cached("AsyncIter", "Coroutine.c")) code.putln("%s = __Pyx_Coroutine_AsyncIterNext(%s); %s" % ( self.result(), self.iterator.py_result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.result()) class WithExitCallNode(ExprNode): # The __exit__() call of a 'with' statement. Used in both the # except and finally clauses. # with_stat WithStatNode the surrounding 'with' statement # args TupleNode or ResultStatNode the exception info tuple # await AwaitExprNode the await expression of an 'async with' statement subexprs = ['args', 'await'] test_if_run = True await = None def analyse_types(self, env): self.args = self.args.analyse_types(env) if self.await: self.await = self.await.analyse_types(env) self.type = PyrexTypes.c_bint_type self.is_temp = True return self def generate_evaluation_code(self, code): if self.test_if_run: # call only if it was not already called (and decref-cleared) code.putln("if (%s) {" % self.with_stat.exit_var) self.args.generate_evaluation_code(code) result_var = code.funcstate.allocate_temp(py_object_type, manage_ref=False) code.mark_pos(self.pos) code.globalstate.use_utility_code(UtilityCode.load_cached( "PyObjectCall", "ObjectHandling.c")) code.putln("%s = __Pyx_PyObject_Call(%s, %s, NULL);" % ( result_var, self.with_stat.exit_var, self.args.result())) code.put_decref_clear(self.with_stat.exit_var, type=py_object_type) self.args.generate_disposal_code(code) self.args.free_temps(code) code.putln(code.error_goto_if_null(result_var, self.pos)) code.put_gotref(result_var) if self.await: # FIXME: result_var temp currently leaks into the closure self.await.generate_evaluation_code(code, source_cname=result_var, decref_source=True) code.putln("%s = %s;" % (result_var, self.await.py_result())) self.await.generate_post_assignment_code(code) self.await.free_temps(code) if self.result_is_used: self.allocate_temp_result(code) code.putln("%s = __Pyx_PyObject_IsTrue(%s);" % (self.result(), result_var)) code.put_decref_clear(result_var, type=py_object_type) if self.result_is_used: code.put_error_if_neg(self.pos, self.result()) code.funcstate.release_temp(result_var) if self.test_if_run: code.putln("}") class ExcValueNode(AtomicExprNode): # Node created during analyse_types phase # of an ExceptClauseNode to fetch the current # exception value. type = py_object_type def __init__(self, pos): ExprNode.__init__(self, pos) def set_var(self, var): self.var = var def calculate_result_code(self): return self.var def generate_result_code(self, code): pass def analyse_types(self, env): return self class TempNode(ExprNode): # Node created during analyse_types phase # of some nodes to hold a temporary value. # # Note: One must call "allocate" and "release" on # the node during code generation to get/release the temp. # This is because the temp result is often used outside of # the regular cycle. subexprs = [] def __init__(self, pos, type, env=None): ExprNode.__init__(self, pos) self.type = type if type.is_pyobject: self.result_ctype = py_object_type self.is_temp = 1 def analyse_types(self, env): return self def analyse_target_declaration(self, env): pass def generate_result_code(self, code): pass def allocate(self, code): self.temp_cname = code.funcstate.allocate_temp(self.type, manage_ref=True) def release(self, code): code.funcstate.release_temp(self.temp_cname) self.temp_cname = None def result(self): try: return self.temp_cname except: assert False, "Remember to call allocate/release on TempNode" raise # Do not participate in normal temp alloc/dealloc: def allocate_temp_result(self, code): pass def release_temp_result(self, code): pass class PyTempNode(TempNode): # TempNode holding a Python value. def __init__(self, pos, env): TempNode.__init__(self, pos, PyrexTypes.py_object_type, env) class RawCNameExprNode(ExprNode): subexprs = [] def __init__(self, pos, type=None, cname=None): ExprNode.__init__(self, pos, type=type) if cname is not None: self.cname = cname def analyse_types(self, env): return self def set_cname(self, cname): self.cname = cname def result(self): return self.cname def generate_result_code(self, code): pass #------------------------------------------------------------------- # # F-strings # #------------------------------------------------------------------- class JoinedStrNode(ExprNode): # F-strings # # values [UnicodeNode|FormattedValueNode] Substrings of the f-string # type = unicode_type is_temp = True subexprs = ['values'] def analyse_types(self, env): self.values = [v.analyse_types(env).coerce_to_pyobject(env) for v in self.values] return self def may_be_none(self): # PyUnicode_Join() always returns a Unicode string or raises an exception return False def generate_evaluation_code(self, code): code.mark_pos(self.pos) num_items = len(self.values) list_var = code.funcstate.allocate_temp(py_object_type, manage_ref=True) ulength_var = code.funcstate.allocate_temp(PyrexTypes.c_py_ssize_t_type, manage_ref=False) max_char_var = code.funcstate.allocate_temp(PyrexTypes.c_py_ucs4_type, manage_ref=False) code.putln('%s = PyTuple_New(%s); %s' % ( list_var, num_items, code.error_goto_if_null(list_var, self.pos))) code.put_gotref(list_var) code.putln("%s = 0;" % ulength_var) code.putln("%s = 127;" % max_char_var) # at least ASCII character range for i, node in enumerate(self.values): node.generate_evaluation_code(code) node.make_owned_reference(code) ulength = "__Pyx_PyUnicode_GET_LENGTH(%s)" % node.py_result() max_char_value = "__Pyx_PyUnicode_MAX_CHAR_VALUE(%s)" % node.py_result() is_ascii = False if isinstance(node, UnicodeNode): try: node.value.encode('iso8859-1') max_char_value = '255' node.value.encode('us-ascii') is_ascii = True except UnicodeEncodeError: pass else: ulength = str(len(node.value)) elif isinstance(node, FormattedValueNode) and node.value.type.is_numeric: is_ascii = True # formatted C numbers are always ASCII if not is_ascii: code.putln("%s = (%s > %s) ? %s : %s;" % ( max_char_var, max_char_value, max_char_var, max_char_value, max_char_var)) code.putln("%s += %s;" % (ulength_var, ulength)) code.put_giveref(node.py_result()) code.putln('PyTuple_SET_ITEM(%s, %s, %s);' % (list_var, i, node.py_result())) node.generate_post_assignment_code(code) node.free_temps(code) code.mark_pos(self.pos) self.allocate_temp_result(code) code.globalstate.use_utility_code(UtilityCode.load_cached("JoinPyUnicode", "StringTools.c")) code.putln('%s = __Pyx_PyUnicode_Join(%s, %d, %s, %s); %s' % ( self.result(), list_var, num_items, ulength_var, max_char_var, code.error_goto_if_null(self.py_result(), self.pos))) code.put_gotref(self.py_result()) code.put_decref_clear(list_var, py_object_type) code.funcstate.release_temp(list_var) code.funcstate.release_temp(ulength_var) code.funcstate.release_temp(max_char_var) class FormattedValueNode(ExprNode): # {}-delimited portions of an f-string # # value ExprNode The expression itself # conversion_char str or None Type conversion (!s, !r, !a, or none) # format_spec JoinedStrNode or None Format string passed to __format__ # c_format_spec str or None If not None, formatting can be done at the C level subexprs = ['value', 'format_spec'] type = unicode_type is_temp = True c_format_spec = None find_conversion_func = { 's': 'PyObject_Str', 'r': 'PyObject_Repr', 'a': 'PyObject_ASCII', # NOTE: mapped to PyObject_Repr() in Py2 }.get def may_be_none(self): # PyObject_Format() always returns a Unicode string or raises an exception return False def analyse_types(self, env): self.value = self.value.analyse_types(env) if not self.format_spec or self.format_spec.is_string_literal: c_format_spec = self.format_spec.value if self.format_spec else self.value.type.default_format_spec if self.value.type.can_coerce_to_pystring(env, format_spec=c_format_spec): self.c_format_spec = c_format_spec if self.format_spec: self.format_spec = self.format_spec.analyse_types(env).coerce_to_pyobject(env) if self.c_format_spec is None: self.value = self.value.coerce_to_pyobject(env) if not self.format_spec and not self.conversion_char: if self.value.type is unicode_type and not self.value.may_be_none(): # value is definitely a unicode string and we don't format it any special return self.value return self def generate_result_code(self, code): if self.c_format_spec is not None and not self.value.type.is_pyobject: convert_func_call = self.value.type.convert_to_pystring( self.value.result(), code, self.c_format_spec) code.putln("%s = %s; %s" % ( self.result(), convert_func_call, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) return value_result = self.value.py_result() value_is_unicode = self.value.type is unicode_type and not self.value.may_be_none() if self.format_spec: format_func = '__Pyx_PyObject_Format' format_spec = self.format_spec.py_result() else: # common case: expect simple Unicode pass-through if no format spec format_func = '__Pyx_PyObject_FormatSimple' # passing a Unicode format string in Py2 forces PyObject_Format() to also return a Unicode string format_spec = Naming.empty_unicode conversion_char = self.conversion_char if conversion_char == 's' and value_is_unicode: # no need to pipe unicode strings through str() conversion_char = None if conversion_char: fn = self.find_conversion_func(conversion_char) assert fn is not None, "invalid conversion character found: '%s'" % conversion_char value_result = '%s(%s)' % (fn, value_result) code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectFormatAndDecref", "StringTools.c")) format_func += 'AndDecref' elif self.format_spec: code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectFormat", "StringTools.c")) else: code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectFormatSimple", "StringTools.c")) code.putln("%s = %s(%s, %s); %s" % ( self.result(), format_func, value_result, format_spec, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) #------------------------------------------------------------------- # # Parallel nodes (cython.parallel.thread(savailable|id)) # #------------------------------------------------------------------- class ParallelThreadsAvailableNode(AtomicExprNode): """ Note: this is disabled and not a valid directive at this moment Implements cython.parallel.threadsavailable(). If we are called from the sequential part of the application, we need to call omp_get_max_threads(), and in the parallel part we can just call omp_get_num_threads() """ type = PyrexTypes.c_int_type def analyse_types(self, env): self.is_temp = True # env.add_include_file("omp.h") return self def generate_result_code(self, code): code.putln("#ifdef _OPENMP") code.putln("if (omp_in_parallel()) %s = omp_get_max_threads();" % self.temp_code) code.putln("else %s = omp_get_num_threads();" % self.temp_code) code.putln("#else") code.putln("%s = 1;" % self.temp_code) code.putln("#endif") def result(self): return self.temp_code class ParallelThreadIdNode(AtomicExprNode): #, Nodes.ParallelNode): """ Implements cython.parallel.threadid() """ type = PyrexTypes.c_int_type def analyse_types(self, env): self.is_temp = True # env.add_include_file("omp.h") return self def generate_result_code(self, code): code.putln("#ifdef _OPENMP") code.putln("%s = omp_get_thread_num();" % self.temp_code) code.putln("#else") code.putln("%s = 0;" % self.temp_code) code.putln("#endif") def result(self): return self.temp_code #------------------------------------------------------------------- # # Trailer nodes # #------------------------------------------------------------------- class _IndexingBaseNode(ExprNode): # Base class for indexing nodes. # # base ExprNode the value being indexed def is_ephemeral(self): # in most cases, indexing will return a safe reference to an object in a container, # so we consider the result safe if the base object is return self.base.is_ephemeral() or self.base.type in ( basestring_type, str_type, bytes_type, unicode_type) def check_const_addr(self): return self.base.check_const_addr() and self.index.check_const() def is_lvalue(self): # NOTE: references currently have both is_reference and is_ptr # set. Since pointers and references have different lvalue # rules, we must be careful to separate the two. if self.type.is_reference: if self.type.ref_base_type.is_array: # fixed-sized arrays aren't l-values return False elif self.type.is_ptr: # non-const pointers can always be reassigned return True # Just about everything else returned by the index operator # can be an lvalue. return True class IndexNode(_IndexingBaseNode): # Sequence indexing. # # base ExprNode # index ExprNode # type_indices [PyrexType] # # is_fused_index boolean Whether the index is used to specialize a # c(p)def function subexprs = ['base', 'index'] type_indices = None is_subscript = True is_fused_index = False def __init__(self, pos, index, **kw): ExprNode.__init__(self, pos, index=index, **kw) self._index = index def calculate_constant_result(self): self.constant_result = self.base.constant_result[self.index.constant_result] def compile_time_value(self, denv): base = self.base.compile_time_value(denv) index = self.index.compile_time_value(denv) try: return base[index] except Exception as e: self.compile_time_value_error(e) def is_simple(self): base = self.base return (base.is_simple() and self.index.is_simple() and base.type and (base.type.is_ptr or base.type.is_array)) def may_be_none(self): base_type = self.base.type if base_type: if base_type.is_string: return False if isinstance(self.index, SliceNode): # slicing! if base_type in (bytes_type, str_type, unicode_type, basestring_type, list_type, tuple_type): return False return ExprNode.may_be_none(self) def analyse_target_declaration(self, env): pass def analyse_as_type(self, env): base_type = self.base.analyse_as_type(env) if base_type and not base_type.is_pyobject: if base_type.is_cpp_class: if isinstance(self.index, TupleNode): template_values = self.index.args else: template_values = [self.index] type_node = Nodes.TemplatedTypeNode( pos=self.pos, positional_args=template_values, keyword_args=None) return type_node.analyse(env, base_type=base_type) else: index = self.index.compile_time_value(env) if index is not None: return PyrexTypes.CArrayType(base_type, int(index)) error(self.pos, "Array size must be a compile time constant") return None def type_dependencies(self, env): return self.base.type_dependencies(env) + self.index.type_dependencies(env) def infer_type(self, env): base_type = self.base.infer_type(env) if self.index.is_slice: # slicing! if base_type.is_string: # sliced C strings must coerce to Python return bytes_type elif base_type.is_pyunicode_ptr: # sliced Py_UNICODE* strings must coerce to Python return unicode_type elif base_type in (unicode_type, bytes_type, str_type, bytearray_type, list_type, tuple_type): # slicing these returns the same type return base_type else: # TODO: Handle buffers (hopefully without too much redundancy). return py_object_type index_type = self.index.infer_type(env) if index_type and index_type.is_int or isinstance(self.index, IntNode): # indexing! if base_type is unicode_type: # Py_UCS4 will automatically coerce to a unicode string # if required, so this is safe. We only infer Py_UCS4 # when the index is a C integer type. Otherwise, we may # need to use normal Python item access, in which case # it's faster to return the one-char unicode string than # to receive it, throw it away, and potentially rebuild it # on a subsequent PyObject coercion. return PyrexTypes.c_py_ucs4_type elif base_type is str_type: # always returns str - Py2: bytes, Py3: unicode return base_type elif base_type is bytearray_type: return PyrexTypes.c_uchar_type elif isinstance(self.base, BytesNode): #if env.global_scope().context.language_level >= 3: # # inferring 'char' can be made to work in Python 3 mode # return PyrexTypes.c_char_type # Py2/3 return different types on indexing bytes objects return py_object_type elif base_type in (tuple_type, list_type): # if base is a literal, take a look at its values item_type = infer_sequence_item_type( env, self.base, self.index, seq_type=base_type) if item_type is not None: return item_type elif base_type.is_ptr or base_type.is_array: return base_type.base_type elif base_type.is_ctuple and isinstance(self.index, IntNode): if self.index.has_constant_result(): index = self.index.constant_result if index < 0: index += base_type.size if 0 <= index < base_type.size: return base_type.components[index] if base_type.is_cpp_class: class FakeOperand: def __init__(self, **kwds): self.__dict__.update(kwds) operands = [ FakeOperand(pos=self.pos, type=base_type), FakeOperand(pos=self.pos, type=index_type), ] index_func = env.lookup_operator('[]', operands) if index_func is not None: return index_func.type.return_type # may be slicing or indexing, we don't know if base_type in (unicode_type, str_type): # these types always returns their own type on Python indexing/slicing return base_type else: # TODO: Handle buffers (hopefully without too much redundancy). return py_object_type def analyse_types(self, env): return self.analyse_base_and_index_types(env, getting=True) def analyse_target_types(self, env): node = self.analyse_base_and_index_types(env, setting=True) if node.type.is_const: error(self.pos, "Assignment to const dereference") if node is self and not node.is_lvalue(): error(self.pos, "Assignment to non-lvalue of type '%s'" % node.type) return node def analyse_base_and_index_types(self, env, getting=False, setting=False, analyse_base=True): # Note: This might be cleaned up by having IndexNode # parsed in a saner way and only construct the tuple if # needed. if analyse_base: self.base = self.base.analyse_types(env) if self.base.type.is_error: # Do not visit child tree if base is undeclared to avoid confusing # error messages self.type = PyrexTypes.error_type return self is_slice = self.index.is_slice if not env.directives['wraparound']: if is_slice: check_negative_indices(self.index.start, self.index.stop) else: check_negative_indices(self.index) # Potentially overflowing index value. if not is_slice and isinstance(self.index, IntNode) and Utils.long_literal(self.index.value): self.index = self.index.coerce_to_pyobject(env) is_memslice = self.base.type.is_memoryviewslice # Handle the case where base is a literal char* (and we expect a string, not an int) if not is_memslice and (isinstance(self.base, BytesNode) or is_slice): if self.base.type.is_string or not (self.base.type.is_ptr or self.base.type.is_array): self.base = self.base.coerce_to_pyobject(env) replacement_node = self.analyse_as_buffer_operation(env, getting) if replacement_node is not None: return replacement_node self.nogil = env.nogil base_type = self.base.type if not base_type.is_cfunction: self.index = self.index.analyse_types(env) self.original_index_type = self.index.type if base_type.is_unicode_char: # we infer Py_UNICODE/Py_UCS4 for unicode strings in some # cases, but indexing must still work for them if setting: warning(self.pos, "cannot assign to Unicode string index", level=1) elif self.index.constant_result in (0, -1): # uchar[0] => uchar return self.base self.base = self.base.coerce_to_pyobject(env) base_type = self.base.type if base_type.is_pyobject: return self.analyse_as_pyobject(env, is_slice, getting, setting) elif base_type.is_ptr or base_type.is_array: return self.analyse_as_c_array(env, is_slice) elif base_type.is_cpp_class: return self.analyse_as_cpp(env, setting) elif base_type.is_cfunction: return self.analyse_as_c_function(env) elif base_type.is_ctuple: return self.analyse_as_c_tuple(env, getting, setting) else: error(self.pos, "Attempting to index non-array type '%s'" % base_type) self.type = PyrexTypes.error_type return self def analyse_as_pyobject(self, env, is_slice, getting, setting): base_type = self.base.type if self.index.type.is_unicode_char and base_type is not dict_type: # TODO: eventually fold into case below and remove warning, once people have adapted their code warning(self.pos, "Item lookup of unicode character codes now always converts to a Unicode string. " "Use an explicit C integer cast to get back the previous integer lookup behaviour.", level=1) self.index = self.index.coerce_to_pyobject(env) self.is_temp = 1 elif self.index.type.is_int and base_type is not dict_type: if (getting and (base_type in (list_type, tuple_type, bytearray_type)) and (not self.index.type.signed or not env.directives['wraparound'] or (isinstance(self.index, IntNode) and self.index.has_constant_result() and self.index.constant_result >= 0)) and not env.directives['boundscheck']): self.is_temp = 0 else: self.is_temp = 1 self.index = self.index.coerce_to(PyrexTypes.c_py_ssize_t_type, env).coerce_to_simple(env) self.original_index_type.create_to_py_utility_code(env) else: self.index = self.index.coerce_to_pyobject(env) self.is_temp = 1 if self.index.type.is_int and base_type is unicode_type: # Py_UNICODE/Py_UCS4 will automatically coerce to a unicode string # if required, so this is fast and safe self.type = PyrexTypes.c_py_ucs4_type elif self.index.type.is_int and base_type is bytearray_type: if setting: self.type = PyrexTypes.c_uchar_type else: # not using 'uchar' to enable fast and safe error reporting as '-1' self.type = PyrexTypes.c_int_type elif is_slice and base_type in (bytes_type, str_type, unicode_type, list_type, tuple_type): self.type = base_type else: item_type = None if base_type in (list_type, tuple_type) and self.index.type.is_int: item_type = infer_sequence_item_type( env, self.base, self.index, seq_type=base_type) if item_type is None: item_type = py_object_type self.type = item_type if base_type in (list_type, tuple_type, dict_type): # do the None check explicitly (not in a helper) to allow optimising it away self.base = self.base.as_none_safe_node("'NoneType' object is not subscriptable") self.wrap_in_nonecheck_node(env, getting) return self def analyse_as_c_array(self, env, is_slice): base_type = self.base.type self.type = base_type.base_type if is_slice: self.type = base_type elif self.index.type.is_pyobject: self.index = self.index.coerce_to(PyrexTypes.c_py_ssize_t_type, env) elif not self.index.type.is_int: error(self.pos, "Invalid index type '%s'" % self.index.type) return self def analyse_as_cpp(self, env, setting): base_type = self.base.type function = env.lookup_operator("[]", [self.base, self.index]) if function is None: error(self.pos, "Indexing '%s' not supported for index type '%s'" % (base_type, self.index.type)) self.type = PyrexTypes.error_type self.result_code = "" return self func_type = function.type if func_type.is_ptr: func_type = func_type.base_type self.exception_check = func_type.exception_check self.exception_value = func_type.exception_value if self.exception_check: if not setting: self.is_temp = True if self.exception_value is None: env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) self.index = self.index.coerce_to(func_type.args[0].type, env) self.type = func_type.return_type if setting and not func_type.return_type.is_reference: error(self.pos, "Can't set non-reference result '%s'" % self.type) return self def analyse_as_c_function(self, env): base_type = self.base.type if base_type.is_fused: self.parse_indexed_fused_cdef(env) else: self.type_indices = self.parse_index_as_types(env) self.index = None # FIXME: use a dedicated Node class instead of generic IndexNode if base_type.templates is None: error(self.pos, "Can only parameterize template functions.") self.type = error_type elif self.type_indices is None: # Error recorded earlier. self.type = error_type elif len(base_type.templates) != len(self.type_indices): error(self.pos, "Wrong number of template arguments: expected %s, got %s" % ( (len(base_type.templates), len(self.type_indices)))) self.type = error_type else: self.type = base_type.specialize(dict(zip(base_type.templates, self.type_indices))) # FIXME: use a dedicated Node class instead of generic IndexNode return self def analyse_as_c_tuple(self, env, getting, setting): base_type = self.base.type if isinstance(self.index, IntNode) and self.index.has_constant_result(): index = self.index.constant_result if -base_type.size <= index < base_type.size: if index < 0: index += base_type.size self.type = base_type.components[index] else: error(self.pos, "Index %s out of bounds for '%s'" % (index, base_type)) self.type = PyrexTypes.error_type return self else: self.base = self.base.coerce_to_pyobject(env) return self.analyse_base_and_index_types(env, getting=getting, setting=setting, analyse_base=False) def analyse_as_buffer_operation(self, env, getting): """ Analyse buffer indexing and memoryview indexing/slicing """ if isinstance(self.index, TupleNode): indices = self.index.args else: indices = [self.index] base_type = self.base.type replacement_node = None if base_type.is_memoryviewslice: # memoryviewslice indexing or slicing from . import MemoryView have_slices, indices, newaxes = MemoryView.unellipsify(indices, base_type.ndim) if have_slices: replacement_node = MemoryViewSliceNode(self.pos, indices=indices, base=self.base) else: replacement_node = MemoryViewIndexNode(self.pos, indices=indices, base=self.base) elif base_type.is_buffer or base_type.is_pythran_expr: if base_type.is_pythran_expr or len(indices) == base_type.ndim: # Buffer indexing is_buffer_access = True indices = [index.analyse_types(env) for index in indices] if base_type.is_pythran_expr: do_replacement = all(index.type.is_int or index.is_slice or index.type.is_pythran_expr for index in indices) if do_replacement: for i,index in enumerate(indices): if index.is_slice: index = SliceIntNode(index.pos, start=index.start, stop=index.stop, step=index.step) index = index.analyse_types(env) indices[i] = index else: do_replacement = all(index.type.is_int for index in indices) if do_replacement: replacement_node = BufferIndexNode(self.pos, indices=indices, base=self.base) # On cloning, indices is cloned. Otherwise, unpack index into indices. assert not isinstance(self.index, CloneNode) if replacement_node is not None: replacement_node = replacement_node.analyse_types(env, getting) return replacement_node def wrap_in_nonecheck_node(self, env, getting): if not env.directives['nonecheck'] or not self.base.may_be_none(): return self.base = self.base.as_none_safe_node("'NoneType' object is not subscriptable") def parse_index_as_types(self, env, required=True): if isinstance(self.index, TupleNode): indices = self.index.args else: indices = [self.index] type_indices = [] for index in indices: type_indices.append(index.analyse_as_type(env)) if type_indices[-1] is None: if required: error(index.pos, "not parsable as a type") return None return type_indices def parse_indexed_fused_cdef(self, env): """ Interpret fused_cdef_func[specific_type1, ...] Note that if this method is called, we are an indexed cdef function with fused argument types, and this IndexNode will be replaced by the NameNode with specific entry just after analysis of expressions by AnalyseExpressionsTransform. """ self.type = PyrexTypes.error_type self.is_fused_index = True base_type = self.base.type positions = [] if self.index.is_name or self.index.is_attribute: positions.append(self.index.pos) elif isinstance(self.index, TupleNode): for arg in self.index.args: positions.append(arg.pos) specific_types = self.parse_index_as_types(env, required=False) if specific_types is None: self.index = self.index.analyse_types(env) if not self.base.entry.as_variable: error(self.pos, "Can only index fused functions with types") else: # A cpdef function indexed with Python objects self.base.entry = self.entry = self.base.entry.as_variable self.base.type = self.type = self.entry.type self.base.is_temp = True self.is_temp = True self.entry.used = True self.is_fused_index = False return for i, type in enumerate(specific_types): specific_types[i] = type.specialize_fused(env) fused_types = base_type.get_fused_types() if len(specific_types) > len(fused_types): return error(self.pos, "Too many types specified") elif len(specific_types) < len(fused_types): t = fused_types[len(specific_types)] return error(self.pos, "Not enough types specified to specialize " "the function, %s is still fused" % t) # See if our index types form valid specializations for pos, specific_type, fused_type in zip(positions, specific_types, fused_types): if not any([specific_type.same_as(t) for t in fused_type.types]): return error(pos, "Type not in fused type") if specific_type is None or specific_type.is_error: return fused_to_specific = dict(zip(fused_types, specific_types)) type = base_type.specialize(fused_to_specific) if type.is_fused: # Only partially specific, this is invalid error(self.pos, "Index operation makes function only partially specific") else: # Fully specific, find the signature with the specialized entry for signature in self.base.type.get_all_specialized_function_types(): if type.same_as(signature): self.type = signature if self.base.is_attribute: # Pretend to be a normal attribute, for cdef extension # methods self.entry = signature.entry self.is_attribute = True self.obj = self.base.obj self.type.entry.used = True self.base.type = signature self.base.entry = signature.entry break else: # This is a bug raise InternalError("Couldn't find the right signature") gil_message = "Indexing Python object" def calculate_result_code(self): if self.base.type in (list_type, tuple_type, bytearray_type): if self.base.type is list_type: index_code = "PyList_GET_ITEM(%s, %s)" elif self.base.type is tuple_type: index_code = "PyTuple_GET_ITEM(%s, %s)" elif self.base.type is bytearray_type: index_code = "((unsigned char)(PyByteArray_AS_STRING(%s)[%s]))" else: assert False, "unexpected base type in indexing: %s" % self.base.type elif self.base.type.is_cfunction: return "%s<%s>" % ( self.base.result(), ",".join([param.empty_declaration_code() for param in self.type_indices])) elif self.base.type.is_ctuple: index = self.index.constant_result if index < 0: index += self.base.type.size return "%s.f%s" % (self.base.result(), index) else: if (self.type.is_ptr or self.type.is_array) and self.type == self.base.type: error(self.pos, "Invalid use of pointer slice") return index_code = "(%s[%s])" return index_code % (self.base.result(), self.index.result()) def extra_index_params(self, code): if self.index.type.is_int: is_list = self.base.type is list_type wraparound = ( bool(code.globalstate.directives['wraparound']) and self.original_index_type.signed and not (isinstance(self.index.constant_result, _py_int_types) and self.index.constant_result >= 0)) boundscheck = bool(code.globalstate.directives['boundscheck']) return ", %s, %d, %s, %d, %d, %d" % ( self.original_index_type.empty_declaration_code(), self.original_index_type.signed and 1 or 0, self.original_index_type.to_py_function, is_list, wraparound, boundscheck) else: return "" def generate_result_code(self, code): if not self.is_temp: # all handled in self.calculate_result_code() return if self.type.is_pyobject: error_value = 'NULL' if self.index.type.is_int: if self.base.type is list_type: function = "__Pyx_GetItemInt_List" elif self.base.type is tuple_type: function = "__Pyx_GetItemInt_Tuple" else: function = "__Pyx_GetItemInt" code.globalstate.use_utility_code( TempitaUtilityCode.load_cached("GetItemInt", "ObjectHandling.c")) else: if self.base.type is dict_type: function = "__Pyx_PyDict_GetItem" code.globalstate.use_utility_code( UtilityCode.load_cached("DictGetItem", "ObjectHandling.c")) else: function = "PyObject_GetItem" elif self.type.is_unicode_char and self.base.type is unicode_type: assert self.index.type.is_int function = "__Pyx_GetItemInt_Unicode" error_value = '(Py_UCS4)-1' code.globalstate.use_utility_code( UtilityCode.load_cached("GetItemIntUnicode", "StringTools.c")) elif self.base.type is bytearray_type: assert self.index.type.is_int assert self.type.is_int function = "__Pyx_GetItemInt_ByteArray" error_value = '-1' code.globalstate.use_utility_code( UtilityCode.load_cached("GetItemIntByteArray", "StringTools.c")) elif not (self.base.type.is_cpp_class and self.exception_check): assert False, "unexpected type %s and base type %s for indexing" % ( self.type, self.base.type) if self.index.type.is_int: index_code = self.index.result() else: index_code = self.index.py_result() if self.base.type.is_cpp_class and self.exception_check: translate_cpp_exception(code, self.pos, "%s = %s[%s];" % (self.result(), self.base.result(), self.index.result()), self.exception_value, self.in_nogil_context) else: error_check = '!%s' if error_value == 'NULL' else '%%s == %s' % error_value code.putln( "%s = %s(%s, %s%s); %s" % ( self.result(), function, self.base.py_result(), index_code, self.extra_index_params(code), code.error_goto_if(error_check % self.result(), self.pos))) if self.type.is_pyobject: code.put_gotref(self.py_result()) def generate_setitem_code(self, value_code, code): if self.index.type.is_int: if self.base.type is bytearray_type: code.globalstate.use_utility_code( UtilityCode.load_cached("SetItemIntByteArray", "StringTools.c")) function = "__Pyx_SetItemInt_ByteArray" else: code.globalstate.use_utility_code( UtilityCode.load_cached("SetItemInt", "ObjectHandling.c")) function = "__Pyx_SetItemInt" index_code = self.index.result() else: index_code = self.index.py_result() if self.base.type is dict_type: function = "PyDict_SetItem" # It would seem that we could specialized lists/tuples, but that # shouldn't happen here. # Both PyList_SetItem() and PyTuple_SetItem() take a Py_ssize_t as # index instead of an object, and bad conversion here would give # the wrong exception. Also, tuples are supposed to be immutable, # and raise a TypeError when trying to set their entries # (PyTuple_SetItem() is for creating new tuples from scratch). else: function = "PyObject_SetItem" code.putln(code.error_goto_if_neg( "%s(%s, %s, %s%s)" % ( function, self.base.py_result(), index_code, value_code, self.extra_index_params(code)), self.pos)) def generate_assignment_code(self, rhs, code, overloaded_assignment=False, exception_check=None, exception_value=None): self.generate_subexpr_evaluation_code(code) if self.type.is_pyobject: self.generate_setitem_code(rhs.py_result(), code) elif self.base.type is bytearray_type: value_code = self._check_byte_value(code, rhs) self.generate_setitem_code(value_code, code) elif self.base.type.is_cpp_class and self.exception_check and self.exception_check == '+': if overloaded_assignment and exception_check and \ self.exception_value != exception_value: # Handle the case that both the index operator and the assignment # operator have a c++ exception handler and they are not the same. translate_double_cpp_exception(code, self.pos, self.type, self.result(), rhs.result(), self.exception_value, exception_value, self.in_nogil_context) else: # Handle the case that only the index operator has a # c++ exception handler, or that # both exception handlers are the same. translate_cpp_exception(code, self.pos, "%s = %s;" % (self.result(), rhs.result()), self.exception_value, self.in_nogil_context) else: code.putln( "%s = %s;" % (self.result(), rhs.result())) self.generate_subexpr_disposal_code(code) self.free_subexpr_temps(code) rhs.generate_disposal_code(code) rhs.free_temps(code) def _check_byte_value(self, code, rhs): # TODO: should we do this generally on downcasts, or just here? assert rhs.type.is_int, repr(rhs.type) value_code = rhs.result() if rhs.has_constant_result(): if 0 <= rhs.constant_result < 256: return value_code needs_cast = True # make at least the C compiler happy warning(rhs.pos, "value outside of range(0, 256)" " when assigning to byte: %s" % rhs.constant_result, level=1) else: needs_cast = rhs.type != PyrexTypes.c_uchar_type if not self.nogil: conditions = [] if rhs.is_literal or rhs.type.signed: conditions.append('%s < 0' % value_code) if (rhs.is_literal or not (rhs.is_temp and rhs.type in ( PyrexTypes.c_uchar_type, PyrexTypes.c_char_type, PyrexTypes.c_schar_type))): conditions.append('%s > 255' % value_code) if conditions: code.putln("if (unlikely(%s)) {" % ' || '.join(conditions)) code.putln( 'PyErr_SetString(PyExc_ValueError,' ' "byte must be in range(0, 256)"); %s' % code.error_goto(self.pos)) code.putln("}") if needs_cast: value_code = '((unsigned char)%s)' % value_code return value_code def generate_deletion_code(self, code, ignore_nonexisting=False): self.generate_subexpr_evaluation_code(code) #if self.type.is_pyobject: if self.index.type.is_int: function = "__Pyx_DelItemInt" index_code = self.index.result() code.globalstate.use_utility_code( UtilityCode.load_cached("DelItemInt", "ObjectHandling.c")) else: index_code = self.index.py_result() if self.base.type is dict_type: function = "PyDict_DelItem" else: function = "PyObject_DelItem" code.putln(code.error_goto_if_neg( "%s(%s, %s%s)" % ( function, self.base.py_result(), index_code, self.extra_index_params(code)), self.pos)) self.generate_subexpr_disposal_code(code) self.free_subexpr_temps(code) class BufferIndexNode(_IndexingBaseNode): """ Indexing of buffers and memoryviews. This node is created during type analysis from IndexNode and replaces it. Attributes: base - base node being indexed indices - list of indexing expressions """ subexprs = ['base', 'indices'] is_buffer_access = True # Whether we're assigning to a buffer (in that case it needs to be writable) writable_needed = False def analyse_target_types(self, env): self.analyse_types(env, getting=False) def analyse_types(self, env, getting=True): """ Analyse types for buffer indexing only. Overridden by memoryview indexing and slicing subclasses """ # self.indices are already analyzed if not self.base.is_name and not is_pythran_expr(self.base.type): error(self.pos, "Can only index buffer variables") self.type = error_type return self if not getting: if not self.base.entry.type.writable: error(self.pos, "Writing to readonly buffer") else: self.writable_needed = True if self.base.type.is_buffer: self.base.entry.buffer_aux.writable_needed = True self.none_error_message = "'NoneType' object is not subscriptable" self.analyse_buffer_index(env, getting) self.wrap_in_nonecheck_node(env) return self def analyse_buffer_index(self, env, getting): if is_pythran_expr(self.base.type): self.type = PythranExpr(pythran_indexing_type(self.base.type, self.indices)) else: self.base = self.base.coerce_to_simple(env) self.type = self.base.type.dtype self.buffer_type = self.base.type if getting and (self.type.is_pyobject or self.type.is_pythran_expr): self.is_temp = True def analyse_assignment(self, rhs): """ Called by IndexNode when this node is assigned to, with the rhs of the assignment """ def wrap_in_nonecheck_node(self, env): if not env.directives['nonecheck'] or not self.base.may_be_none(): return self.base = self.base.as_none_safe_node(self.none_error_message) def nogil_check(self, env): if self.is_buffer_access or self.is_memview_index: if env.directives['boundscheck']: warning(self.pos, "Use boundscheck(False) for faster access", level=1) if self.type.is_pyobject: error(self.pos, "Cannot access buffer with object dtype without gil") self.type = error_type def calculate_result_code(self): return "(*%s)" % self.buffer_ptr_code def buffer_entry(self): base = self.base if self.base.is_nonecheck: base = base.arg return base.type.get_entry(base) def get_index_in_temp(self, code, ivar): ret = code.funcstate.allocate_temp( PyrexTypes.widest_numeric_type( ivar.type, PyrexTypes.c_ssize_t_type if ivar.type.signed else PyrexTypes.c_size_t_type), manage_ref=False) code.putln("%s = %s;" % (ret, ivar.result())) return ret def buffer_lookup_code(self, code): """ ndarray[1, 2, 3] and memslice[1, 2, 3] """ # Assign indices to temps of at least (s)size_t to allow further index calculations. index_temps = [self.get_index_in_temp(code,ivar) for ivar in self.indices] # Generate buffer access code using these temps from . import Buffer buffer_entry = self.buffer_entry() if buffer_entry.type.is_buffer: negative_indices = buffer_entry.type.negative_indices else: negative_indices = Buffer.buffer_defaults['negative_indices'] return buffer_entry, Buffer.put_buffer_lookup_code( entry=buffer_entry, index_signeds=[ivar.type.signed for ivar in self.indices], index_cnames=index_temps, directives=code.globalstate.directives, pos=self.pos, code=code, negative_indices=negative_indices, in_nogil_context=self.in_nogil_context) def generate_assignment_code(self, rhs, code, overloaded_assignment=False): self.generate_subexpr_evaluation_code(code) self.generate_buffer_setitem_code(rhs, code) self.generate_subexpr_disposal_code(code) self.free_subexpr_temps(code) rhs.generate_disposal_code(code) rhs.free_temps(code) def generate_buffer_setitem_code(self, rhs, code, op=""): base_type = self.base.type if is_pythran_expr(base_type) and is_pythran_supported_type(rhs.type): obj = code.funcstate.allocate_temp(PythranExpr(pythran_type(self.base.type)), manage_ref=False) # We have got to do this because we have to declare pythran objects # at the beggining of the functions. # Indeed, Cython uses "goto" statement for error management, and # RAII doesn't work with that kind of construction. # Moreover, the way Pythran expressions are made is that they don't # support move-assignation easily. # This, we explicitly destroy then in-place new objects in this # case. code.putln("__Pyx_call_destructor(%s);" % obj) code.putln("new (&%s) decltype(%s){%s};" % (obj, obj, self.base.pythran_result())) code.putln("%s(%s) %s= %s;" % ( obj, pythran_indexing_code(self.indices), op, rhs.pythran_result())) return # Used from generate_assignment_code and InPlaceAssignmentNode buffer_entry, ptrexpr = self.buffer_lookup_code(code) if self.buffer_type.dtype.is_pyobject: # Must manage refcounts. Decref what is already there # and incref what we put in. ptr = code.funcstate.allocate_temp(buffer_entry.buf_ptr_type, manage_ref=False) rhs_code = rhs.result() code.putln("%s = %s;" % (ptr, ptrexpr)) code.put_gotref("*%s" % ptr) code.putln("__Pyx_INCREF(%s); __Pyx_DECREF(*%s);" % ( rhs_code, ptr)) code.putln("*%s %s= %s;" % (ptr, op, rhs_code)) code.put_giveref("*%s" % ptr) code.funcstate.release_temp(ptr) else: # Simple case code.putln("*%s %s= %s;" % (ptrexpr, op, rhs.result())) def generate_result_code(self, code): if is_pythran_expr(self.base.type): res = self.result() code.putln("__Pyx_call_destructor(%s);" % res) code.putln("new (&%s) decltype(%s){%s(%s)};" % ( res, res, self.base.pythran_result(), pythran_indexing_code(self.indices))) return buffer_entry, self.buffer_ptr_code = self.buffer_lookup_code(code) if self.type.is_pyobject: # is_temp is True, so must pull out value and incref it. # NOTE: object temporary results for nodes are declared # as PyObject *, so we need a cast code.putln("%s = (PyObject *) *%s;" % (self.result(), self.buffer_ptr_code)) code.putln("__Pyx_INCREF((PyObject*)%s);" % self.result()) class MemoryViewIndexNode(BufferIndexNode): is_memview_index = True is_buffer_access = False warned_untyped_idx = False def analyse_types(self, env, getting=True): # memoryviewslice indexing or slicing from . import MemoryView self.is_pythran_mode = has_np_pythran(env) indices = self.indices have_slices, indices, newaxes = MemoryView.unellipsify(indices, self.base.type.ndim) self.memslice_index = (not newaxes and len(indices) == self.base.type.ndim) axes = [] index_type = PyrexTypes.c_py_ssize_t_type new_indices = [] if len(indices) - len(newaxes) > self.base.type.ndim: self.type = error_type error(indices[self.base.type.ndim].pos, "Too many indices specified for type %s" % self.base.type) return self axis_idx = 0 for i, index in enumerate(indices[:]): index = index.analyse_types(env) if index.is_none: self.is_memview_slice = True new_indices.append(index) axes.append(('direct', 'strided')) continue access, packing = self.base.type.axes[axis_idx] axis_idx += 1 if index.is_slice: self.is_memview_slice = True if index.step.is_none: axes.append((access, packing)) else: axes.append((access, 'strided')) # Coerce start, stop and step to temps of the right type for attr in ('start', 'stop', 'step'): value = getattr(index, attr) if not value.is_none: value = value.coerce_to(index_type, env) #value = value.coerce_to_temp(env) setattr(index, attr, value) new_indices.append(value) elif index.type.is_int or index.type.is_pyobject: if index.type.is_pyobject and not self.warned_untyped_idx: warning(index.pos, "Index should be typed for more efficient access", level=2) MemoryViewIndexNode.warned_untyped_idx = True self.is_memview_index = True index = index.coerce_to(index_type, env) indices[i] = index new_indices.append(index) else: self.type = error_type error(index.pos, "Invalid index for memoryview specified, type %s" % index.type) return self ### FIXME: replace by MemoryViewSliceNode if is_memview_slice ? self.is_memview_index = self.is_memview_index and not self.is_memview_slice self.indices = new_indices # All indices with all start/stop/step for slices. # We need to keep this around. self.original_indices = indices self.nogil = env.nogil self.analyse_operation(env, getting, axes) self.wrap_in_nonecheck_node(env) return self def analyse_operation(self, env, getting, axes): self.none_error_message = "Cannot index None memoryview slice" self.analyse_buffer_index(env, getting) def analyse_broadcast_operation(self, rhs): """ Support broadcasting for slice assignment. E.g. m_2d[...] = m_1d # or, m_1d[...] = m_2d # if the leading dimension has extent 1 """ if self.type.is_memoryviewslice: lhs = self if lhs.is_memview_broadcast or rhs.is_memview_broadcast: lhs.is_memview_broadcast = True rhs.is_memview_broadcast = True def analyse_as_memview_scalar_assignment(self, rhs): lhs = self.analyse_assignment(rhs) if lhs: rhs.is_memview_copy_assignment = lhs.is_memview_copy_assignment return lhs return self class MemoryViewSliceNode(MemoryViewIndexNode): is_memview_slice = True # No-op slicing operation, this node will be replaced is_ellipsis_noop = False is_memview_scalar_assignment = False is_memview_index = False is_memview_broadcast = False def analyse_ellipsis_noop(self, env, getting): """Slicing operations needing no evaluation, i.e. m[...] or m[:, :]""" ### FIXME: replace directly self.is_ellipsis_noop = all( index.is_slice and index.start.is_none and index.stop.is_none and index.step.is_none for index in self.indices) if self.is_ellipsis_noop: self.type = self.base.type def analyse_operation(self, env, getting, axes): from . import MemoryView if not getting: self.is_memview_broadcast = True self.none_error_message = "Cannot assign to None memoryview slice" else: self.none_error_message = "Cannot slice None memoryview slice" self.analyse_ellipsis_noop(env, getting) if self.is_ellipsis_noop: return self.index = None self.is_temp = True self.use_managed_ref = True if not MemoryView.validate_axes(self.pos, axes): self.type = error_type return self.type = PyrexTypes.MemoryViewSliceType(self.base.type.dtype, axes) if not (self.base.is_simple() or self.base.result_in_temp()): self.base = self.base.coerce_to_temp(env) def analyse_assignment(self, rhs): if not rhs.type.is_memoryviewslice and ( self.type.dtype.assignable_from(rhs.type) or rhs.type.is_pyobject): # scalar assignment return MemoryCopyScalar(self.pos, self) else: return MemoryCopySlice(self.pos, self) def is_simple(self): if self.is_ellipsis_noop: # TODO: fix SimpleCallNode.is_simple() return self.base.is_simple() or self.base.result_in_temp() return self.result_in_temp() def calculate_result_code(self): """This is called in case this is a no-op slicing node""" return self.base.result() def generate_result_code(self, code): if self.is_ellipsis_noop: return ### FIXME: remove buffer_entry = self.buffer_entry() have_gil = not self.in_nogil_context # TODO Mark: this is insane, do it better have_slices = False it = iter(self.indices) for index in self.original_indices: if index.is_slice: have_slices = True if not index.start.is_none: index.start = next(it) if not index.stop.is_none: index.stop = next(it) if not index.step.is_none: index.step = next(it) else: next(it) assert not list(it) buffer_entry.generate_buffer_slice_code( code, self.original_indices, self.result(), have_gil=have_gil, have_slices=have_slices, directives=code.globalstate.directives) def generate_assignment_code(self, rhs, code, overloaded_assignment=False): if self.is_ellipsis_noop: self.generate_subexpr_evaluation_code(code) else: self.generate_evaluation_code(code) if self.is_memview_scalar_assignment: self.generate_memoryviewslice_assign_scalar_code(rhs, code) else: self.generate_memoryviewslice_setslice_code(rhs, code) if self.is_ellipsis_noop: self.generate_subexpr_disposal_code(code) else: self.generate_disposal_code(code) rhs.generate_disposal_code(code) rhs.free_temps(code) class MemoryCopyNode(ExprNode): """ Wraps a memoryview slice for slice assignment. dst: destination mememoryview slice """ subexprs = ['dst'] def __init__(self, pos, dst): super(MemoryCopyNode, self).__init__(pos) self.dst = dst self.type = dst.type def generate_assignment_code(self, rhs, code, overloaded_assignment=False): self.dst.generate_evaluation_code(code) self._generate_assignment_code(rhs, code) self.dst.generate_disposal_code(code) rhs.generate_disposal_code(code) rhs.free_temps(code) class MemoryCopySlice(MemoryCopyNode): """ Copy the contents of slice src to slice dst. Does not support indirect slices. memslice1[...] = memslice2 memslice1[:] = memslice2 """ is_memview_copy_assignment = True copy_slice_cname = "__pyx_memoryview_copy_contents" def _generate_assignment_code(self, src, code): dst = self.dst src.type.assert_direct_dims(src.pos) dst.type.assert_direct_dims(dst.pos) code.putln(code.error_goto_if_neg( "%s(%s, %s, %d, %d, %d)" % (self.copy_slice_cname, src.result(), dst.result(), src.type.ndim, dst.type.ndim, dst.type.dtype.is_pyobject), dst.pos)) class MemoryCopyScalar(MemoryCopyNode): """ Assign a scalar to a slice. dst must be simple, scalar will be assigned to a correct type and not just something assignable. memslice1[...] = 0.0 memslice1[:] = 0.0 """ def __init__(self, pos, dst): super(MemoryCopyScalar, self).__init__(pos, dst) self.type = dst.type.dtype def _generate_assignment_code(self, scalar, code): from . import MemoryView self.dst.type.assert_direct_dims(self.dst.pos) dtype = self.dst.type.dtype type_decl = dtype.declaration_code("") slice_decl = self.dst.type.declaration_code("") code.begin_block() code.putln("%s __pyx_temp_scalar = %s;" % (type_decl, scalar.result())) if self.dst.result_in_temp() or self.dst.is_simple(): dst_temp = self.dst.result() else: code.putln("%s __pyx_temp_slice = %s;" % (slice_decl, self.dst.result())) dst_temp = "__pyx_temp_slice" slice_iter_obj = MemoryView.slice_iter(self.dst.type, dst_temp, self.dst.type.ndim, code) p = slice_iter_obj.start_loops() if dtype.is_pyobject: code.putln("Py_DECREF(*(PyObject **) %s);" % p) code.putln("*((%s *) %s) = __pyx_temp_scalar;" % (type_decl, p)) if dtype.is_pyobject: code.putln("Py_INCREF(__pyx_temp_scalar);") slice_iter_obj.end_loops() code.end_block() class SliceIndexNode(ExprNode): # 2-element slice indexing # # base ExprNode # start ExprNode or None # stop ExprNode or None # slice ExprNode or None constant slice object subexprs = ['base', 'start', 'stop', 'slice'] slice = None def infer_type(self, env): base_type = self.base.infer_type(env) if base_type.is_string or base_type.is_cpp_class: return bytes_type elif base_type.is_pyunicode_ptr: return unicode_type elif base_type in (bytes_type, str_type, unicode_type, basestring_type, list_type, tuple_type): return base_type elif base_type.is_ptr or base_type.is_array: return PyrexTypes.c_array_type(base_type.base_type, None) return py_object_type def inferable_item_node(self, index=0): # slicing shouldn't change the result type of the base, but the index might if index is not not_a_constant and self.start: if self.start.has_constant_result(): index += self.start.constant_result else: index = not_a_constant return self.base.inferable_item_node(index) def may_be_none(self): base_type = self.base.type if base_type: if base_type.is_string: return False if base_type in (bytes_type, str_type, unicode_type, basestring_type, list_type, tuple_type): return False return ExprNode.may_be_none(self) def calculate_constant_result(self): if self.start is None: start = None else: start = self.start.constant_result if self.stop is None: stop = None else: stop = self.stop.constant_result self.constant_result = self.base.constant_result[start:stop] def compile_time_value(self, denv): base = self.base.compile_time_value(denv) if self.start is None: start = 0 else: start = self.start.compile_time_value(denv) if self.stop is None: stop = None else: stop = self.stop.compile_time_value(denv) try: return base[start:stop] except Exception as e: self.compile_time_value_error(e) def analyse_target_declaration(self, env): pass def analyse_target_types(self, env): node = self.analyse_types(env, getting=False) # when assigning, we must accept any Python type if node.type.is_pyobject: node.type = py_object_type return node def analyse_types(self, env, getting=True): self.base = self.base.analyse_types(env) if self.base.type.is_buffer or self.base.type.is_pythran_expr or self.base.type.is_memoryviewslice: none_node = NoneNode(self.pos) index = SliceNode(self.pos, start=self.start or none_node, stop=self.stop or none_node, step=none_node) index_node = IndexNode(self.pos, index, base=self.base) return index_node.analyse_base_and_index_types( env, getting=getting, setting=not getting, analyse_base=False) if self.start: self.start = self.start.analyse_types(env) if self.stop: self.stop = self.stop.analyse_types(env) if not env.directives['wraparound']: check_negative_indices(self.start, self.stop) base_type = self.base.type if base_type.is_array and not getting: # cannot assign directly to C array => try to assign by making a copy if not self.start and not self.stop: self.type = base_type else: self.type = PyrexTypes.CPtrType(base_type.base_type) elif base_type.is_string or base_type.is_cpp_string: self.type = default_str_type(env) elif base_type.is_pyunicode_ptr: self.type = unicode_type elif base_type.is_ptr: self.type = base_type elif base_type.is_array: # we need a ptr type here instead of an array type, as # array types can result in invalid type casts in the C # code self.type = PyrexTypes.CPtrType(base_type.base_type) else: self.base = self.base.coerce_to_pyobject(env) self.type = py_object_type if base_type.is_builtin_type: # slicing builtin types returns something of the same type self.type = base_type self.base = self.base.as_none_safe_node("'NoneType' object is not subscriptable") if self.type is py_object_type: if (not self.start or self.start.is_literal) and \ (not self.stop or self.stop.is_literal): # cache the constant slice object, in case we need it none_node = NoneNode(self.pos) self.slice = SliceNode( self.pos, start=copy.deepcopy(self.start or none_node), stop=copy.deepcopy(self.stop or none_node), step=none_node ).analyse_types(env) else: c_int = PyrexTypes.c_py_ssize_t_type if self.start: self.start = self.start.coerce_to(c_int, env) if self.stop: self.stop = self.stop.coerce_to(c_int, env) self.is_temp = 1 return self nogil_check = Node.gil_error gil_message = "Slicing Python object" get_slice_utility_code = TempitaUtilityCode.load( "SliceObject", "ObjectHandling.c", context={'access': 'Get'}) set_slice_utility_code = TempitaUtilityCode.load( "SliceObject", "ObjectHandling.c", context={'access': 'Set'}) def coerce_to(self, dst_type, env): if ((self.base.type.is_string or self.base.type.is_cpp_string) and dst_type in (bytes_type, bytearray_type, str_type, unicode_type)): if (dst_type not in (bytes_type, bytearray_type) and not env.directives['c_string_encoding']): error(self.pos, "default encoding required for conversion from '%s' to '%s'" % (self.base.type, dst_type)) self.type = dst_type if dst_type.is_array and self.base.type.is_array: if not self.start and not self.stop: # redundant slice building, copy C arrays directly return self.base.coerce_to(dst_type, env) # else: check array size if possible return super(SliceIndexNode, self).coerce_to(dst_type, env) def generate_result_code(self, code): if not self.type.is_pyobject: error(self.pos, "Slicing is not currently supported for '%s'." % self.type) return base_result = self.base.result() result = self.result() start_code = self.start_code() stop_code = self.stop_code() if self.base.type.is_string: base_result = self.base.result() if self.base.type not in (PyrexTypes.c_char_ptr_type, PyrexTypes.c_const_char_ptr_type): base_result = '((const char*)%s)' % base_result if self.type is bytearray_type: type_name = 'ByteArray' else: type_name = self.type.name.title() if self.stop is None: code.putln( "%s = __Pyx_Py%s_FromString(%s + %s); %s" % ( result, type_name, base_result, start_code, code.error_goto_if_null(result, self.pos))) else: code.putln( "%s = __Pyx_Py%s_FromStringAndSize(%s + %s, %s - %s); %s" % ( result, type_name, base_result, start_code, stop_code, start_code, code.error_goto_if_null(result, self.pos))) elif self.base.type.is_pyunicode_ptr: base_result = self.base.result() if self.base.type != PyrexTypes.c_py_unicode_ptr_type: base_result = '((const Py_UNICODE*)%s)' % base_result if self.stop is None: code.putln( "%s = __Pyx_PyUnicode_FromUnicode(%s + %s); %s" % ( result, base_result, start_code, code.error_goto_if_null(result, self.pos))) else: code.putln( "%s = __Pyx_PyUnicode_FromUnicodeAndLength(%s + %s, %s - %s); %s" % ( result, base_result, start_code, stop_code, start_code, code.error_goto_if_null(result, self.pos))) elif self.base.type is unicode_type: code.globalstate.use_utility_code( UtilityCode.load_cached("PyUnicode_Substring", "StringTools.c")) code.putln( "%s = __Pyx_PyUnicode_Substring(%s, %s, %s); %s" % ( result, base_result, start_code, stop_code, code.error_goto_if_null(result, self.pos))) elif self.type is py_object_type: code.globalstate.use_utility_code(self.get_slice_utility_code) (has_c_start, has_c_stop, c_start, c_stop, py_start, py_stop, py_slice) = self.get_slice_config() code.putln( "%s = __Pyx_PyObject_GetSlice(%s, %s, %s, %s, %s, %s, %d, %d, %d); %s" % ( result, self.base.py_result(), c_start, c_stop, py_start, py_stop, py_slice, has_c_start, has_c_stop, bool(code.globalstate.directives['wraparound']), code.error_goto_if_null(result, self.pos))) else: if self.base.type is list_type: code.globalstate.use_utility_code( TempitaUtilityCode.load_cached("SliceTupleAndList", "ObjectHandling.c")) cfunc = '__Pyx_PyList_GetSlice' elif self.base.type is tuple_type: code.globalstate.use_utility_code( TempitaUtilityCode.load_cached("SliceTupleAndList", "ObjectHandling.c")) cfunc = '__Pyx_PyTuple_GetSlice' else: cfunc = 'PySequence_GetSlice' code.putln( "%s = %s(%s, %s, %s); %s" % ( result, cfunc, self.base.py_result(), start_code, stop_code, code.error_goto_if_null(result, self.pos))) code.put_gotref(self.py_result()) def generate_assignment_code(self, rhs, code, overloaded_assignment=False, exception_check=None, exception_value=None): self.generate_subexpr_evaluation_code(code) if self.type.is_pyobject: code.globalstate.use_utility_code(self.set_slice_utility_code) (has_c_start, has_c_stop, c_start, c_stop, py_start, py_stop, py_slice) = self.get_slice_config() code.put_error_if_neg(self.pos, "__Pyx_PyObject_SetSlice(%s, %s, %s, %s, %s, %s, %s, %d, %d, %d)" % ( self.base.py_result(), rhs.py_result(), c_start, c_stop, py_start, py_stop, py_slice, has_c_start, has_c_stop, bool(code.globalstate.directives['wraparound']))) else: start_offset = self.start_code() if self.start else '0' if rhs.type.is_array: array_length = rhs.type.size self.generate_slice_guard_code(code, array_length) else: array_length = '%s - %s' % (self.stop_code(), start_offset) code.globalstate.use_utility_code(UtilityCode.load_cached("IncludeStringH", "StringTools.c")) code.putln("memcpy(&(%s[%s]), %s, sizeof(%s[0]) * (%s));" % ( self.base.result(), start_offset, rhs.result(), self.base.result(), array_length )) self.generate_subexpr_disposal_code(code) self.free_subexpr_temps(code) rhs.generate_disposal_code(code) rhs.free_temps(code) def generate_deletion_code(self, code, ignore_nonexisting=False): if not self.base.type.is_pyobject: error(self.pos, "Deleting slices is only supported for Python types, not '%s'." % self.type) return self.generate_subexpr_evaluation_code(code) code.globalstate.use_utility_code(self.set_slice_utility_code) (has_c_start, has_c_stop, c_start, c_stop, py_start, py_stop, py_slice) = self.get_slice_config() code.put_error_if_neg(self.pos, "__Pyx_PyObject_DelSlice(%s, %s, %s, %s, %s, %s, %d, %d, %d)" % ( self.base.py_result(), c_start, c_stop, py_start, py_stop, py_slice, has_c_start, has_c_stop, bool(code.globalstate.directives['wraparound']))) self.generate_subexpr_disposal_code(code) self.free_subexpr_temps(code) def get_slice_config(self): has_c_start, c_start, py_start = False, '0', 'NULL' if self.start: has_c_start = not self.start.type.is_pyobject if has_c_start: c_start = self.start.result() else: py_start = '&%s' % self.start.py_result() has_c_stop, c_stop, py_stop = False, '0', 'NULL' if self.stop: has_c_stop = not self.stop.type.is_pyobject if has_c_stop: c_stop = self.stop.result() else: py_stop = '&%s' % self.stop.py_result() py_slice = self.slice and '&%s' % self.slice.py_result() or 'NULL' return (has_c_start, has_c_stop, c_start, c_stop, py_start, py_stop, py_slice) def generate_slice_guard_code(self, code, target_size): if not self.base.type.is_array: return slice_size = self.base.type.size try: total_length = slice_size = int(slice_size) except ValueError: total_length = None start = stop = None if self.stop: stop = self.stop.result() try: stop = int(stop) if stop < 0: if total_length is None: slice_size = '%s + %d' % (slice_size, stop) else: slice_size += stop else: slice_size = stop stop = None except ValueError: pass if self.start: start = self.start.result() try: start = int(start) if start < 0: if total_length is None: start = '%s + %d' % (self.base.type.size, start) else: start += total_length if isinstance(slice_size, _py_int_types): slice_size -= start else: slice_size = '%s - (%s)' % (slice_size, start) start = None except ValueError: pass runtime_check = None compile_time_check = False try: int_target_size = int(target_size) except ValueError: int_target_size = None else: compile_time_check = isinstance(slice_size, _py_int_types) if compile_time_check and slice_size < 0: if int_target_size > 0: error(self.pos, "Assignment to empty slice.") elif compile_time_check and start is None and stop is None: # we know the exact slice length if int_target_size != slice_size: error(self.pos, "Assignment to slice of wrong length, expected %s, got %s" % ( slice_size, target_size)) elif start is not None: if stop is None: stop = slice_size runtime_check = "(%s)-(%s)" % (stop, start) elif stop is not None: runtime_check = stop else: runtime_check = slice_size if runtime_check: code.putln("if (unlikely((%s) != (%s))) {" % (runtime_check, target_size)) code.putln( 'PyErr_Format(PyExc_ValueError, "Assignment to slice of wrong length,' ' expected %%" CYTHON_FORMAT_SSIZE_T "d, got %%" CYTHON_FORMAT_SSIZE_T "d",' ' (Py_ssize_t)(%s), (Py_ssize_t)(%s));' % ( target_size, runtime_check)) code.putln(code.error_goto(self.pos)) code.putln("}") def start_code(self): if self.start: return self.start.result() else: return "0" def stop_code(self): if self.stop: return self.stop.result() elif self.base.type.is_array: return self.base.type.size else: return "PY_SSIZE_T_MAX" def calculate_result_code(self): # self.result() is not used, but this method must exist return "" class SliceNode(ExprNode): # start:stop:step in subscript list # # start ExprNode # stop ExprNode # step ExprNode subexprs = ['start', 'stop', 'step'] is_slice = True type = slice_type is_temp = 1 def calculate_constant_result(self): self.constant_result = slice( self.start.constant_result, self.stop.constant_result, self.step.constant_result) def compile_time_value(self, denv): start = self.start.compile_time_value(denv) stop = self.stop.compile_time_value(denv) step = self.step.compile_time_value(denv) try: return slice(start, stop, step) except Exception as e: self.compile_time_value_error(e) def may_be_none(self): return False def analyse_types(self, env): start = self.start.analyse_types(env) stop = self.stop.analyse_types(env) step = self.step.analyse_types(env) self.start = start.coerce_to_pyobject(env) self.stop = stop.coerce_to_pyobject(env) self.step = step.coerce_to_pyobject(env) if self.start.is_literal and self.stop.is_literal and self.step.is_literal: self.is_literal = True self.is_temp = False return self gil_message = "Constructing Python slice object" def calculate_result_code(self): return self.result_code def generate_result_code(self, code): if self.is_literal: self.result_code = code.get_py_const(py_object_type, 'slice', cleanup_level=2) code = code.get_cached_constants_writer() code.mark_pos(self.pos) code.putln( "%s = PySlice_New(%s, %s, %s); %s" % ( self.result(), self.start.py_result(), self.stop.py_result(), self.step.py_result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) if self.is_literal: code.put_giveref(self.py_result()) class SliceIntNode(SliceNode): # start:stop:step in subscript list # This is just a node to hold start,stop and step nodes that can be # converted to integers. This does not generate a slice python object. # # start ExprNode # stop ExprNode # step ExprNode is_temp = 0 def calculate_constant_result(self): self.constant_result = slice( self.start.constant_result, self.stop.constant_result, self.step.constant_result) def compile_time_value(self, denv): start = self.start.compile_time_value(denv) stop = self.stop.compile_time_value(denv) step = self.step.compile_time_value(denv) try: return slice(start, stop, step) except Exception as e: self.compile_time_value_error(e) def may_be_none(self): return False def analyse_types(self, env): self.start = self.start.analyse_types(env) self.stop = self.stop.analyse_types(env) self.step = self.step.analyse_types(env) if not self.start.is_none: self.start = self.start.coerce_to_integer(env) if not self.stop.is_none: self.stop = self.stop.coerce_to_integer(env) if not self.step.is_none: self.step = self.step.coerce_to_integer(env) if self.start.is_literal and self.stop.is_literal and self.step.is_literal: self.is_literal = True self.is_temp = False return self def calculate_result_code(self): pass def generate_result_code(self, code): for a in self.start,self.stop,self.step: if isinstance(a, CloneNode): a.arg.result() class CallNode(ExprNode): # allow overriding the default 'may_be_none' behaviour may_return_none = None def infer_type(self, env): # TODO(robertwb): Reduce redundancy with analyse_types. function = self.function func_type = function.infer_type(env) if isinstance(function, NewExprNode): # note: needs call to infer_type() above return PyrexTypes.CPtrType(function.class_type) if func_type is py_object_type: # function might have lied for safety => try to find better type entry = getattr(function, 'entry', None) if entry is not None: func_type = entry.type or func_type if func_type.is_ptr: func_type = func_type.base_type if func_type.is_cfunction: if getattr(self.function, 'entry', None) and hasattr(self, 'args'): alternatives = self.function.entry.all_alternatives() arg_types = [arg.infer_type(env) for arg in self.args] func_entry = PyrexTypes.best_match(arg_types, alternatives) if func_entry: func_type = func_entry.type if func_type.is_ptr: func_type = func_type.base_type return func_type.return_type return func_type.return_type elif func_type is type_type: if function.is_name and function.entry and function.entry.type: result_type = function.entry.type if result_type.is_extension_type: return result_type elif result_type.is_builtin_type: if function.entry.name == 'float': return PyrexTypes.c_double_type elif function.entry.name in Builtin.types_that_construct_their_instance: return result_type return py_object_type def type_dependencies(self, env): # TODO: Update when Danilo's C++ code merged in to handle the # the case of function overloading. return self.function.type_dependencies(env) def is_simple(self): # C function calls could be considered simple, but they may # have side-effects that may hit when multiple operations must # be effected in order, e.g. when constructing the argument # sequence for a function call or comparing values. return False def may_be_none(self): if self.may_return_none is not None: return self.may_return_none func_type = self.function.type if func_type is type_type and self.function.is_name: entry = self.function.entry if entry.type.is_extension_type: return False if (entry.type.is_builtin_type and entry.name in Builtin.types_that_construct_their_instance): return False return ExprNode.may_be_none(self) def analyse_as_type_constructor(self, env): type = self.function.analyse_as_type(env) if type and type.is_struct_or_union: args, kwds = self.explicit_args_kwds() items = [] for arg, member in zip(args, type.scope.var_entries): items.append(DictItemNode(pos=arg.pos, key=StringNode(pos=arg.pos, value=member.name), value=arg)) if kwds: items += kwds.key_value_pairs self.key_value_pairs = items self.__class__ = DictNode self.analyse_types(env) # FIXME self.coerce_to(type, env) return True elif type and type.is_cpp_class: self.args = [ arg.analyse_types(env) for arg in self.args ] constructor = type.scope.lookup("") if not constructor: error(self.function.pos, "no constructor found for C++ type '%s'" % self.function.name) self.type = error_type return self self.function = RawCNameExprNode(self.function.pos, constructor.type) self.function.entry = constructor self.function.set_cname(type.empty_declaration_code()) self.analyse_c_function_call(env) self.type = type return True def is_lvalue(self): return self.type.is_reference def nogil_check(self, env): func_type = self.function_type() if func_type.is_pyobject: self.gil_error() elif not getattr(func_type, 'nogil', False): self.gil_error() gil_message = "Calling gil-requiring function" class SimpleCallNode(CallNode): # Function call without keyword, * or ** args. # # function ExprNode # args [ExprNode] # arg_tuple ExprNode or None used internally # self ExprNode or None used internally # coerced_self ExprNode or None used internally # wrapper_call bool used internally # has_optional_args bool used internally # nogil bool used internally subexprs = ['self', 'coerced_self', 'function', 'args', 'arg_tuple'] self = None coerced_self = None arg_tuple = None wrapper_call = False has_optional_args = False nogil = False analysed = False def compile_time_value(self, denv): function = self.function.compile_time_value(denv) args = [arg.compile_time_value(denv) for arg in self.args] try: return function(*args) except Exception as e: self.compile_time_value_error(e) def analyse_as_type(self, env): attr = self.function.as_cython_attribute() if attr == 'pointer': if len(self.args) != 1: error(self.args.pos, "only one type allowed.") else: type = self.args[0].analyse_as_type(env) if not type: error(self.args[0].pos, "Unknown type") else: return PyrexTypes.CPtrType(type) def explicit_args_kwds(self): return self.args, None def analyse_types(self, env): if self.analyse_as_type_constructor(env): return self if self.analysed: return self self.analysed = True self.function.is_called = 1 self.function = self.function.analyse_types(env) function = self.function if function.is_attribute and function.entry and function.entry.is_cmethod: # Take ownership of the object from which the attribute # was obtained, because we need to pass it as 'self'. self.self = function.obj function.obj = CloneNode(self.self) func_type = self.function_type() self.is_numpy_call_with_exprs = False if has_np_pythran(env) and self.function.is_numpy_attribute: has_pythran_args = True self.arg_tuple = TupleNode(self.pos, args = self.args) self.arg_tuple = self.arg_tuple.analyse_types(env) for arg in self.arg_tuple.args: has_pythran_args &= is_pythran_supported_node_or_none(arg) self.is_numpy_call_with_exprs = bool(has_pythran_args) if self.is_numpy_call_with_exprs: self.args = None env.add_include_file("pythonic/numpy/%s.hpp" % self.function.attribute) self.type = PythranExpr(pythran_func_type(self.function.attribute, self.arg_tuple.args)) self.may_return_none = True self.is_temp = 1 elif func_type.is_pyobject: self.arg_tuple = TupleNode(self.pos, args = self.args) self.arg_tuple = self.arg_tuple.analyse_types(env).coerce_to_pyobject(env) self.args = None if func_type is Builtin.type_type and function.is_name and \ function.entry and \ function.entry.is_builtin and \ function.entry.name in Builtin.types_that_construct_their_instance: # calling a builtin type that returns a specific object type if function.entry.name == 'float': # the following will come true later on in a transform self.type = PyrexTypes.c_double_type self.result_ctype = PyrexTypes.c_double_type else: self.type = Builtin.builtin_types[function.entry.name] self.result_ctype = py_object_type self.may_return_none = False elif function.is_name and function.type_entry: # We are calling an extension type constructor. As # long as we do not support __new__(), the result type # is clear self.type = function.type_entry.type self.result_ctype = py_object_type self.may_return_none = False else: self.type = py_object_type self.is_temp = 1 else: self.args = [ arg.analyse_types(env) for arg in self.args ] self.analyse_c_function_call(env) if func_type.exception_check == '+': self.is_temp = True return self def function_type(self): # Return the type of the function being called, coercing a function # pointer to a function if necessary. If the function has fused # arguments, return the specific type. func_type = self.function.type if func_type.is_ptr: func_type = func_type.base_type return func_type def analyse_c_function_call(self, env): func_type = self.function.type if func_type is error_type: self.type = error_type return if func_type.is_cfunction and func_type.is_static_method: if self.self and self.self.type.is_extension_type: # To support this we'd need to pass self to determine whether # it was overloaded in Python space (possibly via a Cython # superclass turning a cdef method into a cpdef one). error(self.pos, "Cannot call a static method on an instance variable.") args = self.args elif self.self: args = [self.self] + self.args else: args = self.args if func_type.is_cpp_class: overloaded_entry = self.function.type.scope.lookup("operator()") if overloaded_entry is None: self.type = PyrexTypes.error_type self.result_code = "" return elif hasattr(self.function, 'entry'): overloaded_entry = self.function.entry elif self.function.is_subscript and self.function.is_fused_index: overloaded_entry = self.function.type.entry else: overloaded_entry = None if overloaded_entry: if self.function.type.is_fused: functypes = self.function.type.get_all_specialized_function_types() alternatives = [f.entry for f in functypes] else: alternatives = overloaded_entry.all_alternatives() entry = PyrexTypes.best_match( [arg.type for arg in args], alternatives, self.pos, env, args) if not entry: self.type = PyrexTypes.error_type self.result_code = "" return entry.used = True if not func_type.is_cpp_class: self.function.entry = entry self.function.type = entry.type func_type = self.function_type() else: entry = None func_type = self.function_type() if not func_type.is_cfunction: error(self.pos, "Calling non-function type '%s'" % func_type) self.type = PyrexTypes.error_type self.result_code = "" return # Check no. of args max_nargs = len(func_type.args) expected_nargs = max_nargs - func_type.optional_arg_count actual_nargs = len(args) if func_type.optional_arg_count and expected_nargs != actual_nargs: self.has_optional_args = 1 self.is_temp = 1 # check 'self' argument if entry and entry.is_cmethod and func_type.args and not func_type.is_static_method: formal_arg = func_type.args[0] arg = args[0] if formal_arg.not_none: if self.self: self.self = self.self.as_none_safe_node( "'NoneType' object has no attribute '%s'", error='PyExc_AttributeError', format_args=[entry.name]) else: # unbound method arg = arg.as_none_safe_node( "descriptor '%s' requires a '%s' object but received a 'NoneType'", format_args=[entry.name, formal_arg.type.name]) if self.self: if formal_arg.accept_builtin_subtypes: arg = CMethodSelfCloneNode(self.self) else: arg = CloneNode(self.self) arg = self.coerced_self = arg.coerce_to(formal_arg.type, env) elif formal_arg.type.is_builtin_type: # special case: unbound methods of builtins accept subtypes arg = arg.coerce_to(formal_arg.type, env) if arg.type.is_builtin_type and isinstance(arg, PyTypeTestNode): arg.exact_builtin_type = False args[0] = arg # Coerce arguments some_args_in_temps = False for i in range(min(max_nargs, actual_nargs)): formal_arg = func_type.args[i] formal_type = formal_arg.type if formal_type.is_const: formal_type = formal_type.const_base_type arg = args[i].coerce_to(formal_type, env) if formal_arg.not_none: # C methods must do the None checks at *call* time arg = arg.as_none_safe_node( "cannot pass None into a C function argument that is declared 'not None'") if arg.is_temp: if i > 0: # first argument in temp doesn't impact subsequent arguments some_args_in_temps = True elif arg.type.is_pyobject and not env.nogil: if i == 0 and self.self is not None: # a method's cloned "self" argument is ok pass elif arg.nonlocally_immutable(): # plain local variables are ok pass else: # we do not safely own the argument's reference, # but we must make sure it cannot be collected # before we return from the function, so we create # an owned temp reference to it if i > 0: # first argument doesn't matter some_args_in_temps = True arg = arg.coerce_to_temp(env) args[i] = arg # handle additional varargs parameters for i in range(max_nargs, actual_nargs): arg = args[i] if arg.type.is_pyobject: if arg.type is str_type: arg_ctype = PyrexTypes.c_char_ptr_type else: arg_ctype = arg.type.default_coerced_ctype() if arg_ctype is None: error(self.args[i].pos, "Python object cannot be passed as a varargs parameter") else: args[i] = arg = arg.coerce_to(arg_ctype, env) if arg.is_temp and i > 0: some_args_in_temps = True if some_args_in_temps: # if some args are temps and others are not, they may get # constructed in the wrong order (temps first) => make # sure they are either all temps or all not temps (except # for the last argument, which is evaluated last in any # case) for i in range(actual_nargs-1): if i == 0 and self.self is not None: continue # self is ok arg = args[i] if arg.nonlocally_immutable(): # locals, C functions, unassignable types are safe. pass elif arg.type.is_cpp_class: # Assignment has side effects, avoid. pass elif env.nogil and arg.type.is_pyobject: # can't copy a Python reference into a temp in nogil # env (this is safe: a construction would fail in # nogil anyway) pass else: #self.args[i] = arg.coerce_to_temp(env) # instead: issue a warning if i > 0 or i == 1 and self.self is not None: # skip first arg warning(arg.pos, "Argument evaluation order in C function call is undefined and may not be as expected", 0) break self.args[:] = args # Calc result type and code fragment if isinstance(self.function, NewExprNode): self.type = PyrexTypes.CPtrType(self.function.class_type) else: self.type = func_type.return_type if self.function.is_name or self.function.is_attribute: func_entry = self.function.entry if func_entry and (func_entry.utility_code or func_entry.utility_code_definition): self.is_temp = 1 # currently doesn't work for self.calculate_result_code() if self.type.is_pyobject: self.result_ctype = py_object_type self.is_temp = 1 elif func_type.exception_value is not None or func_type.exception_check: self.is_temp = 1 elif self.type.is_memoryviewslice: self.is_temp = 1 # func_type.exception_check = True if self.is_temp and self.type.is_reference: self.type = PyrexTypes.CFakeReferenceType(self.type.ref_base_type) # Called in 'nogil' context? self.nogil = env.nogil if (self.nogil and func_type.exception_check and func_type.exception_check != '+'): env.use_utility_code(pyerr_occurred_withgil_utility_code) # C++ exception handler if func_type.exception_check == '+': if func_type.exception_value is None: env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) def calculate_result_code(self): return self.c_call_code() def c_call_code(self): func_type = self.function_type() if self.type is PyrexTypes.error_type or not func_type.is_cfunction: return "" formal_args = func_type.args arg_list_code = [] args = list(zip(formal_args, self.args)) max_nargs = len(func_type.args) expected_nargs = max_nargs - func_type.optional_arg_count actual_nargs = len(self.args) for formal_arg, actual_arg in args[:expected_nargs]: arg_code = actual_arg.result_as(formal_arg.type) arg_list_code.append(arg_code) if func_type.is_overridable: arg_list_code.append(str(int(self.wrapper_call or self.function.entry.is_unbound_cmethod))) if func_type.optional_arg_count: if expected_nargs == actual_nargs: optional_args = 'NULL' else: optional_args = "&%s" % self.opt_arg_struct arg_list_code.append(optional_args) for actual_arg in self.args[len(formal_args):]: arg_list_code.append(actual_arg.result()) result = "%s(%s)" % (self.function.result(), ', '.join(arg_list_code)) return result def is_c_result_required(self): func_type = self.function_type() if not func_type.exception_value or func_type.exception_check == '+': return False # skip allocation of unused result temp return True def generate_result_code(self, code): func_type = self.function_type() if self.function.is_name or self.function.is_attribute: code.globalstate.use_entry_utility_code(self.function.entry) if func_type.is_pyobject: if func_type is not type_type and not self.arg_tuple.args and self.arg_tuple.is_literal: code.globalstate.use_utility_code(UtilityCode.load_cached( "PyObjectCallNoArg", "ObjectHandling.c")) code.putln( "%s = __Pyx_PyObject_CallNoArg(%s); %s" % ( self.result(), self.function.py_result(), code.error_goto_if_null(self.result(), self.pos))) else: arg_code = self.arg_tuple.py_result() code.globalstate.use_utility_code(UtilityCode.load_cached( "PyObjectCall", "ObjectHandling.c")) code.putln( "%s = __Pyx_PyObject_Call(%s, %s, NULL); %s" % ( self.result(), self.function.py_result(), arg_code, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) elif func_type.is_cfunction: if self.has_optional_args: actual_nargs = len(self.args) expected_nargs = len(func_type.args) - func_type.optional_arg_count self.opt_arg_struct = code.funcstate.allocate_temp( func_type.op_arg_struct.base_type, manage_ref=True) code.putln("%s.%s = %s;" % ( self.opt_arg_struct, Naming.pyrex_prefix + "n", len(self.args) - expected_nargs)) args = list(zip(func_type.args, self.args)) for formal_arg, actual_arg in args[expected_nargs:actual_nargs]: code.putln("%s.%s = %s;" % ( self.opt_arg_struct, func_type.opt_arg_cname(formal_arg.name), actual_arg.result_as(formal_arg.type))) exc_checks = [] if self.type.is_pyobject and self.is_temp: exc_checks.append("!%s" % self.result()) elif self.type.is_memoryviewslice: assert self.is_temp exc_checks.append(self.type.error_condition(self.result())) else: exc_val = func_type.exception_value exc_check = func_type.exception_check if exc_val is not None: exc_checks.append("%s == %s" % (self.result(), exc_val)) if exc_check: if self.nogil: exc_checks.append("__Pyx_ErrOccurredWithGIL()") else: exc_checks.append("PyErr_Occurred()") if self.is_temp or exc_checks: rhs = self.c_call_code() if self.result(): lhs = "%s = " % self.result() if self.is_temp and self.type.is_pyobject: #return_type = self.type # func_type.return_type #print "SimpleCallNode.generate_result_code: casting", rhs, \ # "from", return_type, "to pyobject" ### rhs = typecast(py_object_type, self.type, rhs) else: lhs = "" if func_type.exception_check == '+': translate_cpp_exception(code, self.pos, '%s%s;' % (lhs, rhs), func_type.exception_value, self.nogil) else: if exc_checks: goto_error = code.error_goto_if(" && ".join(exc_checks), self.pos) else: goto_error = "" code.putln("%s%s; %s" % (lhs, rhs, goto_error)) if self.type.is_pyobject and self.result(): code.put_gotref(self.py_result()) if self.has_optional_args: code.funcstate.release_temp(self.opt_arg_struct) @classmethod def from_node(cls, node, **kwargs): ret = super(SimpleCallNode, cls).from_node(node, **kwargs) ret.is_numpy_call_with_exprs = node.is_numpy_call_with_exprs return ret class PyMethodCallNode(SimpleCallNode): # Specialised call to a (potential) PyMethodObject with non-constant argument tuple. # Allows the self argument to be injected directly instead of repacking a tuple for it. # # function ExprNode the function/method object to call # arg_tuple TupleNode the arguments for the args tuple subexprs = ['function', 'arg_tuple'] is_temp = True def generate_evaluation_code(self, code): code.mark_pos(self.pos) self.allocate_temp_result(code) self.function.generate_evaluation_code(code) assert self.arg_tuple.mult_factor is None args = self.arg_tuple.args for arg in args: arg.generate_evaluation_code(code) if self.is_numpy_call_with_exprs: code.putln("// function evaluation code for numpy function") code.putln("__Pyx_call_destructor(%s);" % self.result()) code.putln("new (&%s) decltype(%s){pythonic::numpy::functor::%s{}(%s)};" % ( self.result(), self.result(), self.function.attribute, ", ".join(a.pythran_result() for a in self.arg_tuple.args))) return # make sure function is in temp so that we can replace the reference below if it's a method reuse_function_temp = self.function.is_temp if reuse_function_temp: function = self.function.result() else: function = code.funcstate.allocate_temp(py_object_type, manage_ref=True) self.function.make_owned_reference(code) code.put("%s = %s; " % (function, self.function.py_result())) self.function.generate_disposal_code(code) self.function.free_temps(code) self_arg = code.funcstate.allocate_temp(py_object_type, manage_ref=True) code.putln("%s = NULL;" % self_arg) arg_offset_cname = None if len(args) > 1: arg_offset_cname = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False) code.putln("%s = 0;" % arg_offset_cname) def attribute_is_likely_method(attr): obj = attr.obj if obj.is_name and obj.entry.is_pyglobal: return False # more likely to be a function return True if self.function.is_attribute: likely_method = 'likely' if attribute_is_likely_method(self.function) else 'unlikely' elif self.function.is_name and self.function.cf_state: # not an attribute itself, but might have been assigned from one (e.g. bound method) for assignment in self.function.cf_state: value = assignment.rhs if value and value.is_attribute and value.obj.type.is_pyobject: if attribute_is_likely_method(value): likely_method = 'likely' break else: likely_method = 'unlikely' else: likely_method = 'unlikely' code.putln("if (CYTHON_UNPACK_METHODS && %s(PyMethod_Check(%s))) {" % (likely_method, function)) code.putln("%s = PyMethod_GET_SELF(%s);" % (self_arg, function)) # the following is always true in Py3 (kept only for safety), # but is false for unbound methods in Py2 code.putln("if (likely(%s)) {" % self_arg) code.putln("PyObject* function = PyMethod_GET_FUNCTION(%s);" % function) code.put_incref(self_arg, py_object_type) code.put_incref("function", py_object_type) # free method object as early to possible to enable reuse from CPython's freelist code.put_decref_set(function, "function") if len(args) > 1: code.putln("%s = 1;" % arg_offset_cname) code.putln("}") code.putln("}") if not args: # fastest special case: try to avoid tuple creation code.putln("if (%s) {" % self_arg) code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")) code.putln( "%s = __Pyx_PyObject_CallOneArg(%s, %s); %s" % ( self.result(), function, self_arg, code.error_goto_if_null(self.result(), self.pos))) code.put_decref_clear(self_arg, py_object_type) code.funcstate.release_temp(self_arg) code.putln("} else {") code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectCallNoArg", "ObjectHandling.c")) code.putln( "%s = __Pyx_PyObject_CallNoArg(%s); %s" % ( self.result(), function, code.error_goto_if_null(self.result(), self.pos))) code.putln("}") code.put_gotref(self.py_result()) else: if len(args) == 1: code.putln("if (!%s) {" % self_arg) code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")) arg = args[0] code.putln( "%s = __Pyx_PyObject_CallOneArg(%s, %s); %s" % ( self.result(), function, arg.py_result(), code.error_goto_if_null(self.result(), self.pos))) arg.generate_disposal_code(code) code.put_gotref(self.py_result()) code.putln("} else {") arg_offset = 1 else: arg_offset = arg_offset_cname code.globalstate.use_utility_code( UtilityCode.load_cached("PyFunctionFastCall", "ObjectHandling.c")) code.globalstate.use_utility_code( UtilityCode.load_cached("PyCFunctionFastCall", "ObjectHandling.c")) for test_func, call_prefix in [('PyFunction_Check', 'Py'), ('__Pyx_PyFastCFunction_Check', 'PyC')]: code.putln("#if CYTHON_FAST_%sCALL" % call_prefix.upper()) code.putln("if (%s(%s)) {" % (test_func, function)) code.putln("PyObject *%s[%d] = {%s, %s};" % ( Naming.quick_temp_cname, len(args)+1, self_arg, ', '.join(arg.py_result() for arg in args))) code.putln("%s = __Pyx_%sFunction_FastCall(%s, %s+1-%s, %d+%s); %s" % ( self.result(), call_prefix, function, Naming.quick_temp_cname, arg_offset, len(args), arg_offset, code.error_goto_if_null(self.result(), self.pos))) code.put_xdecref_clear(self_arg, py_object_type) code.put_gotref(self.py_result()) for arg in args: arg.generate_disposal_code(code) code.putln("} else") code.putln("#endif") code.putln("{") args_tuple = code.funcstate.allocate_temp(py_object_type, manage_ref=True) code.putln("%s = PyTuple_New(%d+%s); %s" % ( args_tuple, len(args), arg_offset, code.error_goto_if_null(args_tuple, self.pos))) code.put_gotref(args_tuple) if len(args) > 1: code.putln("if (%s) {" % self_arg) code.putln("__Pyx_GIVEREF(%s); PyTuple_SET_ITEM(%s, 0, %s); %s = NULL;" % ( self_arg, args_tuple, self_arg, self_arg)) # stealing owned ref in this case code.funcstate.release_temp(self_arg) if len(args) > 1: code.putln("}") for i, arg in enumerate(args): arg.make_owned_reference(code) code.put_giveref(arg.py_result()) code.putln("PyTuple_SET_ITEM(%s, %d+%s, %s);" % ( args_tuple, i, arg_offset, arg.py_result())) if len(args) > 1: code.funcstate.release_temp(arg_offset_cname) for arg in args: arg.generate_post_assignment_code(code) arg.free_temps(code) code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectCall", "ObjectHandling.c")) code.putln( "%s = __Pyx_PyObject_Call(%s, %s, NULL); %s" % ( self.result(), function, args_tuple, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) code.put_decref_clear(args_tuple, py_object_type) code.funcstate.release_temp(args_tuple) if len(args) == 1: code.putln("}") code.putln("}") # !CYTHON_FAST_PYCALL if reuse_function_temp: self.function.generate_disposal_code(code) self.function.free_temps(code) else: code.put_decref_clear(function, py_object_type) code.funcstate.release_temp(function) class InlinedDefNodeCallNode(CallNode): # Inline call to defnode # # function PyCFunctionNode # function_name NameNode # args [ExprNode] subexprs = ['args', 'function_name'] is_temp = 1 type = py_object_type function = None function_name = None def can_be_inlined(self): func_type= self.function.def_node if func_type.star_arg or func_type.starstar_arg: return False if len(func_type.args) != len(self.args): return False if func_type.num_kwonly_args: return False # actually wrong number of arguments return True def analyse_types(self, env): self.function_name = self.function_name.analyse_types(env) self.args = [ arg.analyse_types(env) for arg in self.args ] func_type = self.function.def_node actual_nargs = len(self.args) # Coerce arguments some_args_in_temps = False for i in range(actual_nargs): formal_type = func_type.args[i].type arg = self.args[i].coerce_to(formal_type, env) if arg.is_temp: if i > 0: # first argument in temp doesn't impact subsequent arguments some_args_in_temps = True elif arg.type.is_pyobject and not env.nogil: if arg.nonlocally_immutable(): # plain local variables are ok pass else: # we do not safely own the argument's reference, # but we must make sure it cannot be collected # before we return from the function, so we create # an owned temp reference to it if i > 0: # first argument doesn't matter some_args_in_temps = True arg = arg.coerce_to_temp(env) self.args[i] = arg if some_args_in_temps: # if some args are temps and others are not, they may get # constructed in the wrong order (temps first) => make # sure they are either all temps or all not temps (except # for the last argument, which is evaluated last in any # case) for i in range(actual_nargs-1): arg = self.args[i] if arg.nonlocally_immutable(): # locals, C functions, unassignable types are safe. pass elif arg.type.is_cpp_class: # Assignment has side effects, avoid. pass elif env.nogil and arg.type.is_pyobject: # can't copy a Python reference into a temp in nogil # env (this is safe: a construction would fail in # nogil anyway) pass else: #self.args[i] = arg.coerce_to_temp(env) # instead: issue a warning if i > 0: warning(arg.pos, "Argument evaluation order in C function call is undefined and may not be as expected", 0) break return self def generate_result_code(self, code): arg_code = [self.function_name.py_result()] func_type = self.function.def_node for arg, proto_arg in zip(self.args, func_type.args): if arg.type.is_pyobject: arg_code.append(arg.result_as(proto_arg.type)) else: arg_code.append(arg.result()) arg_code = ', '.join(arg_code) code.putln( "%s = %s(%s); %s" % ( self.result(), self.function.def_node.entry.pyfunc_cname, arg_code, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class PythonCapiFunctionNode(ExprNode): subexprs = [] def __init__(self, pos, py_name, cname, func_type, utility_code = None): ExprNode.__init__(self, pos, name=py_name, cname=cname, type=func_type, utility_code=utility_code) def analyse_types(self, env): return self def generate_result_code(self, code): if self.utility_code: code.globalstate.use_utility_code(self.utility_code) def calculate_result_code(self): return self.cname class PythonCapiCallNode(SimpleCallNode): # Python C-API Function call (only created in transforms) # By default, we assume that the call never returns None, as this # is true for most C-API functions in CPython. If this does not # apply to a call, set the following to True (or None to inherit # the default behaviour). may_return_none = False def __init__(self, pos, function_name, func_type, utility_code = None, py_name=None, **kwargs): self.type = func_type.return_type self.result_ctype = self.type self.function = PythonCapiFunctionNode( pos, py_name, function_name, func_type, utility_code = utility_code) # call this last so that we can override the constructed # attributes above with explicit keyword arguments if required SimpleCallNode.__init__(self, pos, **kwargs) class GeneralCallNode(CallNode): # General Python function call, including keyword, # * and ** arguments. # # function ExprNode # positional_args ExprNode Tuple of positional arguments # keyword_args ExprNode or None Dict of keyword arguments type = py_object_type subexprs = ['function', 'positional_args', 'keyword_args'] nogil_check = Node.gil_error def compile_time_value(self, denv): function = self.function.compile_time_value(denv) positional_args = self.positional_args.compile_time_value(denv) keyword_args = self.keyword_args.compile_time_value(denv) try: return function(*positional_args, **keyword_args) except Exception as e: self.compile_time_value_error(e) def explicit_args_kwds(self): if (self.keyword_args and not self.keyword_args.is_dict_literal or not self.positional_args.is_sequence_constructor): raise CompileError(self.pos, 'Compile-time keyword arguments must be explicit.') return self.positional_args.args, self.keyword_args def analyse_types(self, env): if self.analyse_as_type_constructor(env): return self self.function = self.function.analyse_types(env) if not self.function.type.is_pyobject: if self.function.type.is_error: self.type = error_type return self if hasattr(self.function, 'entry'): node = self.map_to_simple_call_node() if node is not None and node is not self: return node.analyse_types(env) elif self.function.entry.as_variable: self.function = self.function.coerce_to_pyobject(env) elif node is self: error(self.pos, "Non-trivial keyword arguments and starred " "arguments not allowed in cdef functions.") else: # error was already reported pass else: self.function = self.function.coerce_to_pyobject(env) if self.keyword_args: self.keyword_args = self.keyword_args.analyse_types(env) self.positional_args = self.positional_args.analyse_types(env) self.positional_args = \ self.positional_args.coerce_to_pyobject(env) function = self.function if function.is_name and function.type_entry: # We are calling an extension type constructor. As long # as we do not support __new__(), the result type is clear self.type = function.type_entry.type self.result_ctype = py_object_type self.may_return_none = False else: self.type = py_object_type self.is_temp = 1 return self def map_to_simple_call_node(self): """ Tries to map keyword arguments to declared positional arguments. Returns self to try a Python call, None to report an error or a SimpleCallNode if the mapping succeeds. """ if not isinstance(self.positional_args, TupleNode): # has starred argument return self if not self.keyword_args.is_dict_literal: # keywords come from arbitrary expression => nothing to do here return self function = self.function entry = getattr(function, 'entry', None) if not entry: return self function_type = entry.type if function_type.is_ptr: function_type = function_type.base_type if not function_type.is_cfunction: return self pos_args = self.positional_args.args kwargs = self.keyword_args declared_args = function_type.args if entry.is_cmethod: declared_args = declared_args[1:] # skip 'self' if len(pos_args) > len(declared_args): error(self.pos, "function call got too many positional arguments, " "expected %d, got %s" % (len(declared_args), len(pos_args))) return None matched_args = set([ arg.name for arg in declared_args[:len(pos_args)] if arg.name ]) unmatched_args = declared_args[len(pos_args):] matched_kwargs_count = 0 args = list(pos_args) # check for duplicate keywords seen = set(matched_args) has_errors = False for arg in kwargs.key_value_pairs: name = arg.key.value if name in seen: error(arg.pos, "argument '%s' passed twice" % name) has_errors = True # continue to report more errors if there are any seen.add(name) # match keywords that are passed in order for decl_arg, arg in zip(unmatched_args, kwargs.key_value_pairs): name = arg.key.value if decl_arg.name == name: matched_args.add(name) matched_kwargs_count += 1 args.append(arg.value) else: break # match keyword arguments that are passed out-of-order, but keep # the evaluation of non-simple arguments in order by moving them # into temps from .UtilNodes import EvalWithTempExprNode, LetRefNode temps = [] if len(kwargs.key_value_pairs) > matched_kwargs_count: unmatched_args = declared_args[len(args):] keywords = dict([ (arg.key.value, (i+len(pos_args), arg)) for i, arg in enumerate(kwargs.key_value_pairs) ]) first_missing_keyword = None for decl_arg in unmatched_args: name = decl_arg.name if name not in keywords: # missing keyword argument => either done or error if not first_missing_keyword: first_missing_keyword = name continue elif first_missing_keyword: if entry.as_variable: # we might be able to convert the function to a Python # object, which then allows full calling semantics # with default values in gaps - currently, we only # support optional arguments at the end return self # wasn't the last keyword => gaps are not supported error(self.pos, "C function call is missing " "argument '%s'" % first_missing_keyword) return None pos, arg = keywords[name] matched_args.add(name) matched_kwargs_count += 1 if arg.value.is_simple(): args.append(arg.value) else: temp = LetRefNode(arg.value) assert temp.is_simple() args.append(temp) temps.append((pos, temp)) if temps: # may have to move preceding non-simple args into temps final_args = [] new_temps = [] first_temp_arg = temps[0][-1] for arg_value in args: if arg_value is first_temp_arg: break # done if arg_value.is_simple(): final_args.append(arg_value) else: temp = LetRefNode(arg_value) new_temps.append(temp) final_args.append(temp) if new_temps: args = final_args temps = new_temps + [ arg for i,arg in sorted(temps) ] # check for unexpected keywords for arg in kwargs.key_value_pairs: name = arg.key.value if name not in matched_args: has_errors = True error(arg.pos, "C function got unexpected keyword argument '%s'" % name) if has_errors: # error was reported already return None # all keywords mapped to positional arguments # if we are missing arguments, SimpleCallNode will figure it out node = SimpleCallNode(self.pos, function=function, args=args) for temp in temps[::-1]: node = EvalWithTempExprNode(temp, node) return node def generate_result_code(self, code): if self.type.is_error: return if self.keyword_args: kwargs = self.keyword_args.py_result() else: kwargs = 'NULL' code.globalstate.use_utility_code(UtilityCode.load_cached( "PyObjectCall", "ObjectHandling.c")) code.putln( "%s = __Pyx_PyObject_Call(%s, %s, %s); %s" % ( self.result(), self.function.py_result(), self.positional_args.py_result(), kwargs, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class AsTupleNode(ExprNode): # Convert argument to tuple. Used for normalising # the * argument of a function call. # # arg ExprNode subexprs = ['arg'] def calculate_constant_result(self): self.constant_result = tuple(self.arg.constant_result) def compile_time_value(self, denv): arg = self.arg.compile_time_value(denv) try: return tuple(arg) except Exception as e: self.compile_time_value_error(e) def analyse_types(self, env): self.arg = self.arg.analyse_types(env).coerce_to_pyobject(env) if self.arg.type is tuple_type: return self.arg.as_none_safe_node("'NoneType' object is not iterable") self.type = tuple_type self.is_temp = 1 return self def may_be_none(self): return False nogil_check = Node.gil_error gil_message = "Constructing Python tuple" def generate_result_code(self, code): code.putln( "%s = PySequence_Tuple(%s); %s" % ( self.result(), self.arg.py_result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class MergedDictNode(ExprNode): # Helper class for keyword arguments and other merged dicts. # # keyword_args [DictNode or other ExprNode] subexprs = ['keyword_args'] is_temp = 1 type = dict_type reject_duplicates = True def calculate_constant_result(self): result = {} reject_duplicates = self.reject_duplicates for item in self.keyword_args: if item.is_dict_literal: # process items in order items = ((key.constant_result, value.constant_result) for key, value in item.key_value_pairs) else: items = item.constant_result.iteritems() for key, value in items: if reject_duplicates and key in result: raise ValueError("duplicate keyword argument found: %s" % key) result[key] = value self.constant_result = result def compile_time_value(self, denv): result = {} reject_duplicates = self.reject_duplicates for item in self.keyword_args: if item.is_dict_literal: # process items in order items = [(key.compile_time_value(denv), value.compile_time_value(denv)) for key, value in item.key_value_pairs] else: items = item.compile_time_value(denv).iteritems() try: for key, value in items: if reject_duplicates and key in result: raise ValueError("duplicate keyword argument found: %s" % key) result[key] = value except Exception as e: self.compile_time_value_error(e) return result def type_dependencies(self, env): return () def infer_type(self, env): return dict_type def analyse_types(self, env): args = [ arg.analyse_types(env).coerce_to_pyobject(env).as_none_safe_node( # FIXME: CPython's error message starts with the runtime function name 'argument after ** must be a mapping, not NoneType') for arg in self.keyword_args ] if len(args) == 1 and args[0].type is dict_type: # strip this intermediate node and use the bare dict arg = args[0] if arg.is_name and arg.entry.is_arg and len(arg.entry.cf_assignments) == 1: # passing **kwargs through to function call => allow NULL arg.allow_null = True return arg self.keyword_args = args return self def may_be_none(self): return False gil_message = "Constructing Python dict" def generate_evaluation_code(self, code): code.mark_pos(self.pos) self.allocate_temp_result(code) args = iter(self.keyword_args) item = next(args) item.generate_evaluation_code(code) if item.type is not dict_type: # CPython supports calling functions with non-dicts, so do we code.putln('if (likely(PyDict_CheckExact(%s))) {' % item.py_result()) if item.is_dict_literal: item.make_owned_reference(code) code.putln("%s = %s;" % (self.result(), item.py_result())) item.generate_post_assignment_code(code) else: code.putln("%s = PyDict_Copy(%s); %s" % ( self.result(), item.py_result(), code.error_goto_if_null(self.result(), item.pos))) code.put_gotref(self.result()) item.generate_disposal_code(code) if item.type is not dict_type: code.putln('} else {') code.putln("%s = PyObject_CallFunctionObjArgs((PyObject*)&PyDict_Type, %s, NULL); %s" % ( self.result(), item.py_result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) item.generate_disposal_code(code) code.putln('}') item.free_temps(code) helpers = set() for item in args: if item.is_dict_literal: # inline update instead of creating an intermediate dict for arg in item.key_value_pairs: arg.generate_evaluation_code(code) if self.reject_duplicates: code.putln("if (unlikely(PyDict_Contains(%s, %s))) {" % ( self.result(), arg.key.py_result())) helpers.add("RaiseDoubleKeywords") # FIXME: find out function name at runtime! code.putln('__Pyx_RaiseDoubleKeywordsError("function", %s); %s' % ( arg.key.py_result(), code.error_goto(self.pos))) code.putln("}") code.put_error_if_neg(arg.key.pos, "PyDict_SetItem(%s, %s, %s)" % ( self.result(), arg.key.py_result(), arg.value.py_result())) arg.generate_disposal_code(code) arg.free_temps(code) else: item.generate_evaluation_code(code) if self.reject_duplicates: # merge mapping into kwdict one by one as we need to check for duplicates helpers.add("MergeKeywords") code.put_error_if_neg(item.pos, "__Pyx_MergeKeywords(%s, %s)" % ( self.result(), item.py_result())) else: # simple case, just add all entries helpers.add("RaiseMappingExpected") code.putln("if (unlikely(PyDict_Update(%s, %s) < 0)) {" % ( self.result(), item.py_result())) code.putln("if (PyErr_ExceptionMatches(PyExc_AttributeError)) " "__Pyx_RaiseMappingExpectedError(%s);" % item.py_result()) code.putln(code.error_goto(item.pos)) code.putln("}") item.generate_disposal_code(code) item.free_temps(code) for helper in sorted(helpers): code.globalstate.use_utility_code(UtilityCode.load_cached(helper, "FunctionArguments.c")) def annotate(self, code): for item in self.keyword_args: item.annotate(code) class AttributeNode(ExprNode): # obj.attribute # # obj ExprNode # attribute string # needs_none_check boolean Used if obj is an extension type. # If set to True, it is known that the type is not None. # # Used internally: # # is_py_attr boolean Is a Python getattr operation # member string C name of struct member # is_called boolean Function call is being done on result # entry Entry Symbol table entry of attribute is_attribute = 1 subexprs = ['obj'] type = PyrexTypes.error_type entry = None is_called = 0 needs_none_check = True is_memslice_transpose = False is_special_lookup = False is_py_attr = 0 def as_cython_attribute(self): if (isinstance(self.obj, NameNode) and self.obj.is_cython_module and not self.attribute == u"parallel"): return self.attribute cy = self.obj.as_cython_attribute() if cy: return "%s.%s" % (cy, self.attribute) return None def coerce_to(self, dst_type, env): # If coercing to a generic pyobject and this is a cpdef function # we can create the corresponding attribute if dst_type is py_object_type: entry = self.entry if entry and entry.is_cfunction and entry.as_variable: # must be a cpdef function self.is_temp = 1 self.entry = entry.as_variable self.analyse_as_python_attribute(env) return self return ExprNode.coerce_to(self, dst_type, env) def calculate_constant_result(self): attr = self.attribute if attr.startswith("__") and attr.endswith("__"): return self.constant_result = getattr(self.obj.constant_result, attr) def compile_time_value(self, denv): attr = self.attribute if attr.startswith("__") and attr.endswith("__"): error(self.pos, "Invalid attribute name '%s' in compile-time expression" % attr) return None obj = self.obj.compile_time_value(denv) try: return getattr(obj, attr) except Exception as e: self.compile_time_value_error(e) def type_dependencies(self, env): return self.obj.type_dependencies(env) def infer_type(self, env): # FIXME: this is way too redundant with analyse_types() node = self.analyse_as_cimported_attribute_node(env, target=False) if node is not None: return node.entry.type node = self.analyse_as_type_attribute(env) if node is not None: return node.entry.type obj_type = self.obj.infer_type(env) self.analyse_attribute(env, obj_type=obj_type) if obj_type.is_builtin_type and self.type.is_cfunction: # special case: C-API replacements for C methods of # builtin types cannot be inferred as C functions as # that would prevent their use as bound methods return py_object_type elif self.entry and self.entry.is_cmethod: # special case: bound methods should not be inferred # as their unbound method types return py_object_type return self.type def analyse_target_declaration(self, env): pass def analyse_target_types(self, env): node = self.analyse_types(env, target = 1) if node.type.is_const: error(self.pos, "Assignment to const attribute '%s'" % self.attribute) if not node.is_lvalue(): error(self.pos, "Assignment to non-lvalue of type '%s'" % self.type) return node def analyse_types(self, env, target = 0): self.initialized_check = env.directives['initializedcheck'] node = self.analyse_as_cimported_attribute_node(env, target) if node is None and not target: node = self.analyse_as_type_attribute(env) if node is None: node = self.analyse_as_ordinary_attribute_node(env, target) assert node is not None if node.entry: node.entry.used = True if node.is_attribute: node.wrap_obj_in_nonecheck(env) return node def analyse_as_cimported_attribute_node(self, env, target): # Try to interpret this as a reference to an imported # C const, type, var or function. If successful, mutates # this node into a NameNode and returns 1, otherwise # returns 0. module_scope = self.obj.analyse_as_module(env) if module_scope: entry = module_scope.lookup_here(self.attribute) if entry and ( entry.is_cglobal or entry.is_cfunction or entry.is_type or entry.is_const): return self.as_name_node(env, entry, target) if self.is_cimported_module_without_shadow(env): error(self.pos, "cimported module has no attribute '%s'" % self.attribute) return self return None def analyse_as_type_attribute(self, env): # Try to interpret this as a reference to an unbound # C method of an extension type or builtin type. If successful, # creates a corresponding NameNode and returns it, otherwise # returns None. if self.obj.is_string_literal: return type = self.obj.analyse_as_type(env) if type: if type.is_extension_type or type.is_builtin_type or type.is_cpp_class: entry = type.scope.lookup_here(self.attribute) if entry and (entry.is_cmethod or type.is_cpp_class and entry.type.is_cfunction): if type.is_builtin_type: if not self.is_called: # must handle this as Python object return None ubcm_entry = entry else: # Create a temporary entry describing the C method # as an ordinary function. if entry.func_cname and not hasattr(entry.type, 'op_arg_struct'): cname = entry.func_cname if entry.type.is_static_method or ( env.parent_scope and env.parent_scope.is_cpp_class_scope): ctype = entry.type elif type.is_cpp_class: error(self.pos, "%s not a static member of %s" % (entry.name, type)) ctype = PyrexTypes.error_type else: # Fix self type. ctype = copy.copy(entry.type) ctype.args = ctype.args[:] ctype.args[0] = PyrexTypes.CFuncTypeArg('self', type, 'self', None) else: cname = "%s->%s" % (type.vtabptr_cname, entry.cname) ctype = entry.type ubcm_entry = Symtab.Entry(entry.name, cname, ctype) ubcm_entry.is_cfunction = 1 ubcm_entry.func_cname = entry.func_cname ubcm_entry.is_unbound_cmethod = 1 ubcm_entry.scope = entry.scope return self.as_name_node(env, ubcm_entry, target=False) elif type.is_enum: if self.attribute in type.values: for entry in type.entry.enum_values: if entry.name == self.attribute: return self.as_name_node(env, entry, target=False) else: error(self.pos, "%s not a known value of %s" % (self.attribute, type)) else: error(self.pos, "%s not a known value of %s" % (self.attribute, type)) return None def analyse_as_type(self, env): module_scope = self.obj.analyse_as_module(env) if module_scope: return module_scope.lookup_type(self.attribute) if not self.obj.is_string_literal: base_type = self.obj.analyse_as_type(env) if base_type and hasattr(base_type, 'scope') and base_type.scope is not None: return base_type.scope.lookup_type(self.attribute) return None def analyse_as_extension_type(self, env): # Try to interpret this as a reference to an extension type # in a cimported module. Returns the extension type, or None. module_scope = self.obj.analyse_as_module(env) if module_scope: entry = module_scope.lookup_here(self.attribute) if entry and entry.is_type: if entry.type.is_extension_type or entry.type.is_builtin_type: return entry.type return None def analyse_as_module(self, env): # Try to interpret this as a reference to a cimported module # in another cimported module. Returns the module scope, or None. module_scope = self.obj.analyse_as_module(env) if module_scope: entry = module_scope.lookup_here(self.attribute) if entry and entry.as_module: return entry.as_module return None def as_name_node(self, env, entry, target): # Create a corresponding NameNode from this node and complete the # analyse_types phase. node = NameNode.from_node(self, name=self.attribute, entry=entry) if target: node = node.analyse_target_types(env) else: node = node.analyse_rvalue_entry(env) node.entry.used = 1 return node def analyse_as_ordinary_attribute_node(self, env, target): self.obj = self.obj.analyse_types(env) self.analyse_attribute(env) if self.entry and self.entry.is_cmethod and not self.is_called: # error(self.pos, "C method can only be called") pass ## Reference to C array turns into pointer to first element. #while self.type.is_array: # self.type = self.type.element_ptr_type() if self.is_py_attr: if not target: self.is_temp = 1 self.result_ctype = py_object_type elif target and self.obj.type.is_builtin_type: error(self.pos, "Assignment to an immutable object field") #elif self.type.is_memoryviewslice and not target: # self.is_temp = True return self def analyse_attribute(self, env, obj_type = None): # Look up attribute and set self.type and self.member. immutable_obj = obj_type is not None # used during type inference self.is_py_attr = 0 self.member = self.attribute if obj_type is None: if self.obj.type.is_string or self.obj.type.is_pyunicode_ptr: self.obj = self.obj.coerce_to_pyobject(env) obj_type = self.obj.type else: if obj_type.is_string or obj_type.is_pyunicode_ptr: obj_type = py_object_type if obj_type.is_ptr or obj_type.is_array: obj_type = obj_type.base_type self.op = "->" elif obj_type.is_extension_type or obj_type.is_builtin_type: self.op = "->" elif obj_type.is_reference and obj_type.is_fake_reference: self.op = "->" else: self.op = "." if obj_type.has_attributes: if obj_type.attributes_known(): entry = obj_type.scope.lookup_here(self.attribute) if obj_type.is_memoryviewslice and not entry: if self.attribute == 'T': self.is_memslice_transpose = True self.is_temp = True self.use_managed_ref = True self.type = self.obj.type.transpose(self.pos) return else: obj_type.declare_attribute(self.attribute, env, self.pos) entry = obj_type.scope.lookup_here(self.attribute) if entry and entry.is_member: entry = None else: error(self.pos, "Cannot select attribute of incomplete type '%s'" % obj_type) self.type = PyrexTypes.error_type return self.entry = entry if entry: if obj_type.is_extension_type and entry.name == "__weakref__": error(self.pos, "Illegal use of special attribute __weakref__") # def methods need the normal attribute lookup # because they do not have struct entries # fused function go through assignment synthesis # (foo = pycfunction(foo_func_obj)) and need to go through # regular Python lookup as well if (entry.is_variable and not entry.fused_cfunction) or entry.is_cmethod: self.type = entry.type self.member = entry.cname return else: # If it's not a variable or C method, it must be a Python # method of an extension type, so we treat it like a Python # attribute. pass # If we get here, the base object is not a struct/union/extension # type, or it is an extension type and the attribute is either not # declared or is declared as a Python method. Treat it as a Python # attribute reference. self.analyse_as_python_attribute(env, obj_type, immutable_obj) def analyse_as_python_attribute(self, env, obj_type=None, immutable_obj=False): if obj_type is None: obj_type = self.obj.type # mangle private '__*' Python attributes used inside of a class self.attribute = env.mangle_class_private_name(self.attribute) self.member = self.attribute self.type = py_object_type self.is_py_attr = 1 if not obj_type.is_pyobject and not obj_type.is_error: # Expose python methods for immutable objects. if (obj_type.is_string or obj_type.is_cpp_string or obj_type.is_buffer or obj_type.is_memoryviewslice or obj_type.is_numeric or (obj_type.is_ctuple and obj_type.can_coerce_to_pyobject(env)) or (obj_type.is_struct and obj_type.can_coerce_to_pyobject(env))): if not immutable_obj: self.obj = self.obj.coerce_to_pyobject(env) elif (obj_type.is_cfunction and (self.obj.is_name or self.obj.is_attribute) and self.obj.entry.as_variable and self.obj.entry.as_variable.type.is_pyobject): # might be an optimised builtin function => unpack it if not immutable_obj: self.obj = self.obj.coerce_to_pyobject(env) else: error(self.pos, "Object of type '%s' has no attribute '%s'" % (obj_type, self.attribute)) def wrap_obj_in_nonecheck(self, env): if not env.directives['nonecheck']: return msg = None format_args = () if (self.obj.type.is_extension_type and self.needs_none_check and not self.is_py_attr): msg = "'NoneType' object has no attribute '%s'" format_args = (self.attribute,) elif self.obj.type.is_memoryviewslice: if self.is_memslice_transpose: msg = "Cannot transpose None memoryview slice" else: entry = self.obj.type.scope.lookup_here(self.attribute) if entry: # copy/is_c_contig/shape/strides etc msg = "Cannot access '%s' attribute of None memoryview slice" format_args = (entry.name,) if msg: self.obj = self.obj.as_none_safe_node(msg, 'PyExc_AttributeError', format_args=format_args) def nogil_check(self, env): if self.is_py_attr: self.gil_error() gil_message = "Accessing Python attribute" def is_cimported_module_without_shadow(self, env): return self.obj.is_cimported_module_without_shadow(env) def is_simple(self): if self.obj: return self.result_in_temp() or self.obj.is_simple() else: return NameNode.is_simple(self) def is_lvalue(self): if self.obj: return True else: return NameNode.is_lvalue(self) def is_ephemeral(self): if self.obj: return self.obj.is_ephemeral() else: return NameNode.is_ephemeral(self) def calculate_result_code(self): #print "AttributeNode.calculate_result_code:", self.member ### #print "...obj node =", self.obj, "code", self.obj.result() ### #print "...obj type", self.obj.type, "ctype", self.obj.ctype() ### obj = self.obj obj_code = obj.result_as(obj.type) #print "...obj_code =", obj_code ### if self.entry and self.entry.is_cmethod: if obj.type.is_extension_type and not self.entry.is_builtin_cmethod: if self.entry.final_func_cname: return self.entry.final_func_cname if self.type.from_fused: # If the attribute was specialized through indexing, make # sure to get the right fused name, as our entry was # replaced by our parent index node # (AnalyseExpressionsTransform) self.member = self.entry.cname return "((struct %s *)%s%s%s)->%s" % ( obj.type.vtabstruct_cname, obj_code, self.op, obj.type.vtabslot_cname, self.member) elif self.result_is_used: return self.member # Generating no code at all for unused access to optimised builtin # methods fixes the problem that some optimisations only exist as # macros, i.e. there is no function pointer to them, so we would # generate invalid C code here. return elif obj.type.is_complex: return "__Pyx_C%s(%s)" % (self.member.upper(), obj_code) else: if obj.type.is_builtin_type and self.entry and self.entry.is_variable: # accessing a field of a builtin type, need to cast better than result_as() does obj_code = obj.type.cast_code(obj.result(), to_object_struct = True) return "%s%s%s" % (obj_code, self.op, self.member) def generate_result_code(self, code): if self.is_py_attr: if self.is_special_lookup: code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectLookupSpecial", "ObjectHandling.c")) lookup_func_name = '__Pyx_PyObject_LookupSpecial' else: code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectGetAttrStr", "ObjectHandling.c")) lookup_func_name = '__Pyx_PyObject_GetAttrStr' code.putln( '%s = %s(%s, %s); %s' % ( self.result(), lookup_func_name, self.obj.py_result(), code.intern_identifier(self.attribute), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) elif self.type.is_memoryviewslice: if self.is_memslice_transpose: # transpose the slice for access, packing in self.type.axes: if access == 'ptr': error(self.pos, "Transposing not supported for slices " "with indirect dimensions") return code.putln("%s = %s;" % (self.result(), self.obj.result())) code.put_incref_memoryviewslice(self.result(), have_gil=True) T = "__pyx_memslice_transpose(&%s) == 0" code.putln(code.error_goto_if(T % self.result(), self.pos)) elif self.initialized_check: code.putln( 'if (unlikely(!%s.memview)) {' 'PyErr_SetString(PyExc_AttributeError,' '"Memoryview is not initialized");' '%s' '}' % (self.result(), code.error_goto(self.pos))) else: # result_code contains what is needed, but we may need to insert # a check and raise an exception if self.obj.type and self.obj.type.is_extension_type: pass elif self.entry and self.entry.is_cmethod: # C method implemented as function call with utility code code.globalstate.use_entry_utility_code(self.entry) def generate_disposal_code(self, code): if self.is_temp and self.type.is_memoryviewslice and self.is_memslice_transpose: # mirror condition for putting the memview incref here: code.put_xdecref_memoryviewslice( self.result(), have_gil=True) code.putln("%s.memview = NULL;" % self.result()) code.putln("%s.data = NULL;" % self.result()) else: ExprNode.generate_disposal_code(self, code) def generate_assignment_code(self, rhs, code, overloaded_assignment=False, exception_check=None, exception_value=None): self.obj.generate_evaluation_code(code) if self.is_py_attr: code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectSetAttrStr", "ObjectHandling.c")) code.put_error_if_neg(self.pos, '__Pyx_PyObject_SetAttrStr(%s, %s, %s)' % ( self.obj.py_result(), code.intern_identifier(self.attribute), rhs.py_result())) rhs.generate_disposal_code(code) rhs.free_temps(code) elif self.obj.type.is_complex: code.putln("__Pyx_SET_C%s(%s, %s);" % ( self.member.upper(), self.obj.result_as(self.obj.type), rhs.result_as(self.ctype()))) else: select_code = self.result() if self.type.is_pyobject and self.use_managed_ref: rhs.make_owned_reference(code) code.put_giveref(rhs.py_result()) code.put_gotref(select_code) code.put_decref(select_code, self.ctype()) elif self.type.is_memoryviewslice: from . import MemoryView MemoryView.put_assign_to_memviewslice( select_code, rhs, rhs.result(), self.type, code) if not self.type.is_memoryviewslice: code.putln( "%s = %s;" % ( select_code, rhs.result_as(self.ctype()))) #rhs.result())) rhs.generate_post_assignment_code(code) rhs.free_temps(code) self.obj.generate_disposal_code(code) self.obj.free_temps(code) def generate_deletion_code(self, code, ignore_nonexisting=False): self.obj.generate_evaluation_code(code) if self.is_py_attr or (self.entry.scope.is_property_scope and u'__del__' in self.entry.scope.entries): code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectSetAttrStr", "ObjectHandling.c")) code.put_error_if_neg(self.pos, '__Pyx_PyObject_DelAttrStr(%s, %s)' % ( self.obj.py_result(), code.intern_identifier(self.attribute))) else: error(self.pos, "Cannot delete C attribute of extension type") self.obj.generate_disposal_code(code) self.obj.free_temps(code) def annotate(self, code): if self.is_py_attr: style, text = 'py_attr', 'python attribute (%s)' else: style, text = 'c_attr', 'c attribute (%s)' code.annotate(self.pos, AnnotationItem(style, text % self.type, size=len(self.attribute))) #------------------------------------------------------------------- # # Constructor nodes # #------------------------------------------------------------------- class StarredUnpackingNode(ExprNode): # A starred expression like "*a" # # This is only allowed in sequence assignment or construction such as # # a, *b = (1,2,3,4) => a = 1 ; b = [2,3,4] # # and will be special cased during type analysis (or generate an error # if it's found at unexpected places). # # target ExprNode subexprs = ['target'] is_starred = 1 type = py_object_type is_temp = 1 starred_expr_allowed_here = False def __init__(self, pos, target): ExprNode.__init__(self, pos, target=target) def analyse_declarations(self, env): if not self.starred_expr_allowed_here: error(self.pos, "starred expression is not allowed here") self.target.analyse_declarations(env) def infer_type(self, env): return self.target.infer_type(env) def analyse_types(self, env): if not self.starred_expr_allowed_here: error(self.pos, "starred expression is not allowed here") self.target = self.target.analyse_types(env) self.type = self.target.type return self def analyse_target_declaration(self, env): self.target.analyse_target_declaration(env) def analyse_target_types(self, env): self.target = self.target.analyse_target_types(env) self.type = self.target.type return self def calculate_result_code(self): return "" def generate_result_code(self, code): pass class SequenceNode(ExprNode): # Base class for list and tuple constructor nodes. # Contains common code for performing sequence unpacking. # # args [ExprNode] # unpacked_items [ExprNode] or None # coerced_unpacked_items [ExprNode] or None # mult_factor ExprNode the integer number of content repetitions ([1,2]*3) subexprs = ['args', 'mult_factor'] is_sequence_constructor = 1 unpacked_items = None mult_factor = None slow = False # trade speed for code size (e.g. use PyTuple_Pack()) def compile_time_value_list(self, denv): return [arg.compile_time_value(denv) for arg in self.args] def replace_starred_target_node(self): # replace a starred node in the targets by the contained expression self.starred_assignment = False args = [] for arg in self.args: if arg.is_starred: if self.starred_assignment: error(arg.pos, "more than 1 starred expression in assignment") self.starred_assignment = True arg = arg.target arg.is_starred = True args.append(arg) self.args = args def analyse_target_declaration(self, env): self.replace_starred_target_node() for arg in self.args: arg.analyse_target_declaration(env) def analyse_types(self, env, skip_children=False): for i, arg in enumerate(self.args): if not skip_children: arg = arg.analyse_types(env) self.args[i] = arg.coerce_to_pyobject(env) if self.mult_factor: self.mult_factor = self.mult_factor.analyse_types(env) if not self.mult_factor.type.is_int: self.mult_factor = self.mult_factor.coerce_to_pyobject(env) self.is_temp = 1 # not setting self.type here, subtypes do this return self def coerce_to_ctuple(self, dst_type, env): if self.type == dst_type: return self assert not self.mult_factor if len(self.args) != dst_type.size: error(self.pos, "trying to coerce sequence to ctuple of wrong length, expected %d, got %d" % ( dst_type.size, len(self.args))) coerced_args = [arg.coerce_to(type, env) for arg, type in zip(self.args, dst_type.components)] return TupleNode(self.pos, args=coerced_args, type=dst_type, is_temp=True) def _create_merge_node_if_necessary(self, env): self._flatten_starred_args() if not any(arg.is_starred for arg in self.args): return self # convert into MergedSequenceNode by building partial sequences args = [] values = [] for arg in self.args: if arg.is_starred: if values: args.append(TupleNode(values[0].pos, args=values).analyse_types(env, skip_children=True)) values = [] args.append(arg.target) else: values.append(arg) if values: args.append(TupleNode(values[0].pos, args=values).analyse_types(env, skip_children=True)) node = MergedSequenceNode(self.pos, args, self.type) if self.mult_factor: node = binop_node( self.pos, '*', node, self.mult_factor.coerce_to_pyobject(env), inplace=True, type=self.type, is_temp=True) return node def _flatten_starred_args(self): args = [] for arg in self.args: if arg.is_starred and arg.target.is_sequence_constructor and not arg.target.mult_factor: args.extend(arg.target.args) else: args.append(arg) self.args[:] = args def may_be_none(self): return False def analyse_target_types(self, env): if self.mult_factor: error(self.pos, "can't assign to multiplied sequence") self.unpacked_items = [] self.coerced_unpacked_items = [] self.any_coerced_items = False for i, arg in enumerate(self.args): arg = self.args[i] = arg.analyse_target_types(env) if arg.is_starred: if not arg.type.assignable_from(list_type): error(arg.pos, "starred target must have Python object (list) type") if arg.type is py_object_type: arg.type = list_type unpacked_item = PyTempNode(self.pos, env) coerced_unpacked_item = unpacked_item.coerce_to(arg.type, env) if unpacked_item is not coerced_unpacked_item: self.any_coerced_items = True self.unpacked_items.append(unpacked_item) self.coerced_unpacked_items.append(coerced_unpacked_item) self.type = py_object_type return self def generate_result_code(self, code): self.generate_operation_code(code) def generate_sequence_packing_code(self, code, target=None, plain=False): if target is None: target = self.result() size_factor = c_mult = '' mult_factor = None if self.mult_factor and not plain: mult_factor = self.mult_factor if mult_factor.type.is_int: c_mult = mult_factor.result() if (isinstance(mult_factor.constant_result, _py_int_types) and mult_factor.constant_result > 0): size_factor = ' * %s' % mult_factor.constant_result elif mult_factor.type.signed: size_factor = ' * ((%s<0) ? 0:%s)' % (c_mult, c_mult) else: size_factor = ' * (%s)' % (c_mult,) if self.type is tuple_type and (self.is_literal or self.slow) and not c_mult: # use PyTuple_Pack() to avoid generating huge amounts of one-time code code.putln('%s = PyTuple_Pack(%d, %s); %s' % ( target, len(self.args), ', '.join(arg.py_result() for arg in self.args), code.error_goto_if_null(target, self.pos))) code.put_gotref(target) elif self.type.is_ctuple: for i, arg in enumerate(self.args): code.putln("%s.f%s = %s;" % ( target, i, arg.result())) else: # build the tuple/list step by step, potentially multiplying it as we go if self.type is list_type: create_func, set_item_func = 'PyList_New', 'PyList_SET_ITEM' elif self.type is tuple_type: create_func, set_item_func = 'PyTuple_New', 'PyTuple_SET_ITEM' else: raise InternalError("sequence packing for unexpected type %s" % self.type) arg_count = len(self.args) code.putln("%s = %s(%s%s); %s" % ( target, create_func, arg_count, size_factor, code.error_goto_if_null(target, self.pos))) code.put_gotref(target) if c_mult: # FIXME: can't use a temp variable here as the code may # end up in the constant building function. Temps # currently don't work there. #counter = code.funcstate.allocate_temp(mult_factor.type, manage_ref=False) counter = Naming.quick_temp_cname code.putln('{ Py_ssize_t %s;' % counter) if arg_count == 1: offset = counter else: offset = '%s * %s' % (counter, arg_count) code.putln('for (%s=0; %s < %s; %s++) {' % ( counter, counter, c_mult, counter )) else: offset = '' for i in range(arg_count): arg = self.args[i] if c_mult or not arg.result_in_temp(): code.put_incref(arg.result(), arg.ctype()) code.put_giveref(arg.py_result()) code.putln("%s(%s, %s, %s);" % ( set_item_func, target, (offset and i) and ('%s + %s' % (offset, i)) or (offset or i), arg.py_result())) if c_mult: code.putln('}') #code.funcstate.release_temp(counter) code.putln('}') if mult_factor is not None and mult_factor.type.is_pyobject: code.putln('{ PyObject* %s = PyNumber_InPlaceMultiply(%s, %s); %s' % ( Naming.quick_temp_cname, target, mult_factor.py_result(), code.error_goto_if_null(Naming.quick_temp_cname, self.pos) )) code.put_gotref(Naming.quick_temp_cname) code.put_decref(target, py_object_type) code.putln('%s = %s;' % (target, Naming.quick_temp_cname)) code.putln('}') def generate_subexpr_disposal_code(self, code): if self.mult_factor and self.mult_factor.type.is_int: super(SequenceNode, self).generate_subexpr_disposal_code(code) elif self.type is tuple_type and (self.is_literal or self.slow): super(SequenceNode, self).generate_subexpr_disposal_code(code) else: # We call generate_post_assignment_code here instead # of generate_disposal_code, because values were stored # in the tuple using a reference-stealing operation. for arg in self.args: arg.generate_post_assignment_code(code) # Should NOT call free_temps -- this is invoked by the default # generate_evaluation_code which will do that. if self.mult_factor: self.mult_factor.generate_disposal_code(code) def generate_assignment_code(self, rhs, code, overloaded_assignment=False, exception_check=None, exception_value=None): if self.starred_assignment: self.generate_starred_assignment_code(rhs, code) else: self.generate_parallel_assignment_code(rhs, code) for item in self.unpacked_items: item.release(code) rhs.free_temps(code) _func_iternext_type = PyrexTypes.CPtrType(PyrexTypes.CFuncType( PyrexTypes.py_object_type, [ PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None), ])) def generate_parallel_assignment_code(self, rhs, code): # Need to work around the fact that generate_evaluation_code # allocates the temps in a rather hacky way -- the assignment # is evaluated twice, within each if-block. for item in self.unpacked_items: item.allocate(code) special_unpack = (rhs.type is py_object_type or rhs.type in (tuple_type, list_type) or not rhs.type.is_builtin_type) long_enough_for_a_loop = len(self.unpacked_items) > 3 if special_unpack: self.generate_special_parallel_unpacking_code( code, rhs, use_loop=long_enough_for_a_loop) else: code.putln("{") self.generate_generic_parallel_unpacking_code( code, rhs, self.unpacked_items, use_loop=long_enough_for_a_loop) code.putln("}") for value_node in self.coerced_unpacked_items: value_node.generate_evaluation_code(code) for i in range(len(self.args)): self.args[i].generate_assignment_code( self.coerced_unpacked_items[i], code) def generate_special_parallel_unpacking_code(self, code, rhs, use_loop): sequence_type_test = '1' none_check = "likely(%s != Py_None)" % rhs.py_result() if rhs.type is list_type: sequence_types = ['List'] if rhs.may_be_none(): sequence_type_test = none_check elif rhs.type is tuple_type: sequence_types = ['Tuple'] if rhs.may_be_none(): sequence_type_test = none_check else: sequence_types = ['Tuple', 'List'] tuple_check = 'likely(PyTuple_CheckExact(%s))' % rhs.py_result() list_check = 'PyList_CheckExact(%s)' % rhs.py_result() sequence_type_test = "(%s) || (%s)" % (tuple_check, list_check) code.putln("if (%s) {" % sequence_type_test) code.putln("PyObject* sequence = %s;" % rhs.py_result()) # list/tuple => check size code.putln("#if !CYTHON_COMPILING_IN_PYPY") code.putln("Py_ssize_t size = Py_SIZE(sequence);") code.putln("#else") code.putln("Py_ssize_t size = PySequence_Size(sequence);") # < 0 => exception code.putln("#endif") code.putln("if (unlikely(size != %d)) {" % len(self.args)) code.globalstate.use_utility_code(raise_too_many_values_to_unpack) code.putln("if (size > %d) __Pyx_RaiseTooManyValuesError(%d);" % ( len(self.args), len(self.args))) code.globalstate.use_utility_code(raise_need_more_values_to_unpack) code.putln("else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);") code.putln(code.error_goto(self.pos)) code.putln("}") code.putln("#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS") # unpack items from list/tuple in unrolled loop (can't fail) if len(sequence_types) == 2: code.putln("if (likely(Py%s_CheckExact(sequence))) {" % sequence_types[0]) for i, item in enumerate(self.unpacked_items): code.putln("%s = Py%s_GET_ITEM(sequence, %d); " % ( item.result(), sequence_types[0], i)) if len(sequence_types) == 2: code.putln("} else {") for i, item in enumerate(self.unpacked_items): code.putln("%s = Py%s_GET_ITEM(sequence, %d); " % ( item.result(), sequence_types[1], i)) code.putln("}") for item in self.unpacked_items: code.put_incref(item.result(), item.ctype()) code.putln("#else") # in non-CPython, use the PySequence protocol (which can fail) if not use_loop: for i, item in enumerate(self.unpacked_items): code.putln("%s = PySequence_ITEM(sequence, %d); %s" % ( item.result(), i, code.error_goto_if_null(item.result(), self.pos))) code.put_gotref(item.result()) else: code.putln("{") code.putln("Py_ssize_t i;") code.putln("PyObject** temps[%s] = {%s};" % ( len(self.unpacked_items), ','.join(['&%s' % item.result() for item in self.unpacked_items]))) code.putln("for (i=0; i < %s; i++) {" % len(self.unpacked_items)) code.putln("PyObject* item = PySequence_ITEM(sequence, i); %s" % ( code.error_goto_if_null('item', self.pos))) code.put_gotref('item') code.putln("*(temps[i]) = item;") code.putln("}") code.putln("}") code.putln("#endif") rhs.generate_disposal_code(code) if sequence_type_test == '1': code.putln("}") # all done elif sequence_type_test == none_check: # either tuple/list or None => save some code by generating the error directly code.putln("} else {") code.globalstate.use_utility_code( UtilityCode.load_cached("RaiseNoneIterError", "ObjectHandling.c")) code.putln("__Pyx_RaiseNoneNotIterableError(); %s" % code.error_goto(self.pos)) code.putln("}") # all done else: code.putln("} else {") # needs iteration fallback code self.generate_generic_parallel_unpacking_code( code, rhs, self.unpacked_items, use_loop=use_loop) code.putln("}") def generate_generic_parallel_unpacking_code(self, code, rhs, unpacked_items, use_loop, terminate=True): code.globalstate.use_utility_code(raise_need_more_values_to_unpack) code.globalstate.use_utility_code(UtilityCode.load_cached("IterFinish", "ObjectHandling.c")) code.putln("Py_ssize_t index = -1;") # must be at the start of a C block! if use_loop: code.putln("PyObject** temps[%s] = {%s};" % ( len(self.unpacked_items), ','.join(['&%s' % item.result() for item in unpacked_items]))) iterator_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True) code.putln( "%s = PyObject_GetIter(%s); %s" % ( iterator_temp, rhs.py_result(), code.error_goto_if_null(iterator_temp, self.pos))) code.put_gotref(iterator_temp) rhs.generate_disposal_code(code) iternext_func = code.funcstate.allocate_temp(self._func_iternext_type, manage_ref=False) code.putln("%s = Py_TYPE(%s)->tp_iternext;" % ( iternext_func, iterator_temp)) unpacking_error_label = code.new_label('unpacking_failed') unpack_code = "%s(%s)" % (iternext_func, iterator_temp) if use_loop: code.putln("for (index=0; index < %s; index++) {" % len(unpacked_items)) code.put("PyObject* item = %s; if (unlikely(!item)) " % unpack_code) code.put_goto(unpacking_error_label) code.put_gotref("item") code.putln("*(temps[index]) = item;") code.putln("}") else: for i, item in enumerate(unpacked_items): code.put( "index = %d; %s = %s; if (unlikely(!%s)) " % ( i, item.result(), unpack_code, item.result())) code.put_goto(unpacking_error_label) code.put_gotref(item.py_result()) if terminate: code.globalstate.use_utility_code( UtilityCode.load_cached("UnpackItemEndCheck", "ObjectHandling.c")) code.put_error_if_neg(self.pos, "__Pyx_IternextUnpackEndCheck(%s, %d)" % ( unpack_code, len(unpacked_items))) code.putln("%s = NULL;" % iternext_func) code.put_decref_clear(iterator_temp, py_object_type) unpacking_done_label = code.new_label('unpacking_done') code.put_goto(unpacking_done_label) code.put_label(unpacking_error_label) code.put_decref_clear(iterator_temp, py_object_type) code.putln("%s = NULL;" % iternext_func) code.putln("if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index);") code.putln(code.error_goto(self.pos)) code.put_label(unpacking_done_label) code.funcstate.release_temp(iternext_func) if terminate: code.funcstate.release_temp(iterator_temp) iterator_temp = None return iterator_temp def generate_starred_assignment_code(self, rhs, code): for i, arg in enumerate(self.args): if arg.is_starred: starred_target = self.unpacked_items[i] unpacked_fixed_items_left = self.unpacked_items[:i] unpacked_fixed_items_right = self.unpacked_items[i+1:] break else: assert False iterator_temp = None if unpacked_fixed_items_left: for item in unpacked_fixed_items_left: item.allocate(code) code.putln('{') iterator_temp = self.generate_generic_parallel_unpacking_code( code, rhs, unpacked_fixed_items_left, use_loop=True, terminate=False) for i, item in enumerate(unpacked_fixed_items_left): value_node = self.coerced_unpacked_items[i] value_node.generate_evaluation_code(code) code.putln('}') starred_target.allocate(code) target_list = starred_target.result() code.putln("%s = PySequence_List(%s); %s" % ( target_list, iterator_temp or rhs.py_result(), code.error_goto_if_null(target_list, self.pos))) code.put_gotref(target_list) if iterator_temp: code.put_decref_clear(iterator_temp, py_object_type) code.funcstate.release_temp(iterator_temp) else: rhs.generate_disposal_code(code) if unpacked_fixed_items_right: code.globalstate.use_utility_code(raise_need_more_values_to_unpack) length_temp = code.funcstate.allocate_temp(PyrexTypes.c_py_ssize_t_type, manage_ref=False) code.putln('%s = PyList_GET_SIZE(%s);' % (length_temp, target_list)) code.putln("if (unlikely(%s < %d)) {" % (length_temp, len(unpacked_fixed_items_right))) code.putln("__Pyx_RaiseNeedMoreValuesError(%d+%s); %s" % ( len(unpacked_fixed_items_left), length_temp, code.error_goto(self.pos))) code.putln('}') for item in unpacked_fixed_items_right[::-1]: item.allocate(code) for i, (item, coerced_arg) in enumerate(zip(unpacked_fixed_items_right[::-1], self.coerced_unpacked_items[::-1])): code.putln('#if CYTHON_COMPILING_IN_CPYTHON') code.putln("%s = PyList_GET_ITEM(%s, %s-%d); " % ( item.py_result(), target_list, length_temp, i+1)) # resize the list the hard way code.putln("((PyVarObject*)%s)->ob_size--;" % target_list) code.putln('#else') code.putln("%s = PySequence_ITEM(%s, %s-%d); " % ( item.py_result(), target_list, length_temp, i+1)) code.putln('#endif') code.put_gotref(item.py_result()) coerced_arg.generate_evaluation_code(code) code.putln('#if !CYTHON_COMPILING_IN_CPYTHON') sublist_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True) code.putln('%s = PySequence_GetSlice(%s, 0, %s-%d); %s' % ( sublist_temp, target_list, length_temp, len(unpacked_fixed_items_right), code.error_goto_if_null(sublist_temp, self.pos))) code.put_gotref(sublist_temp) code.funcstate.release_temp(length_temp) code.put_decref(target_list, py_object_type) code.putln('%s = %s; %s = NULL;' % (target_list, sublist_temp, sublist_temp)) code.putln('#else') code.putln('%s = %s;' % (sublist_temp, sublist_temp)) # avoid warning about unused variable code.funcstate.release_temp(sublist_temp) code.putln('#endif') for i, arg in enumerate(self.args): arg.generate_assignment_code(self.coerced_unpacked_items[i], code) def annotate(self, code): for arg in self.args: arg.annotate(code) if self.unpacked_items: for arg in self.unpacked_items: arg.annotate(code) for arg in self.coerced_unpacked_items: arg.annotate(code) class TupleNode(SequenceNode): # Tuple constructor. type = tuple_type is_partly_literal = False gil_message = "Constructing Python tuple" def infer_type(self, env): if self.mult_factor or not self.args: return tuple_type arg_types = [arg.infer_type(env) for arg in self.args] if any(type.is_pyobject or type.is_unspecified or type.is_fused for type in arg_types): return tuple_type else: return env.declare_tuple_type(self.pos, arg_types).type def analyse_types(self, env, skip_children=False): if len(self.args) == 0: self.is_temp = False self.is_literal = True return self if not skip_children: for i, arg in enumerate(self.args): if arg.is_starred: arg.starred_expr_allowed_here = True self.args[i] = arg.analyse_types(env) if (not self.mult_factor and not any((arg.is_starred or arg.type.is_pyobject or arg.type.is_fused) for arg in self.args)): self.type = env.declare_tuple_type(self.pos, (arg.type for arg in self.args)).type self.is_temp = 1 return self node = SequenceNode.analyse_types(self, env, skip_children=True) node = node._create_merge_node_if_necessary(env) if not node.is_sequence_constructor: return node if not all(child.is_literal for child in node.args): return node if not node.mult_factor or ( node.mult_factor.is_literal and isinstance(node.mult_factor.constant_result, _py_int_types)): node.is_temp = False node.is_literal = True else: if not node.mult_factor.type.is_pyobject: node.mult_factor = node.mult_factor.coerce_to_pyobject(env) node.is_temp = True node.is_partly_literal = True return node def analyse_as_type(self, env): # ctuple type if not self.args: return None item_types = [arg.analyse_as_type(env) for arg in self.args] if any(t is None for t in item_types): return None entry = env.declare_tuple_type(self.pos, item_types) return entry.type def coerce_to(self, dst_type, env): if self.type.is_ctuple: if dst_type.is_ctuple and self.type.size == dst_type.size: return self.coerce_to_ctuple(dst_type, env) elif dst_type is tuple_type or dst_type is py_object_type: coerced_args = [arg.coerce_to_pyobject(env) for arg in self.args] return TupleNode(self.pos, args=coerced_args, type=tuple_type, is_temp=1).analyse_types(env, skip_children=True) else: return self.coerce_to_pyobject(env).coerce_to(dst_type, env) elif dst_type.is_ctuple and not self.mult_factor: return self.coerce_to_ctuple(dst_type, env) else: return SequenceNode.coerce_to(self, dst_type, env) def as_list(self): t = ListNode(self.pos, args=self.args, mult_factor=self.mult_factor) if isinstance(self.constant_result, tuple): t.constant_result = list(self.constant_result) return t def is_simple(self): # either temp or constant => always simple return True def nonlocally_immutable(self): # either temp or constant => always safe return True def calculate_result_code(self): if len(self.args) > 0: return self.result_code else: return Naming.empty_tuple def calculate_constant_result(self): self.constant_result = tuple([ arg.constant_result for arg in self.args]) def compile_time_value(self, denv): values = self.compile_time_value_list(denv) try: return tuple(values) except Exception as e: self.compile_time_value_error(e) def generate_operation_code(self, code): if len(self.args) == 0: # result_code is Naming.empty_tuple return if self.is_partly_literal: # underlying tuple is const, but factor is not tuple_target = code.get_py_const(py_object_type, 'tuple', cleanup_level=2) const_code = code.get_cached_constants_writer() const_code.mark_pos(self.pos) self.generate_sequence_packing_code(const_code, tuple_target, plain=True) const_code.put_giveref(tuple_target) code.putln('%s = PyNumber_Multiply(%s, %s); %s' % ( self.result(), tuple_target, self.mult_factor.py_result(), code.error_goto_if_null(self.result(), self.pos) )) code.put_gotref(self.py_result()) elif self.is_literal: # non-empty cached tuple => result is global constant, # creation code goes into separate code writer self.result_code = code.get_py_const(py_object_type, 'tuple', cleanup_level=2) code = code.get_cached_constants_writer() code.mark_pos(self.pos) self.generate_sequence_packing_code(code) code.put_giveref(self.py_result()) else: self.type.entry.used = True self.generate_sequence_packing_code(code) class ListNode(SequenceNode): # List constructor. # obj_conversion_errors [PyrexError] used internally # orignial_args [ExprNode] used internally obj_conversion_errors = [] type = list_type in_module_scope = False gil_message = "Constructing Python list" def type_dependencies(self, env): return () def infer_type(self, env): # TOOD: Infer non-object list arrays. return list_type def analyse_expressions(self, env): for arg in self.args: if arg.is_starred: arg.starred_expr_allowed_here = True node = SequenceNode.analyse_expressions(self, env) return node.coerce_to_pyobject(env) def analyse_types(self, env): hold_errors() self.original_args = list(self.args) node = SequenceNode.analyse_types(self, env) node.obj_conversion_errors = held_errors() release_errors(ignore=True) if env.is_module_scope: self.in_module_scope = True node = node._create_merge_node_if_necessary(env) return node def coerce_to(self, dst_type, env): if dst_type.is_pyobject: for err in self.obj_conversion_errors: report_error(err) self.obj_conversion_errors = [] if not self.type.subtype_of(dst_type): error(self.pos, "Cannot coerce list to type '%s'" % dst_type) elif (dst_type.is_array or dst_type.is_ptr) and dst_type.base_type is not PyrexTypes.c_void_type: array_length = len(self.args) if self.mult_factor: if isinstance(self.mult_factor.constant_result, _py_int_types): if self.mult_factor.constant_result <= 0: error(self.pos, "Cannot coerce non-positively multiplied list to '%s'" % dst_type) else: array_length *= self.mult_factor.constant_result else: error(self.pos, "Cannot coerce dynamically multiplied list to '%s'" % dst_type) base_type = dst_type.base_type self.type = PyrexTypes.CArrayType(base_type, array_length) for i in range(len(self.original_args)): arg = self.args[i] if isinstance(arg, CoerceToPyTypeNode): arg = arg.arg self.args[i] = arg.coerce_to(base_type, env) elif dst_type.is_cpp_class: # TODO(robertwb): Avoid object conversion for vector/list/set. return TypecastNode(self.pos, operand=self, type=PyrexTypes.py_object_type).coerce_to(dst_type, env) elif self.mult_factor: error(self.pos, "Cannot coerce multiplied list to '%s'" % dst_type) elif dst_type.is_struct: if len(self.args) > len(dst_type.scope.var_entries): error(self.pos, "Too many members for '%s'" % dst_type) else: if len(self.args) < len(dst_type.scope.var_entries): warning(self.pos, "Too few members for '%s'" % dst_type, 1) for i, (arg, member) in enumerate(zip(self.original_args, dst_type.scope.var_entries)): if isinstance(arg, CoerceToPyTypeNode): arg = arg.arg self.args[i] = arg.coerce_to(member.type, env) self.type = dst_type elif dst_type.is_ctuple: return self.coerce_to_ctuple(dst_type, env) else: self.type = error_type error(self.pos, "Cannot coerce list to type '%s'" % dst_type) return self def as_list(self): # dummy for compatibility with TupleNode return self def as_tuple(self): t = TupleNode(self.pos, args=self.args, mult_factor=self.mult_factor) if isinstance(self.constant_result, list): t.constant_result = tuple(self.constant_result) return t def allocate_temp_result(self, code): if self.type.is_array and self.in_module_scope: self.temp_code = code.funcstate.allocate_temp( self.type, manage_ref=False, static=True) else: SequenceNode.allocate_temp_result(self, code) def release_temp_result(self, env): if self.type.is_array: # To be valid C++, we must allocate the memory on the stack # manually and be sure not to reuse it for something else. # Yes, this means that we leak a temp array variable. pass else: SequenceNode.release_temp_result(self, env) def calculate_constant_result(self): if self.mult_factor: raise ValueError() # may exceed the compile time memory self.constant_result = [ arg.constant_result for arg in self.args] def compile_time_value(self, denv): l = self.compile_time_value_list(denv) if self.mult_factor: l *= self.mult_factor.compile_time_value(denv) return l def generate_operation_code(self, code): if self.type.is_pyobject: for err in self.obj_conversion_errors: report_error(err) self.generate_sequence_packing_code(code) elif self.type.is_array: if self.mult_factor: code.putln("{") code.putln("Py_ssize_t %s;" % Naming.quick_temp_cname) code.putln("for ({i} = 0; {i} < {count}; {i}++) {{".format( i=Naming.quick_temp_cname, count=self.mult_factor.result())) offset = '+ (%d * %s)' % (len(self.args), Naming.quick_temp_cname) else: offset = '' for i, arg in enumerate(self.args): if arg.type.is_array: code.globalstate.use_utility_code(UtilityCode.load_cached("IncludeStringH", "StringTools.c")) code.putln("memcpy(&(%s[%s%s]), %s, sizeof(%s[0]));" % ( self.result(), i, offset, arg.result(), self.result() )) else: code.putln("%s[%s%s] = %s;" % ( self.result(), i, offset, arg.result())) if self.mult_factor: code.putln("}") code.putln("}") elif self.type.is_struct: for arg, member in zip(self.args, self.type.scope.var_entries): code.putln("%s.%s = %s;" % ( self.result(), member.cname, arg.result())) else: raise InternalError("List type never specified") class ScopedExprNode(ExprNode): # Abstract base class for ExprNodes that have their own local # scope, such as generator expressions. # # expr_scope Scope the inner scope of the expression subexprs = [] expr_scope = None # does this node really have a local scope, e.g. does it leak loop # variables or not? non-leaking Py3 behaviour is default, except # for list comprehensions where the behaviour differs in Py2 and # Py3 (set in Parsing.py based on parser context) has_local_scope = True def init_scope(self, outer_scope, expr_scope=None): if expr_scope is not None: self.expr_scope = expr_scope elif self.has_local_scope: self.expr_scope = Symtab.GeneratorExpressionScope(outer_scope) else: self.expr_scope = None def analyse_declarations(self, env): self.init_scope(env) def analyse_scoped_declarations(self, env): # this is called with the expr_scope as env pass def analyse_types(self, env): # no recursion here, the children will be analysed separately below return self def analyse_scoped_expressions(self, env): # this is called with the expr_scope as env return self def generate_evaluation_code(self, code): # set up local variables and free their references on exit generate_inner_evaluation_code = super(ScopedExprNode, self).generate_evaluation_code if not self.has_local_scope or not self.expr_scope.var_entries: # no local variables => delegate, done generate_inner_evaluation_code(code) return code.putln('{ /* enter inner scope */') py_entries = [] for entry in self.expr_scope.var_entries: if not entry.in_closure: code.put_var_declaration(entry) if entry.type.is_pyobject and entry.used: py_entries.append(entry) if not py_entries: # no local Python references => no cleanup required generate_inner_evaluation_code(code) code.putln('} /* exit inner scope */') return # must free all local Python references at each exit point old_loop_labels = tuple(code.new_loop_labels()) old_error_label = code.new_error_label() generate_inner_evaluation_code(code) # normal (non-error) exit for entry in py_entries: code.put_var_decref(entry) # error/loop body exit points exit_scope = code.new_label('exit_scope') code.put_goto(exit_scope) for label, old_label in ([(code.error_label, old_error_label)] + list(zip(code.get_loop_labels(), old_loop_labels))): if code.label_used(label): code.put_label(label) for entry in py_entries: code.put_var_decref(entry) code.put_goto(old_label) code.put_label(exit_scope) code.putln('} /* exit inner scope */') code.set_loop_labels(old_loop_labels) code.error_label = old_error_label class ComprehensionNode(ScopedExprNode): # A list/set/dict comprehension child_attrs = ["loop"] is_temp = True def infer_type(self, env): return self.type def analyse_declarations(self, env): self.append.target = self # this is used in the PyList_Append of the inner loop self.init_scope(env) def analyse_scoped_declarations(self, env): self.loop.analyse_declarations(env) def analyse_types(self, env): if not self.has_local_scope: self.loop = self.loop.analyse_expressions(env) return self def analyse_scoped_expressions(self, env): if self.has_local_scope: self.loop = self.loop.analyse_expressions(env) return self def may_be_none(self): return False def generate_result_code(self, code): self.generate_operation_code(code) def generate_operation_code(self, code): if self.type is Builtin.list_type: create_code = 'PyList_New(0)' elif self.type is Builtin.set_type: create_code = 'PySet_New(NULL)' elif self.type is Builtin.dict_type: create_code = 'PyDict_New()' else: raise InternalError("illegal type for comprehension: %s" % self.type) code.putln('%s = %s; %s' % ( self.result(), create_code, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.result()) self.loop.generate_execution_code(code) def annotate(self, code): self.loop.annotate(code) class ComprehensionAppendNode(Node): # Need to be careful to avoid infinite recursion: # target must not be in child_attrs/subexprs child_attrs = ['expr'] target = None type = PyrexTypes.c_int_type def analyse_expressions(self, env): self.expr = self.expr.analyse_expressions(env) if not self.expr.type.is_pyobject: self.expr = self.expr.coerce_to_pyobject(env) return self def generate_execution_code(self, code): if self.target.type is list_type: code.globalstate.use_utility_code( UtilityCode.load_cached("ListCompAppend", "Optimize.c")) function = "__Pyx_ListComp_Append" elif self.target.type is set_type: function = "PySet_Add" else: raise InternalError( "Invalid type for comprehension node: %s" % self.target.type) self.expr.generate_evaluation_code(code) code.putln(code.error_goto_if("%s(%s, (PyObject*)%s)" % ( function, self.target.result(), self.expr.result() ), self.pos)) self.expr.generate_disposal_code(code) self.expr.free_temps(code) def generate_function_definitions(self, env, code): self.expr.generate_function_definitions(env, code) def annotate(self, code): self.expr.annotate(code) class DictComprehensionAppendNode(ComprehensionAppendNode): child_attrs = ['key_expr', 'value_expr'] def analyse_expressions(self, env): self.key_expr = self.key_expr.analyse_expressions(env) if not self.key_expr.type.is_pyobject: self.key_expr = self.key_expr.coerce_to_pyobject(env) self.value_expr = self.value_expr.analyse_expressions(env) if not self.value_expr.type.is_pyobject: self.value_expr = self.value_expr.coerce_to_pyobject(env) return self def generate_execution_code(self, code): self.key_expr.generate_evaluation_code(code) self.value_expr.generate_evaluation_code(code) code.putln(code.error_goto_if("PyDict_SetItem(%s, (PyObject*)%s, (PyObject*)%s)" % ( self.target.result(), self.key_expr.result(), self.value_expr.result() ), self.pos)) self.key_expr.generate_disposal_code(code) self.key_expr.free_temps(code) self.value_expr.generate_disposal_code(code) self.value_expr.free_temps(code) def generate_function_definitions(self, env, code): self.key_expr.generate_function_definitions(env, code) self.value_expr.generate_function_definitions(env, code) def annotate(self, code): self.key_expr.annotate(code) self.value_expr.annotate(code) class InlinedGeneratorExpressionNode(ExprNode): # An inlined generator expression for which the result is calculated # inside of the loop and returned as a single, first and only Generator # return value. # This will only be created by transforms when replacing safe builtin # calls on generator expressions. # # gen GeneratorExpressionNode the generator, not containing any YieldExprNodes # orig_func String the name of the builtin function this node replaces # target ExprNode or None a 'target' for a ComprehensionAppend node subexprs = ["gen"] orig_func = None target = None is_temp = True type = py_object_type def __init__(self, pos, gen, comprehension_type=None, **kwargs): gbody = gen.def_node.gbody gbody.is_inlined = True if comprehension_type is not None: assert comprehension_type in (list_type, set_type, dict_type), comprehension_type gbody.inlined_comprehension_type = comprehension_type kwargs.update( target=RawCNameExprNode(pos, comprehension_type, Naming.retval_cname), type=comprehension_type, ) super(InlinedGeneratorExpressionNode, self).__init__(pos, gen=gen, **kwargs) def may_be_none(self): return self.orig_func not in ('any', 'all', 'sorted') def infer_type(self, env): return self.type def analyse_types(self, env): self.gen = self.gen.analyse_expressions(env) return self def generate_result_code(self, code): code.putln("%s = __Pyx_Generator_Next(%s); %s" % ( self.result(), self.gen.result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.result()) class MergedSequenceNode(ExprNode): """ Merge a sequence of iterables into a set/list/tuple. The target collection is determined by self.type, which must be set externally. args [ExprNode] """ subexprs = ['args'] is_temp = True gil_message = "Constructing Python collection" def __init__(self, pos, args, type): if type in (list_type, tuple_type) and args and args[0].is_sequence_constructor: # construct a list directly from the first argument that we can then extend if args[0].type is not list_type: args[0] = ListNode(args[0].pos, args=args[0].args, is_temp=True) ExprNode.__init__(self, pos, args=args, type=type) def calculate_constant_result(self): result = [] for item in self.args: if item.is_sequence_constructor and item.mult_factor: if item.mult_factor.constant_result <= 0: continue # otherwise, adding each item once should be enough if item.is_set_literal or item.is_sequence_constructor: # process items in order items = (arg.constant_result for arg in item.args) else: items = item.constant_result result.extend(items) if self.type is set_type: result = set(result) elif self.type is tuple_type: result = tuple(result) else: assert self.type is list_type self.constant_result = result def compile_time_value(self, denv): result = [] for item in self.args: if item.is_sequence_constructor and item.mult_factor: if item.mult_factor.compile_time_value(denv) <= 0: continue if item.is_set_literal or item.is_sequence_constructor: # process items in order items = (arg.compile_time_value(denv) for arg in item.args) else: items = item.compile_time_value(denv) result.extend(items) if self.type is set_type: try: result = set(result) except Exception as e: self.compile_time_value_error(e) elif self.type is tuple_type: result = tuple(result) else: assert self.type is list_type return result def type_dependencies(self, env): return () def infer_type(self, env): return self.type def analyse_types(self, env): args = [ arg.analyse_types(env).coerce_to_pyobject(env).as_none_safe_node( # FIXME: CPython's error message starts with the runtime function name 'argument after * must be an iterable, not NoneType') for arg in self.args ] if len(args) == 1 and args[0].type is self.type: # strip this intermediate node and use the bare collection return args[0] assert self.type in (set_type, list_type, tuple_type) self.args = args return self def may_be_none(self): return False def generate_evaluation_code(self, code): code.mark_pos(self.pos) self.allocate_temp_result(code) is_set = self.type is set_type args = iter(self.args) item = next(args) item.generate_evaluation_code(code) if (is_set and item.is_set_literal or not is_set and item.is_sequence_constructor and item.type is list_type): code.putln("%s = %s;" % (self.result(), item.py_result())) item.generate_post_assignment_code(code) else: code.putln("%s = %s(%s); %s" % ( self.result(), 'PySet_New' if is_set else 'PySequence_List', item.py_result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) item.generate_disposal_code(code) item.free_temps(code) helpers = set() if is_set: add_func = "PySet_Add" extend_func = "__Pyx_PySet_Update" else: add_func = "__Pyx_ListComp_Append" extend_func = "__Pyx_PyList_Extend" for item in args: if (is_set and (item.is_set_literal or item.is_sequence_constructor) or (item.is_sequence_constructor and not item.mult_factor)): if not is_set and item.args: helpers.add(("ListCompAppend", "Optimize.c")) for arg in item.args: arg.generate_evaluation_code(code) code.put_error_if_neg(arg.pos, "%s(%s, %s)" % ( add_func, self.result(), arg.py_result())) arg.generate_disposal_code(code) arg.free_temps(code) continue if is_set: helpers.add(("PySet_Update", "Builtins.c")) else: helpers.add(("ListExtend", "Optimize.c")) item.generate_evaluation_code(code) code.put_error_if_neg(item.pos, "%s(%s, %s)" % ( extend_func, self.result(), item.py_result())) item.generate_disposal_code(code) item.free_temps(code) if self.type is tuple_type: code.putln("{") code.putln("PyObject *%s = PyList_AsTuple(%s);" % ( Naming.quick_temp_cname, self.result())) code.put_decref(self.result(), py_object_type) code.putln("%s = %s; %s" % ( self.result(), Naming.quick_temp_cname, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.result()) code.putln("}") for helper in sorted(helpers): code.globalstate.use_utility_code(UtilityCode.load_cached(*helper)) def annotate(self, code): for item in self.args: item.annotate(code) class SetNode(ExprNode): """ Set constructor. """ subexprs = ['args'] type = set_type is_set_literal = True gil_message = "Constructing Python set" def analyse_types(self, env): for i in range(len(self.args)): arg = self.args[i] arg = arg.analyse_types(env) self.args[i] = arg.coerce_to_pyobject(env) self.type = set_type self.is_temp = 1 return self def may_be_none(self): return False def calculate_constant_result(self): self.constant_result = set([arg.constant_result for arg in self.args]) def compile_time_value(self, denv): values = [arg.compile_time_value(denv) for arg in self.args] try: return set(values) except Exception as e: self.compile_time_value_error(e) def generate_evaluation_code(self, code): for arg in self.args: arg.generate_evaluation_code(code) self.allocate_temp_result(code) code.putln( "%s = PySet_New(0); %s" % ( self.result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) for arg in self.args: code.put_error_if_neg( self.pos, "PySet_Add(%s, %s)" % (self.result(), arg.py_result())) arg.generate_disposal_code(code) arg.free_temps(code) class DictNode(ExprNode): # Dictionary constructor. # # key_value_pairs [DictItemNode] # exclude_null_values [boolean] Do not add NULL values to dict # # obj_conversion_errors [PyrexError] used internally subexprs = ['key_value_pairs'] is_temp = 1 exclude_null_values = False type = dict_type is_dict_literal = True reject_duplicates = False obj_conversion_errors = [] @classmethod def from_pairs(cls, pos, pairs): return cls(pos, key_value_pairs=[ DictItemNode(pos, key=k, value=v) for k, v in pairs]) def calculate_constant_result(self): self.constant_result = dict([ item.constant_result for item in self.key_value_pairs]) def compile_time_value(self, denv): pairs = [(item.key.compile_time_value(denv), item.value.compile_time_value(denv)) for item in self.key_value_pairs] try: return dict(pairs) except Exception as e: self.compile_time_value_error(e) def type_dependencies(self, env): return () def infer_type(self, env): # TOOD: Infer struct constructors. return dict_type def analyse_types(self, env): hold_errors() self.key_value_pairs = [ item.analyse_types(env) for item in self.key_value_pairs ] self.obj_conversion_errors = held_errors() release_errors(ignore=True) return self def may_be_none(self): return False def coerce_to(self, dst_type, env): if dst_type.is_pyobject: self.release_errors() if self.type.is_struct_or_union: if not dict_type.subtype_of(dst_type): error(self.pos, "Cannot interpret struct as non-dict type '%s'" % dst_type) return DictNode(self.pos, key_value_pairs=[ DictItemNode(item.pos, key=item.key.coerce_to_pyobject(env), value=item.value.coerce_to_pyobject(env)) for item in self.key_value_pairs]) if not self.type.subtype_of(dst_type): error(self.pos, "Cannot interpret dict as type '%s'" % dst_type) elif dst_type.is_struct_or_union: self.type = dst_type if not dst_type.is_struct and len(self.key_value_pairs) != 1: error(self.pos, "Exactly one field must be specified to convert to union '%s'" % dst_type) elif dst_type.is_struct and len(self.key_value_pairs) < len(dst_type.scope.var_entries): warning(self.pos, "Not all members given for struct '%s'" % dst_type, 1) for item in self.key_value_pairs: if isinstance(item.key, CoerceToPyTypeNode): item.key = item.key.arg if not item.key.is_string_literal: error(item.key.pos, "Invalid struct field identifier") item.key = StringNode(item.key.pos, value="") else: key = str(item.key.value) # converts string literals to unicode in Py3 member = dst_type.scope.lookup_here(key) if not member: error(item.key.pos, "struct '%s' has no field '%s'" % (dst_type, key)) else: value = item.value if isinstance(value, CoerceToPyTypeNode): value = value.arg item.value = value.coerce_to(member.type, env) else: self.type = error_type error(self.pos, "Cannot interpret dict as type '%s'" % dst_type) return self def release_errors(self): for err in self.obj_conversion_errors: report_error(err) self.obj_conversion_errors = [] gil_message = "Constructing Python dict" def generate_evaluation_code(self, code): # Custom method used here because key-value # pairs are evaluated and used one at a time. code.mark_pos(self.pos) self.allocate_temp_result(code) is_dict = self.type.is_pyobject if is_dict: self.release_errors() code.putln( "%s = PyDict_New(); %s" % ( self.result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) keys_seen = set() key_type = None needs_error_helper = False for item in self.key_value_pairs: item.generate_evaluation_code(code) if is_dict: if self.exclude_null_values: code.putln('if (%s) {' % item.value.py_result()) key = item.key if self.reject_duplicates: if keys_seen is not None: # avoid runtime 'in' checks for literals that we can do at compile time if not key.is_string_literal: keys_seen = None elif key.value in keys_seen: # FIXME: this could be a compile time error, at least in Cython code keys_seen = None elif key_type is not type(key.value): if key_type is None: key_type = type(key.value) keys_seen.add(key.value) else: # different types => may not be able to compare at compile time keys_seen = None else: keys_seen.add(key.value) if keys_seen is None: code.putln('if (unlikely(PyDict_Contains(%s, %s))) {' % ( self.result(), key.py_result())) # currently only used in function calls needs_error_helper = True code.putln('__Pyx_RaiseDoubleKeywordsError("function", %s); %s' % ( key.py_result(), code.error_goto(item.pos))) code.putln("} else {") code.put_error_if_neg(self.pos, "PyDict_SetItem(%s, %s, %s)" % ( self.result(), item.key.py_result(), item.value.py_result())) if self.reject_duplicates and keys_seen is None: code.putln('}') if self.exclude_null_values: code.putln('}') else: code.putln("%s.%s = %s;" % ( self.result(), item.key.value, item.value.result())) item.generate_disposal_code(code) item.free_temps(code) if needs_error_helper: code.globalstate.use_utility_code( UtilityCode.load_cached("RaiseDoubleKeywords", "FunctionArguments.c")) def annotate(self, code): for item in self.key_value_pairs: item.annotate(code) class DictItemNode(ExprNode): # Represents a single item in a DictNode # # key ExprNode # value ExprNode subexprs = ['key', 'value'] nogil_check = None # Parent DictNode takes care of it def calculate_constant_result(self): self.constant_result = ( self.key.constant_result, self.value.constant_result) def analyse_types(self, env): self.key = self.key.analyse_types(env) self.value = self.value.analyse_types(env) self.key = self.key.coerce_to_pyobject(env) self.value = self.value.coerce_to_pyobject(env) return self def generate_evaluation_code(self, code): self.key.generate_evaluation_code(code) self.value.generate_evaluation_code(code) def generate_disposal_code(self, code): self.key.generate_disposal_code(code) self.value.generate_disposal_code(code) def free_temps(self, code): self.key.free_temps(code) self.value.free_temps(code) def __iter__(self): return iter([self.key, self.value]) class SortedDictKeysNode(ExprNode): # build sorted list of dict keys, e.g. for dir() subexprs = ['arg'] is_temp = True def __init__(self, arg): ExprNode.__init__(self, arg.pos, arg=arg) self.type = Builtin.list_type def analyse_types(self, env): arg = self.arg.analyse_types(env) if arg.type is Builtin.dict_type: arg = arg.as_none_safe_node( "'NoneType' object is not iterable") self.arg = arg return self def may_be_none(self): return False def generate_result_code(self, code): dict_result = self.arg.py_result() if self.arg.type is Builtin.dict_type: code.putln('%s = PyDict_Keys(%s); %s' % ( self.result(), dict_result, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) else: # originally used PyMapping_Keys() here, but that may return a tuple code.globalstate.use_utility_code(UtilityCode.load_cached( 'PyObjectCallMethod0', 'ObjectHandling.c')) keys_cname = code.intern_identifier(StringEncoding.EncodedString("keys")) code.putln('%s = __Pyx_PyObject_CallMethod0(%s, %s); %s' % ( self.result(), dict_result, keys_cname, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) code.putln("if (unlikely(!PyList_Check(%s))) {" % self.result()) code.put_decref_set(self.result(), "PySequence_List(%s)" % self.result()) code.putln(code.error_goto_if_null(self.result(), self.pos)) code.put_gotref(self.py_result()) code.putln("}") code.put_error_if_neg( self.pos, 'PyList_Sort(%s)' % self.py_result()) class ModuleNameMixin(object): def get_py_mod_name(self, code): return code.get_py_string_const( self.module_name, identifier=True) def get_py_qualified_name(self, code): return code.get_py_string_const( self.qualname, identifier=True) class ClassNode(ExprNode, ModuleNameMixin): # Helper class used in the implementation of Python # class definitions. Constructs a class object given # a name, tuple of bases and class dictionary. # # name EncodedString Name of the class # bases ExprNode Base class tuple # dict ExprNode Class dict (not owned by this node) # doc ExprNode or None Doc string # module_name EncodedString Name of defining module subexprs = ['bases', 'doc'] type = py_object_type is_temp = True def infer_type(self, env): # TODO: could return 'type' in some cases return py_object_type def analyse_types(self, env): self.bases = self.bases.analyse_types(env) if self.doc: self.doc = self.doc.analyse_types(env) self.doc = self.doc.coerce_to_pyobject(env) env.use_utility_code(UtilityCode.load_cached("CreateClass", "ObjectHandling.c")) return self def may_be_none(self): return True gil_message = "Constructing Python class" def generate_result_code(self, code): cname = code.intern_identifier(self.name) if self.doc: code.put_error_if_neg(self.pos, 'PyDict_SetItem(%s, %s, %s)' % ( self.dict.py_result(), code.intern_identifier( StringEncoding.EncodedString("__doc__")), self.doc.py_result())) py_mod_name = self.get_py_mod_name(code) qualname = self.get_py_qualified_name(code) code.putln( '%s = __Pyx_CreateClass(%s, %s, %s, %s, %s); %s' % ( self.result(), self.bases.py_result(), self.dict.py_result(), cname, qualname, py_mod_name, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class Py3ClassNode(ExprNode): # Helper class used in the implementation of Python3+ # class definitions. Constructs a class object given # a name, tuple of bases and class dictionary. # # name EncodedString Name of the class # dict ExprNode Class dict (not owned by this node) # module_name EncodedString Name of defining module # calculate_metaclass bool should call CalculateMetaclass() # allow_py2_metaclass bool should look for Py2 metaclass subexprs = [] type = py_object_type is_temp = True def infer_type(self, env): # TODO: could return 'type' in some cases return py_object_type def analyse_types(self, env): return self def may_be_none(self): return True gil_message = "Constructing Python class" def generate_result_code(self, code): code.globalstate.use_utility_code(UtilityCode.load_cached("Py3ClassCreate", "ObjectHandling.c")) cname = code.intern_identifier(self.name) if self.mkw: mkw = self.mkw.py_result() else: mkw = 'NULL' if self.metaclass: metaclass = self.metaclass.py_result() else: metaclass = "((PyObject*)&__Pyx_DefaultClassType)" code.putln( '%s = __Pyx_Py3ClassCreate(%s, %s, %s, %s, %s, %d, %d); %s' % ( self.result(), metaclass, cname, self.bases.py_result(), self.dict.py_result(), mkw, self.calculate_metaclass, self.allow_py2_metaclass, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class PyClassMetaclassNode(ExprNode): # Helper class holds Python3 metaclass object # # bases ExprNode Base class tuple (not owned by this node) # mkw ExprNode Class keyword arguments (not owned by this node) subexprs = [] def analyse_types(self, env): self.type = py_object_type self.is_temp = True return self def may_be_none(self): return True def generate_result_code(self, code): if self.mkw: code.globalstate.use_utility_code( UtilityCode.load_cached("Py3MetaclassGet", "ObjectHandling.c")) call = "__Pyx_Py3MetaclassGet(%s, %s)" % ( self.bases.result(), self.mkw.result()) else: code.globalstate.use_utility_code( UtilityCode.load_cached("CalculateMetaclass", "ObjectHandling.c")) call = "__Pyx_CalculateMetaclass(NULL, %s)" % ( self.bases.result()) code.putln( "%s = %s; %s" % ( self.result(), call, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class PyClassNamespaceNode(ExprNode, ModuleNameMixin): # Helper class holds Python3 namespace object # # All this are not owned by this node # metaclass ExprNode Metaclass object # bases ExprNode Base class tuple # mkw ExprNode Class keyword arguments # doc ExprNode or None Doc string (owned) subexprs = ['doc'] def analyse_types(self, env): if self.doc: self.doc = self.doc.analyse_types(env) self.doc = self.doc.coerce_to_pyobject(env) self.type = py_object_type self.is_temp = 1 return self def may_be_none(self): return True def generate_result_code(self, code): cname = code.intern_identifier(self.name) py_mod_name = self.get_py_mod_name(code) qualname = self.get_py_qualified_name(code) if self.doc: doc_code = self.doc.result() else: doc_code = '(PyObject *) NULL' if self.mkw: mkw = self.mkw.py_result() else: mkw = '(PyObject *) NULL' if self.metaclass: metaclass = self.metaclass.py_result() else: metaclass = "(PyObject *) NULL" code.putln( "%s = __Pyx_Py3MetaclassPrepare(%s, %s, %s, %s, %s, %s, %s); %s" % ( self.result(), metaclass, self.bases.result(), cname, qualname, mkw, py_mod_name, doc_code, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class ClassCellInjectorNode(ExprNode): # Initialize CyFunction.func_classobj is_temp = True type = py_object_type subexprs = [] is_active = False def analyse_expressions(self, env): if self.is_active: env.use_utility_code( UtilityCode.load_cached("CyFunctionClassCell", "CythonFunction.c")) return self def generate_evaluation_code(self, code): if self.is_active: self.allocate_temp_result(code) code.putln( '%s = PyList_New(0); %s' % ( self.result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.result()) def generate_injection_code(self, code, classobj_cname): if self.is_active: code.put_error_if_neg(self.pos, '__Pyx_CyFunction_InitClassCell(%s, %s)' % ( self.result(), classobj_cname)) class ClassCellNode(ExprNode): # Class Cell for noargs super() subexprs = [] is_temp = True is_generator = False type = py_object_type def analyse_types(self, env): return self def generate_result_code(self, code): if not self.is_generator: code.putln('%s = __Pyx_CyFunction_GetClassObj(%s);' % ( self.result(), Naming.self_cname)) else: code.putln('%s = %s->classobj;' % ( self.result(), Naming.generator_cname)) code.putln( 'if (!%s) { PyErr_SetString(PyExc_SystemError, ' '"super(): empty __class__ cell"); %s }' % ( self.result(), code.error_goto(self.pos))) code.put_incref(self.result(), py_object_type) class BoundMethodNode(ExprNode): # Helper class used in the implementation of Python # class definitions. Constructs an bound method # object from a class and a function. # # function ExprNode Function object # self_object ExprNode self object subexprs = ['function'] def analyse_types(self, env): self.function = self.function.analyse_types(env) self.type = py_object_type self.is_temp = 1 return self gil_message = "Constructing a bound method" def generate_result_code(self, code): code.putln( "%s = __Pyx_PyMethod_New(%s, %s, (PyObject*)%s->ob_type); %s" % ( self.result(), self.function.py_result(), self.self_object.py_result(), self.self_object.py_result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class UnboundMethodNode(ExprNode): # Helper class used in the implementation of Python # class definitions. Constructs an unbound method # object from a class and a function. # # function ExprNode Function object type = py_object_type is_temp = 1 subexprs = ['function'] def analyse_types(self, env): self.function = self.function.analyse_types(env) return self def may_be_none(self): return False gil_message = "Constructing an unbound method" def generate_result_code(self, code): class_cname = code.pyclass_stack[-1].classobj.result() code.putln( "%s = __Pyx_PyMethod_New(%s, 0, %s); %s" % ( self.result(), self.function.py_result(), class_cname, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class PyCFunctionNode(ExprNode, ModuleNameMixin): # Helper class used in the implementation of Python # functions. Constructs a PyCFunction object # from a PyMethodDef struct. # # pymethdef_cname string PyMethodDef structure # self_object ExprNode or None # binding bool # def_node DefNode the Python function node # module_name EncodedString Name of defining module # code_object CodeObjectNode the PyCodeObject creator node subexprs = ['code_object', 'defaults_tuple', 'defaults_kwdict', 'annotations_dict'] self_object = None code_object = None binding = False def_node = None defaults = None defaults_struct = None defaults_pyobjects = 0 defaults_tuple = None defaults_kwdict = None annotations_dict = None type = py_object_type is_temp = 1 specialized_cpdefs = None is_specialization = False @classmethod def from_defnode(cls, node, binding): return cls(node.pos, def_node=node, pymethdef_cname=node.entry.pymethdef_cname, binding=binding or node.specialized_cpdefs, specialized_cpdefs=node.specialized_cpdefs, code_object=CodeObjectNode(node)) def analyse_types(self, env): if self.binding: self.analyse_default_args(env) return self def analyse_default_args(self, env): """ Handle non-literal function's default arguments. """ nonliteral_objects = [] nonliteral_other = [] default_args = [] default_kwargs = [] annotations = [] # For global cpdef functions and def/cpdef methods in cdef classes, we must use global constants # for default arguments to avoid the dependency on the CyFunction object as 'self' argument # in the underlying C function. Basically, cpdef functions/methods are static C functions, # so their optional arguments must be static, too. # TODO: change CyFunction implementation to pass both function object and owning object for method calls must_use_constants = env.is_c_class_scope or (self.def_node.is_wrapper and env.is_module_scope) for arg in self.def_node.args: if arg.default and not must_use_constants: if not arg.default.is_literal: arg.is_dynamic = True if arg.type.is_pyobject: nonliteral_objects.append(arg) else: nonliteral_other.append(arg) else: arg.default = DefaultLiteralArgNode(arg.pos, arg.default) if arg.kw_only: default_kwargs.append(arg) else: default_args.append(arg) if arg.annotation: arg.annotation = arg.annotation.analyse_types(env) if not arg.annotation.type.is_pyobject: arg.annotation = arg.annotation.coerce_to_pyobject(env) annotations.append((arg.pos, arg.name, arg.annotation)) for arg in (self.def_node.star_arg, self.def_node.starstar_arg): if arg and arg.annotation: arg.annotation = arg.annotation.analyse_types(env) if not arg.annotation.type.is_pyobject: arg.annotation = arg.annotation.coerce_to_pyobject(env) annotations.append((arg.pos, arg.name, arg.annotation)) if self.def_node.return_type_annotation: annotations.append((self.def_node.return_type_annotation.pos, StringEncoding.EncodedString("return"), self.def_node.return_type_annotation)) if nonliteral_objects or nonliteral_other: module_scope = env.global_scope() cname = module_scope.next_id(Naming.defaults_struct_prefix) scope = Symtab.StructOrUnionScope(cname) self.defaults = [] for arg in nonliteral_objects: entry = scope.declare_var(arg.name, arg.type, None, Naming.arg_prefix + arg.name, allow_pyobject=True) self.defaults.append((arg, entry)) for arg in nonliteral_other: entry = scope.declare_var(arg.name, arg.type, None, Naming.arg_prefix + arg.name, allow_pyobject=False) self.defaults.append((arg, entry)) entry = module_scope.declare_struct_or_union( None, 'struct', scope, 1, None, cname=cname) self.defaults_struct = scope self.defaults_pyobjects = len(nonliteral_objects) for arg, entry in self.defaults: arg.default_value = '%s->%s' % ( Naming.dynamic_args_cname, entry.cname) self.def_node.defaults_struct = self.defaults_struct.name if default_args or default_kwargs: if self.defaults_struct is None: if default_args: defaults_tuple = TupleNode(self.pos, args=[ arg.default for arg in default_args]) self.defaults_tuple = defaults_tuple.analyse_types(env).coerce_to_pyobject(env) if default_kwargs: defaults_kwdict = DictNode(self.pos, key_value_pairs=[ DictItemNode( arg.pos, key=IdentifierStringNode(arg.pos, value=arg.name), value=arg.default) for arg in default_kwargs]) self.defaults_kwdict = defaults_kwdict.analyse_types(env) else: if default_args: defaults_tuple = DefaultsTupleNode( self.pos, default_args, self.defaults_struct) else: defaults_tuple = NoneNode(self.pos) if default_kwargs: defaults_kwdict = DefaultsKwDictNode( self.pos, default_kwargs, self.defaults_struct) else: defaults_kwdict = NoneNode(self.pos) defaults_getter = Nodes.DefNode( self.pos, args=[], star_arg=None, starstar_arg=None, body=Nodes.ReturnStatNode( self.pos, return_type=py_object_type, value=TupleNode( self.pos, args=[defaults_tuple, defaults_kwdict])), decorators=None, name=StringEncoding.EncodedString("__defaults__")) # defaults getter must never live in class scopes, it's always a module function module_scope = env.global_scope() defaults_getter.analyse_declarations(module_scope) defaults_getter = defaults_getter.analyse_expressions(module_scope) defaults_getter.body = defaults_getter.body.analyse_expressions( defaults_getter.local_scope) defaults_getter.py_wrapper_required = False defaults_getter.pymethdef_required = False self.def_node.defaults_getter = defaults_getter if annotations: annotations_dict = DictNode(self.pos, key_value_pairs=[ DictItemNode( pos, key=IdentifierStringNode(pos, value=name), value=value) for pos, name, value in annotations]) self.annotations_dict = annotations_dict.analyse_types(env) def may_be_none(self): return False gil_message = "Constructing Python function" def self_result_code(self): if self.self_object is None: self_result = "NULL" else: self_result = self.self_object.py_result() return self_result def generate_result_code(self, code): if self.binding: self.generate_cyfunction_code(code) else: self.generate_pycfunction_code(code) def generate_pycfunction_code(self, code): py_mod_name = self.get_py_mod_name(code) code.putln( '%s = PyCFunction_NewEx(&%s, %s, %s); %s' % ( self.result(), self.pymethdef_cname, self.self_result_code(), py_mod_name, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) def generate_cyfunction_code(self, code): if self.specialized_cpdefs: def_node = self.specialized_cpdefs[0] else: def_node = self.def_node if self.specialized_cpdefs or self.is_specialization: code.globalstate.use_utility_code( UtilityCode.load_cached("FusedFunction", "CythonFunction.c")) constructor = "__pyx_FusedFunction_NewEx" else: code.globalstate.use_utility_code( UtilityCode.load_cached("CythonFunction", "CythonFunction.c")) constructor = "__Pyx_CyFunction_NewEx" if self.code_object: code_object_result = self.code_object.py_result() else: code_object_result = 'NULL' flags = [] if def_node.is_staticmethod: flags.append('__Pyx_CYFUNCTION_STATICMETHOD') elif def_node.is_classmethod: flags.append('__Pyx_CYFUNCTION_CLASSMETHOD') if def_node.local_scope.parent_scope.is_c_class_scope and not def_node.entry.is_anonymous: flags.append('__Pyx_CYFUNCTION_CCLASS') if flags: flags = ' | '.join(flags) else: flags = '0' code.putln( '%s = %s(&%s, %s, %s, %s, %s, %s, %s); %s' % ( self.result(), constructor, self.pymethdef_cname, flags, self.get_py_qualified_name(code), self.self_result_code(), self.get_py_mod_name(code), Naming.moddict_cname, code_object_result, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) if def_node.requires_classobj: assert code.pyclass_stack, "pyclass_stack is empty" class_node = code.pyclass_stack[-1] code.put_incref(self.py_result(), py_object_type) code.putln( 'PyList_Append(%s, %s);' % ( class_node.class_cell.result(), self.result())) code.put_giveref(self.py_result()) if self.defaults: code.putln( 'if (!__Pyx_CyFunction_InitDefaults(%s, sizeof(%s), %d)) %s' % ( self.result(), self.defaults_struct.name, self.defaults_pyobjects, code.error_goto(self.pos))) defaults = '__Pyx_CyFunction_Defaults(%s, %s)' % ( self.defaults_struct.name, self.result()) for arg, entry in self.defaults: arg.generate_assignment_code(code, target='%s->%s' % ( defaults, entry.cname)) if self.defaults_tuple: code.putln('__Pyx_CyFunction_SetDefaultsTuple(%s, %s);' % ( self.result(), self.defaults_tuple.py_result())) if self.defaults_kwdict: code.putln('__Pyx_CyFunction_SetDefaultsKwDict(%s, %s);' % ( self.result(), self.defaults_kwdict.py_result())) if def_node.defaults_getter: code.putln('__Pyx_CyFunction_SetDefaultsGetter(%s, %s);' % ( self.result(), def_node.defaults_getter.entry.pyfunc_cname)) if self.annotations_dict: code.putln('__Pyx_CyFunction_SetAnnotationsDict(%s, %s);' % ( self.result(), self.annotations_dict.py_result())) class InnerFunctionNode(PyCFunctionNode): # Special PyCFunctionNode that depends on a closure class # binding = True needs_self_code = True def self_result_code(self): if self.needs_self_code: return "((PyObject*)%s)" % Naming.cur_scope_cname return "NULL" class CodeObjectNode(ExprNode): # Create a PyCodeObject for a CyFunction instance. # # def_node DefNode the Python function node # varnames TupleNode a tuple with all local variable names subexprs = ['varnames'] is_temp = False result_code = None def __init__(self, def_node): ExprNode.__init__(self, def_node.pos, def_node=def_node) args = list(def_node.args) # if we have args/kwargs, then the first two in var_entries are those local_vars = [arg for arg in def_node.local_scope.var_entries if arg.name] self.varnames = TupleNode( def_node.pos, args=[IdentifierStringNode(arg.pos, value=arg.name) for arg in args + local_vars], is_temp=0, is_literal=1) def may_be_none(self): return False def calculate_result_code(self, code=None): if self.result_code is None: self.result_code = code.get_py_const(py_object_type, 'codeobj', cleanup_level=2) return self.result_code def generate_result_code(self, code): if self.result_code is None: self.result_code = code.get_py_const(py_object_type, 'codeobj', cleanup_level=2) code = code.get_cached_constants_writer() code.mark_pos(self.pos) func = self.def_node func_name = code.get_py_string_const( func.name, identifier=True, is_str=False, unicode_value=func.name) # FIXME: better way to get the module file path at module init time? Encoding to use? file_path = StringEncoding.bytes_literal(func.pos[0].get_filenametable_entry().encode('utf8'), 'utf8') file_path_const = code.get_py_string_const(file_path, identifier=False, is_str=True) flags = [] if self.def_node.star_arg: flags.append('CO_VARARGS') if self.def_node.starstar_arg: flags.append('CO_VARKEYWORDS') code.putln("%s = (PyObject*)__Pyx_PyCode_New(%d, %d, %d, 0, %s, %s, %s, %s, %s, %s, %s, %s, %s, %d, %s); %s" % ( self.result_code, len(func.args) - func.num_kwonly_args, # argcount func.num_kwonly_args, # kwonlyargcount (Py3 only) len(self.varnames.args), # nlocals '|'.join(flags) or '0', # flags Naming.empty_bytes, # code Naming.empty_tuple, # consts Naming.empty_tuple, # names (FIXME) self.varnames.result(), # varnames Naming.empty_tuple, # freevars (FIXME) Naming.empty_tuple, # cellvars (FIXME) file_path_const, # filename func_name, # name self.pos[1], # firstlineno Naming.empty_bytes, # lnotab code.error_goto_if_null(self.result_code, self.pos), )) class DefaultLiteralArgNode(ExprNode): # CyFunction's literal argument default value # # Evaluate literal only once. subexprs = [] is_literal = True is_temp = False def __init__(self, pos, arg): super(DefaultLiteralArgNode, self).__init__(pos) self.arg = arg self.type = self.arg.type self.evaluated = False def analyse_types(self, env): return self def generate_result_code(self, code): pass def generate_evaluation_code(self, code): if not self.evaluated: self.arg.generate_evaluation_code(code) self.evaluated = True def result(self): return self.type.cast_code(self.arg.result()) class DefaultNonLiteralArgNode(ExprNode): # CyFunction's non-literal argument default value subexprs = [] def __init__(self, pos, arg, defaults_struct): super(DefaultNonLiteralArgNode, self).__init__(pos) self.arg = arg self.defaults_struct = defaults_struct def analyse_types(self, env): self.type = self.arg.type self.is_temp = False return self def generate_result_code(self, code): pass def result(self): return '__Pyx_CyFunction_Defaults(%s, %s)->%s' % ( self.defaults_struct.name, Naming.self_cname, self.defaults_struct.lookup(self.arg.name).cname) class DefaultsTupleNode(TupleNode): # CyFunction's __defaults__ tuple def __init__(self, pos, defaults, defaults_struct): args = [] for arg in defaults: if not arg.default.is_literal: arg = DefaultNonLiteralArgNode(pos, arg, defaults_struct) else: arg = arg.default args.append(arg) super(DefaultsTupleNode, self).__init__(pos, args=args) def analyse_types(self, env, skip_children=False): return super(DefaultsTupleNode, self).analyse_types(env, skip_children).coerce_to_pyobject(env) class DefaultsKwDictNode(DictNode): # CyFunction's __kwdefaults__ dict def __init__(self, pos, defaults, defaults_struct): items = [] for arg in defaults: name = IdentifierStringNode(arg.pos, value=arg.name) if not arg.default.is_literal: arg = DefaultNonLiteralArgNode(pos, arg, defaults_struct) else: arg = arg.default items.append(DictItemNode(arg.pos, key=name, value=arg)) super(DefaultsKwDictNode, self).__init__(pos, key_value_pairs=items) class LambdaNode(InnerFunctionNode): # Lambda expression node (only used as a function reference) # # args [CArgDeclNode] formal arguments # star_arg PyArgDeclNode or None * argument # starstar_arg PyArgDeclNode or None ** argument # lambda_name string a module-globally unique lambda name # result_expr ExprNode # def_node DefNode the underlying function 'def' node child_attrs = ['def_node'] name = StringEncoding.EncodedString('') def analyse_declarations(self, env): self.lambda_name = self.def_node.lambda_name = env.next_id('lambda') self.def_node.no_assignment_synthesis = True self.def_node.pymethdef_required = True self.def_node.analyse_declarations(env) self.def_node.is_cyfunction = True self.pymethdef_cname = self.def_node.entry.pymethdef_cname env.add_lambda_def(self.def_node) def analyse_types(self, env): self.def_node = self.def_node.analyse_expressions(env) return super(LambdaNode, self).analyse_types(env) def generate_result_code(self, code): self.def_node.generate_execution_code(code) super(LambdaNode, self).generate_result_code(code) class GeneratorExpressionNode(LambdaNode): # A generator expression, e.g. (i for i in range(10)) # # Result is a generator. # # loop ForStatNode the for-loop, containing a YieldExprNode # def_node DefNode the underlying generator 'def' node name = StringEncoding.EncodedString('genexpr') binding = False def analyse_declarations(self, env): self.genexpr_name = env.next_id('genexpr') super(GeneratorExpressionNode, self).analyse_declarations(env) # No pymethdef required self.def_node.pymethdef_required = False self.def_node.py_wrapper_required = False self.def_node.is_cyfunction = False # Force genexpr signature self.def_node.entry.signature = TypeSlots.pyfunction_noargs def generate_result_code(self, code): code.putln( '%s = %s(%s); %s' % ( self.result(), self.def_node.entry.pyfunc_cname, self.self_result_code(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class YieldExprNode(ExprNode): # Yield expression node # # arg ExprNode the value to return from the generator # label_num integer yield label number # is_yield_from boolean is a YieldFromExprNode to delegate to another generator subexprs = ['arg'] type = py_object_type label_num = 0 is_yield_from = False is_await = False expr_keyword = 'yield' def analyse_types(self, env): if not self.label_num: error(self.pos, "'%s' not supported here" % self.expr_keyword) self.is_temp = 1 if self.arg is not None: self.arg = self.arg.analyse_types(env) if not self.arg.type.is_pyobject: self.coerce_yield_argument(env) return self def coerce_yield_argument(self, env): self.arg = self.arg.coerce_to_pyobject(env) def generate_evaluation_code(self, code): if self.arg: self.arg.generate_evaluation_code(code) self.arg.make_owned_reference(code) code.putln( "%s = %s;" % ( Naming.retval_cname, self.arg.result_as(py_object_type))) self.arg.generate_post_assignment_code(code) self.arg.free_temps(code) else: code.put_init_to_py_none(Naming.retval_cname, py_object_type) self.generate_yield_code(code) def generate_yield_code(self, code): """ Generate the code to return the argument in 'Naming.retval_cname' and to continue at the yield label. """ label_num, label_name = code.new_yield_label() code.use_label(label_name) saved = [] code.funcstate.closure_temps.reset() for cname, type, manage_ref in code.funcstate.temps_in_use(): save_cname = code.funcstate.closure_temps.allocate_temp(type) saved.append((cname, save_cname, type)) if type.is_pyobject: code.put_xgiveref(cname) code.putln('%s->%s = %s;' % (Naming.cur_scope_cname, save_cname, cname)) code.put_xgiveref(Naming.retval_cname) profile = code.globalstate.directives['profile'] linetrace = code.globalstate.directives['linetrace'] if profile or linetrace: code.put_trace_return(Naming.retval_cname, nogil=not code.funcstate.gil_owned) code.put_finish_refcount_context() code.putln("/* return from generator, yielding value */") code.putln("%s->resume_label = %d;" % ( Naming.generator_cname, label_num)) code.putln("return %s;" % Naming.retval_cname) code.put_label(label_name) for cname, save_cname, type in saved: code.putln('%s = %s->%s;' % (cname, Naming.cur_scope_cname, save_cname)) if type.is_pyobject: code.putln('%s->%s = 0;' % (Naming.cur_scope_cname, save_cname)) code.put_xgotref(cname) code.putln(code.error_goto_if_null(Naming.sent_value_cname, self.pos)) if self.result_is_used: self.allocate_temp_result(code) code.put('%s = %s; ' % (self.result(), Naming.sent_value_cname)) code.put_incref(self.result(), py_object_type) class YieldFromExprNode(YieldExprNode): # "yield from GEN" expression is_yield_from = True expr_keyword = 'yield from' def coerce_yield_argument(self, env): if not self.arg.type.is_string: # FIXME: support C arrays and C++ iterators? error(self.pos, "yielding from non-Python object not supported") self.arg = self.arg.coerce_to_pyobject(env) def yield_from_func(self, code): code.globalstate.use_utility_code(UtilityCode.load_cached("GeneratorYieldFrom", "Coroutine.c")) return "__Pyx_Generator_Yield_From" def generate_evaluation_code(self, code, source_cname=None, decref_source=False): if source_cname is None: self.arg.generate_evaluation_code(code) code.putln("%s = %s(%s, %s);" % ( Naming.retval_cname, self.yield_from_func(code), Naming.generator_cname, self.arg.py_result() if source_cname is None else source_cname)) if source_cname is None: self.arg.generate_disposal_code(code) self.arg.free_temps(code) elif decref_source: code.put_decref_clear(source_cname, py_object_type) code.put_xgotref(Naming.retval_cname) code.putln("if (likely(%s)) {" % Naming.retval_cname) self.generate_yield_code(code) code.putln("} else {") # either error or sub-generator has normally terminated: return value => node result if self.result_is_used: self.fetch_iteration_result(code) else: self.handle_iteration_exception(code) code.putln("}") def fetch_iteration_result(self, code): # YieldExprNode has allocated the result temp for us code.putln("%s = NULL;" % self.result()) code.put_error_if_neg(self.pos, "__Pyx_PyGen_FetchStopIterationValue(&%s)" % self.result()) code.put_gotref(self.result()) def handle_iteration_exception(self, code): code.putln("PyObject* exc_type = PyErr_Occurred();") code.putln("if (exc_type) {") code.putln("if (likely(exc_type == PyExc_StopIteration ||" " PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();") code.putln("else %s" % code.error_goto(self.pos)) code.putln("}") class AwaitExprNode(YieldFromExprNode): # 'await' expression node # # arg ExprNode the Awaitable value to await # label_num integer yield label number is_await = True expr_keyword = 'await' def coerce_yield_argument(self, env): if self.arg is not None: # FIXME: use same check as in YieldFromExprNode.coerce_yield_argument() ? self.arg = self.arg.coerce_to_pyobject(env) def yield_from_func(self, code): code.globalstate.use_utility_code(UtilityCode.load_cached("CoroutineYieldFrom", "Coroutine.c")) return "__Pyx_Coroutine_Yield_From" class AIterAwaitExprNode(AwaitExprNode): # 'await' expression node used in async-for loops to support the pre-Py3.5.2 'aiter' protocol def yield_from_func(self, code): code.globalstate.use_utility_code(UtilityCode.load_cached("CoroutineAIterYieldFrom", "Coroutine.c")) return "__Pyx_Coroutine_AIter_Yield_From" class AwaitIterNextExprNode(AwaitExprNode): # 'await' expression node as part of 'async for' iteration # # Breaks out of loop on StopAsyncIteration exception. def fetch_iteration_result(self, code): assert code.break_label, "AwaitIterNextExprNode outside of 'async for' loop" code.globalstate.use_utility_code(UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c")) code.putln("PyObject* exc_type = PyErr_Occurred();") code.putln("if (exc_type && likely(exc_type == __Pyx_PyExc_StopAsyncIteration ||" " PyErr_GivenExceptionMatches(exc_type, __Pyx_PyExc_StopAsyncIteration))) {") code.putln("PyErr_Clear();") code.putln("break;") code.putln("}") super(AwaitIterNextExprNode, self).fetch_iteration_result(code) class GlobalsExprNode(AtomicExprNode): type = dict_type is_temp = 1 def analyse_types(self, env): env.use_utility_code(Builtin.globals_utility_code) return self gil_message = "Constructing globals dict" def may_be_none(self): return False def generate_result_code(self, code): code.putln('%s = __Pyx_Globals(); %s' % ( self.result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.result()) class LocalsDictItemNode(DictItemNode): def analyse_types(self, env): self.key = self.key.analyse_types(env) self.value = self.value.analyse_types(env) self.key = self.key.coerce_to_pyobject(env) if self.value.type.can_coerce_to_pyobject(env): self.value = self.value.coerce_to_pyobject(env) else: self.value = None return self class FuncLocalsExprNode(DictNode): def __init__(self, pos, env): local_vars = sorted([ entry.name for entry in env.entries.values() if entry.name]) items = [LocalsDictItemNode( pos, key=IdentifierStringNode(pos, value=var), value=NameNode(pos, name=var, allow_null=True)) for var in local_vars] DictNode.__init__(self, pos, key_value_pairs=items, exclude_null_values=True) def analyse_types(self, env): node = super(FuncLocalsExprNode, self).analyse_types(env) node.key_value_pairs = [ i for i in node.key_value_pairs if i.value is not None ] return node class PyClassLocalsExprNode(AtomicExprNode): def __init__(self, pos, pyclass_dict): AtomicExprNode.__init__(self, pos) self.pyclass_dict = pyclass_dict def analyse_types(self, env): self.type = self.pyclass_dict.type self.is_temp = False return self def may_be_none(self): return False def result(self): return self.pyclass_dict.result() def generate_result_code(self, code): pass def LocalsExprNode(pos, scope_node, env): if env.is_module_scope: return GlobalsExprNode(pos) if env.is_py_class_scope: return PyClassLocalsExprNode(pos, scope_node.dict) return FuncLocalsExprNode(pos, env) #------------------------------------------------------------------- # # Unary operator nodes # #------------------------------------------------------------------- compile_time_unary_operators = { 'not': operator.not_, '~': operator.inv, '-': operator.neg, '+': operator.pos, } class UnopNode(ExprNode): # operator string # operand ExprNode # # Processing during analyse_expressions phase: # # analyse_c_operation # Called when the operand is not a pyobject. # - Check operand type and coerce if needed. # - Determine result type and result code fragment. # - Allocate temporary for result if needed. subexprs = ['operand'] infix = True def calculate_constant_result(self): func = compile_time_unary_operators[self.operator] self.constant_result = func(self.operand.constant_result) def compile_time_value(self, denv): func = compile_time_unary_operators.get(self.operator) if not func: error(self.pos, "Unary '%s' not supported in compile-time expression" % self.operator) operand = self.operand.compile_time_value(denv) try: return func(operand) except Exception as e: self.compile_time_value_error(e) def infer_type(self, env): operand_type = self.operand.infer_type(env) if operand_type.is_cpp_class or operand_type.is_ptr: cpp_type = operand_type.find_cpp_operation_type(self.operator) if cpp_type is not None: return cpp_type return self.infer_unop_type(env, operand_type) def infer_unop_type(self, env, operand_type): if operand_type.is_pyobject: return py_object_type else: return operand_type def may_be_none(self): if self.operand.type and self.operand.type.is_builtin_type: if self.operand.type is not type_type: return False return ExprNode.may_be_none(self) def analyse_types(self, env): self.operand = self.operand.analyse_types(env) if self.is_pythran_operation(env): self.type = PythranExpr(pythran_unaryop_type(self.operator, self.operand.type)) self.is_temp = 1 elif self.is_py_operation(): self.coerce_operand_to_pyobject(env) self.type = py_object_type self.is_temp = 1 elif self.is_cpp_operation(): self.analyse_cpp_operation(env) else: self.analyse_c_operation(env) return self def check_const(self): return self.operand.check_const() def is_py_operation(self): return self.operand.type.is_pyobject or self.operand.type.is_ctuple def is_pythran_operation(self, env): np_pythran = has_np_pythran(env) op_type = self.operand.type return np_pythran and (op_type.is_buffer or op_type.is_pythran_expr) def nogil_check(self, env): if self.is_py_operation(): self.gil_error() def is_cpp_operation(self): type = self.operand.type return type.is_cpp_class def coerce_operand_to_pyobject(self, env): self.operand = self.operand.coerce_to_pyobject(env) def generate_result_code(self, code): if self.type.is_pythran_expr: code.putln("// Pythran unaryop") code.putln("__Pyx_call_destructor(%s);" % self.result()) code.putln("new (&%s) decltype(%s){%s%s};" % ( self.result(), self.result(), self.operator, self.operand.pythran_result())) elif self.operand.type.is_pyobject: self.generate_py_operation_code(code) elif self.is_temp: if self.is_cpp_operation() and self.exception_check == '+': translate_cpp_exception(code, self.pos, "%s = %s %s;" % (self.result(), self.operator, self.operand.result()), self.exception_value, self.in_nogil_context) else: code.putln("%s = %s %s;" % (self.result(), self.operator, self.operand.result())) def generate_py_operation_code(self, code): function = self.py_operation_function(code) code.putln( "%s = %s(%s); %s" % ( self.result(), function, self.operand.py_result(), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) def type_error(self): if not self.operand.type.is_error: error(self.pos, "Invalid operand type for '%s' (%s)" % (self.operator, self.operand.type)) self.type = PyrexTypes.error_type def analyse_cpp_operation(self, env, overload_check=True): entry = env.lookup_operator(self.operator, [self.operand]) if overload_check and not entry: self.type_error() return if entry: self.exception_check = entry.type.exception_check self.exception_value = entry.type.exception_value if self.exception_check == '+': self.is_temp = True if self.exception_value is None: env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) else: self.exception_check = '' self.exception_value = '' cpp_type = self.operand.type.find_cpp_operation_type(self.operator) if overload_check and cpp_type is None: error(self.pos, "'%s' operator not defined for %s" % ( self.operator, type)) self.type_error() return self.type = cpp_type class NotNode(UnopNode): # 'not' operator # # operand ExprNode operator = '!' type = PyrexTypes.c_bint_type def calculate_constant_result(self): self.constant_result = not self.operand.constant_result def compile_time_value(self, denv): operand = self.operand.compile_time_value(denv) try: return not operand except Exception as e: self.compile_time_value_error(e) def infer_unop_type(self, env, operand_type): return PyrexTypes.c_bint_type def analyse_types(self, env): self.operand = self.operand.analyse_types(env) operand_type = self.operand.type if operand_type.is_cpp_class: self.analyse_cpp_operation(env) else: self.operand = self.operand.coerce_to_boolean(env) return self def calculate_result_code(self): return "(!%s)" % self.operand.result() class UnaryPlusNode(UnopNode): # unary '+' operator operator = '+' def analyse_c_operation(self, env): self.type = PyrexTypes.widest_numeric_type( self.operand.type, PyrexTypes.c_int_type) def py_operation_function(self, code): return "PyNumber_Positive" def calculate_result_code(self): if self.is_cpp_operation(): return "(+%s)" % self.operand.result() else: return self.operand.result() class UnaryMinusNode(UnopNode): # unary '-' operator operator = '-' def analyse_c_operation(self, env): if self.operand.type.is_numeric: self.type = PyrexTypes.widest_numeric_type( self.operand.type, PyrexTypes.c_int_type) elif self.operand.type.is_enum: self.type = PyrexTypes.c_int_type else: self.type_error() if self.type.is_complex: self.infix = False def py_operation_function(self, code): return "PyNumber_Negative" def calculate_result_code(self): if self.infix: return "(-%s)" % self.operand.result() else: return "%s(%s)" % (self.operand.type.unary_op('-'), self.operand.result()) def get_constant_c_result_code(self): value = self.operand.get_constant_c_result_code() if value: return "(-%s)" % value class TildeNode(UnopNode): # unary '~' operator def analyse_c_operation(self, env): if self.operand.type.is_int: self.type = PyrexTypes.widest_numeric_type( self.operand.type, PyrexTypes.c_int_type) elif self.operand.type.is_enum: self.type = PyrexTypes.c_int_type else: self.type_error() def py_operation_function(self, code): return "PyNumber_Invert" def calculate_result_code(self): return "(~%s)" % self.operand.result() class CUnopNode(UnopNode): def is_py_operation(self): return False class DereferenceNode(CUnopNode): # unary * operator operator = '*' def infer_unop_type(self, env, operand_type): if operand_type.is_ptr: return operand_type.base_type else: return PyrexTypes.error_type def analyse_c_operation(self, env): if self.operand.type.is_ptr: self.type = self.operand.type.base_type else: self.type_error() def calculate_result_code(self): return "(*%s)" % self.operand.result() class DecrementIncrementNode(CUnopNode): # unary ++/-- operator def analyse_c_operation(self, env): if self.operand.type.is_numeric: self.type = PyrexTypes.widest_numeric_type( self.operand.type, PyrexTypes.c_int_type) elif self.operand.type.is_ptr: self.type = self.operand.type else: self.type_error() def calculate_result_code(self): if self.is_prefix: return "(%s%s)" % (self.operator, self.operand.result()) else: return "(%s%s)" % (self.operand.result(), self.operator) def inc_dec_constructor(is_prefix, operator): return lambda pos, **kwds: DecrementIncrementNode(pos, is_prefix=is_prefix, operator=operator, **kwds) class AmpersandNode(CUnopNode): # The C address-of operator. # # operand ExprNode operator = '&' def infer_unop_type(self, env, operand_type): return PyrexTypes.c_ptr_type(operand_type) def analyse_types(self, env): self.operand = self.operand.analyse_types(env) argtype = self.operand.type if argtype.is_cpp_class: self.analyse_cpp_operation(env, overload_check=False) if not (argtype.is_cfunction or argtype.is_reference or self.operand.is_addressable()): if argtype.is_memoryviewslice: self.error("Cannot take address of memoryview slice") else: self.error("Taking address of non-lvalue (type %s)" % argtype) return self if argtype.is_pyobject: self.error("Cannot take address of Python %s" % ( "variable '%s'" % self.operand.name if self.operand.is_name else "object attribute '%s'" % self.operand.attribute if self.operand.is_attribute else "object")) return self if not argtype.is_cpp_class or not self.type: self.type = PyrexTypes.c_ptr_type(argtype) return self def check_const(self): return self.operand.check_const_addr() def error(self, mess): error(self.pos, mess) self.type = PyrexTypes.error_type self.result_code = "" def calculate_result_code(self): return "(&%s)" % self.operand.result() def generate_result_code(self, code): if (self.operand.type.is_cpp_class and self.exception_check == '+'): translate_cpp_exception(code, self.pos, "%s = %s %s;" % (self.result(), self.operator, self.operand.result()), self.exception_value, self.in_nogil_context) unop_node_classes = { "+": UnaryPlusNode, "-": UnaryMinusNode, "~": TildeNode, } def unop_node(pos, operator, operand): # Construct unnop node of appropriate class for # given operator. if isinstance(operand, IntNode) and operator == '-': return IntNode(pos = operand.pos, value = str(-Utils.str_to_number(operand.value)), longness=operand.longness, unsigned=operand.unsigned) elif isinstance(operand, UnopNode) and operand.operator == operator in '+-': warning(pos, "Python has no increment/decrement operator: %s%sx == %s(%sx) == x" % ((operator,)*4), 5) return unop_node_classes[operator](pos, operator = operator, operand = operand) class TypecastNode(ExprNode): # C type cast # # operand ExprNode # base_type CBaseTypeNode # declarator CDeclaratorNode # typecheck boolean # # If used from a transform, one can if wanted specify the attribute # "type" directly and leave base_type and declarator to None subexprs = ['operand'] base_type = declarator = type = None def type_dependencies(self, env): return () def infer_type(self, env): if self.type is None: base_type = self.base_type.analyse(env) _, self.type = self.declarator.analyse(base_type, env) return self.type def analyse_types(self, env): if self.type is None: base_type = self.base_type.analyse(env) _, self.type = self.declarator.analyse(base_type, env) if self.operand.has_constant_result(): # Must be done after self.type is resolved. self.calculate_constant_result() if self.type.is_cfunction: error(self.pos, "Cannot cast to a function type") self.type = PyrexTypes.error_type self.operand = self.operand.analyse_types(env) if self.type is PyrexTypes.c_bint_type: # short circuit this to a coercion return self.operand.coerce_to_boolean(env) to_py = self.type.is_pyobject from_py = self.operand.type.is_pyobject if from_py and not to_py and self.operand.is_ephemeral(): if not self.type.is_numeric and not self.type.is_cpp_class: error(self.pos, "Casting temporary Python object to non-numeric non-Python type") if to_py and not from_py: if self.type is bytes_type and self.operand.type.is_int: return CoerceIntToBytesNode(self.operand, env) elif self.operand.type.can_coerce_to_pyobject(env): self.result_ctype = py_object_type self.operand = self.operand.coerce_to(self.type, env) else: if self.operand.type.is_ptr: if not (self.operand.type.base_type.is_void or self.operand.type.base_type.is_struct): error(self.pos, "Python objects cannot be cast from pointers of primitive types") else: # Should this be an error? warning(self.pos, "No conversion from %s to %s, python object pointer used." % (self.operand.type, self.type)) self.operand = self.operand.coerce_to_simple(env) elif from_py and not to_py: if self.type.create_from_py_utility_code(env): self.operand = self.operand.coerce_to(self.type, env) elif self.type.is_ptr: if not (self.type.base_type.is_void or self.type.base_type.is_struct): error(self.pos, "Python objects cannot be cast to pointers of primitive types") else: warning(self.pos, "No conversion from %s to %s, python object pointer used." % (self.type, self.operand.type)) elif from_py and to_py: if self.typecheck: self.operand = PyTypeTestNode(self.operand, self.type, env, notnone=True) elif isinstance(self.operand, SliceIndexNode): # This cast can influence the created type of string slices. self.operand = self.operand.coerce_to(self.type, env) elif self.type.is_complex and self.operand.type.is_complex: self.operand = self.operand.coerce_to_simple(env) elif self.operand.type.is_fused: self.operand = self.operand.coerce_to(self.type, env) #self.type = self.operand.type return self def is_simple(self): # either temp or a C cast => no side effects other than the operand's return self.operand.is_simple() def is_ephemeral(self): # either temp or a C cast => no side effects other than the operand's return self.operand.is_ephemeral() def nonlocally_immutable(self): return self.is_temp or self.operand.nonlocally_immutable() def nogil_check(self, env): if self.type and self.type.is_pyobject and self.is_temp: self.gil_error() def check_const(self): return self.operand.check_const() def calculate_constant_result(self): self.constant_result = self.calculate_result_code(self.operand.constant_result) def calculate_result_code(self, operand_result = None): if operand_result is None: operand_result = self.operand.result() if self.type.is_complex: operand_result = self.operand.result() if self.operand.type.is_complex: real_part = self.type.real_type.cast_code("__Pyx_CREAL(%s)" % operand_result) imag_part = self.type.real_type.cast_code("__Pyx_CIMAG(%s)" % operand_result) else: real_part = self.type.real_type.cast_code(operand_result) imag_part = "0" return "%s(%s, %s)" % ( self.type.from_parts, real_part, imag_part) else: return self.type.cast_code(operand_result) def get_constant_c_result_code(self): operand_result = self.operand.get_constant_c_result_code() if operand_result: return self.type.cast_code(operand_result) def result_as(self, type): if self.type.is_pyobject and not self.is_temp: # Optimise away some unnecessary casting return self.operand.result_as(type) else: return ExprNode.result_as(self, type) def generate_result_code(self, code): if self.is_temp: code.putln( "%s = (PyObject *)%s;" % ( self.result(), self.operand.result())) code.put_incref(self.result(), self.ctype()) ERR_START = "Start may not be given" ERR_NOT_STOP = "Stop must be provided to indicate shape" ERR_STEPS = ("Strides may only be given to indicate contiguity. " "Consider slicing it after conversion") ERR_NOT_POINTER = "Can only create cython.array from pointer or array" ERR_BASE_TYPE = "Pointer base type does not match cython.array base type" class CythonArrayNode(ExprNode): """ Used when a pointer of base_type is cast to a memoryviewslice with that base type. i.e. p creates a fortran-contiguous cython.array. We leave the type set to object so coercions to object are more efficient and less work. Acquiring a memoryviewslice from this will be just as efficient. ExprNode.coerce_to() will do the additional typecheck on self.compile_time_type This also handles my_c_array operand ExprNode the thing we're casting base_type_node MemoryViewSliceTypeNode the cast expression node """ subexprs = ['operand', 'shapes'] shapes = None is_temp = True mode = "c" array_dtype = None shape_type = PyrexTypes.c_py_ssize_t_type def analyse_types(self, env): from . import MemoryView self.operand = self.operand.analyse_types(env) if self.array_dtype: array_dtype = self.array_dtype else: array_dtype = self.base_type_node.base_type_node.analyse(env) axes = self.base_type_node.axes self.type = error_type self.shapes = [] ndim = len(axes) # Base type of the pointer or C array we are converting base_type = self.operand.type if not self.operand.type.is_ptr and not self.operand.type.is_array: error(self.operand.pos, ERR_NOT_POINTER) return self # Dimension sizes of C array array_dimension_sizes = [] if base_type.is_array: while base_type.is_array: array_dimension_sizes.append(base_type.size) base_type = base_type.base_type elif base_type.is_ptr: base_type = base_type.base_type else: error(self.pos, "unexpected base type %s found" % base_type) return self if not (base_type.same_as(array_dtype) or base_type.is_void): error(self.operand.pos, ERR_BASE_TYPE) return self elif self.operand.type.is_array and len(array_dimension_sizes) != ndim: error(self.operand.pos, "Expected %d dimensions, array has %d dimensions" % (ndim, len(array_dimension_sizes))) return self # Verify the start, stop and step values # In case of a C array, use the size of C array in each dimension to # get an automatic cast for axis_no, axis in enumerate(axes): if not axis.start.is_none: error(axis.start.pos, ERR_START) return self if axis.stop.is_none: if array_dimension_sizes: dimsize = array_dimension_sizes[axis_no] axis.stop = IntNode(self.pos, value=str(dimsize), constant_result=dimsize, type=PyrexTypes.c_int_type) else: error(axis.pos, ERR_NOT_STOP) return self axis.stop = axis.stop.analyse_types(env) shape = axis.stop.coerce_to(self.shape_type, env) if not shape.is_literal: shape.coerce_to_temp(env) self.shapes.append(shape) first_or_last = axis_no in (0, ndim - 1) if not axis.step.is_none and first_or_last: # '1' in the first or last dimension denotes F or C contiguity axis.step = axis.step.analyse_types(env) if (not axis.step.type.is_int and axis.step.is_literal and not axis.step.type.is_error): error(axis.step.pos, "Expected an integer literal") return self if axis.step.compile_time_value(env) != 1: error(axis.step.pos, ERR_STEPS) return self if axis_no == 0: self.mode = "fortran" elif not axis.step.is_none and not first_or_last: # step provided in some other dimension error(axis.step.pos, ERR_STEPS) return self if not self.operand.is_name: self.operand = self.operand.coerce_to_temp(env) axes = [('direct', 'follow')] * len(axes) if self.mode == "fortran": axes[0] = ('direct', 'contig') else: axes[-1] = ('direct', 'contig') self.coercion_type = PyrexTypes.MemoryViewSliceType(array_dtype, axes) self.coercion_type.validate_memslice_dtype(self.pos) self.type = self.get_cython_array_type(env) MemoryView.use_cython_array_utility_code(env) env.use_utility_code(MemoryView.typeinfo_to_format_code) return self def allocate_temp_result(self, code): if self.temp_code: raise RuntimeError("temp allocated mulitple times") self.temp_code = code.funcstate.allocate_temp(self.type, True) def infer_type(self, env): return self.get_cython_array_type(env) def get_cython_array_type(self, env): cython_scope = env.global_scope().context.cython_scope cython_scope.load_cythonscope() return cython_scope.viewscope.lookup("array").type def generate_result_code(self, code): from . import Buffer shapes = [self.shape_type.cast_code(shape.result()) for shape in self.shapes] dtype = self.coercion_type.dtype shapes_temp = code.funcstate.allocate_temp(py_object_type, True) format_temp = code.funcstate.allocate_temp(py_object_type, True) itemsize = "sizeof(%s)" % dtype.empty_declaration_code() type_info = Buffer.get_type_information_cname(code, dtype) if self.operand.type.is_ptr: code.putln("if (!%s) {" % self.operand.result()) code.putln( 'PyErr_SetString(PyExc_ValueError,' '"Cannot create cython.array from NULL pointer");') code.putln(code.error_goto(self.operand.pos)) code.putln("}") code.putln("%s = __pyx_format_from_typeinfo(&%s);" % (format_temp, type_info)) buildvalue_fmt = " __PYX_BUILD_PY_SSIZE_T " * len(shapes) code.putln('%s = Py_BuildValue((char*) "(" %s ")", %s);' % ( shapes_temp, buildvalue_fmt, ", ".join(shapes))) err = "!%s || !%s || !PyBytes_AsString(%s)" % (format_temp, shapes_temp, format_temp) code.putln(code.error_goto_if(err, self.pos)) code.put_gotref(format_temp) code.put_gotref(shapes_temp) tup = (self.result(), shapes_temp, itemsize, format_temp, self.mode, self.operand.result()) code.putln('%s = __pyx_array_new(' '%s, %s, PyBytes_AS_STRING(%s), ' '(char *) "%s", (char *) %s);' % tup) code.putln(code.error_goto_if_null(self.result(), self.pos)) code.put_gotref(self.result()) def dispose(temp): code.put_decref_clear(temp, py_object_type) code.funcstate.release_temp(temp) dispose(shapes_temp) dispose(format_temp) @classmethod def from_carray(cls, src_node, env): """ Given a C array type, return a CythonArrayNode """ pos = src_node.pos base_type = src_node.type none_node = NoneNode(pos) axes = [] while base_type.is_array: axes.append(SliceNode(pos, start=none_node, stop=none_node, step=none_node)) base_type = base_type.base_type axes[-1].step = IntNode(pos, value="1", is_c_literal=True) memslicenode = Nodes.MemoryViewSliceTypeNode(pos, axes=axes, base_type_node=base_type) result = CythonArrayNode(pos, base_type_node=memslicenode, operand=src_node, array_dtype=base_type) result = result.analyse_types(env) return result class SizeofNode(ExprNode): # Abstract base class for sizeof(x) expression nodes. type = PyrexTypes.c_size_t_type def check_const(self): return True def generate_result_code(self, code): pass class SizeofTypeNode(SizeofNode): # C sizeof function applied to a type # # base_type CBaseTypeNode # declarator CDeclaratorNode subexprs = [] arg_type = None def analyse_types(self, env): # we may have incorrectly interpreted a dotted name as a type rather than an attribute # this could be better handled by more uniformly treating types as runtime-available objects if 0 and self.base_type.module_path: path = self.base_type.module_path obj = env.lookup(path[0]) if obj.as_module is None: operand = NameNode(pos=self.pos, name=path[0]) for attr in path[1:]: operand = AttributeNode(pos=self.pos, obj=operand, attribute=attr) operand = AttributeNode(pos=self.pos, obj=operand, attribute=self.base_type.name) self.operand = operand self.__class__ = SizeofVarNode node = self.analyse_types(env) return node if self.arg_type is None: base_type = self.base_type.analyse(env) _, arg_type = self.declarator.analyse(base_type, env) self.arg_type = arg_type self.check_type() return self def check_type(self): arg_type = self.arg_type if not arg_type: return if arg_type.is_pyobject and not arg_type.is_extension_type: error(self.pos, "Cannot take sizeof Python object") elif arg_type.is_void: error(self.pos, "Cannot take sizeof void") elif not arg_type.is_complete(): error(self.pos, "Cannot take sizeof incomplete type '%s'" % arg_type) def calculate_result_code(self): if self.arg_type.is_extension_type: # the size of the pointer is boring # we want the size of the actual struct arg_code = self.arg_type.declaration_code("", deref=1) else: arg_code = self.arg_type.empty_declaration_code() return "(sizeof(%s))" % arg_code class SizeofVarNode(SizeofNode): # C sizeof function applied to a variable # # operand ExprNode subexprs = ['operand'] def analyse_types(self, env): # We may actually be looking at a type rather than a variable... # If we are, traditional analysis would fail... operand_as_type = self.operand.analyse_as_type(env) if operand_as_type: self.arg_type = operand_as_type if self.arg_type.is_fused: self.arg_type = self.arg_type.specialize(env.fused_to_specific) self.__class__ = SizeofTypeNode self.check_type() else: self.operand = self.operand.analyse_types(env) return self def calculate_result_code(self): return "(sizeof(%s))" % self.operand.result() def generate_result_code(self, code): pass class TypeidNode(ExprNode): # C++ typeid operator applied to a type or variable # # operand ExprNode # arg_type ExprNode # is_variable boolean type = PyrexTypes.error_type subexprs = ['operand'] arg_type = None is_variable = None is_temp = 1 def get_type_info_type(self, env): env_module = env while not env_module.is_module_scope: env_module = env_module.outer_scope typeinfo_module = env_module.find_module('libcpp.typeinfo', self.pos) typeinfo_entry = typeinfo_module.lookup('type_info') return PyrexTypes.CFakeReferenceType(PyrexTypes.c_const_type(typeinfo_entry.type)) def analyse_types(self, env): type_info = self.get_type_info_type(env) if not type_info: self.error("The 'libcpp.typeinfo' module must be cimported to use the typeid() operator") return self self.type = type_info as_type = self.operand.analyse_as_type(env) if as_type: self.arg_type = as_type self.is_type = True else: self.arg_type = self.operand.analyse_types(env) self.is_type = False if self.arg_type.type.is_pyobject: self.error("Cannot use typeid on a Python object") return self elif self.arg_type.type.is_void: self.error("Cannot use typeid on void") return self elif not self.arg_type.type.is_complete(): self.error("Cannot use typeid on incomplete type '%s'" % self.arg_type.type) return self env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) return self def error(self, mess): error(self.pos, mess) self.type = PyrexTypes.error_type self.result_code = "" def check_const(self): return True def calculate_result_code(self): return self.temp_code def generate_result_code(self, code): if self.is_type: arg_code = self.arg_type.empty_declaration_code() else: arg_code = self.arg_type.result() translate_cpp_exception(code, self.pos, "%s = typeid(%s);" % (self.temp_code, arg_code), None, self.in_nogil_context) class TypeofNode(ExprNode): # Compile-time type of an expression, as a string. # # operand ExprNode # literal StringNode # internal literal = None type = py_object_type subexprs = ['literal'] # 'operand' will be ignored after type analysis! def analyse_types(self, env): self.operand = self.operand.analyse_types(env) value = StringEncoding.EncodedString(str(self.operand.type)) #self.operand.type.typeof_name()) literal = StringNode(self.pos, value=value) literal = literal.analyse_types(env) self.literal = literal.coerce_to_pyobject(env) return self def may_be_none(self): return False def generate_evaluation_code(self, code): self.literal.generate_evaluation_code(code) def calculate_result_code(self): return self.literal.calculate_result_code() #------------------------------------------------------------------- # # Binary operator nodes # #------------------------------------------------------------------- try: matmul_operator = operator.matmul except AttributeError: def matmul_operator(a, b): try: func = a.__matmul__ except AttributeError: func = b.__rmatmul__ return func(a, b) compile_time_binary_operators = { '<': operator.lt, '<=': operator.le, '==': operator.eq, '!=': operator.ne, '>=': operator.ge, '>': operator.gt, 'is': operator.is_, 'is_not': operator.is_not, '+': operator.add, '&': operator.and_, '/': operator.truediv, '//': operator.floordiv, '<<': operator.lshift, '%': operator.mod, '*': operator.mul, '|': operator.or_, '**': operator.pow, '>>': operator.rshift, '-': operator.sub, '^': operator.xor, '@': matmul_operator, 'in': lambda x, seq: x in seq, 'not_in': lambda x, seq: x not in seq, } def get_compile_time_binop(node): func = compile_time_binary_operators.get(node.operator) if not func: error(node.pos, "Binary '%s' not supported in compile-time expression" % node.operator) return func class BinopNode(ExprNode): # operator string # operand1 ExprNode # operand2 ExprNode # # Processing during analyse_expressions phase: # # analyse_c_operation # Called when neither operand is a pyobject. # - Check operand types and coerce if needed. # - Determine result type and result code fragment. # - Allocate temporary for result if needed. subexprs = ['operand1', 'operand2'] inplace = False def calculate_constant_result(self): func = compile_time_binary_operators[self.operator] self.constant_result = func( self.operand1.constant_result, self.operand2.constant_result) def compile_time_value(self, denv): func = get_compile_time_binop(self) operand1 = self.operand1.compile_time_value(denv) operand2 = self.operand2.compile_time_value(denv) try: return func(operand1, operand2) except Exception as e: self.compile_time_value_error(e) def infer_type(self, env): return self.result_type(self.operand1.infer_type(env), self.operand2.infer_type(env), env) def analyse_types(self, env): self.operand1 = self.operand1.analyse_types(env) self.operand2 = self.operand2.analyse_types(env) self.analyse_operation(env) return self def analyse_operation(self, env): if self.is_pythran_operation(env): self.type = self.result_type(self.operand1.type, self.operand2.type, env) assert self.type.is_pythran_expr self.is_temp = 1 elif self.is_py_operation(): self.coerce_operands_to_pyobjects(env) self.type = self.result_type(self.operand1.type, self.operand2.type, env) assert self.type.is_pyobject self.is_temp = 1 elif self.is_cpp_operation(): self.analyse_cpp_operation(env) else: self.analyse_c_operation(env) def is_py_operation(self): return self.is_py_operation_types(self.operand1.type, self.operand2.type) def is_py_operation_types(self, type1, type2): return type1.is_pyobject or type2.is_pyobject or type1.is_ctuple or type2.is_ctuple def is_pythran_operation(self, env): return self.is_pythran_operation_types(self.operand1.type, self.operand2.type, env) def is_pythran_operation_types(self, type1, type2, env): # Support only expr op supported_type, or supported_type op expr return has_np_pythran(env) and \ (is_pythran_supported_operation_type(type1) and is_pythran_supported_operation_type(type2)) and \ (is_pythran_expr(type1) or is_pythran_expr(type2)) def is_cpp_operation(self): return (self.operand1.type.is_cpp_class or self.operand2.type.is_cpp_class) def analyse_cpp_operation(self, env): entry = env.lookup_operator(self.operator, [self.operand1, self.operand2]) if not entry: self.type_error() return func_type = entry.type self.exception_check = func_type.exception_check self.exception_value = func_type.exception_value if self.exception_check == '+': # Used by NumBinopNodes to break up expressions involving multiple # operators so that exceptions can be handled properly. self.is_temp = 1 if self.exception_value is None: env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) if func_type.is_ptr: func_type = func_type.base_type if len(func_type.args) == 1: self.operand2 = self.operand2.coerce_to(func_type.args[0].type, env) else: self.operand1 = self.operand1.coerce_to(func_type.args[0].type, env) self.operand2 = self.operand2.coerce_to(func_type.args[1].type, env) self.type = func_type.return_type def result_type(self, type1, type2, env): if self.is_pythran_operation_types(type1, type2, env): return PythranExpr(pythran_binop_type(self.operator, type1, type2)) if self.is_py_operation_types(type1, type2): if type2.is_string: type2 = Builtin.bytes_type elif type2.is_pyunicode_ptr: type2 = Builtin.unicode_type if type1.is_string: type1 = Builtin.bytes_type elif type1.is_pyunicode_ptr: type1 = Builtin.unicode_type if type1.is_builtin_type or type2.is_builtin_type: if type1 is type2 and self.operator in '**%+|&^': # FIXME: at least these operators should be safe - others? return type1 result_type = self.infer_builtin_types_operation(type1, type2) if result_type is not None: return result_type return py_object_type elif type1.is_error or type2.is_error: return PyrexTypes.error_type else: return self.compute_c_result_type(type1, type2) def infer_builtin_types_operation(self, type1, type2): return None def nogil_check(self, env): if self.is_py_operation(): self.gil_error() def coerce_operands_to_pyobjects(self, env): self.operand1 = self.operand1.coerce_to_pyobject(env) self.operand2 = self.operand2.coerce_to_pyobject(env) def check_const(self): return self.operand1.check_const() and self.operand2.check_const() def is_ephemeral(self): return (super(BinopNode, self).is_ephemeral() or self.operand1.is_ephemeral() or self.operand2.is_ephemeral()) def generate_result_code(self, code): if self.type.is_pythran_expr: code.putln("// Pythran binop") code.putln("__Pyx_call_destructor(%s);" % self.result()) code.putln("new (&%s) decltype(%s){%s %s %s};" % ( self.result(), self.result(), self.operand1.pythran_result(), self.operator, self.operand2.pythran_result())) elif self.operand1.type.is_pyobject: function = self.py_operation_function(code) if self.operator == '**': extra_args = ", Py_None" else: extra_args = "" code.putln( "%s = %s(%s, %s%s); %s" % ( self.result(), function, self.operand1.py_result(), self.operand2.py_result(), extra_args, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) elif self.is_temp: # C++ overloaded operators with exception values are currently all # handled through temporaries. if self.is_cpp_operation() and self.exception_check == '+': translate_cpp_exception(code, self.pos, "%s = %s;" % (self.result(), self.calculate_result_code()), self.exception_value, self.in_nogil_context) else: code.putln("%s = %s;" % (self.result(), self.calculate_result_code())) def type_error(self): if not (self.operand1.type.is_error or self.operand2.type.is_error): error(self.pos, "Invalid operand types for '%s' (%s; %s)" % (self.operator, self.operand1.type, self.operand2.type)) self.type = PyrexTypes.error_type class CBinopNode(BinopNode): def analyse_types(self, env): node = BinopNode.analyse_types(self, env) if node.is_py_operation(): node.type = PyrexTypes.error_type return node def py_operation_function(self, code): return "" def calculate_result_code(self): return "(%s %s %s)" % ( self.operand1.result(), self.operator, self.operand2.result()) def compute_c_result_type(self, type1, type2): cpp_type = None if type1.is_cpp_class or type1.is_ptr: cpp_type = type1.find_cpp_operation_type(self.operator, type2) # FIXME: handle the reversed case? #if cpp_type is None and (type2.is_cpp_class or type2.is_ptr): # cpp_type = type2.find_cpp_operation_type(self.operator, type1) # FIXME: do we need to handle other cases here? return cpp_type def c_binop_constructor(operator): def make_binop_node(pos, **operands): return CBinopNode(pos, operator=operator, **operands) return make_binop_node class NumBinopNode(BinopNode): # Binary operation taking numeric arguments. infix = True overflow_check = False overflow_bit_node = None def analyse_c_operation(self, env): type1 = self.operand1.type type2 = self.operand2.type self.type = self.compute_c_result_type(type1, type2) if not self.type: self.type_error() return if self.type.is_complex: self.infix = False if (self.type.is_int and env.directives['overflowcheck'] and self.operator in self.overflow_op_names): if (self.operator in ('+', '*') and self.operand1.has_constant_result() and not self.operand2.has_constant_result()): self.operand1, self.operand2 = self.operand2, self.operand1 self.overflow_check = True self.overflow_fold = env.directives['overflowcheck.fold'] self.func = self.type.overflow_check_binop( self.overflow_op_names[self.operator], env, const_rhs = self.operand2.has_constant_result()) self.is_temp = True if not self.infix or (type1.is_numeric and type2.is_numeric): self.operand1 = self.operand1.coerce_to(self.type, env) self.operand2 = self.operand2.coerce_to(self.type, env) def compute_c_result_type(self, type1, type2): if self.c_types_okay(type1, type2): widest_type = PyrexTypes.widest_numeric_type(type1, type2) if widest_type is PyrexTypes.c_bint_type: if self.operator not in '|^&': # False + False == 0 # not False! widest_type = PyrexTypes.c_int_type else: widest_type = PyrexTypes.widest_numeric_type( widest_type, PyrexTypes.c_int_type) return widest_type else: return None def may_be_none(self): if self.type and self.type.is_builtin_type: # if we know the result type, we know the operation, so it can't be None return False type1 = self.operand1.type type2 = self.operand2.type if type1 and type1.is_builtin_type and type2 and type2.is_builtin_type: # XXX: I can't think of any case where a binary operation # on builtin types evaluates to None - add a special case # here if there is one. return False return super(NumBinopNode, self).may_be_none() def get_constant_c_result_code(self): value1 = self.operand1.get_constant_c_result_code() value2 = self.operand2.get_constant_c_result_code() if value1 and value2: return "(%s %s %s)" % (value1, self.operator, value2) else: return None def c_types_okay(self, type1, type2): #print "NumBinopNode.c_types_okay:", type1, type2 ### return (type1.is_numeric or type1.is_enum) \ and (type2.is_numeric or type2.is_enum) def generate_evaluation_code(self, code): if self.overflow_check: self.overflow_bit_node = self self.overflow_bit = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False) code.putln("%s = 0;" % self.overflow_bit) super(NumBinopNode, self).generate_evaluation_code(code) if self.overflow_check: code.putln("if (unlikely(%s)) {" % self.overflow_bit) code.putln('PyErr_SetString(PyExc_OverflowError, "value too large");') code.putln(code.error_goto(self.pos)) code.putln("}") code.funcstate.release_temp(self.overflow_bit) def calculate_result_code(self): if self.overflow_bit_node is not None: return "%s(%s, %s, &%s)" % ( self.func, self.operand1.result(), self.operand2.result(), self.overflow_bit_node.overflow_bit) elif self.type.is_cpp_class or self.infix: return "(%s %s %s)" % ( self.operand1.result(), self.operator, self.operand2.result()) else: func = self.type.binary_op(self.operator) if func is None: error(self.pos, "binary operator %s not supported for %s" % (self.operator, self.type)) return "%s(%s, %s)" % ( func, self.operand1.result(), self.operand2.result()) def is_py_operation_types(self, type1, type2): return (type1.is_unicode_char or type2.is_unicode_char or BinopNode.is_py_operation_types(self, type1, type2)) def py_operation_function(self, code): function_name = self.py_functions[self.operator] if self.inplace: function_name = function_name.replace('PyNumber_', 'PyNumber_InPlace') return function_name py_functions = { "|": "PyNumber_Or", "^": "PyNumber_Xor", "&": "PyNumber_And", "<<": "PyNumber_Lshift", ">>": "PyNumber_Rshift", "+": "PyNumber_Add", "-": "PyNumber_Subtract", "*": "PyNumber_Multiply", "@": "__Pyx_PyNumber_MatrixMultiply", "/": "__Pyx_PyNumber_Divide", "//": "PyNumber_FloorDivide", "%": "PyNumber_Remainder", "**": "PyNumber_Power", } overflow_op_names = { "+": "add", "-": "sub", "*": "mul", "<<": "lshift", } class IntBinopNode(NumBinopNode): # Binary operation taking integer arguments. def c_types_okay(self, type1, type2): #print "IntBinopNode.c_types_okay:", type1, type2 ### return (type1.is_int or type1.is_enum) \ and (type2.is_int or type2.is_enum) class AddNode(NumBinopNode): # '+' operator. def is_py_operation_types(self, type1, type2): if type1.is_string and type2.is_string or type1.is_pyunicode_ptr and type2.is_pyunicode_ptr: return 1 else: return NumBinopNode.is_py_operation_types(self, type1, type2) def infer_builtin_types_operation(self, type1, type2): # b'abc' + 'abc' raises an exception in Py3, # so we can safely infer the Py2 type for bytes here string_types = (bytes_type, str_type, basestring_type, unicode_type) if type1 in string_types and type2 in string_types: return string_types[max(string_types.index(type1), string_types.index(type2))] return None def compute_c_result_type(self, type1, type2): #print "AddNode.compute_c_result_type:", type1, self.operator, type2 ### if (type1.is_ptr or type1.is_array) and (type2.is_int or type2.is_enum): return type1 elif (type2.is_ptr or type2.is_array) and (type1.is_int or type1.is_enum): return type2 else: return NumBinopNode.compute_c_result_type( self, type1, type2) def py_operation_function(self, code): is_unicode_concat = False if isinstance(self.operand1, FormattedValueNode) or isinstance(self.operand2, FormattedValueNode): is_unicode_concat = True else: type1, type2 = self.operand1.type, self.operand2.type if type1 is unicode_type or type2 is unicode_type: is_unicode_concat = type1.is_builtin_type and type2.is_builtin_type if is_unicode_concat: if self.operand1.may_be_none() or self.operand2.may_be_none(): return '__Pyx_PyUnicode_ConcatSafe' else: return '__Pyx_PyUnicode_Concat' return super(AddNode, self).py_operation_function(code) class SubNode(NumBinopNode): # '-' operator. def compute_c_result_type(self, type1, type2): if (type1.is_ptr or type1.is_array) and (type2.is_int or type2.is_enum): return type1 elif (type1.is_ptr or type1.is_array) and (type2.is_ptr or type2.is_array): return PyrexTypes.c_ptrdiff_t_type else: return NumBinopNode.compute_c_result_type( self, type1, type2) class MulNode(NumBinopNode): # '*' operator. def is_py_operation_types(self, type1, type2): if ((type1.is_string and type2.is_int) or (type2.is_string and type1.is_int)): return 1 else: return NumBinopNode.is_py_operation_types(self, type1, type2) def infer_builtin_types_operation(self, type1, type2): # let's assume that whatever builtin type you multiply a string with # will either return a string of the same type or fail with an exception string_types = (bytes_type, str_type, basestring_type, unicode_type) if type1 in string_types and type2.is_builtin_type: return type1 if type2 in string_types and type1.is_builtin_type: return type2 # multiplication of containers/numbers with an integer value # always (?) returns the same type if type1.is_int: return type2 if type2.is_int: return type1 return None class MatMultNode(NumBinopNode): # '@' operator. def is_py_operation_types(self, type1, type2): return True def generate_evaluation_code(self, code): code.globalstate.use_utility_code(UtilityCode.load_cached("MatrixMultiply", "ObjectHandling.c")) super(MatMultNode, self).generate_evaluation_code(code) class DivNode(NumBinopNode): # '/' or '//' operator. cdivision = None truedivision = None # == "unknown" if operator == '/' ctruedivision = False cdivision_warnings = False zerodivision_check = None def find_compile_time_binary_operator(self, op1, op2): func = compile_time_binary_operators[self.operator] if self.operator == '/' and self.truedivision is None: # => true div for floats, floor div for integers if isinstance(op1, _py_int_types) and isinstance(op2, _py_int_types): func = compile_time_binary_operators['//'] return func def calculate_constant_result(self): op1 = self.operand1.constant_result op2 = self.operand2.constant_result func = self.find_compile_time_binary_operator(op1, op2) self.constant_result = func( self.operand1.constant_result, self.operand2.constant_result) def compile_time_value(self, denv): operand1 = self.operand1.compile_time_value(denv) operand2 = self.operand2.compile_time_value(denv) try: func = self.find_compile_time_binary_operator( operand1, operand2) return func(operand1, operand2) except Exception as e: self.compile_time_value_error(e) def _check_truedivision(self, env): if self.cdivision or env.directives['cdivision']: self.ctruedivision = False else: self.ctruedivision = self.truedivision def infer_type(self, env): self._check_truedivision(env) return self.result_type( self.operand1.infer_type(env), self.operand2.infer_type(env), env) def analyse_operation(self, env): self._check_truedivision(env) NumBinopNode.analyse_operation(self, env) if self.is_cpp_operation(): self.cdivision = True if not self.type.is_pyobject: self.zerodivision_check = ( self.cdivision is None and not env.directives['cdivision'] and (not self.operand2.has_constant_result() or self.operand2.constant_result == 0)) if self.zerodivision_check or env.directives['cdivision_warnings']: # Need to check ahead of time to warn or raise zero division error self.operand1 = self.operand1.coerce_to_simple(env) self.operand2 = self.operand2.coerce_to_simple(env) def compute_c_result_type(self, type1, type2): if self.operator == '/' and self.ctruedivision: if not type1.is_float and not type2.is_float: widest_type = PyrexTypes.widest_numeric_type(type1, PyrexTypes.c_double_type) widest_type = PyrexTypes.widest_numeric_type(type2, widest_type) return widest_type return NumBinopNode.compute_c_result_type(self, type1, type2) def zero_division_message(self): if self.type.is_int: return "integer division or modulo by zero" else: return "float division" def generate_evaluation_code(self, code): if not self.type.is_pyobject and not self.type.is_complex: if self.cdivision is None: self.cdivision = (code.globalstate.directives['cdivision'] or not self.type.signed or self.type.is_float) if not self.cdivision: code.globalstate.use_utility_code( UtilityCode.load_cached("DivInt", "CMath.c").specialize(self.type)) NumBinopNode.generate_evaluation_code(self, code) self.generate_div_warning_code(code) def generate_div_warning_code(self, code): in_nogil = self.in_nogil_context if not self.type.is_pyobject: if self.zerodivision_check: if not self.infix: zero_test = "%s(%s)" % (self.type.unary_op('zero'), self.operand2.result()) else: zero_test = "%s == 0" % self.operand2.result() code.putln("if (unlikely(%s)) {" % zero_test) if in_nogil: code.put_ensure_gil() code.putln('PyErr_SetString(PyExc_ZeroDivisionError, "%s");' % self.zero_division_message()) if in_nogil: code.put_release_ensured_gil() code.putln(code.error_goto(self.pos)) code.putln("}") if self.type.is_int and self.type.signed and self.operator != '%': code.globalstate.use_utility_code(UtilityCode.load_cached("UnaryNegOverflows", "Overflow.c")) if self.operand2.type.signed == 2: # explicitly signed, no runtime check needed minus1_check = 'unlikely(%s == -1)' % self.operand2.result() else: type_of_op2 = self.operand2.type.empty_declaration_code() minus1_check = '(!(((%s)-1) > 0)) && unlikely(%s == (%s)-1)' % ( type_of_op2, self.operand2.result(), type_of_op2) code.putln("else if (sizeof(%s) == sizeof(long) && %s " " && unlikely(UNARY_NEG_WOULD_OVERFLOW(%s))) {" % ( self.type.empty_declaration_code(), minus1_check, self.operand1.result())) if in_nogil: code.put_ensure_gil() code.putln('PyErr_SetString(PyExc_OverflowError, "value too large to perform division");') if in_nogil: code.put_release_ensured_gil() code.putln(code.error_goto(self.pos)) code.putln("}") if code.globalstate.directives['cdivision_warnings'] and self.operator != '/': code.globalstate.use_utility_code( UtilityCode.load_cached("CDivisionWarning", "CMath.c")) code.putln("if (unlikely((%s < 0) ^ (%s < 0))) {" % ( self.operand1.result(), self.operand2.result())) warning_code = "__Pyx_cdivision_warning(%(FILENAME)s, %(LINENO)s)" % { 'FILENAME': Naming.filename_cname, 'LINENO': Naming.lineno_cname, } if in_nogil: result_code = 'result' code.putln("int %s;" % result_code) code.put_ensure_gil() code.putln(code.set_error_info(self.pos, used=True)) code.putln("%s = %s;" % (result_code, warning_code)) code.put_release_ensured_gil() else: result_code = warning_code code.putln(code.set_error_info(self.pos, used=True)) code.put("if (unlikely(%s)) " % result_code) code.put_goto(code.error_label) code.putln("}") def calculate_result_code(self): if self.type.is_complex: return NumBinopNode.calculate_result_code(self) elif self.type.is_float and self.operator == '//': return "floor(%s / %s)" % ( self.operand1.result(), self.operand2.result()) elif self.truedivision or self.cdivision: op1 = self.operand1.result() op2 = self.operand2.result() if self.truedivision: if self.type != self.operand1.type: op1 = self.type.cast_code(op1) if self.type != self.operand2.type: op2 = self.type.cast_code(op2) return "(%s / %s)" % (op1, op2) else: return "__Pyx_div_%s(%s, %s)" % ( self.type.specialization_name(), self.operand1.result(), self.operand2.result()) class ModNode(DivNode): # '%' operator. def is_py_operation_types(self, type1, type2): return (type1.is_string or type2.is_string or NumBinopNode.is_py_operation_types(self, type1, type2)) def infer_builtin_types_operation(self, type1, type2): # b'%s' % xyz raises an exception in Py3, so it's safe to infer the type for Py2 if type1 is unicode_type: # None + xyz may be implemented by RHS if type2.is_builtin_type or not self.operand1.may_be_none(): return type1 elif type1 in (bytes_type, str_type, basestring_type): if type2 is unicode_type: return type2 elif type2.is_numeric: return type1 elif type1 is bytes_type and not type2.is_builtin_type: return None # RHS might implement '% operator differently in Py3 else: return basestring_type # either str or unicode, can't tell return None def zero_division_message(self): if self.type.is_int: return "integer division or modulo by zero" else: return "float divmod()" def analyse_operation(self, env): DivNode.analyse_operation(self, env) if not self.type.is_pyobject: if self.cdivision is None: self.cdivision = env.directives['cdivision'] or not self.type.signed if not self.cdivision and not self.type.is_int and not self.type.is_float: error(self.pos, "mod operator not supported for type '%s'" % self.type) def generate_evaluation_code(self, code): if not self.type.is_pyobject and not self.cdivision: if self.type.is_int: code.globalstate.use_utility_code( UtilityCode.load_cached("ModInt", "CMath.c").specialize(self.type)) else: # float code.globalstate.use_utility_code( UtilityCode.load_cached("ModFloat", "CMath.c").specialize( self.type, math_h_modifier=self.type.math_h_modifier)) # NOTE: skipping over DivNode here NumBinopNode.generate_evaluation_code(self, code) self.generate_div_warning_code(code) def calculate_result_code(self): if self.cdivision: if self.type.is_float: return "fmod%s(%s, %s)" % ( self.type.math_h_modifier, self.operand1.result(), self.operand2.result()) else: return "(%s %% %s)" % ( self.operand1.result(), self.operand2.result()) else: return "__Pyx_mod_%s(%s, %s)" % ( self.type.specialization_name(), self.operand1.result(), self.operand2.result()) def py_operation_function(self, code): if self.operand1.type is unicode_type: if self.operand1.may_be_none(): return '__Pyx_PyUnicode_FormatSafe' else: return 'PyUnicode_Format' elif self.operand1.type is str_type: if self.operand1.may_be_none(): return '__Pyx_PyString_FormatSafe' else: return '__Pyx_PyString_Format' return super(ModNode, self).py_operation_function(code) class PowNode(NumBinopNode): # '**' operator. def analyse_c_operation(self, env): NumBinopNode.analyse_c_operation(self, env) if self.type.is_complex: if self.type.real_type.is_float: self.operand1 = self.operand1.coerce_to(self.type, env) self.operand2 = self.operand2.coerce_to(self.type, env) self.pow_func = self.type.binary_op('**') else: error(self.pos, "complex int powers not supported") self.pow_func = "" elif self.type.is_float: self.pow_func = "pow" + self.type.math_h_modifier elif self.type.is_int: self.pow_func = "__Pyx_pow_%s" % self.type.empty_declaration_code().replace(' ', '_') env.use_utility_code( UtilityCode.load_cached("IntPow", "CMath.c").specialize( func_name=self.pow_func, type=self.type.empty_declaration_code(), signed=self.type.signed and 1 or 0)) elif not self.type.is_error: error(self.pos, "got unexpected types for C power operator: %s, %s" % (self.operand1.type, self.operand2.type)) def calculate_result_code(self): # Work around MSVC overloading ambiguity. def typecast(operand): if self.type == operand.type: return operand.result() else: return self.type.cast_code(operand.result()) return "%s(%s, %s)" % ( self.pow_func, typecast(self.operand1), typecast(self.operand2)) def py_operation_function(self, code): if (self.type.is_pyobject and self.operand1.constant_result == 2 and isinstance(self.operand1.constant_result, _py_int_types) and self.operand2.type is py_object_type): code.globalstate.use_utility_code(UtilityCode.load_cached('PyNumberPow2', 'Optimize.c')) if self.inplace: return '__Pyx_PyNumber_InPlacePowerOf2' else: return '__Pyx_PyNumber_PowerOf2' return super(PowNode, self).py_operation_function(code) class BoolBinopNode(ExprNode): """ Short-circuiting boolean operation. Note that this node provides the same code generation method as BoolBinopResultNode to simplify expression nesting. operator string "and"/"or" operand1 BoolBinopNode/BoolBinopResultNode left operand operand2 BoolBinopNode/BoolBinopResultNode right operand """ subexprs = ['operand1', 'operand2'] is_temp = True operator = None operand1 = None operand2 = None def infer_type(self, env): type1 = self.operand1.infer_type(env) type2 = self.operand2.infer_type(env) return PyrexTypes.independent_spanning_type(type1, type2) def may_be_none(self): if self.operator == 'or': return self.operand2.may_be_none() else: return self.operand1.may_be_none() or self.operand2.may_be_none() def calculate_constant_result(self): operand1 = self.operand1.constant_result operand2 = self.operand2.constant_result if self.operator == 'and': self.constant_result = operand1 and operand2 else: self.constant_result = operand1 or operand2 def compile_time_value(self, denv): operand1 = self.operand1.compile_time_value(denv) operand2 = self.operand2.compile_time_value(denv) if self.operator == 'and': return operand1 and operand2 else: return operand1 or operand2 def is_ephemeral(self): return self.operand1.is_ephemeral() or self.operand2.is_ephemeral() def analyse_types(self, env): # Note: we do not do any coercion here as we most likely do not know the final type anyway. # We even accept to set self.type to ErrorType if both operands do not have a spanning type. # The coercion to the final type and to a "simple" value is left to coerce_to(). operand1 = self.operand1.analyse_types(env) operand2 = self.operand2.analyse_types(env) self.type = PyrexTypes.independent_spanning_type( operand1.type, operand2.type) self.operand1 = self._wrap_operand(operand1, env) self.operand2 = self._wrap_operand(operand2, env) return self def _wrap_operand(self, operand, env): if not isinstance(operand, (BoolBinopNode, BoolBinopResultNode)): operand = BoolBinopResultNode(operand, self.type, env) return operand def wrap_operands(self, env): """ Must get called by transforms that want to create a correct BoolBinopNode after the type analysis phase. """ self.operand1 = self._wrap_operand(self.operand1, env) self.operand2 = self._wrap_operand(self.operand2, env) def coerce_to_boolean(self, env): return self.coerce_to(PyrexTypes.c_bint_type, env) def coerce_to(self, dst_type, env): operand1 = self.operand1.coerce_to(dst_type, env) operand2 = self.operand2.coerce_to(dst_type, env) return BoolBinopNode.from_node( self, type=dst_type, operator=self.operator, operand1=operand1, operand2=operand2) def generate_bool_evaluation_code(self, code, final_result_temp, and_label, or_label, end_label, fall_through): code.mark_pos(self.pos) outer_labels = (and_label, or_label) if self.operator == 'and': my_label = and_label = code.new_label('next_and') else: my_label = or_label = code.new_label('next_or') self.operand1.generate_bool_evaluation_code( code, final_result_temp, and_label, or_label, end_label, my_label) and_label, or_label = outer_labels code.put_label(my_label) self.operand2.generate_bool_evaluation_code( code, final_result_temp, and_label, or_label, end_label, fall_through) def generate_evaluation_code(self, code): self.allocate_temp_result(code) or_label = and_label = None end_label = code.new_label('bool_binop_done') self.generate_bool_evaluation_code(code, self.result(), and_label, or_label, end_label, end_label) code.put_label(end_label) gil_message = "Truth-testing Python object" def check_const(self): return self.operand1.check_const() and self.operand2.check_const() def generate_subexpr_disposal_code(self, code): pass # nothing to do here, all done in generate_evaluation_code() def free_subexpr_temps(self, code): pass # nothing to do here, all done in generate_evaluation_code() def generate_operand1_test(self, code): # Generate code to test the truth of the first operand. if self.type.is_pyobject: test_result = code.funcstate.allocate_temp( PyrexTypes.c_bint_type, manage_ref=False) code.putln( "%s = __Pyx_PyObject_IsTrue(%s); %s" % ( test_result, self.operand1.py_result(), code.error_goto_if_neg(test_result, self.pos))) else: test_result = self.operand1.result() return (test_result, self.type.is_pyobject) class BoolBinopResultNode(ExprNode): """ Intermediate result of a short-circuiting and/or expression. Tests the result for 'truthiness' and takes care of coercing the final result of the overall expression to the target type. Note that this node provides the same code generation method as BoolBinopNode to simplify expression nesting. arg ExprNode the argument to test value ExprNode the coerced result value node """ subexprs = ['arg', 'value'] is_temp = True arg = None value = None def __init__(self, arg, result_type, env): # using 'arg' multiple times, so it must be a simple/temp value arg = arg.coerce_to_simple(env) # wrap in ProxyNode, in case a transform wants to replace self.arg later arg = ProxyNode(arg) super(BoolBinopResultNode, self).__init__( arg.pos, arg=arg, type=result_type, value=CloneNode(arg).coerce_to(result_type, env)) def coerce_to_boolean(self, env): return self.coerce_to(PyrexTypes.c_bint_type, env) def coerce_to(self, dst_type, env): # unwrap, coerce, rewrap arg = self.arg.arg if dst_type is PyrexTypes.c_bint_type: arg = arg.coerce_to_boolean(env) # TODO: unwrap more coercion nodes? return BoolBinopResultNode(arg, dst_type, env) def nogil_check(self, env): # let's leave all errors to BoolBinopNode pass def generate_operand_test(self, code): # Generate code to test the truth of the first operand. if self.arg.type.is_pyobject: test_result = code.funcstate.allocate_temp( PyrexTypes.c_bint_type, manage_ref=False) code.putln( "%s = __Pyx_PyObject_IsTrue(%s); %s" % ( test_result, self.arg.py_result(), code.error_goto_if_neg(test_result, self.pos))) else: test_result = self.arg.result() return (test_result, self.arg.type.is_pyobject) def generate_bool_evaluation_code(self, code, final_result_temp, and_label, or_label, end_label, fall_through): code.mark_pos(self.pos) # x => x # x and ... or ... => next 'and' / 'or' # False ... or x => next 'or' # True and x => next 'and' # True or x => True (operand) self.arg.generate_evaluation_code(code) if and_label or or_label: test_result, uses_temp = self.generate_operand_test(code) if uses_temp and (and_label and or_label): # cannot become final result => free early # disposal: uses_temp and (and_label and or_label) self.arg.generate_disposal_code(code) sense = '!' if or_label else '' code.putln("if (%s%s) {" % (sense, test_result)) if uses_temp: code.funcstate.release_temp(test_result) if not uses_temp or not (and_label and or_label): # disposal: (not uses_temp) or {not (and_label and or_label) [if]} self.arg.generate_disposal_code(code) if or_label and or_label != fall_through: # value is false => short-circuit to next 'or' code.put_goto(or_label) if and_label: # value is true => go to next 'and' if or_label: code.putln("} else {") if not uses_temp: # disposal: (not uses_temp) and {(and_label and or_label) [else]} self.arg.generate_disposal_code(code) if and_label != fall_through: code.put_goto(and_label) if not and_label or not or_label: # if no next 'and' or 'or', we provide the result if and_label or or_label: code.putln("} else {") self.value.generate_evaluation_code(code) self.value.make_owned_reference(code) code.putln("%s = %s;" % (final_result_temp, self.value.result())) self.value.generate_post_assignment_code(code) # disposal: {not (and_label and or_label) [else]} self.arg.generate_disposal_code(code) self.value.free_temps(code) if end_label != fall_through: code.put_goto(end_label) if and_label or or_label: code.putln("}") self.arg.free_temps(code) class CondExprNode(ExprNode): # Short-circuiting conditional expression. # # test ExprNode # true_val ExprNode # false_val ExprNode true_val = None false_val = None subexprs = ['test', 'true_val', 'false_val'] def type_dependencies(self, env): return self.true_val.type_dependencies(env) + self.false_val.type_dependencies(env) def infer_type(self, env): return PyrexTypes.independent_spanning_type( self.true_val.infer_type(env), self.false_val.infer_type(env)) def calculate_constant_result(self): if self.test.constant_result: self.constant_result = self.true_val.constant_result else: self.constant_result = self.false_val.constant_result def is_ephemeral(self): return self.true_val.is_ephemeral() or self.false_val.is_ephemeral() def analyse_types(self, env): self.test = self.test.analyse_types(env).coerce_to_boolean(env) self.true_val = self.true_val.analyse_types(env) self.false_val = self.false_val.analyse_types(env) self.is_temp = 1 return self.analyse_result_type(env) def analyse_result_type(self, env): self.type = PyrexTypes.independent_spanning_type( self.true_val.type, self.false_val.type) if self.type.is_reference: self.type = PyrexTypes.CFakeReferenceType(self.type.ref_base_type) if self.type.is_pyobject: self.result_ctype = py_object_type elif self.true_val.is_ephemeral() or self.false_val.is_ephemeral(): error(self.pos, "Unsafe C derivative of temporary Python reference used in conditional expression") if self.true_val.type.is_pyobject or self.false_val.type.is_pyobject: self.true_val = self.true_val.coerce_to(self.type, env) self.false_val = self.false_val.coerce_to(self.type, env) if self.type.is_error: self.type_error() return self def coerce_to_integer(self, env): self.true_val = self.true_val.coerce_to_integer(env) self.false_val = self.false_val.coerce_to_integer(env) self.result_ctype = None return self.analyse_result_type(env) def coerce_to(self, dst_type, env): self.true_val = self.true_val.coerce_to(dst_type, env) self.false_val = self.false_val.coerce_to(dst_type, env) self.result_ctype = None return self.analyse_result_type(env) def type_error(self): if not (self.true_val.type.is_error or self.false_val.type.is_error): error(self.pos, "Incompatible types in conditional expression (%s; %s)" % (self.true_val.type, self.false_val.type)) self.type = PyrexTypes.error_type def check_const(self): return (self.test.check_const() and self.true_val.check_const() and self.false_val.check_const()) def generate_evaluation_code(self, code): # Because subexprs may not be evaluated we can use a more optimal # subexpr allocation strategy than the default, so override evaluation_code. code.mark_pos(self.pos) self.allocate_temp_result(code) self.test.generate_evaluation_code(code) code.putln("if (%s) {" % self.test.result()) self.eval_and_get(code, self.true_val) code.putln("} else {") self.eval_and_get(code, self.false_val) code.putln("}") self.test.generate_disposal_code(code) self.test.free_temps(code) def eval_and_get(self, code, expr): expr.generate_evaluation_code(code) if self.type.is_memoryviewslice: expr.make_owned_memoryviewslice(code) else: expr.make_owned_reference(code) code.putln('%s = %s;' % (self.result(), expr.result_as(self.ctype()))) expr.generate_post_assignment_code(code) expr.free_temps(code) def generate_subexpr_disposal_code(self, code): pass # done explicitly above (cleanup must separately happen within the if/else blocks) def free_subexpr_temps(self, code): pass # done explicitly above (cleanup must separately happen within the if/else blocks) richcmp_constants = { "<" : "Py_LT", "<=": "Py_LE", "==": "Py_EQ", "!=": "Py_NE", "<>": "Py_NE", ">" : "Py_GT", ">=": "Py_GE", # the following are faked by special compare functions "in" : "Py_EQ", "not_in": "Py_NE", } class CmpNode(object): # Mixin class containing code common to PrimaryCmpNodes # and CascadedCmpNodes. special_bool_cmp_function = None special_bool_cmp_utility_code = None def infer_type(self, env): # TODO: Actually implement this (after merging with -unstable). return py_object_type def calculate_cascaded_constant_result(self, operand1_result): func = compile_time_binary_operators[self.operator] operand2_result = self.operand2.constant_result if (isinstance(operand1_result, any_string_type) and isinstance(operand2_result, any_string_type) and type(operand1_result) != type(operand2_result)): # string comparison of different types isn't portable return if self.operator in ('in', 'not_in'): if isinstance(self.operand2, (ListNode, TupleNode, SetNode)): if not self.operand2.args: self.constant_result = self.operator == 'not_in' return elif isinstance(self.operand2, ListNode) and not self.cascade: # tuples are more efficient to store than lists self.operand2 = self.operand2.as_tuple() elif isinstance(self.operand2, DictNode): if not self.operand2.key_value_pairs: self.constant_result = self.operator == 'not_in' return self.constant_result = func(operand1_result, operand2_result) def cascaded_compile_time_value(self, operand1, denv): func = get_compile_time_binop(self) operand2 = self.operand2.compile_time_value(denv) try: result = func(operand1, operand2) except Exception as e: self.compile_time_value_error(e) result = None if result: cascade = self.cascade if cascade: result = result and cascade.cascaded_compile_time_value(operand2, denv) return result def is_cpp_comparison(self): return self.operand1.type.is_cpp_class or self.operand2.type.is_cpp_class def find_common_int_type(self, env, op, operand1, operand2): # type1 != type2 and at least one of the types is not a C int type1 = operand1.type type2 = operand2.type type1_can_be_int = False type2_can_be_int = False if operand1.is_string_literal and operand1.can_coerce_to_char_literal(): type1_can_be_int = True if operand2.is_string_literal and operand2.can_coerce_to_char_literal(): type2_can_be_int = True if type1.is_int: if type2_can_be_int: return type1 elif type2.is_int: if type1_can_be_int: return type2 elif type1_can_be_int: if type2_can_be_int: if Builtin.unicode_type in (type1, type2): return PyrexTypes.c_py_ucs4_type else: return PyrexTypes.c_uchar_type return None def find_common_type(self, env, op, operand1, common_type=None): operand2 = self.operand2 type1 = operand1.type type2 = operand2.type new_common_type = None # catch general errors if (type1 == str_type and (type2.is_string or type2 in (bytes_type, unicode_type)) or type2 == str_type and (type1.is_string or type1 in (bytes_type, unicode_type))): error(self.pos, "Comparisons between bytes/unicode and str are not portable to Python 3") new_common_type = error_type # try to use numeric comparisons where possible elif type1.is_complex or type2.is_complex: if (op not in ('==', '!=') and (type1.is_complex or type1.is_numeric) and (type2.is_complex or type2.is_numeric)): error(self.pos, "complex types are unordered") new_common_type = error_type elif type1.is_pyobject: new_common_type = Builtin.complex_type if type1.subtype_of(Builtin.complex_type) else py_object_type elif type2.is_pyobject: new_common_type = Builtin.complex_type if type2.subtype_of(Builtin.complex_type) else py_object_type else: new_common_type = PyrexTypes.widest_numeric_type(type1, type2) elif type1.is_numeric and type2.is_numeric: new_common_type = PyrexTypes.widest_numeric_type(type1, type2) elif common_type is None or not common_type.is_pyobject: new_common_type = self.find_common_int_type(env, op, operand1, operand2) if new_common_type is None: # fall back to generic type compatibility tests if type1.is_ctuple or type2.is_ctuple: new_common_type = py_object_type elif type1 == type2: new_common_type = type1 elif type1.is_pyobject or type2.is_pyobject: if type2.is_numeric or type2.is_string: if operand2.check_for_coercion_error(type1, env): new_common_type = error_type else: new_common_type = py_object_type elif type1.is_numeric or type1.is_string: if operand1.check_for_coercion_error(type2, env): new_common_type = error_type else: new_common_type = py_object_type elif py_object_type.assignable_from(type1) and py_object_type.assignable_from(type2): new_common_type = py_object_type else: # one Python type and one non-Python type, not assignable self.invalid_types_error(operand1, op, operand2) new_common_type = error_type elif type1.assignable_from(type2): new_common_type = type1 elif type2.assignable_from(type1): new_common_type = type2 else: # C types that we couldn't handle up to here are an error self.invalid_types_error(operand1, op, operand2) new_common_type = error_type if new_common_type.is_string and (isinstance(operand1, BytesNode) or isinstance(operand2, BytesNode)): # special case when comparing char* to bytes literal: must # compare string values! new_common_type = bytes_type # recursively merge types if common_type is None or new_common_type.is_error: common_type = new_common_type else: # we could do a lot better by splitting the comparison # into a non-Python part and a Python part, but this is # safer for now common_type = PyrexTypes.spanning_type(common_type, new_common_type) if self.cascade: common_type = self.cascade.find_common_type(env, self.operator, operand2, common_type) return common_type def invalid_types_error(self, operand1, op, operand2): error(self.pos, "Invalid types for '%s' (%s, %s)" % (op, operand1.type, operand2.type)) def is_python_comparison(self): return (not self.is_ptr_contains() and not self.is_c_string_contains() and (self.has_python_operands() or (self.cascade and self.cascade.is_python_comparison()) or self.operator in ('in', 'not_in'))) def coerce_operands_to(self, dst_type, env): operand2 = self.operand2 if operand2.type != dst_type: self.operand2 = operand2.coerce_to(dst_type, env) if self.cascade: self.cascade.coerce_operands_to(dst_type, env) def is_python_result(self): return ((self.has_python_operands() and self.special_bool_cmp_function is None and self.operator not in ('is', 'is_not', 'in', 'not_in') and not self.is_c_string_contains() and not self.is_ptr_contains()) or (self.cascade and self.cascade.is_python_result())) def is_c_string_contains(self): return self.operator in ('in', 'not_in') and \ ((self.operand1.type.is_int and (self.operand2.type.is_string or self.operand2.type is bytes_type)) or (self.operand1.type.is_unicode_char and self.operand2.type is unicode_type)) def is_ptr_contains(self): if self.operator in ('in', 'not_in'): container_type = self.operand2.type return (container_type.is_ptr or container_type.is_array) \ and not container_type.is_string def find_special_bool_compare_function(self, env, operand1, result_is_bool=False): # note: currently operand1 must get coerced to a Python object if we succeed here! if self.operator in ('==', '!='): type1, type2 = operand1.type, self.operand2.type if result_is_bool or (type1.is_builtin_type and type2.is_builtin_type): if type1 is Builtin.unicode_type or type2 is Builtin.unicode_type: self.special_bool_cmp_utility_code = UtilityCode.load_cached("UnicodeEquals", "StringTools.c") self.special_bool_cmp_function = "__Pyx_PyUnicode_Equals" return True elif type1 is Builtin.bytes_type or type2 is Builtin.bytes_type: self.special_bool_cmp_utility_code = UtilityCode.load_cached("BytesEquals", "StringTools.c") self.special_bool_cmp_function = "__Pyx_PyBytes_Equals" return True elif type1 is Builtin.basestring_type or type2 is Builtin.basestring_type: self.special_bool_cmp_utility_code = UtilityCode.load_cached("UnicodeEquals", "StringTools.c") self.special_bool_cmp_function = "__Pyx_PyUnicode_Equals" return True elif type1 is Builtin.str_type or type2 is Builtin.str_type: self.special_bool_cmp_utility_code = UtilityCode.load_cached("StrEquals", "StringTools.c") self.special_bool_cmp_function = "__Pyx_PyString_Equals" return True elif self.operator in ('in', 'not_in'): if self.operand2.type is Builtin.dict_type: self.operand2 = self.operand2.as_none_safe_node("'NoneType' object is not iterable") self.special_bool_cmp_utility_code = UtilityCode.load_cached("PyDictContains", "ObjectHandling.c") self.special_bool_cmp_function = "__Pyx_PyDict_ContainsTF" return True elif self.operand2.type is Builtin.unicode_type: self.operand2 = self.operand2.as_none_safe_node("'NoneType' object is not iterable") self.special_bool_cmp_utility_code = UtilityCode.load_cached("PyUnicodeContains", "StringTools.c") self.special_bool_cmp_function = "__Pyx_PyUnicode_ContainsTF" return True else: if not self.operand2.type.is_pyobject: self.operand2 = self.operand2.coerce_to_pyobject(env) self.special_bool_cmp_utility_code = UtilityCode.load_cached("PySequenceContains", "ObjectHandling.c") self.special_bool_cmp_function = "__Pyx_PySequence_ContainsTF" return True return False def generate_operation_code(self, code, result_code, operand1, op , operand2): if self.type.is_pyobject: error_clause = code.error_goto_if_null got_ref = "__Pyx_XGOTREF(%s); " % result_code if self.special_bool_cmp_function: code.globalstate.use_utility_code( UtilityCode.load_cached("PyBoolOrNullFromLong", "ObjectHandling.c")) coerce_result = "__Pyx_PyBoolOrNull_FromLong" else: coerce_result = "__Pyx_PyBool_FromLong" else: error_clause = code.error_goto_if_neg got_ref = "" coerce_result = "" if self.special_bool_cmp_function: if operand1.type.is_pyobject: result1 = operand1.py_result() else: result1 = operand1.result() if operand2.type.is_pyobject: result2 = operand2.py_result() else: result2 = operand2.result() if self.special_bool_cmp_utility_code: code.globalstate.use_utility_code(self.special_bool_cmp_utility_code) code.putln( "%s = %s(%s(%s, %s, %s)); %s%s" % ( result_code, coerce_result, self.special_bool_cmp_function, result1, result2, richcmp_constants[op], got_ref, error_clause(result_code, self.pos))) elif operand1.type.is_pyobject and op not in ('is', 'is_not'): assert op not in ('in', 'not_in'), op code.putln("%s = PyObject_RichCompare(%s, %s, %s); %s%s" % ( result_code, operand1.py_result(), operand2.py_result(), richcmp_constants[op], got_ref, error_clause(result_code, self.pos))) elif operand1.type.is_complex: code.putln("%s = %s(%s%s(%s, %s));" % ( result_code, coerce_result, op == "!=" and "!" or "", operand1.type.unary_op('eq'), operand1.result(), operand2.result())) else: type1 = operand1.type type2 = operand2.type if (type1.is_extension_type or type2.is_extension_type) \ and not type1.same_as(type2): common_type = py_object_type elif type1.is_numeric: common_type = PyrexTypes.widest_numeric_type(type1, type2) else: common_type = type1 code1 = operand1.result_as(common_type) code2 = operand2.result_as(common_type) statement = "%s = %s(%s %s %s);" % ( result_code, coerce_result, code1, self.c_operator(op), code2) if self.is_cpp_comparison() and self.exception_check == '+': translate_cpp_exception(code, self.pos, statement, self.exception_value, self.in_nogil_context) code.putln(statement) def c_operator(self, op): if op == 'is': return "==" elif op == 'is_not': return "!=" else: return op class PrimaryCmpNode(ExprNode, CmpNode): # Non-cascaded comparison or first comparison of # a cascaded sequence. # # operator string # operand1 ExprNode # operand2 ExprNode # cascade CascadedCmpNode # We don't use the subexprs mechanism, because # things here are too complicated for it to handle. # Instead, we override all the framework methods # which use it. child_attrs = ['operand1', 'operand2', 'coerced_operand2', 'cascade'] cascade = None coerced_operand2 = None is_memslice_nonecheck = False def infer_type(self, env): # TODO: Actually implement this (after merging with -unstable). return py_object_type def type_dependencies(self, env): return () def calculate_constant_result(self): assert not self.cascade self.calculate_cascaded_constant_result(self.operand1.constant_result) def compile_time_value(self, denv): operand1 = self.operand1.compile_time_value(denv) return self.cascaded_compile_time_value(operand1, denv) def analyse_types(self, env): self.operand1 = self.operand1.analyse_types(env) self.operand2 = self.operand2.analyse_types(env) if self.is_cpp_comparison(): self.analyse_cpp_comparison(env) if self.cascade: error(self.pos, "Cascading comparison not yet supported for cpp types.") return self type1 = self.operand1.type type2 = self.operand2.type if is_pythran_expr(type1) or is_pythran_expr(type2): if is_pythran_supported_type(type1) and is_pythran_supported_type(type2): self.type = PythranExpr(pythran_binop_type(self.operator, type1, type2)) self.is_pycmp = False return self if self.analyse_memoryviewslice_comparison(env): return self if self.cascade: self.cascade = self.cascade.analyse_types(env) if self.operator in ('in', 'not_in'): if self.is_c_string_contains(): self.is_pycmp = False common_type = None if self.cascade: error(self.pos, "Cascading comparison not yet supported for 'int_val in string'.") return self if self.operand2.type is unicode_type: env.use_utility_code(UtilityCode.load_cached("PyUCS4InUnicode", "StringTools.c")) else: if self.operand1.type is PyrexTypes.c_uchar_type: self.operand1 = self.operand1.coerce_to(PyrexTypes.c_char_type, env) if self.operand2.type is not bytes_type: self.operand2 = self.operand2.coerce_to(bytes_type, env) env.use_utility_code(UtilityCode.load_cached("BytesContains", "StringTools.c")) self.operand2 = self.operand2.as_none_safe_node( "argument of type 'NoneType' is not iterable") elif self.is_ptr_contains(): if self.cascade: error(self.pos, "Cascading comparison not supported for 'val in sliced pointer'.") self.type = PyrexTypes.c_bint_type # Will be transformed by IterationTransform return self elif self.find_special_bool_compare_function(env, self.operand1): if not self.operand1.type.is_pyobject: self.operand1 = self.operand1.coerce_to_pyobject(env) common_type = None # if coercion needed, the method call above has already done it self.is_pycmp = False # result is bint else: common_type = py_object_type self.is_pycmp = True elif self.find_special_bool_compare_function(env, self.operand1): if not self.operand1.type.is_pyobject: self.operand1 = self.operand1.coerce_to_pyobject(env) common_type = None # if coercion needed, the method call above has already done it self.is_pycmp = False # result is bint else: common_type = self.find_common_type(env, self.operator, self.operand1) self.is_pycmp = common_type.is_pyobject if common_type is not None and not common_type.is_error: if self.operand1.type != common_type: self.operand1 = self.operand1.coerce_to(common_type, env) self.coerce_operands_to(common_type, env) if self.cascade: self.operand2 = self.operand2.coerce_to_simple(env) self.cascade.coerce_cascaded_operands_to_temp(env) operand2 = self.cascade.optimise_comparison(self.operand2, env) if operand2 is not self.operand2: self.coerced_operand2 = operand2 if self.is_python_result(): self.type = PyrexTypes.py_object_type else: self.type = PyrexTypes.c_bint_type cdr = self.cascade while cdr: cdr.type = self.type cdr = cdr.cascade if self.is_pycmp or self.cascade or self.special_bool_cmp_function: # 1) owned reference, 2) reused value, 3) potential function error return value self.is_temp = 1 return self def analyse_cpp_comparison(self, env): type1 = self.operand1.type type2 = self.operand2.type self.is_pycmp = False entry = env.lookup_operator(self.operator, [self.operand1, self.operand2]) if entry is None: error(self.pos, "Invalid types for '%s' (%s, %s)" % (self.operator, type1, type2)) self.type = PyrexTypes.error_type self.result_code = "" return func_type = entry.type if func_type.is_ptr: func_type = func_type.base_type self.exception_check = func_type.exception_check self.exception_value = func_type.exception_value if self.exception_check == '+': self.is_temp = True if self.exception_value is None: env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) if len(func_type.args) == 1: self.operand2 = self.operand2.coerce_to(func_type.args[0].type, env) else: self.operand1 = self.operand1.coerce_to(func_type.args[0].type, env) self.operand2 = self.operand2.coerce_to(func_type.args[1].type, env) self.type = func_type.return_type def analyse_memoryviewslice_comparison(self, env): have_none = self.operand1.is_none or self.operand2.is_none have_slice = (self.operand1.type.is_memoryviewslice or self.operand2.type.is_memoryviewslice) ops = ('==', '!=', 'is', 'is_not') if have_slice and have_none and self.operator in ops: self.is_pycmp = False self.type = PyrexTypes.c_bint_type self.is_memslice_nonecheck = True return True return False def coerce_to_boolean(self, env): if self.is_pycmp: # coercing to bool => may allow for more efficient comparison code if self.find_special_bool_compare_function( env, self.operand1, result_is_bool=True): self.is_pycmp = False self.type = PyrexTypes.c_bint_type self.is_temp = 1 if self.cascade: operand2 = self.cascade.optimise_comparison( self.operand2, env, result_is_bool=True) if operand2 is not self.operand2: self.coerced_operand2 = operand2 return self # TODO: check if we can optimise parts of the cascade here return ExprNode.coerce_to_boolean(self, env) def has_python_operands(self): return (self.operand1.type.is_pyobject or self.operand2.type.is_pyobject) def check_const(self): if self.cascade: self.not_const() return False else: return self.operand1.check_const() and self.operand2.check_const() def calculate_result_code(self): if self.operand1.type.is_complex: if self.operator == "!=": negation = "!" else: negation = "" return "(%s%s(%s, %s))" % ( negation, self.operand1.type.binary_op('=='), self.operand1.result(), self.operand2.result()) elif self.is_c_string_contains(): if self.operand2.type is unicode_type: method = "__Pyx_UnicodeContainsUCS4" else: method = "__Pyx_BytesContains" if self.operator == "not_in": negation = "!" else: negation = "" return "(%s%s(%s, %s))" % ( negation, method, self.operand2.result(), self.operand1.result()) else: result1 = self.operand1.result() result2 = self.operand2.result() if self.is_memslice_nonecheck: if self.operand1.type.is_memoryviewslice: result1 = "((PyObject *) %s.memview)" % result1 else: result2 = "((PyObject *) %s.memview)" % result2 return "(%s %s %s)" % ( result1, self.c_operator(self.operator), result2) def generate_evaluation_code(self, code): self.operand1.generate_evaluation_code(code) self.operand2.generate_evaluation_code(code) if self.is_temp: self.allocate_temp_result(code) self.generate_operation_code(code, self.result(), self.operand1, self.operator, self.operand2) if self.cascade: self.cascade.generate_evaluation_code( code, self.result(), self.coerced_operand2 or self.operand2, needs_evaluation=self.coerced_operand2 is not None) self.operand1.generate_disposal_code(code) self.operand1.free_temps(code) self.operand2.generate_disposal_code(code) self.operand2.free_temps(code) def generate_subexpr_disposal_code(self, code): # If this is called, it is a non-cascaded cmp, # so only need to dispose of the two main operands. self.operand1.generate_disposal_code(code) self.operand2.generate_disposal_code(code) def free_subexpr_temps(self, code): # If this is called, it is a non-cascaded cmp, # so only need to dispose of the two main operands. self.operand1.free_temps(code) self.operand2.free_temps(code) def annotate(self, code): self.operand1.annotate(code) self.operand2.annotate(code) if self.cascade: self.cascade.annotate(code) class CascadedCmpNode(Node, CmpNode): # A CascadedCmpNode is not a complete expression node. It # hangs off the side of another comparison node, shares # its left operand with that node, and shares its result # with the PrimaryCmpNode at the head of the chain. # # operator string # operand2 ExprNode # cascade CascadedCmpNode child_attrs = ['operand2', 'coerced_operand2', 'cascade'] cascade = None coerced_operand2 = None constant_result = constant_value_not_set # FIXME: where to calculate this? def infer_type(self, env): # TODO: Actually implement this (after merging with -unstable). return py_object_type def type_dependencies(self, env): return () def has_constant_result(self): return self.constant_result is not constant_value_not_set and \ self.constant_result is not not_a_constant def analyse_types(self, env): self.operand2 = self.operand2.analyse_types(env) if self.cascade: self.cascade = self.cascade.analyse_types(env) return self def has_python_operands(self): return self.operand2.type.is_pyobject def is_cpp_comparison(self): # cascaded comparisons aren't currently implemented for c++ classes. return False def optimise_comparison(self, operand1, env, result_is_bool=False): if self.find_special_bool_compare_function(env, operand1, result_is_bool): self.is_pycmp = False self.type = PyrexTypes.c_bint_type if not operand1.type.is_pyobject: operand1 = operand1.coerce_to_pyobject(env) if self.cascade: operand2 = self.cascade.optimise_comparison(self.operand2, env, result_is_bool) if operand2 is not self.operand2: self.coerced_operand2 = operand2 return operand1 def coerce_operands_to_pyobjects(self, env): self.operand2 = self.operand2.coerce_to_pyobject(env) if self.operand2.type is dict_type and self.operator in ('in', 'not_in'): self.operand2 = self.operand2.as_none_safe_node("'NoneType' object is not iterable") if self.cascade: self.cascade.coerce_operands_to_pyobjects(env) def coerce_cascaded_operands_to_temp(self, env): if self.cascade: #self.operand2 = self.operand2.coerce_to_temp(env) #CTT self.operand2 = self.operand2.coerce_to_simple(env) self.cascade.coerce_cascaded_operands_to_temp(env) def generate_evaluation_code(self, code, result, operand1, needs_evaluation=False): if self.type.is_pyobject: code.putln("if (__Pyx_PyObject_IsTrue(%s)) {" % result) code.put_decref(result, self.type) else: code.putln("if (%s) {" % result) if needs_evaluation: operand1.generate_evaluation_code(code) self.operand2.generate_evaluation_code(code) self.generate_operation_code(code, result, operand1, self.operator, self.operand2) if self.cascade: self.cascade.generate_evaluation_code( code, result, self.coerced_operand2 or self.operand2, needs_evaluation=self.coerced_operand2 is not None) if needs_evaluation: operand1.generate_disposal_code(code) operand1.free_temps(code) # Cascaded cmp result is always temp self.operand2.generate_disposal_code(code) self.operand2.free_temps(code) code.putln("}") def annotate(self, code): self.operand2.annotate(code) if self.cascade: self.cascade.annotate(code) binop_node_classes = { "or": BoolBinopNode, "and": BoolBinopNode, "|": IntBinopNode, "^": IntBinopNode, "&": IntBinopNode, "<<": IntBinopNode, ">>": IntBinopNode, "+": AddNode, "-": SubNode, "*": MulNode, "@": MatMultNode, "/": DivNode, "//": DivNode, "%": ModNode, "**": PowNode, } def binop_node(pos, operator, operand1, operand2, inplace=False, **kwargs): # Construct binop node of appropriate class for # given operator. return binop_node_classes[operator]( pos, operator=operator, operand1=operand1, operand2=operand2, inplace=inplace, **kwargs) #------------------------------------------------------------------- # # Coercion nodes # # Coercion nodes are special in that they are created during # the analyse_types phase of parse tree processing. # Their __init__ methods consequently incorporate some aspects # of that phase. # #------------------------------------------------------------------- class CoercionNode(ExprNode): # Abstract base class for coercion nodes. # # arg ExprNode node being coerced subexprs = ['arg'] constant_result = not_a_constant def __init__(self, arg): super(CoercionNode, self).__init__(arg.pos) self.arg = arg if debug_coercion: print("%s Coercing %s" % (self, self.arg)) def calculate_constant_result(self): # constant folding can break type coercion, so this is disabled pass def annotate(self, code): self.arg.annotate(code) if self.arg.type != self.type: file, line, col = self.pos code.annotate((file, line, col-1), AnnotationItem( style='coerce', tag='coerce', text='[%s] to [%s]' % (self.arg.type, self.type))) class CoerceToMemViewSliceNode(CoercionNode): """ Coerce an object to a memoryview slice. This holds a new reference in a managed temp. """ def __init__(self, arg, dst_type, env): assert dst_type.is_memoryviewslice assert not arg.type.is_memoryviewslice CoercionNode.__init__(self, arg) self.type = dst_type self.is_temp = 1 self.env = env self.use_managed_ref = True self.arg = arg def generate_result_code(self, code): self.type.create_from_py_utility_code(self.env) code.putln("%s = %s(%s);" % (self.result(), self.type.from_py_function, self.arg.py_result())) error_cond = self.type.error_condition(self.result()) code.putln(code.error_goto_if(error_cond, self.pos)) class CastNode(CoercionNode): # Wrap a node in a C type cast. def __init__(self, arg, new_type): CoercionNode.__init__(self, arg) self.type = new_type def may_be_none(self): return self.arg.may_be_none() def calculate_result_code(self): return self.arg.result_as(self.type) def generate_result_code(self, code): self.arg.generate_result_code(code) class PyTypeTestNode(CoercionNode): # This node is used to check that a generic Python # object is an instance of a particular extension type. # This node borrows the result of its argument node. exact_builtin_type = True def __init__(self, arg, dst_type, env, notnone=False): # The arg is know to be a Python object, and # the dst_type is known to be an extension type. assert dst_type.is_extension_type or dst_type.is_builtin_type, "PyTypeTest on non extension type" CoercionNode.__init__(self, arg) self.type = dst_type self.result_ctype = arg.ctype() self.notnone = notnone nogil_check = Node.gil_error gil_message = "Python type test" def analyse_types(self, env): return self def may_be_none(self): if self.notnone: return False return self.arg.may_be_none() def is_simple(self): return self.arg.is_simple() def result_in_temp(self): return self.arg.result_in_temp() def is_ephemeral(self): return self.arg.is_ephemeral() def nonlocally_immutable(self): return self.arg.nonlocally_immutable() def calculate_constant_result(self): # FIXME pass def calculate_result_code(self): return self.arg.result() def generate_result_code(self, code): if self.type.typeobj_is_available(): if self.type.is_builtin_type: type_test = self.type.type_test_code( self.arg.py_result(), self.notnone, exact=self.exact_builtin_type) else: type_test = self.type.type_test_code( self.arg.py_result(), self.notnone) code.globalstate.use_utility_code( UtilityCode.load_cached("ExtTypeTest", "ObjectHandling.c")) code.putln("if (!(%s)) %s" % ( type_test, code.error_goto(self.pos))) else: error(self.pos, "Cannot test type of extern C class " "without type object name specification") def generate_post_assignment_code(self, code): self.arg.generate_post_assignment_code(code) def free_temps(self, code): self.arg.free_temps(code) class NoneCheckNode(CoercionNode): # This node is used to check that a Python object is not None and # raises an appropriate exception (as specified by the creating # transform). is_nonecheck = True def __init__(self, arg, exception_type_cname, exception_message, exception_format_args): CoercionNode.__init__(self, arg) self.type = arg.type self.result_ctype = arg.ctype() self.exception_type_cname = exception_type_cname self.exception_message = exception_message self.exception_format_args = tuple(exception_format_args or ()) nogil_check = None # this node only guards an operation that would fail already def analyse_types(self, env): return self def may_be_none(self): return False def is_simple(self): return self.arg.is_simple() def result_in_temp(self): return self.arg.result_in_temp() def nonlocally_immutable(self): return self.arg.nonlocally_immutable() def calculate_result_code(self): return self.arg.result() def condition(self): if self.type.is_pyobject: return self.arg.py_result() elif self.type.is_memoryviewslice: return "((PyObject *) %s.memview)" % self.arg.result() else: raise Exception("unsupported type") def put_nonecheck(self, code): code.putln( "if (unlikely(%s == Py_None)) {" % self.condition()) if self.in_nogil_context: code.put_ensure_gil() escape = StringEncoding.escape_byte_string if self.exception_format_args: code.putln('PyErr_Format(%s, "%s", %s);' % ( self.exception_type_cname, StringEncoding.escape_byte_string( self.exception_message.encode('UTF-8')), ', '.join([ '"%s"' % escape(str(arg).encode('UTF-8')) for arg in self.exception_format_args ]))) else: code.putln('PyErr_SetString(%s, "%s");' % ( self.exception_type_cname, escape(self.exception_message.encode('UTF-8')))) if self.in_nogil_context: code.put_release_ensured_gil() code.putln(code.error_goto(self.pos)) code.putln("}") def generate_result_code(self, code): self.put_nonecheck(code) def generate_post_assignment_code(self, code): self.arg.generate_post_assignment_code(code) def free_temps(self, code): self.arg.free_temps(code) class CoerceToPyTypeNode(CoercionNode): # This node is used to convert a C data type # to a Python object. type = py_object_type target_type = py_object_type is_temp = 1 def __init__(self, arg, env, type=py_object_type): if not arg.type.create_to_py_utility_code(env): error(arg.pos, "Cannot convert '%s' to Python object" % arg.type) elif arg.type.is_complex: # special case: complex coercion is so complex that it # uses a macro ("__pyx_PyComplex_FromComplex()"), for # which the argument must be simple arg = arg.coerce_to_simple(env) CoercionNode.__init__(self, arg) if type is py_object_type: # be specific about some known types if arg.type.is_string or arg.type.is_cpp_string: self.type = default_str_type(env) elif arg.type.is_pyunicode_ptr or arg.type.is_unicode_char: self.type = unicode_type elif arg.type.is_complex: self.type = Builtin.complex_type self.target_type = self.type elif arg.type.is_string or arg.type.is_cpp_string: if (type not in (bytes_type, bytearray_type) and not env.directives['c_string_encoding']): error(arg.pos, "default encoding required for conversion from '%s' to '%s'" % (arg.type, type)) self.type = self.target_type = type else: # FIXME: check that the target type and the resulting type are compatible self.target_type = type gil_message = "Converting to Python object" def may_be_none(self): # FIXME: is this always safe? return False def coerce_to_boolean(self, env): arg_type = self.arg.type if (arg_type == PyrexTypes.c_bint_type or (arg_type.is_pyobject and arg_type.name == 'bool')): return self.arg.coerce_to_temp(env) else: return CoerceToBooleanNode(self, env) def coerce_to_integer(self, env): # If not already some C integer type, coerce to longint. if self.arg.type.is_int: return self.arg else: return self.arg.coerce_to(PyrexTypes.c_long_type, env) def analyse_types(self, env): # The arg is always already analysed return self def generate_result_code(self, code): code.putln('%s; %s' % ( self.arg.type.to_py_call_code( self.arg.result(), self.result(), self.target_type), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) class CoerceIntToBytesNode(CoerceToPyTypeNode): # This node is used to convert a C int type to a Python bytes # object. is_temp = 1 def __init__(self, arg, env): arg = arg.coerce_to_simple(env) CoercionNode.__init__(self, arg) self.type = Builtin.bytes_type def generate_result_code(self, code): arg = self.arg arg_result = arg.result() if arg.type not in (PyrexTypes.c_char_type, PyrexTypes.c_uchar_type, PyrexTypes.c_schar_type): if arg.type.signed: code.putln("if ((%s < 0) || (%s > 255)) {" % ( arg_result, arg_result)) else: code.putln("if (%s > 255) {" % arg_result) code.putln('PyErr_SetString(PyExc_OverflowError, ' '"value too large to pack into a byte"); %s' % ( code.error_goto(self.pos))) code.putln('}') temp = None if arg.type is not PyrexTypes.c_char_type: temp = code.funcstate.allocate_temp(PyrexTypes.c_char_type, manage_ref=False) code.putln("%s = (char)%s;" % (temp, arg_result)) arg_result = temp code.putln('%s = PyBytes_FromStringAndSize(&%s, 1); %s' % ( self.result(), arg_result, code.error_goto_if_null(self.result(), self.pos))) if temp is not None: code.funcstate.release_temp(temp) code.put_gotref(self.py_result()) class CoerceFromPyTypeNode(CoercionNode): # This node is used to convert a Python object # to a C data type. def __init__(self, result_type, arg, env): CoercionNode.__init__(self, arg) self.type = result_type self.is_temp = 1 if not result_type.create_from_py_utility_code(env): error(arg.pos, "Cannot convert Python object to '%s'" % result_type) if self.type.is_string or self.type.is_pyunicode_ptr: if self.arg.is_name and self.arg.entry and self.arg.entry.is_pyglobal: warning(arg.pos, "Obtaining '%s' from externally modifiable global Python value" % result_type, level=1) def analyse_types(self, env): # The arg is always already analysed return self def is_ephemeral(self): return (self.type.is_ptr and not self.type.is_array) and self.arg.is_ephemeral() def generate_result_code(self, code): code.putln(self.type.from_py_call_code( self.arg.py_result(), self.result(), self.pos, code)) if self.type.is_pyobject: code.put_gotref(self.py_result()) def nogil_check(self, env): error(self.pos, "Coercion from Python not allowed without the GIL") class CoerceToBooleanNode(CoercionNode): # This node is used when a result needs to be used # in a boolean context. type = PyrexTypes.c_bint_type _special_builtins = { Builtin.list_type: 'PyList_GET_SIZE', Builtin.tuple_type: 'PyTuple_GET_SIZE', Builtin.set_type: 'PySet_GET_SIZE', Builtin.frozenset_type: 'PySet_GET_SIZE', Builtin.bytes_type: 'PyBytes_GET_SIZE', Builtin.unicode_type: '__Pyx_PyUnicode_IS_TRUE', } def __init__(self, arg, env): CoercionNode.__init__(self, arg) if arg.type.is_pyobject: self.is_temp = 1 def nogil_check(self, env): if self.arg.type.is_pyobject and self._special_builtins.get(self.arg.type) is None: self.gil_error() gil_message = "Truth-testing Python object" def check_const(self): if self.is_temp: self.not_const() return False return self.arg.check_const() def calculate_result_code(self): return "(%s != 0)" % self.arg.result() def generate_result_code(self, code): if not self.is_temp: return test_func = self._special_builtins.get(self.arg.type) if test_func is not None: code.putln("%s = (%s != Py_None) && (%s(%s) != 0);" % ( self.result(), self.arg.py_result(), test_func, self.arg.py_result())) else: code.putln( "%s = __Pyx_PyObject_IsTrue(%s); %s" % ( self.result(), self.arg.py_result(), code.error_goto_if_neg(self.result(), self.pos))) class CoerceToComplexNode(CoercionNode): def __init__(self, arg, dst_type, env): if arg.type.is_complex: arg = arg.coerce_to_simple(env) self.type = dst_type CoercionNode.__init__(self, arg) dst_type.create_declaration_utility_code(env) def calculate_result_code(self): if self.arg.type.is_complex: real_part = "__Pyx_CREAL(%s)" % self.arg.result() imag_part = "__Pyx_CIMAG(%s)" % self.arg.result() else: real_part = self.arg.result() imag_part = "0" return "%s(%s, %s)" % ( self.type.from_parts, real_part, imag_part) def generate_result_code(self, code): pass class CoerceToTempNode(CoercionNode): # This node is used to force the result of another node # to be stored in a temporary. It is only used if the # argument node's result is not already in a temporary. def __init__(self, arg, env): CoercionNode.__init__(self, arg) self.type = self.arg.type.as_argument_type() self.constant_result = self.arg.constant_result self.is_temp = 1 if self.type.is_pyobject: self.result_ctype = py_object_type gil_message = "Creating temporary Python reference" def analyse_types(self, env): # The arg is always already analysed return self def coerce_to_boolean(self, env): self.arg = self.arg.coerce_to_boolean(env) if self.arg.is_simple(): return self.arg self.type = self.arg.type self.result_ctype = self.type return self def generate_result_code(self, code): #self.arg.generate_evaluation_code(code) # Already done # by generic generate_subexpr_evaluation_code! code.putln("%s = %s;" % ( self.result(), self.arg.result_as(self.ctype()))) if self.use_managed_ref: if self.type.is_pyobject: code.put_incref(self.result(), self.ctype()) elif self.type.is_memoryviewslice: code.put_incref_memoryviewslice(self.result(), not self.in_nogil_context) class ProxyNode(CoercionNode): """ A node that should not be replaced by transforms or other means, and hence can be useful to wrap the argument to a clone node MyNode -> ProxyNode -> ArgNode CloneNode -^ """ nogil_check = None def __init__(self, arg): super(ProxyNode, self).__init__(arg) self.constant_result = arg.constant_result self._proxy_type() def analyse_types(self, env): self.arg = self.arg.analyse_expressions(env) self._proxy_type() return self def infer_type(self, env): return self.arg.infer_type(env) def _proxy_type(self): if hasattr(self.arg, 'type'): self.type = self.arg.type self.result_ctype = self.arg.result_ctype if hasattr(self.arg, 'entry'): self.entry = self.arg.entry def generate_result_code(self, code): self.arg.generate_result_code(code) def result(self): return self.arg.result() def is_simple(self): return self.arg.is_simple() def may_be_none(self): return self.arg.may_be_none() def generate_evaluation_code(self, code): self.arg.generate_evaluation_code(code) def generate_disposal_code(self, code): self.arg.generate_disposal_code(code) def free_temps(self, code): self.arg.free_temps(code) class CloneNode(CoercionNode): # This node is employed when the result of another node needs # to be used multiple times. The argument node's result must # be in a temporary. This node "borrows" the result from the # argument node, and does not generate any evaluation or # disposal code for it. The original owner of the argument # node is responsible for doing those things. subexprs = [] # Arg is not considered a subexpr nogil_check = None def __init__(self, arg): CoercionNode.__init__(self, arg) self.constant_result = arg.constant_result if hasattr(arg, 'type'): self.type = arg.type self.result_ctype = arg.result_ctype if hasattr(arg, 'entry'): self.entry = arg.entry def result(self): return self.arg.result() def may_be_none(self): return self.arg.may_be_none() def type_dependencies(self, env): return self.arg.type_dependencies(env) def infer_type(self, env): return self.arg.infer_type(env) def analyse_types(self, env): self.type = self.arg.type self.result_ctype = self.arg.result_ctype self.is_temp = 1 if hasattr(self.arg, 'entry'): self.entry = self.arg.entry return self def coerce_to(self, dest_type, env): if self.arg.is_literal: return self.arg.coerce_to(dest_type, env) return super(CloneNode, self).coerce_to(dest_type, env) def is_simple(self): return True # result is always in a temp (or a name) def generate_evaluation_code(self, code): pass def generate_result_code(self, code): pass def generate_disposal_code(self, code): pass def free_temps(self, code): pass class CMethodSelfCloneNode(CloneNode): # Special CloneNode for the self argument of builtin C methods # that accepts subtypes of the builtin type. This is safe only # for 'final' subtypes, as subtypes of the declared type may # override the C method. def coerce_to(self, dst_type, env): if dst_type.is_builtin_type and self.type.subtype_of(dst_type): return self return CloneNode.coerce_to(self, dst_type, env) class ModuleRefNode(ExprNode): # Simple returns the module object type = py_object_type is_temp = False subexprs = [] def analyse_types(self, env): return self def may_be_none(self): return False def calculate_result_code(self): return Naming.module_cname def generate_result_code(self, code): pass class DocstringRefNode(ExprNode): # Extracts the docstring of the body element subexprs = ['body'] type = py_object_type is_temp = True def __init__(self, pos, body): ExprNode.__init__(self, pos) assert body.type.is_pyobject self.body = body def analyse_types(self, env): return self def generate_result_code(self, code): code.putln('%s = __Pyx_GetAttr(%s, %s); %s' % ( self.result(), self.body.result(), code.intern_identifier(StringEncoding.EncodedString("__doc__")), code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.result()) #------------------------------------------------------------------------------------ # # Runtime support code # #------------------------------------------------------------------------------------ pyerr_occurred_withgil_utility_code= UtilityCode( proto = """ static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void); /* proto */ """, impl = """ static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void) { int err; #ifdef WITH_THREAD PyGILState_STATE _save = PyGILState_Ensure(); #endif err = !!PyErr_Occurred(); #ifdef WITH_THREAD PyGILState_Release(_save); #endif return err; } """ ) #------------------------------------------------------------------------------------ raise_unbound_local_error_utility_code = UtilityCode( proto = """ static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname); """, impl = """ static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname) { PyErr_Format(PyExc_UnboundLocalError, "local variable '%s' referenced before assignment", varname); } """) raise_closure_name_error_utility_code = UtilityCode( proto = """ static CYTHON_INLINE void __Pyx_RaiseClosureNameError(const char *varname); """, impl = """ static CYTHON_INLINE void __Pyx_RaiseClosureNameError(const char *varname) { PyErr_Format(PyExc_NameError, "free variable '%s' referenced before assignment in enclosing scope", varname); } """) # Don't inline the function, it should really never be called in production raise_unbound_memoryview_utility_code_nogil = UtilityCode( proto = """ static void __Pyx_RaiseUnboundMemoryviewSliceNogil(const char *varname); """, impl = """ static void __Pyx_RaiseUnboundMemoryviewSliceNogil(const char *varname) { #ifdef WITH_THREAD PyGILState_STATE gilstate = PyGILState_Ensure(); #endif __Pyx_RaiseUnboundLocalError(varname); #ifdef WITH_THREAD PyGILState_Release(gilstate); #endif } """, requires = [raise_unbound_local_error_utility_code]) #------------------------------------------------------------------------------------ raise_too_many_values_to_unpack = UtilityCode.load_cached("RaiseTooManyValuesToUnpack", "ObjectHandling.c") raise_need_more_values_to_unpack = UtilityCode.load_cached("RaiseNeedMoreValuesToUnpack", "ObjectHandling.c") tuple_unpacking_error_code = UtilityCode.load_cached("UnpackTupleError", "ObjectHandling.c") Cython-0.26.1/Cython/Compiler/Tests/0000775000175000017500000000000013151203436017746 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Compiler/Tests/TestTypes.py0000664000175000017500000000123513150045407022266 0ustar stefanstefan00000000000000from __future__ import absolute_import import unittest import Cython.Compiler.PyrexTypes as PT class TestMethodDispatcherTransform(unittest.TestCase): def test_widest_numeric_type(self): def assert_widest(type1, type2, widest): self.assertEqual(widest, PT.widest_numeric_type(type1, type2)) assert_widest(PT.c_int_type, PT.c_long_type, PT.c_long_type) assert_widest(PT.c_double_type, PT.c_long_type, PT.c_double_type) assert_widest(PT.c_longdouble_type, PT.c_long_type, PT.c_longdouble_type) cenum = PT.CEnumType("E", "cenum", typedef_flag=False) assert_widest(PT.c_int_type, cenum, PT.c_int_type) Cython-0.26.1/Cython/Compiler/Tests/TestMemView.py0000664000175000017500000000472013143605603022537 0ustar stefanstefan00000000000000from Cython.TestUtils import CythonTest import Cython.Compiler.Errors as Errors from Cython.Compiler.Nodes import * from Cython.Compiler.ParseTreeTransforms import * from Cython.Compiler.Buffer import * class TestMemviewParsing(CythonTest): def parse(self, s): return self.should_not_fail(lambda: self.fragment(s)).root def not_parseable(self, expected_error, s): e = self.should_fail(lambda: self.fragment(s), Errors.CompileError) self.assertEqual(expected_error, e.message_only) def test_default_1dim(self): self.parse(u"cdef int[:] x") self.parse(u"cdef short int[:] x") def test_default_ndim(self): self.parse(u"cdef int[:,:,:,:,:] x") self.parse(u"cdef unsigned long int[:,:,:,:,:] x") self.parse(u"cdef unsigned int[:,:,:,:,:] x") def test_zero_offset(self): self.parse(u"cdef long double[0:] x") self.parse(u"cdef int[0:] x") def test_zero_offset_ndim(self): self.parse(u"cdef int[0:,0:,0:,0:] x") def test_def_arg(self): self.parse(u"def foo(int[:,:] x): pass") def test_cdef_arg(self): self.parse(u"cdef foo(int[:,:] x): pass") def test_general_slice(self): self.parse(u'cdef float[::ptr, ::direct & contig, 0::full & strided] x') def test_non_slice_memview(self): self.not_parseable(u"An axis specification in memoryview declaration does not have a ':'.", u"cdef double[:foo, bar] x") self.not_parseable(u"An axis specification in memoryview declaration does not have a ':'.", u"cdef double[0:foo, bar] x") def test_basic(self): t = self.parse(u"cdef int[:] x") memv_node = t.stats[0].base_type self.assert_(isinstance(memv_node, MemoryViewSliceTypeNode)) # we also test other similar declarations (buffers, anonymous C arrays) # since the parsing has to distinguish between them. def disable_test_no_buf_arg(self): # TODO self.not_parseable(u"Expected ']'", u"cdef extern foo(object[int, ndim=2])") def disable_test_parse_sizeof(self): # TODO self.parse(u"sizeof(int[NN])") self.parse(u"sizeof(int[])") self.parse(u"sizeof(int[][NN])") self.not_parseable(u"Expected an identifier or literal", u"sizeof(int[:NN])") self.not_parseable(u"Expected ']'", u"sizeof(foo[dtype=bar]") if __name__ == '__main__': import unittest unittest.main() Cython-0.26.1/Cython/Compiler/Tests/TestCmdLine.py0000664000175000017500000001047612542002467022507 0ustar stefanstefan00000000000000 import sys from unittest import TestCase try: from StringIO import StringIO except ImportError: from io import StringIO # doesn't accept 'str' in Py2 from .. import Options from ..CmdLine import parse_command_line class CmdLineParserTest(TestCase): def setUp(self): backup = {} for name, value in vars(Options).items(): backup[name] = value self._options_backup = backup def tearDown(self): no_value = object() for name, orig_value in self._options_backup.items(): if getattr(Options, name, no_value) != orig_value: setattr(Options, name, orig_value) def test_short_options(self): options, sources = parse_command_line([ '-V', '-l', '-+', '-t', '-v', '-v', '-v', '-p', '-D', '-a', '-3', ]) self.assertFalse(sources) self.assertTrue(options.show_version) self.assertTrue(options.use_listing_file) self.assertTrue(options.cplus) self.assertTrue(options.timestamps) self.assertTrue(options.verbose >= 3) self.assertTrue(Options.embed_pos_in_docstring) self.assertFalse(Options.docstrings) self.assertTrue(Options.annotate) self.assertEqual(options.language_level, 3) options, sources = parse_command_line([ '-f', '-2', 'source.pyx', ]) self.assertTrue(sources) self.assertTrue(len(sources) == 1) self.assertFalse(options.timestamps) self.assertEqual(options.language_level, 2) def test_long_options(self): options, sources = parse_command_line([ '--version', '--create-listing', '--cplus', '--embed', '--timestamps', '--verbose', '--verbose', '--verbose', '--embed-positions', '--no-docstrings', '--annotate', '--lenient', ]) self.assertFalse(sources) self.assertTrue(options.show_version) self.assertTrue(options.use_listing_file) self.assertTrue(options.cplus) self.assertEqual(Options.embed, 'main') self.assertTrue(options.timestamps) self.assertTrue(options.verbose >= 3) self.assertTrue(Options.embed_pos_in_docstring) self.assertFalse(Options.docstrings) self.assertTrue(Options.annotate) self.assertFalse(Options.error_on_unknown_names) self.assertFalse(Options.error_on_uninitialized) options, sources = parse_command_line([ '--force', 'source.pyx', ]) self.assertTrue(sources) self.assertTrue(len(sources) == 1) self.assertFalse(options.timestamps) def test_options_with_values(self): options, sources = parse_command_line([ '--embed=huhu', '-I/test/include/dir1', '--include-dir=/test/include/dir2', '--include-dir', '/test/include/dir3', '--working=/work/dir', 'source.pyx', '--output-file=/output/dir', '--pre-import=/pre/import', '--cleanup=3', '--annotate-coverage=cov.xml', '--gdb-outdir=/gdb/outdir', '--directive=wraparound=false', ]) self.assertEqual(sources, ['source.pyx']) self.assertEqual(Options.embed, 'huhu') self.assertEqual(options.include_path, ['/test/include/dir1', '/test/include/dir2', '/test/include/dir3']) self.assertEqual(options.working_path, '/work/dir') self.assertEqual(options.output_file, '/output/dir') self.assertEqual(Options.pre_import, '/pre/import') self.assertEqual(Options.generate_cleanup_code, 3) self.assertTrue(Options.annotate) self.assertEqual(Options.annotate_coverage_xml, 'cov.xml') self.assertTrue(options.gdb_debug) self.assertEqual(options.output_dir, '/gdb/outdir') def test_errors(self): def error(*args): old_stderr = sys.stderr stderr = sys.stderr = StringIO() try: self.assertRaises(SystemExit, parse_command_line, list(args)) finally: sys.stderr = old_stderr self.assertTrue(stderr.getvalue()) error('-1') error('-I') error('--version=-a') error('--version=--annotate=true') error('--working') error('--verbose=1') error('--verbose=1') error('--cleanup') Cython-0.26.1/Cython/Compiler/Tests/TestBuffer.py0000664000175000017500000001003212542002467022371 0ustar stefanstefan00000000000000from Cython.TestUtils import CythonTest import Cython.Compiler.Errors as Errors from Cython.Compiler.Nodes import * from Cython.Compiler.ParseTreeTransforms import * from Cython.Compiler.Buffer import * class TestBufferParsing(CythonTest): # First, we only test the raw parser, i.e. # the number and contents of arguments are NOT checked. # However "dtype"/the first positional argument is special-cased # to parse a type argument rather than an expression def parse(self, s): return self.should_not_fail(lambda: self.fragment(s)).root def not_parseable(self, expected_error, s): e = self.should_fail(lambda: self.fragment(s), Errors.CompileError) self.assertEqual(expected_error, e.message_only) def test_basic(self): t = self.parse(u"cdef object[float, 4, ndim=2, foo=foo] x") bufnode = t.stats[0].base_type self.assert_(isinstance(bufnode, TemplatedTypeNode)) self.assertEqual(2, len(bufnode.positional_args)) # print bufnode.dump() # should put more here... def test_type_pos(self): self.parse(u"cdef object[short unsigned int, 3] x") def test_type_keyword(self): self.parse(u"cdef object[foo=foo, dtype=short unsigned int] x") def test_pos_after_key(self): self.not_parseable("Non-keyword arg following keyword arg", u"cdef object[foo=1, 2] x") # See also tests/error/e_bufaccess.pyx and tets/run/bufaccess.pyx # THESE TESTS ARE NOW DISABLED, the code they test was pretty much # refactored away class TestBufferOptions(CythonTest): # Tests the full parsing of the options within the brackets def nonfatal_error(self, error): # We're passing self as context to transform to trap this self.error = error self.assert_(self.expect_error) def parse_opts(self, opts, expect_error=False): assert opts != "" s = u"def f():\n cdef object[%s] x" % opts self.expect_error = expect_error root = self.fragment(s, pipeline=[NormalizeTree(self), PostParse(self)]).root if not expect_error: vardef = root.stats[0].body.stats[0] assert isinstance(vardef, CVarDefNode) # use normal assert as this is to validate the test code buftype = vardef.base_type self.assert_(isinstance(buftype, TemplatedTypeNode)) self.assert_(isinstance(buftype.base_type_node, CSimpleBaseTypeNode)) self.assertEqual(u"object", buftype.base_type_node.name) return buftype else: self.assert_(len(root.stats[0].body.stats) == 0) def non_parse(self, expected_err, opts): self.parse_opts(opts, expect_error=True) # e = self.should_fail(lambda: self.parse_opts(opts)) self.assertEqual(expected_err, self.error.message_only) def __test_basic(self): buf = self.parse_opts(u"unsigned short int, 3") self.assert_(isinstance(buf.dtype_node, CSimpleBaseTypeNode)) self.assert_(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1) self.assertEqual(3, buf.ndim) def __test_dict(self): buf = self.parse_opts(u"ndim=3, dtype=unsigned short int") self.assert_(isinstance(buf.dtype_node, CSimpleBaseTypeNode)) self.assert_(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1) self.assertEqual(3, buf.ndim) def __test_ndim(self): self.parse_opts(u"int, 2") self.non_parse(ERR_BUF_NDIM, u"int, 'a'") self.non_parse(ERR_BUF_NDIM, u"int, -34") def __test_use_DEF(self): t = self.fragment(u""" DEF ndim = 3 def f(): cdef object[int, ndim] x cdef object[ndim=ndim, dtype=int] y """, pipeline=[NormalizeTree(self), PostParse(self)]).root stats = t.stats[0].body.stats self.assert_(stats[0].base_type.ndim == 3) self.assert_(stats[1].base_type.ndim == 3) # add exotic and impossible combinations as they come along... if __name__ == '__main__': import unittest unittest.main() Cython-0.26.1/Cython/Compiler/Tests/TestParseTreeTransforms.py0000664000175000017500000002046413150045407025140 0ustar stefanstefan00000000000000import os from Cython.TestUtils import TransformTest from Cython.Compiler.ParseTreeTransforms import * from Cython.Compiler.Nodes import * from Cython.Compiler import Main, Symtab class TestNormalizeTree(TransformTest): def test_parserbehaviour_is_what_we_coded_for(self): t = self.fragment(u"if x: y").root self.assertLines(u""" (root): StatListNode stats[0]: IfStatNode if_clauses[0]: IfClauseNode condition: NameNode body: ExprStatNode expr: NameNode """, self.treetypes(t)) def test_wrap_singlestat(self): t = self.run_pipeline([NormalizeTree(None)], u"if x: y") self.assertLines(u""" (root): StatListNode stats[0]: IfStatNode if_clauses[0]: IfClauseNode condition: NameNode body: StatListNode stats[0]: ExprStatNode expr: NameNode """, self.treetypes(t)) def test_wrap_multistat(self): t = self.run_pipeline([NormalizeTree(None)], u""" if z: x y """) self.assertLines(u""" (root): StatListNode stats[0]: IfStatNode if_clauses[0]: IfClauseNode condition: NameNode body: StatListNode stats[0]: ExprStatNode expr: NameNode stats[1]: ExprStatNode expr: NameNode """, self.treetypes(t)) def test_statinexpr(self): t = self.run_pipeline([NormalizeTree(None)], u""" a, b = x, y """) self.assertLines(u""" (root): StatListNode stats[0]: SingleAssignmentNode lhs: TupleNode args[0]: NameNode args[1]: NameNode rhs: TupleNode args[0]: NameNode args[1]: NameNode """, self.treetypes(t)) def test_wrap_offagain(self): t = self.run_pipeline([NormalizeTree(None)], u""" x y if z: x """) self.assertLines(u""" (root): StatListNode stats[0]: ExprStatNode expr: NameNode stats[1]: ExprStatNode expr: NameNode stats[2]: IfStatNode if_clauses[0]: IfClauseNode condition: NameNode body: StatListNode stats[0]: ExprStatNode expr: NameNode """, self.treetypes(t)) def test_pass_eliminated(self): t = self.run_pipeline([NormalizeTree(None)], u"pass") self.assert_(len(t.stats) == 0) class TestWithTransform(object): # (TransformTest): # Disabled! def test_simplified(self): t = self.run_pipeline([WithTransform(None)], u""" with x: y = z ** 3 """) self.assertCode(u""" $0_0 = x $0_2 = $0_0.__exit__ $0_0.__enter__() $0_1 = True try: try: $1_0 = None y = z ** 3 except: $0_1 = False if (not $0_2($1_0)): raise finally: if $0_1: $0_2(None, None, None) """, t) def test_basic(self): t = self.run_pipeline([WithTransform(None)], u""" with x as y: y = z ** 3 """) self.assertCode(u""" $0_0 = x $0_2 = $0_0.__exit__ $0_3 = $0_0.__enter__() $0_1 = True try: try: $1_0 = None y = $0_3 y = z ** 3 except: $0_1 = False if (not $0_2($1_0)): raise finally: if $0_1: $0_2(None, None, None) """, t) class TestInterpretCompilerDirectives(TransformTest): """ This class tests the parallel directives AST-rewriting and importing. """ # Test the parallel directives (c)importing import_code = u""" cimport cython.parallel cimport cython.parallel as par from cython cimport parallel as par2 from cython cimport parallel from cython.parallel cimport threadid as tid from cython.parallel cimport threadavailable as tavail from cython.parallel cimport prange """ expected_directives_dict = { u'cython.parallel': u'cython.parallel', u'par': u'cython.parallel', u'par2': u'cython.parallel', u'parallel': u'cython.parallel', u"tid": u"cython.parallel.threadid", u"tavail": u"cython.parallel.threadavailable", u"prange": u"cython.parallel.prange", } def setUp(self): super(TestInterpretCompilerDirectives, self).setUp() compilation_options = Main.CompilationOptions(Main.default_options) ctx = compilation_options.create_context() transform = InterpretCompilerDirectives(ctx, ctx.compiler_directives) transform.module_scope = Symtab.ModuleScope('__main__', None, ctx) self.pipeline = [transform] self.debug_exception_on_error = DebugFlags.debug_exception_on_error def tearDown(self): DebugFlags.debug_exception_on_error = self.debug_exception_on_error def test_parallel_directives_cimports(self): self.run_pipeline(self.pipeline, self.import_code) parallel_directives = self.pipeline[0].parallel_directives self.assertEqual(parallel_directives, self.expected_directives_dict) def test_parallel_directives_imports(self): self.run_pipeline(self.pipeline, self.import_code.replace(u'cimport', u'import')) parallel_directives = self.pipeline[0].parallel_directives self.assertEqual(parallel_directives, self.expected_directives_dict) # TODO: Re-enable once they're more robust. if False: from Cython.Debugger import DebugWriter from Cython.Debugger.Tests.TestLibCython import DebuggerTestCase else: # skip test, don't let it inherit unittest.TestCase DebuggerTestCase = object class TestDebugTransform(DebuggerTestCase): def elem_hasattrs(self, elem, attrs): return all(attr in elem.attrib for attr in attrs) def test_debug_info(self): try: assert os.path.exists(self.debug_dest) t = DebugWriter.etree.parse(self.debug_dest) # the xpath of the standard ElementTree is primitive, don't use # anything fancy L = list(t.find('/Module/Globals')) assert L xml_globals = dict((e.attrib['name'], e.attrib['type']) for e in L) self.assertEqual(len(L), len(xml_globals)) L = list(t.find('/Module/Functions')) assert L xml_funcs = dict((e.attrib['qualified_name'], e) for e in L) self.assertEqual(len(L), len(xml_funcs)) # test globals self.assertEqual('CObject', xml_globals.get('c_var')) self.assertEqual('PythonObject', xml_globals.get('python_var')) # test functions funcnames = ('codefile.spam', 'codefile.ham', 'codefile.eggs', 'codefile.closure', 'codefile.inner') required_xml_attrs = 'name', 'cname', 'qualified_name' assert all(f in xml_funcs for f in funcnames) spam, ham, eggs = [xml_funcs[funcname] for funcname in funcnames] self.assertEqual(spam.attrib['name'], 'spam') self.assertNotEqual('spam', spam.attrib['cname']) assert self.elem_hasattrs(spam, required_xml_attrs) # test locals of functions spam_locals = list(spam.find('Locals')) assert spam_locals spam_locals.sort(key=lambda e: e.attrib['name']) names = [e.attrib['name'] for e in spam_locals] self.assertEqual(list('abcd'), names) assert self.elem_hasattrs(spam_locals[0], required_xml_attrs) # test arguments of functions spam_arguments = list(spam.find('Arguments')) assert spam_arguments self.assertEqual(1, len(list(spam_arguments))) # test step-into functions step_into = spam.find('StepIntoFunctions') spam_stepinto = [x.attrib['name'] for x in step_into] assert spam_stepinto self.assertEqual(2, len(spam_stepinto)) assert 'puts' in spam_stepinto assert 'some_c_function' in spam_stepinto except: f = open(self.debug_dest) try: print(f.read()) finally: f.close() raise if __name__ == "__main__": import unittest unittest.main() Cython-0.26.1/Cython/Compiler/Tests/TestTreeFragment.py0000664000175000017500000000422012542002467023545 0ustar stefanstefan00000000000000from Cython.TestUtils import CythonTest from Cython.Compiler.TreeFragment import * from Cython.Compiler.Nodes import * from Cython.Compiler.UtilNodes import * import Cython.Compiler.Naming as Naming class TestTreeFragments(CythonTest): def test_basic(self): F = self.fragment(u"x = 4") T = F.copy() self.assertCode(u"x = 4", T) def test_copy_is_taken(self): F = self.fragment(u"if True: x = 4") T1 = F.root T2 = F.copy() self.assertEqual("x", T2.stats[0].if_clauses[0].body.lhs.name) T2.stats[0].if_clauses[0].body.lhs.name = "other" self.assertEqual("x", T1.stats[0].if_clauses[0].body.lhs.name) def test_substitutions_are_copied(self): T = self.fragment(u"y + y").substitute({"y": NameNode(pos=None, name="x")}) self.assertEqual("x", T.stats[0].expr.operand1.name) self.assertEqual("x", T.stats[0].expr.operand2.name) self.assert_(T.stats[0].expr.operand1 is not T.stats[0].expr.operand2) def test_substitution(self): F = self.fragment(u"x = 4") y = NameNode(pos=None, name=u"y") T = F.substitute({"x" : y}) self.assertCode(u"y = 4", T) def test_exprstat(self): F = self.fragment(u"PASS") pass_stat = PassStatNode(pos=None) T = F.substitute({"PASS" : pass_stat}) self.assert_(isinstance(T.stats[0], PassStatNode), T) def test_pos_is_transferred(self): F = self.fragment(u""" x = y x = u * v ** w """) T = F.substitute({"v" : NameNode(pos=None, name="a")}) v = F.root.stats[1].rhs.operand2.operand1 a = T.stats[1].rhs.operand2.operand1 self.assertEquals(v.pos, a.pos) def test_temps(self): TemplateTransform.temp_name_counter = 0 F = self.fragment(u""" TMP x = TMP """) T = F.substitute(temps=[u"TMP"]) s = T.body.stats self.assert_(isinstance(s[0].expr, TempRefNode)) self.assert_(isinstance(s[1].rhs, TempRefNode)) self.assert_(s[0].expr.handle is s[1].rhs.handle) if __name__ == "__main__": import unittest unittest.main() Cython-0.26.1/Cython/Compiler/Tests/TestFlowControl.py0000664000175000017500000000347013023021033023421 0ustar stefanstefan00000000000000 from __future__ import absolute_import from copy import deepcopy from unittest import TestCase from Cython.Compiler.FlowControl import ( NameAssignment, StaticAssignment, Argument, NameDeletion) class FakeType(object): is_pyobject = True class FakeNode(object): pos = ('filename.pyx', 1, 2) cf_state = None type = FakeType() def infer_type(self, scope): return self.type class FakeEntry(object): type = FakeType() class TestGraph(TestCase): def test_deepcopy(self): lhs, rhs = FakeNode(), FakeNode() entry = FakeEntry() entry.pos = lhs.pos name_ass = NameAssignment(lhs, rhs, entry) ass = deepcopy(name_ass) self.assertTrue(ass.lhs) self.assertTrue(ass.rhs) self.assertTrue(ass.entry) self.assertEqual(ass.pos, name_ass.pos) self.assertFalse(ass.is_arg) self.assertFalse(ass.is_deletion) static_ass = StaticAssignment(entry) ass = deepcopy(static_ass) self.assertTrue(ass.lhs) self.assertTrue(ass.rhs) self.assertTrue(ass.entry) self.assertEqual(ass.pos, static_ass.pos) self.assertFalse(ass.is_arg) self.assertFalse(ass.is_deletion) arg_ass = Argument(lhs, rhs, entry) ass = deepcopy(arg_ass) self.assertTrue(ass.lhs) self.assertTrue(ass.rhs) self.assertTrue(ass.entry) self.assertEqual(ass.pos, arg_ass.pos) self.assertTrue(ass.is_arg) self.assertFalse(ass.is_deletion) name_del = NameDeletion(lhs, entry) ass = deepcopy(name_del) self.assertTrue(ass.lhs) self.assertTrue(ass.rhs) self.assertTrue(ass.entry) self.assertEqual(ass.pos, name_del.pos) self.assertFalse(ass.is_arg) self.assertTrue(ass.is_deletion) Cython-0.26.1/Cython/Compiler/Tests/TestTreePath.py0000664000175000017500000001026212542002467022701 0ustar stefanstefan00000000000000import unittest from Cython.Compiler.Visitor import PrintTree from Cython.TestUtils import TransformTest from Cython.Compiler.TreePath import find_first, find_all from Cython.Compiler import Nodes, ExprNodes class TestTreePath(TransformTest): _tree = None def _build_tree(self): if self._tree is None: self._tree = self.run_pipeline([], u""" def decorator(fun): # DefNode return fun # ReturnStatNode, NameNode @decorator # NameNode def decorated(): # DefNode pass """) return self._tree def test_node_path(self): t = self._build_tree() self.assertEquals(2, len(find_all(t, "//DefNode"))) self.assertEquals(2, len(find_all(t, "//NameNode"))) self.assertEquals(1, len(find_all(t, "//ReturnStatNode"))) self.assertEquals(1, len(find_all(t, "//DefNode//ReturnStatNode"))) def test_node_path_star(self): t = self._build_tree() self.assertEquals(10, len(find_all(t, "//*"))) self.assertEquals(8, len(find_all(t, "//DefNode//*"))) self.assertEquals(0, len(find_all(t, "//NameNode//*"))) def test_node_path_attribute(self): t = self._build_tree() self.assertEquals(2, len(find_all(t, "//NameNode/@name"))) self.assertEquals(['fun', 'decorator'], find_all(t, "//NameNode/@name")) def test_node_path_attribute_dotted(self): t = self._build_tree() self.assertEquals(1, len(find_all(t, "//ReturnStatNode/@value.name"))) self.assertEquals(['fun'], find_all(t, "//ReturnStatNode/@value.name")) def test_node_path_child(self): t = self._build_tree() self.assertEquals(1, len(find_all(t, "//DefNode/ReturnStatNode/NameNode"))) self.assertEquals(1, len(find_all(t, "//ReturnStatNode/NameNode"))) def test_node_path_node_predicate(self): t = self._build_tree() self.assertEquals(0, len(find_all(t, "//DefNode[.//ForInStatNode]"))) self.assertEquals(2, len(find_all(t, "//DefNode[.//NameNode]"))) self.assertEquals(1, len(find_all(t, "//ReturnStatNode[./NameNode]"))) self.assertEquals(Nodes.ReturnStatNode, type(find_first(t, "//ReturnStatNode[./NameNode]"))) def test_node_path_node_predicate_step(self): t = self._build_tree() self.assertEquals(2, len(find_all(t, "//DefNode[.//NameNode]"))) self.assertEquals(8, len(find_all(t, "//DefNode[.//NameNode]//*"))) self.assertEquals(1, len(find_all(t, "//DefNode[.//NameNode]//ReturnStatNode"))) self.assertEquals(Nodes.ReturnStatNode, type(find_first(t, "//DefNode[.//NameNode]//ReturnStatNode"))) def test_node_path_attribute_exists(self): t = self._build_tree() self.assertEquals(2, len(find_all(t, "//NameNode[@name]"))) self.assertEquals(ExprNodes.NameNode, type(find_first(t, "//NameNode[@name]"))) def test_node_path_attribute_exists_not(self): t = self._build_tree() self.assertEquals(0, len(find_all(t, "//NameNode[not(@name)]"))) self.assertEquals(2, len(find_all(t, "//NameNode[not(@honking)]"))) def test_node_path_and(self): t = self._build_tree() self.assertEquals(1, len(find_all(t, "//DefNode[.//ReturnStatNode and .//NameNode]"))) self.assertEquals(0, len(find_all(t, "//NameNode[@honking and @name]"))) self.assertEquals(0, len(find_all(t, "//NameNode[@name and @honking]"))) self.assertEquals(2, len(find_all(t, "//DefNode[.//NameNode[@name] and @name]"))) def test_node_path_attribute_string_predicate(self): t = self._build_tree() self.assertEquals(1, len(find_all(t, "//NameNode[@name = 'decorator']"))) def test_node_path_recursive_predicate(self): t = self._build_tree() self.assertEquals(2, len(find_all(t, "//DefNode[.//NameNode[@name]]"))) self.assertEquals(1, len(find_all(t, "//DefNode[.//NameNode[@name = 'decorator']]"))) self.assertEquals(1, len(find_all(t, "//DefNode[.//ReturnStatNode[./NameNode[@name = 'fun']]/NameNode]"))) if __name__ == '__main__': unittest.main() Cython-0.26.1/Cython/Compiler/Tests/TestUtilityLoad.py0000664000175000017500000000642712542002467023440 0ustar stefanstefan00000000000000import unittest from Cython.Compiler import Code, UtilityCode def strip_2tup(tup): return tup[0] and tup[0].strip(), tup[1] and tup[1].strip() class TestUtilityLoader(unittest.TestCase): """ Test loading UtilityCodes """ expected = "test {{loader}} prototype", "test {{loader}} impl" required = "req {{loader}} proto", "req {{loader}} impl" context = dict(loader='Loader') name = "TestUtilityLoader" filename = "TestUtilityLoader.c" cls = Code.UtilityCode def test_load_as_string(self): got = strip_2tup(self.cls.load_as_string(self.name)) self.assertEquals(got, self.expected) got = strip_2tup(self.cls.load_as_string(self.name, self.filename)) self.assertEquals(got, self.expected) def test_load(self): utility = self.cls.load(self.name) got = strip_2tup((utility.proto, utility.impl)) self.assertEquals(got, self.expected) required, = utility.requires got = strip_2tup((required.proto, required.impl)) self.assertEquals(got, self.required) utility = self.cls.load(self.name, from_file=self.filename) got = strip_2tup((utility.proto, utility.impl)) self.assertEquals(got, self.expected) utility = self.cls.load_cached(self.name, from_file=self.filename) got = strip_2tup((utility.proto, utility.impl)) self.assertEquals(got, self.expected) class TestTempitaUtilityLoader(TestUtilityLoader): """ Test loading UtilityCodes with Tempita substitution """ expected_tempita = (TestUtilityLoader.expected[0].replace('{{loader}}', 'Loader'), TestUtilityLoader.expected[1].replace('{{loader}}', 'Loader')) required_tempita = (TestUtilityLoader.required[0].replace('{{loader}}', 'Loader'), TestUtilityLoader.required[1].replace('{{loader}}', 'Loader')) cls = Code.TempitaUtilityCode def test_load_as_string(self): got = strip_2tup(self.cls.load_as_string(self.name, context=self.context)) self.assertEquals(got, self.expected_tempita) def test_load(self): utility = self.cls.load(self.name, context=self.context) got = strip_2tup((utility.proto, utility.impl)) self.assertEquals(got, self.expected_tempita) required, = utility.requires got = strip_2tup((required.proto, required.impl)) self.assertEquals(got, self.required_tempita) utility = self.cls.load(self.name, from_file=self.filename, context=self.context) got = strip_2tup((utility.proto, utility.impl)) self.assertEquals(got, self.expected_tempita) class TestCythonUtilityLoader(TestTempitaUtilityLoader): """ Test loading CythonUtilityCodes """ # Just change the attributes and run the same tests expected = None, "test {{cy_loader}} impl" expected_tempita = None, "test CyLoader impl" required = None, "req {{cy_loader}} impl" required_tempita = None, "req CyLoader impl" context = dict(cy_loader='CyLoader') name = "TestCyUtilityLoader" filename = "TestCyUtilityLoader.pyx" cls = UtilityCode.CythonUtilityCode # Small hack to pass our tests above cls.proto = None test_load = TestUtilityLoader.test_load test_load_tempita = TestTempitaUtilityLoader.test_load Cython-0.26.1/Cython/Compiler/Tests/__init__.py0000664000175000017500000000001512542002467022057 0ustar stefanstefan00000000000000# empty file Cython-0.26.1/Cython/Compiler/Tests/TestGrammar.py0000664000175000017500000000656313023021033022545 0ustar stefanstefan00000000000000# mode: run # tag: syntax """ Uses TreeFragment to test invalid syntax. """ from __future__ import absolute_import from ...TestUtils import CythonTest from ..Errors import CompileError from .. import ExprNodes # Copied from CPython's test_grammar.py VALID_UNDERSCORE_LITERALS = [ '0_0_0', '4_2', '1_0000_0000', '0b1001_0100', '0xffff_ffff', '0o5_7_7', '1_00_00.5', '1_00_00.5j', '1_00_00.5e5', '1_00_00j', '1_00_00e5_1', '1e1_0', '.1_4', '.1_4e1', '.1_4j', ] # Copied from CPython's test_grammar.py INVALID_UNDERSCORE_LITERALS = [ # Trailing underscores: '0_', '42_', '1.4j_', '0b1_', '0xf_', '0o5_', # Underscores in the base selector: '0_b0', '0_xf', '0_o5', # Underscore right after the base selector: '0b_0', '0x_f', '0o_5', # Old-style octal, still disallowed: #'0_7', #'09_99', # Special case with exponent: '0 if 1_Else 1', # Underscore right before a dot: '1_.4', '1_.4j', # Underscore right after a dot: '1._4', '1._4j', '._5', # Underscore right after a sign: '1.0e+_1', # Multiple consecutive underscores: '4_______2', '0.1__4', '0b1001__0100', '0xffff__ffff', '0o5__77', '1e1__0', # Underscore right before j: '1.4_j', '1.4e5_j', # Underscore right before e: '1_e1', '1.4_e1', # Underscore right after e: '1e_1', '1.4e_1', # Whitespace in literals '1_ 2', '1 _2', '1_2.2_ 1', '1_2.2 _1', '1_2e _1', '1_2e2 _1', '1_2e 2_1', ] class TestGrammar(CythonTest): def test_invalid_number_literals(self): for literal in INVALID_UNDERSCORE_LITERALS: for expression in ['%s', '1 + %s', '%s + 1', '2 * %s', '%s * 2']: code = 'x = ' + expression % literal try: self.fragment(u'''\ # cython: language_level=3 ''' + code) except CompileError as exc: assert code in [s.strip() for s in str(exc).splitlines()], str(exc) else: assert False, "Invalid Cython code '%s' failed to raise an exception" % code def test_valid_number_literals(self): for literal in VALID_UNDERSCORE_LITERALS: for i, expression in enumerate(['%s', '1 + %s', '%s + 1', '2 * %s', '%s * 2']): code = 'x = ' + expression % literal node = self.fragment(u'''\ # cython: language_level=3 ''' + code).root assert node is not None literal_node = node.stats[0].rhs # StatListNode([SingleAssignmentNode('x', expr)]) if i > 0: # Add/MulNode() -> literal is first or second operand literal_node = literal_node.operand2 if i % 2 else literal_node.operand1 if 'j' in literal or 'J' in literal: assert isinstance(literal_node, ExprNodes.ImagNode) elif '.' in literal or 'e' in literal or 'E' in literal and not ('0x' in literal or '0X' in literal): assert isinstance(literal_node, ExprNodes.FloatNode) else: assert isinstance(literal_node, ExprNodes.IntNode) if __name__ == "__main__": import unittest unittest.main() Cython-0.26.1/Cython/Compiler/Tests/TestVisitor.py0000664000175000017500000000426412542002467022631 0ustar stefanstefan00000000000000from Cython.Compiler.ModuleNode import ModuleNode from Cython.Compiler.Symtab import ModuleScope from Cython.TestUtils import TransformTest from Cython.Compiler.Visitor import MethodDispatcherTransform from Cython.Compiler.ParseTreeTransforms import ( NormalizeTree, AnalyseDeclarationsTransform, AnalyseExpressionsTransform, InterpretCompilerDirectives) class TestMethodDispatcherTransform(TransformTest): _tree = None def _build_tree(self): if self._tree is None: context = None def fake_module(node): scope = ModuleScope('test', None, None) return ModuleNode(node.pos, doc=None, body=node, scope=scope, full_module_name='test', directive_comments={}) pipeline = [ fake_module, NormalizeTree(context), InterpretCompilerDirectives(context, {}), AnalyseDeclarationsTransform(context), AnalyseExpressionsTransform(context), ] self._tree = self.run_pipeline(pipeline, u""" cdef bytes s = b'asdfg' cdef dict d = {1:2} x = s * 3 d.get('test') """) return self._tree def test_builtin_method(self): calls = [0] class Test(MethodDispatcherTransform): def _handle_simple_method_dict_get(self, node, func, args, unbound): calls[0] += 1 return node tree = self._build_tree() Test(None)(tree) self.assertEqual(1, calls[0]) def test_binop_method(self): calls = {'bytes': 0, 'object': 0} class Test(MethodDispatcherTransform): def _handle_simple_method_bytes___mul__(self, node, func, args, unbound): calls['bytes'] += 1 return node def _handle_simple_method_object___mul__(self, node, func, args, unbound): calls['object'] += 1 return node tree = self._build_tree() Test(None)(tree) self.assertEqual(1, calls['bytes']) self.assertEqual(0, calls['object']) Cython-0.26.1/Cython/Compiler/Tests/TestSignatureMatching.py0000664000175000017500000000641213023021033024564 0ustar stefanstefan00000000000000import unittest from Cython.Compiler import PyrexTypes as pt from Cython.Compiler.ExprNodes import NameNode from Cython.Compiler.PyrexTypes import CFuncTypeArg def cfunctype(*arg_types): return pt.CFuncType(pt.c_int_type, [ CFuncTypeArg("name", arg_type, None) for arg_type in arg_types ]) def cppclasstype(name, base_classes): return pt.CppClassType(name, None, 'CPP_'+name, base_classes) class SignatureMatcherTest(unittest.TestCase): """ Test the signature matching algorithm for overloaded signatures. """ def assertMatches(self, expected_type, arg_types, functions): match = pt.best_match(arg_types, functions) if expected_type is not None: self.assertNotEqual(None, match) self.assertEqual(expected_type, match.type) def test_cpp_reference_single_arg(self): function_types = [ cfunctype(pt.CReferenceType(pt.c_int_type)), cfunctype(pt.CReferenceType(pt.c_long_type)), cfunctype(pt.CReferenceType(pt.c_double_type)), ] functions = [ NameNode(None, type=t) for t in function_types ] self.assertMatches(function_types[0], [pt.c_int_type], functions) self.assertMatches(function_types[1], [pt.c_long_type], functions) self.assertMatches(function_types[2], [pt.c_double_type], functions) def test_cpp_reference_two_args(self): function_types = [ cfunctype( pt.CReferenceType(pt.c_int_type), pt.CReferenceType(pt.c_long_type)), cfunctype( pt.CReferenceType(pt.c_long_type), pt.CReferenceType(pt.c_long_type)), ] functions = [ NameNode(None, type=t) for t in function_types ] self.assertMatches(function_types[0], [pt.c_int_type, pt.c_long_type], functions) self.assertMatches(function_types[1], [pt.c_long_type, pt.c_long_type], functions) self.assertMatches(function_types[1], [pt.c_long_type, pt.c_int_type], functions) def test_cpp_reference_cpp_class(self): classes = [ cppclasstype("Test%d"%i, []) for i in range(2) ] function_types = [ cfunctype(pt.CReferenceType(classes[0])), cfunctype(pt.CReferenceType(classes[1])), ] functions = [ NameNode(None, type=t) for t in function_types ] self.assertMatches(function_types[0], [classes[0]], functions) self.assertMatches(function_types[1], [classes[1]], functions) def test_cpp_reference_cpp_class_and_int(self): classes = [ cppclasstype("Test%d"%i, []) for i in range(2) ] function_types = [ cfunctype(pt.CReferenceType(classes[0]), pt.c_int_type), cfunctype(pt.CReferenceType(classes[0]), pt.c_long_type), cfunctype(pt.CReferenceType(classes[1]), pt.c_int_type), cfunctype(pt.CReferenceType(classes[1]), pt.c_long_type), ] functions = [ NameNode(None, type=t) for t in function_types ] self.assertMatches(function_types[0], [classes[0], pt.c_int_type], functions) self.assertMatches(function_types[1], [classes[0], pt.c_long_type], functions) self.assertMatches(function_types[2], [classes[1], pt.c_int_type], functions) self.assertMatches(function_types[3], [classes[1], pt.c_long_type], functions) Cython-0.26.1/Cython/Compiler/Code.pxd0000664000175000017500000000504713023021033020226 0ustar stefanstefan00000000000000 from __future__ import absolute_import cimport cython #cdef class UtilityCodeBase(object): # cdef public object name # cdef public object proto # cdef public object impl # cdef public object init # cdef public object cleanup # cdef public object requires # cdef public dict _cache # cdef public list specialize_list # cdef public object proto_block # cdef public object file # # cpdef format_code(self, code_string, replace_empty_lines=*) cdef class FunctionState: cdef public set names_taken cdef public object owner cdef public object scope cdef public object error_label cdef public size_t label_counter cdef public set labels_used cdef public object return_label cdef public object continue_label cdef public object break_label cdef public list yield_labels cdef public object return_from_error_cleanup_label # not used in __init__ ? cdef public object exc_vars cdef public bint in_try_finally cdef public bint can_trace cdef public bint gil_owned cdef public list temps_allocated cdef public dict temps_free cdef public dict temps_used_type cdef public size_t temp_counter cdef public list collect_temps_stack cdef public object closure_temps cdef public bint should_declare_error_indicator cdef public bint uses_error_indicator @cython.locals(n=size_t) cpdef new_label(self, name=*) cpdef tuple get_loop_labels(self) cpdef set_loop_labels(self, labels) cpdef tuple get_all_labels(self) cpdef set_all_labels(self, labels) cpdef start_collecting_temps(self) cpdef stop_collecting_temps(self) cpdef list temps_in_use(self) cdef class IntConst: cdef public object cname cdef public object value cdef public bint is_long cdef class PyObjectConst: cdef public object cname cdef public object type cdef class StringConst: cdef public object cname cdef public object text cdef public object escaped_value cdef public dict py_strings cdef public list py_versions @cython.locals(intern=bint, is_str=bint, is_unicode=bint) cpdef get_py_string_const(self, encoding, identifier=*, is_str=*, py3str_cstring=*) ## cdef class PyStringConst: ## cdef public object cname ## cdef public object encoding ## cdef public bint is_str ## cdef public bint is_unicode ## cdef public bint intern #class GlobalState(object): #def funccontext_property(name): #class CCodeWriter(object): cdef class PyrexCodeWriter: cdef public object f cdef public Py_ssize_t level Cython-0.26.1/Cython/Compiler/Scanning.py0000664000175000017500000004316113143605603020766 0ustar stefanstefan00000000000000# cython: infer_types=True, language_level=3, py2_import=True # # Cython Scanner # from __future__ import absolute_import import cython cython.declare(make_lexicon=object, lexicon=object, print_function=object, error=object, warning=object, os=object, platform=object) import os import platform from .. import Utils from ..Plex.Scanners import Scanner from ..Plex.Errors import UnrecognizedInput from .Errors import error, warning from .Lexicon import any_string_prefix, make_lexicon, IDENT from .Future import print_function debug_scanner = 0 trace_scanner = 0 scanner_debug_flags = 0 scanner_dump_file = None lexicon = None def get_lexicon(): global lexicon if not lexicon: lexicon = make_lexicon() return lexicon #------------------------------------------------------------------ py_reserved_words = [ "global", "nonlocal", "def", "class", "print", "del", "pass", "break", "continue", "return", "raise", "import", "exec", "try", "except", "finally", "while", "if", "elif", "else", "for", "in", "assert", "and", "or", "not", "is", "in", "lambda", "from", "yield", "with", "nonlocal", ] pyx_reserved_words = py_reserved_words + [ "include", "ctypedef", "cdef", "cpdef", "cimport", "DEF", "IF", "ELIF", "ELSE" ] class Method(object): def __init__(self, name, **kwargs): self.name = name self.kwargs = kwargs or None self.__name__ = name # for Plex tracing def __call__(self, stream, text): method = getattr(stream, self.name) # self.kwargs is almost always unused => avoid call overhead return method(text, **self.kwargs) if self.kwargs is not None else method(text) #------------------------------------------------------------------ class CompileTimeScope(object): def __init__(self, outer=None): self.entries = {} self.outer = outer def declare(self, name, value): self.entries[name] = value def update(self, other): self.entries.update(other) def lookup_here(self, name): return self.entries[name] def __contains__(self, name): return name in self.entries def lookup(self, name): try: return self.lookup_here(name) except KeyError: outer = self.outer if outer: return outer.lookup(name) else: raise def initial_compile_time_env(): benv = CompileTimeScope() names = ('UNAME_SYSNAME', 'UNAME_NODENAME', 'UNAME_RELEASE', 'UNAME_VERSION', 'UNAME_MACHINE') for name, value in zip(names, platform.uname()): benv.declare(name, value) try: import __builtin__ as builtins except ImportError: import builtins names = ( 'False', 'True', 'abs', 'all', 'any', 'ascii', 'bin', 'bool', 'bytearray', 'bytes', 'chr', 'cmp', 'complex', 'dict', 'divmod', 'enumerate', 'filter', 'float', 'format', 'frozenset', 'hash', 'hex', 'int', 'len', 'list', 'map', 'max', 'min', 'oct', 'ord', 'pow', 'range', 'repr', 'reversed', 'round', 'set', 'slice', 'sorted', 'str', 'sum', 'tuple', 'zip', ### defined below in a platform independent way # 'long', 'unicode', 'reduce', 'xrange' ) for name in names: try: benv.declare(name, getattr(builtins, name)) except AttributeError: # ignore, likely Py3 pass # Py2/3 adaptations from functools import reduce benv.declare('reduce', reduce) benv.declare('unicode', getattr(builtins, 'unicode', getattr(builtins, 'str'))) benv.declare('long', getattr(builtins, 'long', getattr(builtins, 'int'))) benv.declare('xrange', getattr(builtins, 'xrange', getattr(builtins, 'range'))) denv = CompileTimeScope(benv) return denv #------------------------------------------------------------------ class SourceDescriptor(object): """ A SourceDescriptor should be considered immutable. """ _file_type = 'pyx' _escaped_description = None _cmp_name = '' def __str__(self): assert False # To catch all places where a descriptor is used directly as a filename def set_file_type_from_name(self, filename): name, ext = os.path.splitext(filename) self._file_type = ext in ('.pyx', '.pxd', '.py') and ext[1:] or 'pyx' def is_cython_file(self): return self._file_type in ('pyx', 'pxd') def is_python_file(self): return self._file_type == 'py' def get_escaped_description(self): if self._escaped_description is None: esc_desc = \ self.get_description().encode('ASCII', 'replace').decode("ASCII") # Use foreward slashes on Windows since these paths # will be used in the #line directives in the C/C++ files. self._escaped_description = esc_desc.replace('\\', '/') return self._escaped_description def __gt__(self, other): # this is only used to provide some sort of order try: return self._cmp_name > other._cmp_name except AttributeError: return False def __lt__(self, other): # this is only used to provide some sort of order try: return self._cmp_name < other._cmp_name except AttributeError: return False def __le__(self, other): # this is only used to provide some sort of order try: return self._cmp_name <= other._cmp_name except AttributeError: return False class FileSourceDescriptor(SourceDescriptor): """ Represents a code source. A code source is a more generic abstraction for a "filename" (as sometimes the code doesn't come from a file). Instances of code sources are passed to Scanner.__init__ as the optional name argument and will be passed back when asking for the position()-tuple. """ def __init__(self, filename, path_description=None): filename = Utils.decode_filename(filename) self.path_description = path_description or filename self.filename = filename # Prefer relative paths to current directory (which is most likely the project root) over absolute paths. workdir = os.path.abspath('.') + os.sep self.file_path = filename[len(workdir):] if filename.startswith(workdir) else filename self.set_file_type_from_name(filename) self._cmp_name = filename self._lines = {} def get_lines(self, encoding=None, error_handling=None): # we cache the lines only the second time this is called, in # order to save memory when they are only used once key = (encoding, error_handling) try: lines = self._lines[key] if lines is not None: return lines except KeyError: pass with Utils.open_source_file(self.filename, encoding=encoding, error_handling=error_handling) as f: lines = list(f) if key in self._lines: self._lines[key] = lines else: # do not cache the first access, but remember that we # already read it once self._lines[key] = None return lines def get_description(self): try: return os.path.relpath(self.path_description) except ValueError: # path not under current directory => use complete file path return self.path_description def get_error_description(self): path = self.filename cwd = Utils.decode_filename(os.getcwd() + os.path.sep) if path.startswith(cwd): return path[len(cwd):] return path def get_filenametable_entry(self): return self.file_path def __eq__(self, other): return isinstance(other, FileSourceDescriptor) and self.filename == other.filename def __hash__(self): return hash(self.filename) def __repr__(self): return "" % self.filename class StringSourceDescriptor(SourceDescriptor): """ Instances of this class can be used instead of a filenames if the code originates from a string object. """ filename = None def __init__(self, name, code): self.name = name #self.set_file_type_from_name(name) self.codelines = [x + "\n" for x in code.split("\n")] self._cmp_name = name def get_lines(self, encoding=None, error_handling=None): if not encoding: return self.codelines else: return [line.encode(encoding, error_handling).decode(encoding) for line in self.codelines] def get_description(self): return self.name get_error_description = get_description def get_filenametable_entry(self): return "stringsource" def __hash__(self): return id(self) # Do not hash on the name, an identical string source should be the # same object (name is often defaulted in other places) # return hash(self.name) def __eq__(self, other): return isinstance(other, StringSourceDescriptor) and self.name == other.name def __repr__(self): return "" % self.name #------------------------------------------------------------------ class PyrexScanner(Scanner): # context Context Compilation context # included_files [string] Files included with 'include' statement # compile_time_env dict Environment for conditional compilation # compile_time_eval boolean In a true conditional compilation context # compile_time_expr boolean In a compile-time expression context def __init__(self, file, filename, parent_scanner=None, scope=None, context=None, source_encoding=None, parse_comments=True, initial_pos=None): Scanner.__init__(self, get_lexicon(), file, filename, initial_pos) if parent_scanner: self.context = parent_scanner.context self.included_files = parent_scanner.included_files self.compile_time_env = parent_scanner.compile_time_env self.compile_time_eval = parent_scanner.compile_time_eval self.compile_time_expr = parent_scanner.compile_time_expr else: self.context = context self.included_files = scope.included_files self.compile_time_env = initial_compile_time_env() self.compile_time_eval = 1 self.compile_time_expr = 0 if getattr(context.options, 'compile_time_env', None): self.compile_time_env.update(context.options.compile_time_env) self.parse_comments = parse_comments self.source_encoding = source_encoding if filename.is_python_file(): self.in_python_file = True self.keywords = set(py_reserved_words) else: self.in_python_file = False self.keywords = set(pyx_reserved_words) self.trace = trace_scanner self.indentation_stack = [0] self.indentation_char = None self.bracket_nesting_level = 0 self.async_enabled = 0 self.begin('INDENT') self.sy = '' self.next() def commentline(self, text): if self.parse_comments: self.produce('commentline', text) def strip_underscores(self, text, symbol): self.produce(symbol, text.replace('_', '')) def current_level(self): return self.indentation_stack[-1] def open_bracket_action(self, text): self.bracket_nesting_level += 1 return text def close_bracket_action(self, text): self.bracket_nesting_level -= 1 return text def newline_action(self, text): if self.bracket_nesting_level == 0: self.begin('INDENT') self.produce('NEWLINE', '') string_states = { "'": 'SQ_STRING', '"': 'DQ_STRING', "'''": 'TSQ_STRING', '"""': 'TDQ_STRING' } def begin_string_action(self, text): while text[:1] in any_string_prefix: text = text[1:] self.begin(self.string_states[text]) self.produce('BEGIN_STRING') def end_string_action(self, text): self.begin('') self.produce('END_STRING') def unclosed_string_action(self, text): self.end_string_action(text) self.error("Unclosed string literal") def indentation_action(self, text): self.begin('') # Indentation within brackets should be ignored. #if self.bracket_nesting_level > 0: # return # Check that tabs and spaces are being used consistently. if text: c = text[0] #print "Scanner.indentation_action: indent with", repr(c) ### if self.indentation_char is None: self.indentation_char = c #print "Scanner.indentation_action: setting indent_char to", repr(c) else: if self.indentation_char != c: self.error("Mixed use of tabs and spaces") if text.replace(c, "") != "": self.error("Mixed use of tabs and spaces") # Figure out how many indents/dedents to do current_level = self.current_level() new_level = len(text) #print "Changing indent level from", current_level, "to", new_level ### if new_level == current_level: return elif new_level > current_level: #print "...pushing level", new_level ### self.indentation_stack.append(new_level) self.produce('INDENT', '') else: while new_level < self.current_level(): #print "...popping level", self.indentation_stack[-1] ### self.indentation_stack.pop() self.produce('DEDENT', '') #print "...current level now", self.current_level() ### if new_level != self.current_level(): self.error("Inconsistent indentation") def eof_action(self, text): while len(self.indentation_stack) > 1: self.produce('DEDENT', '') self.indentation_stack.pop() self.produce('EOF', '') def next(self): try: sy, systring = self.read() except UnrecognizedInput: self.error("Unrecognized character") return # just a marker, error() always raises if sy == IDENT: if systring in self.keywords: if systring == u'print' and print_function in self.context.future_directives: self.keywords.discard('print') elif systring == u'exec' and self.context.language_level >= 3: self.keywords.discard('exec') else: sy = systring systring = self.context.intern_ustring(systring) self.sy = sy self.systring = systring if False: # debug_scanner: _, line, col = self.position() if not self.systring or self.sy == self.systring: t = self.sy else: t = "%s %s" % (self.sy, self.systring) print("--- %3d %2d %s" % (line, col, t)) def peek(self): saved = self.sy, self.systring self.next() next = self.sy, self.systring self.unread(*next) self.sy, self.systring = saved return next def put_back(self, sy, systring): self.unread(self.sy, self.systring) self.sy = sy self.systring = systring def unread(self, token, value): # This method should be added to Plex self.queue.insert(0, (token, value)) def error(self, message, pos=None, fatal=True): if pos is None: pos = self.position() if self.sy == 'INDENT': error(pos, "Possible inconsistent indentation") err = error(pos, message) if fatal: raise err def expect(self, what, message=None): if self.sy == what: self.next() else: self.expected(what, message) def expect_keyword(self, what, message=None): if self.sy == IDENT and self.systring == what: self.next() else: self.expected(what, message) def expected(self, what, message=None): if message: self.error(message) else: if self.sy == IDENT: found = self.systring else: found = self.sy self.error("Expected '%s', found '%s'" % (what, found)) def expect_indent(self): self.expect('INDENT', "Expected an increase in indentation level") def expect_dedent(self): self.expect('DEDENT', "Expected a decrease in indentation level") def expect_newline(self, message="Expected a newline", ignore_semicolon=False): # Expect either a newline or end of file useless_trailing_semicolon = None if ignore_semicolon and self.sy == ';': useless_trailing_semicolon = self.position() self.next() if self.sy != 'EOF': self.expect('NEWLINE', message) if useless_trailing_semicolon is not None: warning(useless_trailing_semicolon, "useless trailing semicolon") def enter_async(self): self.async_enabled += 1 if self.async_enabled == 1: self.keywords.add('async') self.keywords.add('await') def exit_async(self): assert self.async_enabled > 0 self.async_enabled -= 1 if not self.async_enabled: self.keywords.discard('await') self.keywords.discard('async') if self.sy in ('async', 'await'): self.sy, self.systring = IDENT, self.context.intern_ustring(self.sy) Cython-0.26.1/Cython/Compiler/DebugFlags.py0000664000175000017500000000115712542002467021231 0ustar stefanstefan00000000000000# Can be enabled at the command line with --debug-xxx. debug_disposal_code = 0 debug_temp_alloc = 0 debug_coercion = 0 # Write comments into the C code that show where temporary variables # are allocated and released. debug_temp_code_comments = 0 # Write a call trace of the code generation phase into the C code. debug_trace_code_generation = 0 # Do not replace exceptions with user-friendly error messages. debug_no_exception_intercept = 0 # Print a message each time a new stage in the pipeline is entered. debug_verbose_pipeline = 0 # Raise an exception when an error is encountered. debug_exception_on_error = 0 Cython-0.26.1/Cython/Compiler/Options.py0000664000175000017500000003573713143605603020673 0ustar stefanstefan00000000000000# # Cython - Compilation-wide options and pragma declarations # from __future__ import absolute_import class ShouldBeFromDirective(object): known_directives = [] def __init__(self, options_name, directive_name=None, disallow=False): self.options_name = options_name self.directive_name = directive_name or options_name self.disallow = disallow self.known_directives.append(self) def __nonzero__(self): self._bad_access() def __int__(self): self._bad_access() def _bad_access(self): raise RuntimeError(repr(self)) def __repr__(self): return ( "Illegal access of '%s' from Options module rather than directive '%s'" % (self.options_name, self.directive_name)) # Include docstrings. docstrings = True # Embed the source code position in the docstrings of functions and classes. embed_pos_in_docstring = False # Copy the original source code line by line into C code comments # in the generated code file to help with understanding the output. emit_code_comments = True pre_import = None # undocumented # Decref global variables in this module on exit for garbage collection. # 0: None, 1+: interned objects, 2+: cdef globals, 3+: types objects # Mostly for reducing noise in Valgrind, only executes at process exit # (when all memory will be reclaimed anyways). generate_cleanup_code = False # Should tp_clear() set object fields to None instead of clearing them to NULL? clear_to_none = True # Generate an annotated HTML version of the input source files. annotate = False # When annotating source files in HTML, include coverage information from # this file. annotate_coverage_xml = None # This will abort the compilation on the first error occurred rather than trying # to keep going and printing further error messages. fast_fail = False # Make all warnings into errors. warning_errors = False # Make unknown names an error. Python raises a NameError when # encountering unknown names at runtime, whereas this option makes # them a compile time error. If you want full Python compatibility, # you should disable this option and also 'cache_builtins'. error_on_unknown_names = True # Make uninitialized local variable reference a compile time error. # Python raises UnboundLocalError at runtime, whereas this option makes # them a compile time error. Note that this option affects only variables # of "python object" type. error_on_uninitialized = True # This will convert statements of the form "for i in range(...)" # to "for i from ..." when i is a cdef'd integer type, and the direction # (i.e. sign of step) can be determined. # WARNING: This may change the semantics if the range causes assignment to # i to overflow. Specifically, if this option is set, an error will be # raised before the loop is entered, whereas without this option the loop # will execute until an overflowing value is encountered. convert_range = True # Perform lookups on builtin names only once, at module initialisation # time. This will prevent the module from getting imported if a # builtin name that it uses cannot be found during initialisation. cache_builtins = True # Generate branch prediction hints to speed up error handling etc. gcc_branch_hints = True # Enable this to allow one to write your_module.foo = ... to overwrite the # definition if the cpdef function foo, at the cost of an extra dictionary # lookup on every call. # If this is false it generates only the Python wrapper and no override check. lookup_module_cpdef = False # Whether or not to embed the Python interpreter, for use in making a # standalone executable or calling from external libraries. # This will provide a method which initialises the interpreter and # executes the body of this module. embed = None # In previous iterations of Cython, globals() gave the first non-Cython module # globals in the call stack. Sage relies on this behavior for variable injection. old_style_globals = ShouldBeFromDirective('old_style_globals') # Allows cimporting from a pyx file without a pxd file. cimport_from_pyx = False # max # of dims for buffers -- set lower than number of dimensions in numpy, as # slices are passed by value and involve a lot of copying buffer_max_dims = 8 # Number of function closure instances to keep in a freelist (0: no freelists) closure_freelist_size = 8 def get_directive_defaults(): # To add an item to this list, all accesses should be changed to use the new # directive, and the global option itself should be set to an instance of # ShouldBeFromDirective. for old_option in ShouldBeFromDirective.known_directives: value = globals().get(old_option.options_name) assert old_option.directive_name in _directive_defaults if not isinstance(value, ShouldBeFromDirective): if old_option.disallow: raise RuntimeError( "Option '%s' must be set from directive '%s'" % ( old_option.option_name, old_option.directive_name)) else: # Warn? _directive_defaults[old_option.directive_name] = value return _directive_defaults # Declare compiler directives _directive_defaults = { 'boundscheck' : True, 'nonecheck' : False, 'initializedcheck' : True, 'embedsignature' : False, 'locals' : {}, 'auto_cpdef': False, 'auto_pickle': None, 'cdivision': False, # was True before 0.12 'cdivision_warnings': False, 'overflowcheck': False, 'overflowcheck.fold': True, 'always_allow_keywords': False, 'allow_none_for_extension_args': True, 'wraparound' : True, 'ccomplex' : False, # use C99/C++ for complex types and arith 'callspec' : "", 'final' : False, 'internal' : False, 'profile': False, 'no_gc_clear': False, 'no_gc': False, 'linetrace': False, 'emit_code_comments': True, # copy original source code into C code comments 'annotation_typing': False, # read type declarations from Python function annotations 'infer_types': None, 'infer_types.verbose': False, 'autotestdict': True, 'autotestdict.cdef': False, 'autotestdict.all': False, 'language_level': 2, 'fast_getattr': False, # Undocumented until we come up with a better way to handle this everywhere. 'py2_import': False, # For backward compatibility of Cython's source code in Py3 source mode 'c_string_type': 'bytes', 'c_string_encoding': '', 'type_version_tag': True, # enables Py_TPFLAGS_HAVE_VERSION_TAG on extension types 'unraisable_tracebacks': True, 'old_style_globals': False, 'np_pythran': False, 'fast_gil': False, # set __file__ and/or __path__ to known source/target path at import time (instead of not having them available) 'set_initial_path' : None, # SOURCEFILE or "/full/path/to/module" 'warn': None, 'warn.undeclared': False, 'warn.unreachable': True, 'warn.maybe_uninitialized': False, 'warn.unused': False, 'warn.unused_arg': False, 'warn.unused_result': False, 'warn.multiple_declarators': True, # optimizations 'optimize.inline_defnode_calls': True, 'optimize.unpack_method_calls': True, # increases code size when True 'optimize.use_switch': True, # remove unreachable code 'remove_unreachable': True, # control flow debug directives 'control_flow.dot_output': "", # Graphviz output filename 'control_flow.dot_annotate_defs': False, # Annotate definitions # test support 'test_assert_path_exists' : [], 'test_fail_if_path_exists' : [], # experimental, subject to change 'binding': None, 'freelist': 0, 'formal_grammar': False, } # Extra warning directives extra_warnings = { 'warn.maybe_uninitialized': True, 'warn.unreachable': True, 'warn.unused': True, } def one_of(*args): def validate(name, value): if value not in args: raise ValueError("%s directive must be one of %s, got '%s'" % ( name, args, value)) else: return value return validate def normalise_encoding_name(option_name, encoding): """ >>> normalise_encoding_name('c_string_encoding', 'ascii') 'ascii' >>> normalise_encoding_name('c_string_encoding', 'AsCIi') 'ascii' >>> normalise_encoding_name('c_string_encoding', 'us-ascii') 'ascii' >>> normalise_encoding_name('c_string_encoding', 'utF8') 'utf8' >>> normalise_encoding_name('c_string_encoding', 'utF-8') 'utf8' >>> normalise_encoding_name('c_string_encoding', 'deFAuLT') 'default' >>> normalise_encoding_name('c_string_encoding', 'default') 'default' >>> normalise_encoding_name('c_string_encoding', 'SeriousLyNoSuch--Encoding') 'SeriousLyNoSuch--Encoding' """ if not encoding: return '' if encoding.lower() in ('default', 'ascii', 'utf8'): return encoding.lower() import codecs try: decoder = codecs.getdecoder(encoding) except LookupError: return encoding # may exists at runtime ... for name in ('ascii', 'utf8'): if codecs.getdecoder(name) == decoder: return name return encoding # Override types possibilities above, if needed directive_types = { 'auto_pickle': bool, 'final' : bool, # final cdef classes and methods 'internal' : bool, # cdef class visibility in the module dict 'infer_types' : bool, # values can be True/None/False 'binding' : bool, 'cfunc' : None, # decorators do not take directive value 'ccall' : None, 'inline' : None, 'staticmethod' : None, 'cclass' : None, 'returns' : type, 'set_initial_path': str, 'freelist': int, 'c_string_type': one_of('bytes', 'bytearray', 'str', 'unicode'), 'c_string_encoding': normalise_encoding_name, } for key, val in _directive_defaults.items(): if key not in directive_types: directive_types[key] = type(val) directive_scopes = { # defaults to available everywhere # 'module', 'function', 'class', 'with statement' 'auto_pickle': ('module', 'cclass'), 'final' : ('cclass', 'function'), 'inline' : ('function',), 'staticmethod' : ('function',), # FIXME: analysis currently lacks more specific function scope 'no_gc_clear' : ('cclass',), 'no_gc' : ('cclass',), 'internal' : ('cclass',), 'autotestdict' : ('module',), 'autotestdict.all' : ('module',), 'autotestdict.cdef' : ('module',), 'set_initial_path' : ('module',), 'test_assert_path_exists' : ('function', 'class', 'cclass'), 'test_fail_if_path_exists' : ('function', 'class', 'cclass'), 'freelist': ('cclass',), 'emit_code_comments': ('module',), 'annotation_typing': ('module',), # FIXME: analysis currently lacks more specific function scope # Avoid scope-specific to/from_py_functions for c_string. 'c_string_type': ('module',), 'c_string_encoding': ('module',), 'type_version_tag': ('module', 'cclass'), 'language_level': ('module',), # globals() could conceivably be controlled at a finer granularity, # but that would complicate the implementation 'old_style_globals': ('module',), 'np_pythran': ('module',), 'fast_gil': ('module',), } def parse_directive_value(name, value, relaxed_bool=False): """ Parses value as an option value for the given name and returns the interpreted value. None is returned if the option does not exist. >>> print(parse_directive_value('nonexisting', 'asdf asdfd')) None >>> parse_directive_value('boundscheck', 'True') True >>> parse_directive_value('boundscheck', 'true') Traceback (most recent call last): ... ValueError: boundscheck directive must be set to True or False, got 'true' >>> parse_directive_value('c_string_encoding', 'us-ascii') 'ascii' >>> parse_directive_value('c_string_type', 'str') 'str' >>> parse_directive_value('c_string_type', 'bytes') 'bytes' >>> parse_directive_value('c_string_type', 'bytearray') 'bytearray' >>> parse_directive_value('c_string_type', 'unicode') 'unicode' >>> parse_directive_value('c_string_type', 'unnicode') Traceback (most recent call last): ValueError: c_string_type directive must be one of ('bytes', 'bytearray', 'str', 'unicode'), got 'unnicode' """ type = directive_types.get(name) if not type: return None orig_value = value if type is bool: value = str(value) if value == 'True': return True if value == 'False': return False if relaxed_bool: value = value.lower() if value in ("true", "yes"): return True elif value in ("false", "no"): return False raise ValueError("%s directive must be set to True or False, got '%s'" % ( name, orig_value)) elif type is int: try: return int(value) except ValueError: raise ValueError("%s directive must be set to an integer, got '%s'" % ( name, orig_value)) elif type is str: return str(value) elif callable(type): return type(name, value) else: assert False def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False, current_settings=None): """ Parses a comma-separated list of pragma options. Whitespace is not considered. >>> parse_directive_list(' ') {} >>> (parse_directive_list('boundscheck=True') == ... {'boundscheck': True}) True >>> parse_directive_list(' asdf') Traceback (most recent call last): ... ValueError: Expected "=" in option "asdf" >>> parse_directive_list('boundscheck=hey') Traceback (most recent call last): ... ValueError: boundscheck directive must be set to True or False, got 'hey' >>> parse_directive_list('unknown=True') Traceback (most recent call last): ... ValueError: Unknown option: "unknown" >>> warnings = parse_directive_list('warn.all=True') >>> len(warnings) > 1 True >>> sum(warnings.values()) == len(warnings) # all true. True """ if current_settings is None: result = {} else: result = current_settings for item in s.split(','): item = item.strip() if not item: continue if not '=' in item: raise ValueError('Expected "=" in option "%s"' % item) name, value = [s.strip() for s in item.strip().split('=', 1)] if name not in _directive_defaults: found = False if name.endswith('.all'): prefix = name[:-3] for directive in _directive_defaults: if directive.startswith(prefix): found = True parsed_value = parse_directive_value(directive, value, relaxed_bool=relaxed_bool) result[directive] = parsed_value if not found and not ignore_unknown: raise ValueError('Unknown option: "%s"' % name) else: parsed_value = parse_directive_value(name, value, relaxed_bool=relaxed_bool) result[name] = parsed_value return result Cython-0.26.1/Cython/Compiler/__init__.py0000664000175000017500000000001512542002467020755 0ustar stefanstefan00000000000000# empty file Cython-0.26.1/Cython/Compiler/TypeInference.py0000664000175000017500000005134313023021033021751 0ustar stefanstefan00000000000000from __future__ import absolute_import from .Errors import error, message from . import ExprNodes from . import Nodes from . import Builtin from . import PyrexTypes from .. import Utils from .PyrexTypes import py_object_type, unspecified_type from .Visitor import CythonTransform, EnvTransform try: reduce except NameError: from functools import reduce class TypedExprNode(ExprNodes.ExprNode): # Used for declaring assignments of a specified type without a known entry. subexprs = [] def __init__(self, type, pos=None): super(TypedExprNode, self).__init__(pos, type=type) object_expr = TypedExprNode(py_object_type) class MarkParallelAssignments(EnvTransform): # Collects assignments inside parallel blocks prange, with parallel. # Perhaps it's better to move it to ControlFlowAnalysis. # tells us whether we're in a normal loop in_loop = False parallel_errors = False def __init__(self, context): # Track the parallel block scopes (with parallel, for i in prange()) self.parallel_block_stack = [] super(MarkParallelAssignments, self).__init__(context) def mark_assignment(self, lhs, rhs, inplace_op=None): if isinstance(lhs, (ExprNodes.NameNode, Nodes.PyArgDeclNode)): if lhs.entry is None: # TODO: This shouldn't happen... return if self.parallel_block_stack: parallel_node = self.parallel_block_stack[-1] previous_assignment = parallel_node.assignments.get(lhs.entry) # If there was a previous assignment to the variable, keep the # previous assignment position if previous_assignment: pos, previous_inplace_op = previous_assignment if (inplace_op and previous_inplace_op and inplace_op != previous_inplace_op): # x += y; x *= y t = (inplace_op, previous_inplace_op) error(lhs.pos, "Reduction operator '%s' is inconsistent " "with previous reduction operator '%s'" % t) else: pos = lhs.pos parallel_node.assignments[lhs.entry] = (pos, inplace_op) parallel_node.assigned_nodes.append(lhs) elif isinstance(lhs, ExprNodes.SequenceNode): for i, arg in enumerate(lhs.args): if not rhs or arg.is_starred: item_node = None else: item_node = rhs.inferable_item_node(i) self.mark_assignment(arg, item_node) else: # Could use this info to infer cdef class attributes... pass def visit_WithTargetAssignmentStatNode(self, node): self.mark_assignment(node.lhs, node.with_node.enter_call) self.visitchildren(node) return node def visit_SingleAssignmentNode(self, node): self.mark_assignment(node.lhs, node.rhs) self.visitchildren(node) return node def visit_CascadedAssignmentNode(self, node): for lhs in node.lhs_list: self.mark_assignment(lhs, node.rhs) self.visitchildren(node) return node def visit_InPlaceAssignmentNode(self, node): self.mark_assignment(node.lhs, node.create_binop_node(), node.operator) self.visitchildren(node) return node def visit_ForInStatNode(self, node): # TODO: Remove redundancy with range optimization... is_special = False sequence = node.iterator.sequence target = node.target if isinstance(sequence, ExprNodes.SimpleCallNode): function = sequence.function if sequence.self is None and function.is_name: entry = self.current_env().lookup(function.name) if not entry or entry.is_builtin: if function.name == 'reversed' and len(sequence.args) == 1: sequence = sequence.args[0] elif function.name == 'enumerate' and len(sequence.args) == 1: if target.is_sequence_constructor and len(target.args) == 2: iterator = sequence.args[0] if iterator.is_name: iterator_type = iterator.infer_type(self.current_env()) if iterator_type.is_builtin_type: # assume that builtin types have a length within Py_ssize_t self.mark_assignment( target.args[0], ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX', type=PyrexTypes.c_py_ssize_t_type)) target = target.args[1] sequence = sequence.args[0] if isinstance(sequence, ExprNodes.SimpleCallNode): function = sequence.function if sequence.self is None and function.is_name: entry = self.current_env().lookup(function.name) if not entry or entry.is_builtin: if function.name in ('range', 'xrange'): is_special = True for arg in sequence.args[:2]: self.mark_assignment(target, arg) if len(sequence.args) > 2: self.mark_assignment( target, ExprNodes.binop_node(node.pos, '+', sequence.args[0], sequence.args[2])) if not is_special: # A for-loop basically translates to subsequent calls to # __getitem__(), so using an IndexNode here allows us to # naturally infer the base type of pointers, C arrays, # Python strings, etc., while correctly falling back to an # object type when the base type cannot be handled. self.mark_assignment(target, ExprNodes.IndexNode( node.pos, base=sequence, index=ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX', type=PyrexTypes.c_py_ssize_t_type))) self.visitchildren(node) return node def visit_ForFromStatNode(self, node): self.mark_assignment(node.target, node.bound1) if node.step is not None: self.mark_assignment(node.target, ExprNodes.binop_node(node.pos, '+', node.bound1, node.step)) self.visitchildren(node) return node def visit_WhileStatNode(self, node): self.visitchildren(node) return node def visit_ExceptClauseNode(self, node): if node.target is not None: self.mark_assignment(node.target, object_expr) self.visitchildren(node) return node def visit_FromCImportStatNode(self, node): pass # Can't be assigned to... def visit_FromImportStatNode(self, node): for name, target in node.items: if name != "*": self.mark_assignment(target, object_expr) self.visitchildren(node) return node def visit_DefNode(self, node): # use fake expressions with the right result type if node.star_arg: self.mark_assignment( node.star_arg, TypedExprNode(Builtin.tuple_type, node.pos)) if node.starstar_arg: self.mark_assignment( node.starstar_arg, TypedExprNode(Builtin.dict_type, node.pos)) EnvTransform.visit_FuncDefNode(self, node) return node def visit_DelStatNode(self, node): for arg in node.args: self.mark_assignment(arg, arg) self.visitchildren(node) return node def visit_ParallelStatNode(self, node): if self.parallel_block_stack: node.parent = self.parallel_block_stack[-1] else: node.parent = None nested = False if node.is_prange: if not node.parent: node.is_parallel = True else: node.is_parallel = (node.parent.is_prange or not node.parent.is_parallel) nested = node.parent.is_prange else: node.is_parallel = True # Note: nested with parallel() blocks are handled by # ParallelRangeTransform! # nested = node.parent nested = node.parent and node.parent.is_prange self.parallel_block_stack.append(node) nested = nested or len(self.parallel_block_stack) > 2 if not self.parallel_errors and nested and not node.is_prange: error(node.pos, "Only prange() may be nested") self.parallel_errors = True if node.is_prange: child_attrs = node.child_attrs node.child_attrs = ['body', 'target', 'args'] self.visitchildren(node) node.child_attrs = child_attrs self.parallel_block_stack.pop() if node.else_clause: node.else_clause = self.visit(node.else_clause) else: self.visitchildren(node) self.parallel_block_stack.pop() self.parallel_errors = False return node def visit_YieldExprNode(self, node): if self.parallel_block_stack: error(node.pos, "Yield not allowed in parallel sections") return node def visit_ReturnStatNode(self, node): node.in_parallel = bool(self.parallel_block_stack) return node class MarkOverflowingArithmetic(CythonTransform): # It may be possible to integrate this with the above for # performance improvements (though likely not worth it). might_overflow = False def __call__(self, root): self.env_stack = [] self.env = root.scope return super(MarkOverflowingArithmetic, self).__call__(root) def visit_safe_node(self, node): self.might_overflow, saved = False, self.might_overflow self.visitchildren(node) self.might_overflow = saved return node def visit_neutral_node(self, node): self.visitchildren(node) return node def visit_dangerous_node(self, node): self.might_overflow, saved = True, self.might_overflow self.visitchildren(node) self.might_overflow = saved return node def visit_FuncDefNode(self, node): self.env_stack.append(self.env) self.env = node.local_scope self.visit_safe_node(node) self.env = self.env_stack.pop() return node def visit_NameNode(self, node): if self.might_overflow: entry = node.entry or self.env.lookup(node.name) if entry: entry.might_overflow = True return node def visit_BinopNode(self, node): if node.operator in '&|^': return self.visit_neutral_node(node) else: return self.visit_dangerous_node(node) visit_UnopNode = visit_neutral_node visit_UnaryMinusNode = visit_dangerous_node visit_InPlaceAssignmentNode = visit_dangerous_node visit_Node = visit_safe_node def visit_assignment(self, lhs, rhs): if (isinstance(rhs, ExprNodes.IntNode) and isinstance(lhs, ExprNodes.NameNode) and Utils.long_literal(rhs.value)): entry = lhs.entry or self.env.lookup(lhs.name) if entry: entry.might_overflow = True def visit_SingleAssignmentNode(self, node): self.visit_assignment(node.lhs, node.rhs) self.visitchildren(node) return node def visit_CascadedAssignmentNode(self, node): for lhs in node.lhs_list: self.visit_assignment(lhs, node.rhs) self.visitchildren(node) return node class PyObjectTypeInferer(object): """ If it's not declared, it's a PyObject. """ def infer_types(self, scope): """ Given a dict of entries, map all unspecified types to a specified type. """ for name, entry in scope.entries.items(): if entry.type is unspecified_type: entry.type = py_object_type class SimpleAssignmentTypeInferer(object): """ Very basic type inference. Note: in order to support cross-closure type inference, this must be applies to nested scopes in top-down order. """ def set_entry_type(self, entry, entry_type): entry.type = entry_type for e in entry.all_entries(): e.type = entry_type def infer_types(self, scope): enabled = scope.directives['infer_types'] verbose = scope.directives['infer_types.verbose'] if enabled == True: spanning_type = aggressive_spanning_type elif enabled is None: # safe mode spanning_type = safe_spanning_type else: for entry in scope.entries.values(): if entry.type is unspecified_type: self.set_entry_type(entry, py_object_type) return # Set of assignemnts assignments = set() assmts_resolved = set() dependencies = {} assmt_to_names = {} for name, entry in scope.entries.items(): for assmt in entry.cf_assignments: names = assmt.type_dependencies() assmt_to_names[assmt] = names assmts = set() for node in names: assmts.update(node.cf_state) dependencies[assmt] = assmts if entry.type is unspecified_type: assignments.update(entry.cf_assignments) else: assmts_resolved.update(entry.cf_assignments) def infer_name_node_type(node): types = [assmt.inferred_type for assmt in node.cf_state] if not types: node_type = py_object_type else: entry = node.entry node_type = spanning_type( types, entry.might_overflow, entry.pos, scope) node.inferred_type = node_type def infer_name_node_type_partial(node): types = [assmt.inferred_type for assmt in node.cf_state if assmt.inferred_type is not None] if not types: return entry = node.entry return spanning_type(types, entry.might_overflow, entry.pos, scope) def resolve_assignments(assignments): resolved = set() for assmt in assignments: deps = dependencies[assmt] # All assignments are resolved if assmts_resolved.issuperset(deps): for node in assmt_to_names[assmt]: infer_name_node_type(node) # Resolve assmt inferred_type = assmt.infer_type() assmts_resolved.add(assmt) resolved.add(assmt) assignments.difference_update(resolved) return resolved def partial_infer(assmt): partial_types = [] for node in assmt_to_names[assmt]: partial_type = infer_name_node_type_partial(node) if partial_type is None: return False partial_types.append((node, partial_type)) for node, partial_type in partial_types: node.inferred_type = partial_type assmt.infer_type() return True partial_assmts = set() def resolve_partial(assignments): # try to handle circular references partials = set() for assmt in assignments: if assmt in partial_assmts: continue if partial_infer(assmt): partials.add(assmt) assmts_resolved.add(assmt) partial_assmts.update(partials) return partials # Infer assignments while True: if not resolve_assignments(assignments): if not resolve_partial(assignments): break inferred = set() # First pass for entry in scope.entries.values(): if entry.type is not unspecified_type: continue entry_type = py_object_type if assmts_resolved.issuperset(entry.cf_assignments): types = [assmt.inferred_type for assmt in entry.cf_assignments] if types and all(types): entry_type = spanning_type( types, entry.might_overflow, entry.pos, scope) inferred.add(entry) self.set_entry_type(entry, entry_type) def reinfer(): dirty = False for entry in inferred: types = [assmt.infer_type() for assmt in entry.cf_assignments] new_type = spanning_type(types, entry.might_overflow, entry.pos, scope) if new_type != entry.type: self.set_entry_type(entry, new_type) dirty = True return dirty # types propagation while reinfer(): pass if verbose: for entry in inferred: message(entry.pos, "inferred '%s' to be of type '%s'" % ( entry.name, entry.type)) def find_spanning_type(type1, type2): if type1 is type2: result_type = type1 elif type1 is PyrexTypes.c_bint_type or type2 is PyrexTypes.c_bint_type: # type inference can break the coercion back to a Python bool # if it returns an arbitrary int type here return py_object_type else: result_type = PyrexTypes.spanning_type(type1, type2) if result_type in (PyrexTypes.c_double_type, PyrexTypes.c_float_type, Builtin.float_type): # Python's float type is just a C double, so it's safe to # use the C type instead return PyrexTypes.c_double_type return result_type def simply_type(result_type, pos): if result_type.is_reference: result_type = result_type.ref_base_type if result_type.is_const: result_type = result_type.const_base_type if result_type.is_cpp_class: result_type.check_nullary_constructor(pos) if result_type.is_array: result_type = PyrexTypes.c_ptr_type(result_type.base_type) return result_type def aggressive_spanning_type(types, might_overflow, pos, scope): return simply_type(reduce(find_spanning_type, types), pos) def safe_spanning_type(types, might_overflow, pos, scope): result_type = simply_type(reduce(find_spanning_type, types), pos) if result_type.is_pyobject: # In theory, any specific Python type is always safe to # infer. However, inferring str can cause some existing code # to break, since we are also now much more strict about # coercion from str to char *. See trac #553. if result_type.name == 'str': return py_object_type else: return result_type elif result_type is PyrexTypes.c_double_type: # Python's float type is just a C double, so it's safe to use # the C type instead return result_type elif result_type is PyrexTypes.c_bint_type: # find_spanning_type() only returns 'bint' for clean boolean # operations without other int types, so this is safe, too return result_type elif result_type.is_ptr: # Any pointer except (signed|unsigned|) char* can't implicitly # become a PyObject, and inferring char* is now accepted, too. return result_type elif result_type.is_cpp_class: # These can't implicitly become Python objects either. return result_type elif result_type.is_struct: # Though we have struct -> object for some structs, this is uncommonly # used, won't arise in pure Python, and there shouldn't be side # effects, so I'm declaring this safe. return result_type # TODO: double complex should be OK as well, but we need # to make sure everything is supported. elif (result_type.is_int or result_type.is_enum) and not might_overflow: return result_type elif (not result_type.can_coerce_to_pyobject(scope) and not result_type.is_error): return result_type return py_object_type def get_type_inferer(): return SimpleAssignmentTypeInferer() Cython-0.26.1/Cython/Compiler/Visitor.py0000664000175000017500000007020713023021033020650 0ustar stefanstefan00000000000000# cython: infer_types=True # # Tree visitor and transform framework # from __future__ import absolute_import, print_function import sys import inspect from . import TypeSlots from . import Builtin from . import Nodes from . import ExprNodes from . import Errors from . import DebugFlags from . import Future import cython cython.declare(_PRINTABLE=tuple) if sys.version_info[0] >= 3: _PRINTABLE = (bytes, str, int, float) else: _PRINTABLE = (str, unicode, long, int, float) class TreeVisitor(object): """ Base class for writing visitors for a Cython tree, contains utilities for recursing such trees using visitors. Each node is expected to have a child_attrs iterable containing the names of attributes containing child nodes or lists of child nodes. Lists are not considered part of the tree structure (i.e. contained nodes are considered direct children of the parent node). visit_children visits each of the children of a given node (see the visit_children documentation). When recursing the tree using visit_children, an attribute access_path is maintained which gives information about the current location in the tree as a stack of tuples: (parent_node, attrname, index), representing the node, attribute and optional list index that was taken in each step in the path to the current node. Example: >>> class SampleNode(object): ... child_attrs = ["head", "body"] ... def __init__(self, value, head=None, body=None): ... self.value = value ... self.head = head ... self.body = body ... def __repr__(self): return "SampleNode(%s)" % self.value ... >>> tree = SampleNode(0, SampleNode(1), [SampleNode(2), SampleNode(3)]) >>> class MyVisitor(TreeVisitor): ... def visit_SampleNode(self, node): ... print("in %s %s" % (node.value, self.access_path)) ... self.visitchildren(node) ... print("out %s" % node.value) ... >>> MyVisitor().visit(tree) in 0 [] in 1 [(SampleNode(0), 'head', None)] out 1 in 2 [(SampleNode(0), 'body', 0)] out 2 in 3 [(SampleNode(0), 'body', 1)] out 3 out 0 """ def __init__(self): super(TreeVisitor, self).__init__() self.dispatch_table = {} self.access_path = [] def dump_node(self, node, indent=0): ignored = list(node.child_attrs or []) + [ u'child_attrs', u'pos', u'gil_message', u'cpp_message', u'subexprs'] values = [] pos = getattr(node, 'pos', None) if pos: source = pos[0] if source: import os.path source = os.path.basename(source.get_description()) values.append(u'%s:%s:%s' % (source, pos[1], pos[2])) attribute_names = dir(node) attribute_names.sort() for attr in attribute_names: if attr in ignored: continue if attr.startswith('_') or attr.endswith('_'): continue try: value = getattr(node, attr) except AttributeError: continue if value is None or value == 0: continue elif isinstance(value, list): value = u'[...]/%d' % len(value) elif not isinstance(value, _PRINTABLE): continue else: value = repr(value) values.append(u'%s = %s' % (attr, value)) return u'%s(%s)' % (node.__class__.__name__, u',\n '.join(values)) def _find_node_path(self, stacktrace): import os.path last_traceback = stacktrace nodes = [] while hasattr(stacktrace, 'tb_frame'): frame = stacktrace.tb_frame node = frame.f_locals.get(u'self') if isinstance(node, Nodes.Node): code = frame.f_code method_name = code.co_name pos = (os.path.basename(code.co_filename), frame.f_lineno) nodes.append((node, method_name, pos)) last_traceback = stacktrace stacktrace = stacktrace.tb_next return (last_traceback, nodes) def _raise_compiler_error(self, child, e): trace = [''] for parent, attribute, index in self.access_path: node = getattr(parent, attribute) if index is None: index = '' else: node = node[index] index = u'[%d]' % index trace.append(u'%s.%s%s = %s' % ( parent.__class__.__name__, attribute, index, self.dump_node(node))) stacktrace, called_nodes = self._find_node_path(sys.exc_info()[2]) last_node = child for node, method_name, pos in called_nodes: last_node = node trace.append(u"File '%s', line %d, in %s: %s" % ( pos[0], pos[1], method_name, self.dump_node(node))) raise Errors.CompilerCrash( getattr(last_node, 'pos', None), self.__class__.__name__, u'\n'.join(trace), e, stacktrace) @cython.final def find_handler(self, obj): # to resolve, try entire hierarchy cls = type(obj) pattern = "visit_%s" mro = inspect.getmro(cls) handler_method = None for mro_cls in mro: handler_method = getattr(self, pattern % mro_cls.__name__, None) if handler_method is not None: return handler_method print(type(self), cls) if self.access_path: print(self.access_path) print(self.access_path[-1][0].pos) print(self.access_path[-1][0].__dict__) raise RuntimeError("Visitor %r does not accept object: %s" % (self, obj)) def visit(self, obj): return self._visit(obj) @cython.final def _visit(self, obj): try: try: handler_method = self.dispatch_table[type(obj)] except KeyError: handler_method = self.find_handler(obj) self.dispatch_table[type(obj)] = handler_method return handler_method(obj) except Errors.CompileError: raise except Errors.AbortError: raise except Exception as e: if DebugFlags.debug_no_exception_intercept: raise self._raise_compiler_error(obj, e) @cython.final def _visitchild(self, child, parent, attrname, idx): self.access_path.append((parent, attrname, idx)) result = self._visit(child) self.access_path.pop() return result def visitchildren(self, parent, attrs=None): return self._visitchildren(parent, attrs) @cython.final @cython.locals(idx=int) def _visitchildren(self, parent, attrs): """ Visits the children of the given parent. If parent is None, returns immediately (returning None). The return value is a dictionary giving the results for each child (mapping the attribute name to either the return value or a list of return values (in the case of multiple children in an attribute)). """ if parent is None: return None result = {} for attr in parent.child_attrs: if attrs is not None and attr not in attrs: continue child = getattr(parent, attr) if child is not None: if type(child) is list: childretval = [self._visitchild(x, parent, attr, idx) for idx, x in enumerate(child)] else: childretval = self._visitchild(child, parent, attr, None) assert not isinstance(childretval, list), 'Cannot insert list here: %s in %r' % (attr, parent) result[attr] = childretval return result class VisitorTransform(TreeVisitor): """ A tree transform is a base class for visitors that wants to do stream processing of the structure (rather than attributes etc.) of a tree. It implements __call__ to simply visit the argument node. It requires the visitor methods to return the nodes which should take the place of the visited node in the result tree (which can be the same or one or more replacement). Specifically, if the return value from a visitor method is: - [] or None; the visited node will be removed (set to None if an attribute and removed if in a list) - A single node; the visited node will be replaced by the returned node. - A list of nodes; the visited nodes will be replaced by all the nodes in the list. This will only work if the node was already a member of a list; if it was not, an exception will be raised. (Typically you want to ensure that you are within a StatListNode or similar before doing this.) """ def visitchildren(self, parent, attrs=None): result = self._visitchildren(parent, attrs) for attr, newnode in result.items(): if type(newnode) is not list: setattr(parent, attr, newnode) else: # Flatten the list one level and remove any None newlist = [] for x in newnode: if x is not None: if type(x) is list: newlist += x else: newlist.append(x) setattr(parent, attr, newlist) return result def recurse_to_children(self, node): self.visitchildren(node) return node def __call__(self, root): return self._visit(root) class CythonTransform(VisitorTransform): """ Certain common conventions and utilities for Cython transforms. - Sets up the context of the pipeline in self.context - Tracks directives in effect in self.current_directives """ def __init__(self, context): super(CythonTransform, self).__init__() self.context = context def __call__(self, node): from . import ModuleNode if isinstance(node, ModuleNode.ModuleNode): self.current_directives = node.directives return super(CythonTransform, self).__call__(node) def visit_CompilerDirectivesNode(self, node): old = self.current_directives self.current_directives = node.directives self.visitchildren(node) self.current_directives = old return node def visit_Node(self, node): self.visitchildren(node) return node class ScopeTrackingTransform(CythonTransform): # Keeps track of type of scopes #scope_type: can be either of 'module', 'function', 'cclass', 'pyclass', 'struct' #scope_node: the node that owns the current scope def visit_ModuleNode(self, node): self.scope_type = 'module' self.scope_node = node self.visitchildren(node) return node def visit_scope(self, node, scope_type): prev = self.scope_type, self.scope_node self.scope_type = scope_type self.scope_node = node self.visitchildren(node) self.scope_type, self.scope_node = prev return node def visit_CClassDefNode(self, node): return self.visit_scope(node, 'cclass') def visit_PyClassDefNode(self, node): return self.visit_scope(node, 'pyclass') def visit_FuncDefNode(self, node): return self.visit_scope(node, 'function') def visit_CStructOrUnionDefNode(self, node): return self.visit_scope(node, 'struct') class EnvTransform(CythonTransform): """ This transformation keeps a stack of the environments. """ def __call__(self, root): self.env_stack = [] self.enter_scope(root, root.scope) return super(EnvTransform, self).__call__(root) def current_env(self): return self.env_stack[-1][1] def current_scope_node(self): return self.env_stack[-1][0] def global_scope(self): return self.current_env().global_scope() def enter_scope(self, node, scope): self.env_stack.append((node, scope)) def exit_scope(self): self.env_stack.pop() def visit_FuncDefNode(self, node): self.enter_scope(node, node.local_scope) self.visitchildren(node) self.exit_scope() return node def visit_GeneratorBodyDefNode(self, node): self.visitchildren(node) return node def visit_ClassDefNode(self, node): self.enter_scope(node, node.scope) self.visitchildren(node) self.exit_scope() return node def visit_CStructOrUnionDefNode(self, node): self.enter_scope(node, node.scope) self.visitchildren(node) self.exit_scope() return node def visit_ScopedExprNode(self, node): if node.expr_scope: self.enter_scope(node, node.expr_scope) self.visitchildren(node) self.exit_scope() else: self.visitchildren(node) return node def visit_CArgDeclNode(self, node): # default arguments are evaluated in the outer scope if node.default: attrs = [attr for attr in node.child_attrs if attr != 'default'] self.visitchildren(node, attrs) self.enter_scope(node, self.current_env().outer_scope) self.visitchildren(node, ('default',)) self.exit_scope() else: self.visitchildren(node) return node class NodeRefCleanupMixin(object): """ Clean up references to nodes that were replaced. NOTE: this implementation assumes that the replacement is done first, before hitting any further references during normal tree traversal. This needs to be arranged by calling "self.visitchildren()" at a proper place in the transform and by ordering the "child_attrs" of nodes appropriately. """ def __init__(self, *args): super(NodeRefCleanupMixin, self).__init__(*args) self._replacements = {} def visit_CloneNode(self, node): arg = node.arg if arg not in self._replacements: self.visitchildren(arg) node.arg = self._replacements.get(arg, arg) return node def visit_ResultRefNode(self, node): expr = node.expression if expr is None or expr not in self._replacements: self.visitchildren(node) expr = node.expression if expr is not None: node.expression = self._replacements.get(expr, expr) return node def replace(self, node, replacement): self._replacements[node] = replacement return replacement find_special_method_for_binary_operator = { '<': '__lt__', '<=': '__le__', '==': '__eq__', '!=': '__ne__', '>=': '__ge__', '>': '__gt__', '+': '__add__', '&': '__and__', '/': '__div__', '//': '__floordiv__', '<<': '__lshift__', '%': '__mod__', '*': '__mul__', '|': '__or__', '**': '__pow__', '>>': '__rshift__', '-': '__sub__', '^': '__xor__', 'in': '__contains__', }.get find_special_method_for_unary_operator = { 'not': '__not__', '~': '__inv__', '-': '__neg__', '+': '__pos__', }.get class MethodDispatcherTransform(EnvTransform): """ Base class for transformations that want to intercept on specific builtin functions or methods of builtin types, including special methods triggered by Python operators. Must run after declaration analysis when entries were assigned. Naming pattern for handler methods is as follows: * builtin functions: _handle_(general|simple|any)_function_NAME * builtin methods: _handle_(general|simple|any)_method_TYPENAME_METHODNAME """ # only visit call nodes and Python operations def visit_GeneralCallNode(self, node): self.visitchildren(node) function = node.function if not function.type.is_pyobject: return node arg_tuple = node.positional_args if not isinstance(arg_tuple, ExprNodes.TupleNode): return node keyword_args = node.keyword_args if keyword_args and not isinstance(keyword_args, ExprNodes.DictNode): # can't handle **kwargs return node args = arg_tuple.args return self._dispatch_to_handler(node, function, args, keyword_args) def visit_SimpleCallNode(self, node): self.visitchildren(node) function = node.function if function.type.is_pyobject: arg_tuple = node.arg_tuple if not isinstance(arg_tuple, ExprNodes.TupleNode): return node args = arg_tuple.args else: args = node.args return self._dispatch_to_handler(node, function, args, None) def visit_PrimaryCmpNode(self, node): if node.cascade: # not currently handled below self.visitchildren(node) return node return self._visit_binop_node(node) def visit_BinopNode(self, node): return self._visit_binop_node(node) def _visit_binop_node(self, node): self.visitchildren(node) # FIXME: could special case 'not_in' special_method_name = find_special_method_for_binary_operator(node.operator) if special_method_name: operand1, operand2 = node.operand1, node.operand2 if special_method_name == '__contains__': operand1, operand2 = operand2, operand1 elif special_method_name == '__div__': if Future.division in self.current_env().global_scope().context.future_directives: special_method_name = '__truediv__' obj_type = operand1.type if obj_type.is_builtin_type: type_name = obj_type.name else: type_name = "object" # safety measure node = self._dispatch_to_method_handler( special_method_name, None, False, type_name, node, None, [operand1, operand2], None) return node def visit_UnopNode(self, node): self.visitchildren(node) special_method_name = find_special_method_for_unary_operator(node.operator) if special_method_name: operand = node.operand obj_type = operand.type if obj_type.is_builtin_type: type_name = obj_type.name else: type_name = "object" # safety measure node = self._dispatch_to_method_handler( special_method_name, None, False, type_name, node, None, [operand], None) return node ### dispatch to specific handlers def _find_handler(self, match_name, has_kwargs): call_type = has_kwargs and 'general' or 'simple' handler = getattr(self, '_handle_%s_%s' % (call_type, match_name), None) if handler is None: handler = getattr(self, '_handle_any_%s' % match_name, None) return handler def _delegate_to_assigned_value(self, node, function, arg_list, kwargs): assignment = function.cf_state[0] value = assignment.rhs if value.is_name: if not value.entry or len(value.entry.cf_assignments) > 1: # the variable might have been reassigned => play safe return node elif value.is_attribute and value.obj.is_name: if not value.obj.entry or len(value.obj.entry.cf_assignments) > 1: # the underlying variable might have been reassigned => play safe return node else: return node return self._dispatch_to_handler( node, value, arg_list, kwargs) def _dispatch_to_handler(self, node, function, arg_list, kwargs): if function.is_name: # we only consider functions that are either builtin # Python functions or builtins that were already replaced # into a C function call (defined in the builtin scope) if not function.entry: return node is_builtin = ( function.entry.is_builtin or function.entry is self.current_env().builtin_scope().lookup_here(function.name)) if not is_builtin: if function.cf_state and function.cf_state.is_single: # we know the value of the variable # => see if it's usable instead return self._delegate_to_assigned_value( node, function, arg_list, kwargs) return node function_handler = self._find_handler( "function_%s" % function.name, kwargs) if function_handler is None: return self._handle_function(node, function.name, function, arg_list, kwargs) if kwargs: return function_handler(node, function, arg_list, kwargs) else: return function_handler(node, function, arg_list) elif function.is_attribute: attr_name = function.attribute if function.type.is_pyobject: self_arg = function.obj elif node.self and function.entry: entry = function.entry.as_variable if not entry or not entry.is_builtin: return node # C implementation of a Python builtin method - see if we find further matches self_arg = node.self arg_list = arg_list[1:] # drop CloneNode of self argument else: return node obj_type = self_arg.type is_unbound_method = False if obj_type.is_builtin_type: if (obj_type is Builtin.type_type and self_arg.is_name and arg_list and arg_list[0].type.is_pyobject): # calling an unbound method like 'list.append(L,x)' # (ignoring 'type.mro()' here ...) type_name = self_arg.name self_arg = None is_unbound_method = True else: type_name = obj_type.name else: type_name = "object" # safety measure return self._dispatch_to_method_handler( attr_name, self_arg, is_unbound_method, type_name, node, function, arg_list, kwargs) else: return node def _dispatch_to_method_handler(self, attr_name, self_arg, is_unbound_method, type_name, node, function, arg_list, kwargs): method_handler = self._find_handler( "method_%s_%s" % (type_name, attr_name), kwargs) if method_handler is None: if (attr_name in TypeSlots.method_name_to_slot or attr_name == '__new__'): method_handler = self._find_handler( "slot%s" % attr_name, kwargs) if method_handler is None: return self._handle_method( node, type_name, attr_name, function, arg_list, is_unbound_method, kwargs) if self_arg is not None: arg_list = [self_arg] + list(arg_list) if kwargs: result = method_handler( node, function, arg_list, is_unbound_method, kwargs) else: result = method_handler( node, function, arg_list, is_unbound_method) return result def _handle_function(self, node, function_name, function, arg_list, kwargs): """Fallback handler""" return node def _handle_method(self, node, type_name, attr_name, function, arg_list, is_unbound_method, kwargs): """Fallback handler""" return node class RecursiveNodeReplacer(VisitorTransform): """ Recursively replace all occurrences of a node in a subtree by another node. """ def __init__(self, orig_node, new_node): super(RecursiveNodeReplacer, self).__init__() self.orig_node, self.new_node = orig_node, new_node def visit_CloneNode(self, node): if node is self.orig_node: return self.new_node if node.arg is self.orig_node: node.arg = self.new_node return node def visit_Node(self, node): self.visitchildren(node) if node is self.orig_node: return self.new_node else: return node def recursively_replace_node(tree, old_node, new_node): replace_in = RecursiveNodeReplacer(old_node, new_node) replace_in(tree) class NodeFinder(TreeVisitor): """ Find out if a node appears in a subtree. """ def __init__(self, node): super(NodeFinder, self).__init__() self.node = node self.found = False def visit_Node(self, node): if self.found: pass # short-circuit elif node is self.node: self.found = True else: self._visitchildren(node, None) def tree_contains(tree, node): finder = NodeFinder(node) finder.visit(tree) return finder.found # Utils def replace_node(ptr, value): """Replaces a node. ptr is of the form used on the access path stack (parent, attrname, listidx|None) """ parent, attrname, listidx = ptr if listidx is None: setattr(parent, attrname, value) else: getattr(parent, attrname)[listidx] = value class PrintTree(TreeVisitor): """Prints a representation of the tree to standard output. Subclass and override repr_of to provide more information about nodes. """ def __init__(self, start=None, end=None): TreeVisitor.__init__(self) self._indent = "" if start is not None or end is not None: self._line_range = (start or 0, end or 2**30) else: self._line_range = None def indent(self): self._indent += " " def unindent(self): self._indent = self._indent[:-2] def __call__(self, tree, phase=None): print("Parse tree dump at phase '%s'" % phase) self.visit(tree) return tree # Don't do anything about process_list, the defaults gives # nice-looking name[idx] nodes which will visually appear # under the parent-node, not displaying the list itself in # the hierarchy. def visit_Node(self, node): self._print_node(node) self.indent() self.visitchildren(node) self.unindent() return node def visit_CloneNode(self, node): self._print_node(node) self.indent() line = node.pos[1] if self._line_range is None or self._line_range[0] <= line <= self._line_range[1]: print("%s- %s: %s" % (self._indent, 'arg', self.repr_of(node.arg))) self.indent() self.visitchildren(node.arg) self.unindent() self.unindent() return node def _print_node(self, node): line = node.pos[1] if self._line_range is None or self._line_range[0] <= line <= self._line_range[1]: if len(self.access_path) == 0: name = "(root)" else: parent, attr, idx = self.access_path[-1] if idx is not None: name = "%s[%d]" % (attr, idx) else: name = attr print("%s- %s: %s" % (self._indent, name, self.repr_of(node))) def repr_of(self, node): if node is None: return "(none)" else: result = node.__class__.__name__ if isinstance(node, ExprNodes.NameNode): result += "(type=%s, name=\"%s\")" % (repr(node.type), node.name) elif isinstance(node, Nodes.DefNode): result += "(name=\"%s\")" % node.name elif isinstance(node, ExprNodes.ExprNode): t = node.type result += "(type=%s)" % repr(t) elif node.pos: pos = node.pos path = pos[0].get_description() if '/' in path: path = path.split('/')[-1] if '\\' in path: path = path.split('\\')[-1] result += "(pos=(%s:%s:%s))" % (path, pos[1], pos[2]) return result if __name__ == "__main__": import doctest doctest.testmod() Cython-0.26.1/Cython/Compiler/ParseTreeTransforms.py0000664000175000017500000040016613150045407023177 0ustar stefanstefan00000000000000from __future__ import absolute_import import cython cython.declare(PyrexTypes=object, Naming=object, ExprNodes=object, Nodes=object, Options=object, UtilNodes=object, LetNode=object, LetRefNode=object, TreeFragment=object, EncodedString=object, error=object, warning=object, copy=object, _unicode=object) import copy import hashlib from . import PyrexTypes from . import Naming from . import ExprNodes from . import Nodes from . import Options from . import Builtin from .Visitor import VisitorTransform, TreeVisitor from .Visitor import CythonTransform, EnvTransform, ScopeTrackingTransform from .UtilNodes import LetNode, LetRefNode from .TreeFragment import TreeFragment from .StringEncoding import EncodedString, _unicode from .Errors import error, warning, CompileError, InternalError from .Code import UtilityCode class NameNodeCollector(TreeVisitor): """Collect all NameNodes of a (sub-)tree in the ``name_nodes`` attribute. """ def __init__(self): super(NameNodeCollector, self).__init__() self.name_nodes = [] def visit_NameNode(self, node): self.name_nodes.append(node) def visit_Node(self, node): self._visitchildren(node, None) class SkipDeclarations(object): """ Variable and function declarations can often have a deep tree structure, and yet most transformations don't need to descend to this depth. Declaration nodes are removed after AnalyseDeclarationsTransform, so there is no need to use this for transformations after that point. """ def visit_CTypeDefNode(self, node): return node def visit_CVarDefNode(self, node): return node def visit_CDeclaratorNode(self, node): return node def visit_CBaseTypeNode(self, node): return node def visit_CEnumDefNode(self, node): return node def visit_CStructOrUnionDefNode(self, node): return node class NormalizeTree(CythonTransform): """ This transform fixes up a few things after parsing in order to make the parse tree more suitable for transforms. a) After parsing, blocks with only one statement will be represented by that statement, not by a StatListNode. When doing transforms this is annoying and inconsistent, as one cannot in general remove a statement in a consistent way and so on. This transform wraps any single statements in a StatListNode containing a single statement. b) The PassStatNode is a noop and serves no purpose beyond plugging such one-statement blocks; i.e., once parsed a ` "pass" can just as well be represented using an empty StatListNode. This means less special cases to worry about in subsequent transforms (one always checks to see if a StatListNode has no children to see if the block is empty). """ def __init__(self, context): super(NormalizeTree, self).__init__(context) self.is_in_statlist = False self.is_in_expr = False def visit_ExprNode(self, node): stacktmp = self.is_in_expr self.is_in_expr = True self.visitchildren(node) self.is_in_expr = stacktmp return node def visit_StatNode(self, node, is_listcontainer=False): stacktmp = self.is_in_statlist self.is_in_statlist = is_listcontainer self.visitchildren(node) self.is_in_statlist = stacktmp if not self.is_in_statlist and not self.is_in_expr: return Nodes.StatListNode(pos=node.pos, stats=[node]) else: return node def visit_StatListNode(self, node): self.is_in_statlist = True self.visitchildren(node) self.is_in_statlist = False return node def visit_ParallelAssignmentNode(self, node): return self.visit_StatNode(node, True) def visit_CEnumDefNode(self, node): return self.visit_StatNode(node, True) def visit_CStructOrUnionDefNode(self, node): return self.visit_StatNode(node, True) def visit_PassStatNode(self, node): """Eliminate PassStatNode""" if not self.is_in_statlist: return Nodes.StatListNode(pos=node.pos, stats=[]) else: return [] def visit_ExprStatNode(self, node): """Eliminate useless string literals""" if node.expr.is_string_literal: return self.visit_PassStatNode(node) else: return self.visit_StatNode(node) def visit_CDeclaratorNode(self, node): return node class PostParseError(CompileError): pass # error strings checked by unit tests, so define them ERR_CDEF_INCLASS = 'Cannot assign default value to fields in cdef classes, structs or unions' ERR_BUF_DEFAULTS = 'Invalid buffer defaults specification (see docs)' ERR_INVALID_SPECIALATTR_TYPE = 'Special attributes must not have a type declared' class PostParse(ScopeTrackingTransform): """ Basic interpretation of the parse tree, as well as validity checking that can be done on a very basic level on the parse tree (while still not being a problem with the basic syntax, as such). Specifically: - Default values to cdef assignments are turned into single assignments following the declaration (everywhere but in class bodies, where they raise a compile error) - Interpret some node structures into Python runtime values. Some nodes take compile-time arguments (currently: TemplatedTypeNode[args] and __cythonbufferdefaults__ = {args}), which should be interpreted. This happens in a general way and other steps should be taken to ensure validity. Type arguments cannot be interpreted in this way. - For __cythonbufferdefaults__ the arguments are checked for validity. TemplatedTypeNode has its directives interpreted: Any first positional argument goes into the "dtype" attribute, any "ndim" keyword argument goes into the "ndim" attribute and so on. Also it is checked that the directive combination is valid. - __cythonbufferdefaults__ attributes are parsed and put into the type information. Note: Currently Parsing.py does a lot of interpretation and reorganization that can be refactored into this transform if a more pure Abstract Syntax Tree is wanted. """ def __init__(self, context): super(PostParse, self).__init__(context) self.specialattribute_handlers = { '__cythonbufferdefaults__' : self.handle_bufferdefaults } def visit_LambdaNode(self, node): # unpack a lambda expression into the corresponding DefNode collector = YieldNodeCollector() collector.visitchildren(node.result_expr) if collector.yields or collector.awaits or isinstance(node.result_expr, ExprNodes.YieldExprNode): body = Nodes.ExprStatNode( node.result_expr.pos, expr=node.result_expr) else: body = Nodes.ReturnStatNode( node.result_expr.pos, value=node.result_expr) node.def_node = Nodes.DefNode( node.pos, name=node.name, args=node.args, star_arg=node.star_arg, starstar_arg=node.starstar_arg, body=body, doc=None) self.visitchildren(node) return node def visit_GeneratorExpressionNode(self, node): # unpack a generator expression into the corresponding DefNode node.def_node = Nodes.DefNode(node.pos, name=node.name, doc=None, args=[], star_arg=None, starstar_arg=None, body=node.loop) self.visitchildren(node) return node # cdef variables def handle_bufferdefaults(self, decl): if not isinstance(decl.default, ExprNodes.DictNode): raise PostParseError(decl.pos, ERR_BUF_DEFAULTS) self.scope_node.buffer_defaults_node = decl.default self.scope_node.buffer_defaults_pos = decl.pos def visit_CVarDefNode(self, node): # This assumes only plain names and pointers are assignable on # declaration. Also, it makes use of the fact that a cdef decl # must appear before the first use, so we don't have to deal with # "i = 3; cdef int i = i" and can simply move the nodes around. try: self.visitchildren(node) stats = [node] newdecls = [] for decl in node.declarators: declbase = decl while isinstance(declbase, Nodes.CPtrDeclaratorNode): declbase = declbase.base if isinstance(declbase, Nodes.CNameDeclaratorNode): if declbase.default is not None: if self.scope_type in ('cclass', 'pyclass', 'struct'): if isinstance(self.scope_node, Nodes.CClassDefNode): handler = self.specialattribute_handlers.get(decl.name) if handler: if decl is not declbase: raise PostParseError(decl.pos, ERR_INVALID_SPECIALATTR_TYPE) handler(decl) continue # Remove declaration raise PostParseError(decl.pos, ERR_CDEF_INCLASS) first_assignment = self.scope_type != 'module' stats.append(Nodes.SingleAssignmentNode(node.pos, lhs=ExprNodes.NameNode(node.pos, name=declbase.name), rhs=declbase.default, first=first_assignment)) declbase.default = None newdecls.append(decl) node.declarators = newdecls return stats except PostParseError as e: # An error in a cdef clause is ok, simply remove the declaration # and try to move on to report more errors self.context.nonfatal_error(e) return None # Split parallel assignments (a,b = b,a) into separate partial # assignments that are executed rhs-first using temps. This # restructuring must be applied before type analysis so that known # types on rhs and lhs can be matched directly. It is required in # the case that the types cannot be coerced to a Python type in # order to assign from a tuple. def visit_SingleAssignmentNode(self, node): self.visitchildren(node) return self._visit_assignment_node(node, [node.lhs, node.rhs]) def visit_CascadedAssignmentNode(self, node): self.visitchildren(node) return self._visit_assignment_node(node, node.lhs_list + [node.rhs]) def _visit_assignment_node(self, node, expr_list): """Flatten parallel assignments into separate single assignments or cascaded assignments. """ if sum([ 1 for expr in expr_list if expr.is_sequence_constructor or expr.is_string_literal ]) < 2: # no parallel assignments => nothing to do return node expr_list_list = [] flatten_parallel_assignments(expr_list, expr_list_list) temp_refs = [] eliminate_rhs_duplicates(expr_list_list, temp_refs) nodes = [] for expr_list in expr_list_list: lhs_list = expr_list[:-1] rhs = expr_list[-1] if len(lhs_list) == 1: node = Nodes.SingleAssignmentNode(rhs.pos, lhs = lhs_list[0], rhs = rhs) else: node = Nodes.CascadedAssignmentNode(rhs.pos, lhs_list = lhs_list, rhs = rhs) nodes.append(node) if len(nodes) == 1: assign_node = nodes[0] else: assign_node = Nodes.ParallelAssignmentNode(nodes[0].pos, stats = nodes) if temp_refs: duplicates_and_temps = [ (temp.expression, temp) for temp in temp_refs ] sort_common_subsequences(duplicates_and_temps) for _, temp_ref in duplicates_and_temps[::-1]: assign_node = LetNode(temp_ref, assign_node) return assign_node def _flatten_sequence(self, seq, result): for arg in seq.args: if arg.is_sequence_constructor: self._flatten_sequence(arg, result) else: result.append(arg) return result def visit_DelStatNode(self, node): self.visitchildren(node) node.args = self._flatten_sequence(node, []) return node def visit_ExceptClauseNode(self, node): if node.is_except_as: # except-as must delete NameNode target at the end del_target = Nodes.DelStatNode( node.pos, args=[ExprNodes.NameNode( node.target.pos, name=node.target.name)], ignore_nonexisting=True) node.body = Nodes.StatListNode( node.pos, stats=[Nodes.TryFinallyStatNode( node.pos, body=node.body, finally_clause=Nodes.StatListNode( node.pos, stats=[del_target]))]) self.visitchildren(node) return node def eliminate_rhs_duplicates(expr_list_list, ref_node_sequence): """Replace rhs items by LetRefNodes if they appear more than once. Creates a sequence of LetRefNodes that set up the required temps and appends them to ref_node_sequence. The input list is modified in-place. """ seen_nodes = set() ref_nodes = {} def find_duplicates(node): if node.is_literal or node.is_name: # no need to replace those; can't include attributes here # as their access is not necessarily side-effect free return if node in seen_nodes: if node not in ref_nodes: ref_node = LetRefNode(node) ref_nodes[node] = ref_node ref_node_sequence.append(ref_node) else: seen_nodes.add(node) if node.is_sequence_constructor: for item in node.args: find_duplicates(item) for expr_list in expr_list_list: rhs = expr_list[-1] find_duplicates(rhs) if not ref_nodes: return def substitute_nodes(node): if node in ref_nodes: return ref_nodes[node] elif node.is_sequence_constructor: node.args = list(map(substitute_nodes, node.args)) return node # replace nodes inside of the common subexpressions for node in ref_nodes: if node.is_sequence_constructor: node.args = list(map(substitute_nodes, node.args)) # replace common subexpressions on all rhs items for expr_list in expr_list_list: expr_list[-1] = substitute_nodes(expr_list[-1]) def sort_common_subsequences(items): """Sort items/subsequences so that all items and subsequences that an item contains appear before the item itself. This is needed because each rhs item must only be evaluated once, so its value must be evaluated first and then reused when packing sequences that contain it. This implies a partial order, and the sort must be stable to preserve the original order as much as possible, so we use a simple insertion sort (which is very fast for short sequences, the normal case in practice). """ def contains(seq, x): for item in seq: if item is x: return True elif item.is_sequence_constructor and contains(item.args, x): return True return False def lower_than(a,b): return b.is_sequence_constructor and contains(b.args, a) for pos, item in enumerate(items): key = item[1] # the ResultRefNode which has already been injected into the sequences new_pos = pos for i in range(pos-1, -1, -1): if lower_than(key, items[i][0]): new_pos = i if new_pos != pos: for i in range(pos, new_pos, -1): items[i] = items[i-1] items[new_pos] = item def unpack_string_to_character_literals(literal): chars = [] pos = literal.pos stype = literal.__class__ sval = literal.value sval_type = sval.__class__ for char in sval: cval = sval_type(char) chars.append(stype(pos, value=cval, constant_result=cval)) return chars def flatten_parallel_assignments(input, output): # The input is a list of expression nodes, representing the LHSs # and RHS of one (possibly cascaded) assignment statement. For # sequence constructors, rearranges the matching parts of both # sides into a list of equivalent assignments between the # individual elements. This transformation is applied # recursively, so that nested structures get matched as well. rhs = input[-1] if (not (rhs.is_sequence_constructor or isinstance(rhs, ExprNodes.UnicodeNode)) or not sum([lhs.is_sequence_constructor for lhs in input[:-1]])): output.append(input) return complete_assignments = [] if rhs.is_sequence_constructor: rhs_args = rhs.args elif rhs.is_string_literal: rhs_args = unpack_string_to_character_literals(rhs) rhs_size = len(rhs_args) lhs_targets = [[] for _ in range(rhs_size)] starred_assignments = [] for lhs in input[:-1]: if not lhs.is_sequence_constructor: if lhs.is_starred: error(lhs.pos, "starred assignment target must be in a list or tuple") complete_assignments.append(lhs) continue lhs_size = len(lhs.args) starred_targets = sum([1 for expr in lhs.args if expr.is_starred]) if starred_targets > 1: error(lhs.pos, "more than 1 starred expression in assignment") output.append([lhs,rhs]) continue elif lhs_size - starred_targets > rhs_size: error(lhs.pos, "need more than %d value%s to unpack" % (rhs_size, (rhs_size != 1) and 's' or '')) output.append([lhs,rhs]) continue elif starred_targets: map_starred_assignment(lhs_targets, starred_assignments, lhs.args, rhs_args) elif lhs_size < rhs_size: error(lhs.pos, "too many values to unpack (expected %d, got %d)" % (lhs_size, rhs_size)) output.append([lhs,rhs]) continue else: for targets, expr in zip(lhs_targets, lhs.args): targets.append(expr) if complete_assignments: complete_assignments.append(rhs) output.append(complete_assignments) # recursively flatten partial assignments for cascade, rhs in zip(lhs_targets, rhs_args): if cascade: cascade.append(rhs) flatten_parallel_assignments(cascade, output) # recursively flatten starred assignments for cascade in starred_assignments: if cascade[0].is_sequence_constructor: flatten_parallel_assignments(cascade, output) else: output.append(cascade) def map_starred_assignment(lhs_targets, starred_assignments, lhs_args, rhs_args): # Appends the fixed-position LHS targets to the target list that # appear left and right of the starred argument. # # The starred_assignments list receives a new tuple # (lhs_target, rhs_values_list) that maps the remaining arguments # (those that match the starred target) to a list. # left side of the starred target for i, (targets, expr) in enumerate(zip(lhs_targets, lhs_args)): if expr.is_starred: starred = i lhs_remaining = len(lhs_args) - i - 1 break targets.append(expr) else: raise InternalError("no starred arg found when splitting starred assignment") # right side of the starred target for i, (targets, expr) in enumerate(zip(lhs_targets[-lhs_remaining:], lhs_args[starred + 1:])): targets.append(expr) # the starred target itself, must be assigned a (potentially empty) list target = lhs_args[starred].target # unpack starred node starred_rhs = rhs_args[starred:] if lhs_remaining: starred_rhs = starred_rhs[:-lhs_remaining] if starred_rhs: pos = starred_rhs[0].pos else: pos = target.pos starred_assignments.append([ target, ExprNodes.ListNode(pos=pos, args=starred_rhs)]) class PxdPostParse(CythonTransform, SkipDeclarations): """ Basic interpretation/validity checking that should only be done on pxd trees. A lot of this checking currently happens in the parser; but what is listed below happens here. - "def" functions are let through only if they fill the getbuffer/releasebuffer slots - cdef functions are let through only if they are on the top level and are declared "inline" """ ERR_INLINE_ONLY = "function definition in pxd file must be declared 'cdef inline'" ERR_NOGO_WITH_INLINE = "inline function definition in pxd file cannot be '%s'" def __call__(self, node): self.scope_type = 'pxd' return super(PxdPostParse, self).__call__(node) def visit_CClassDefNode(self, node): old = self.scope_type self.scope_type = 'cclass' self.visitchildren(node) self.scope_type = old return node def visit_FuncDefNode(self, node): # FuncDefNode always come with an implementation (without # an imp they are CVarDefNodes..) err = self.ERR_INLINE_ONLY if (isinstance(node, Nodes.DefNode) and self.scope_type == 'cclass' and node.name in ('__getbuffer__', '__releasebuffer__')): err = None # allow these slots if isinstance(node, Nodes.CFuncDefNode): if (u'inline' in node.modifiers and self.scope_type in ('pxd', 'cclass')): node.inline_in_pxd = True if node.visibility != 'private': err = self.ERR_NOGO_WITH_INLINE % node.visibility elif node.api: err = self.ERR_NOGO_WITH_INLINE % 'api' else: err = None # allow inline function else: err = self.ERR_INLINE_ONLY if err: self.context.nonfatal_error(PostParseError(node.pos, err)) return None else: return node class TrackNumpyAttributes(CythonTransform, SkipDeclarations): def __init__(self, context): super(TrackNumpyAttributes, self).__init__(context) self.numpy_module_names = set() def visit_CImportStatNode(self, node): if node.module_name == u"numpy": self.numpy_module_names.add(node.as_name or u"numpy") return node def visit_AttributeNode(self, node): self.visitchildren(node) if node.obj.is_name and node.obj.name in self.numpy_module_names: node.is_numpy_attribute = True return node class InterpretCompilerDirectives(CythonTransform, SkipDeclarations): """ After parsing, directives can be stored in a number of places: - #cython-comments at the top of the file (stored in ModuleNode) - Command-line arguments overriding these - @cython.directivename decorators - with cython.directivename: statements This transform is responsible for interpreting these various sources and store the directive in two ways: - Set the directives attribute of the ModuleNode for global directives. - Use a CompilerDirectivesNode to override directives for a subtree. (The first one is primarily to not have to modify with the tree structure, so that ModuleNode stay on top.) The directives are stored in dictionaries from name to value in effect. Each such dictionary is always filled in for all possible directives, using default values where no value is given by the user. The available directives are controlled in Options.py. Note that we have to run this prior to analysis, and so some minor duplication of functionality has to occur: We manually track cimports and which names the "cython" module may have been imported to. """ unop_method_nodes = { 'typeof': ExprNodes.TypeofNode, 'operator.address': ExprNodes.AmpersandNode, 'operator.dereference': ExprNodes.DereferenceNode, 'operator.preincrement' : ExprNodes.inc_dec_constructor(True, '++'), 'operator.predecrement' : ExprNodes.inc_dec_constructor(True, '--'), 'operator.postincrement': ExprNodes.inc_dec_constructor(False, '++'), 'operator.postdecrement': ExprNodes.inc_dec_constructor(False, '--'), 'operator.typeid' : ExprNodes.TypeidNode, # For backwards compatibility. 'address': ExprNodes.AmpersandNode, } binop_method_nodes = { 'operator.comma' : ExprNodes.c_binop_constructor(','), } special_methods = set(['declare', 'union', 'struct', 'typedef', 'sizeof', 'cast', 'pointer', 'compiled', 'NULL', 'fused_type', 'parallel']) special_methods.update(unop_method_nodes) valid_parallel_directives = set([ "parallel", "prange", "threadid", #"threadsavailable", ]) def __init__(self, context, compilation_directive_defaults): super(InterpretCompilerDirectives, self).__init__(context) self.cython_module_names = set() self.directive_names = {'staticmethod': 'staticmethod'} self.parallel_directives = {} directives = copy.deepcopy(Options.get_directive_defaults()) for key, value in compilation_directive_defaults.items(): directives[_unicode(key)] = copy.deepcopy(value) self.directives = directives def check_directive_scope(self, pos, directive, scope): legal_scopes = Options.directive_scopes.get(directive, None) if legal_scopes and scope not in legal_scopes: self.context.nonfatal_error(PostParseError(pos, 'The %s compiler directive ' 'is not allowed in %s scope' % (directive, scope))) return False else: if directive not in Options.directive_types: error(pos, "Invalid directive: '%s'." % (directive,)) return True # Set up processing and handle the cython: comments. def visit_ModuleNode(self, node): for key in sorted(node.directive_comments): if not self.check_directive_scope(node.pos, key, 'module'): self.wrong_scope_error(node.pos, key, 'module') del node.directive_comments[key] self.module_scope = node.scope self.directives.update(node.directive_comments) node.directives = self.directives node.parallel_directives = self.parallel_directives self.visitchildren(node) node.cython_module_names = self.cython_module_names return node # The following four functions track imports and cimports that # begin with "cython" def is_cython_directive(self, name): return (name in Options.directive_types or name in self.special_methods or PyrexTypes.parse_basic_type(name)) def is_parallel_directive(self, full_name, pos): """ Checks to see if fullname (e.g. cython.parallel.prange) is a valid parallel directive. If it is a star import it also updates the parallel_directives. """ result = (full_name + ".").startswith("cython.parallel.") if result: directive = full_name.split('.') if full_name == u"cython.parallel": self.parallel_directives[u"parallel"] = u"cython.parallel" elif full_name == u"cython.parallel.*": for name in self.valid_parallel_directives: self.parallel_directives[name] = u"cython.parallel.%s" % name elif (len(directive) != 3 or directive[-1] not in self.valid_parallel_directives): error(pos, "No such directive: %s" % full_name) self.module_scope.use_utility_code( UtilityCode.load_cached("InitThreads", "ModuleSetupCode.c")) return result def visit_CImportStatNode(self, node): if node.module_name == u"cython": self.cython_module_names.add(node.as_name or u"cython") elif node.module_name.startswith(u"cython."): if node.module_name.startswith(u"cython.parallel."): error(node.pos, node.module_name + " is not a module") if node.module_name == u"cython.parallel": if node.as_name and node.as_name != u"cython": self.parallel_directives[node.as_name] = node.module_name else: self.cython_module_names.add(u"cython") self.parallel_directives[ u"cython.parallel"] = node.module_name self.module_scope.use_utility_code( UtilityCode.load_cached("InitThreads", "ModuleSetupCode.c")) elif node.as_name: self.directive_names[node.as_name] = node.module_name[7:] else: self.cython_module_names.add(u"cython") # if this cimport was a compiler directive, we don't # want to leave the cimport node sitting in the tree return None return node def visit_FromCImportStatNode(self, node): if not node.relative_level and ( node.module_name == u"cython" or node.module_name.startswith(u"cython.")): submodule = (node.module_name + u".")[7:] newimp = [] for pos, name, as_name, kind in node.imported_names: full_name = submodule + name qualified_name = u"cython." + full_name if self.is_parallel_directive(qualified_name, node.pos): # from cython cimport parallel, or # from cython.parallel cimport parallel, prange, ... self.parallel_directives[as_name or name] = qualified_name elif self.is_cython_directive(full_name): self.directive_names[as_name or name] = full_name if kind is not None: self.context.nonfatal_error(PostParseError(pos, "Compiler directive imports must be plain imports")) else: newimp.append((pos, name, as_name, kind)) if not newimp: return None node.imported_names = newimp return node def visit_FromImportStatNode(self, node): if (node.module.module_name.value == u"cython") or \ node.module.module_name.value.startswith(u"cython."): submodule = (node.module.module_name.value + u".")[7:] newimp = [] for name, name_node in node.items: full_name = submodule + name qualified_name = u"cython." + full_name if self.is_parallel_directive(qualified_name, node.pos): self.parallel_directives[name_node.name] = qualified_name elif self.is_cython_directive(full_name): self.directive_names[name_node.name] = full_name else: newimp.append((name, name_node)) if not newimp: return None node.items = newimp return node def visit_SingleAssignmentNode(self, node): if isinstance(node.rhs, ExprNodes.ImportNode): module_name = node.rhs.module_name.value is_parallel = (module_name + u".").startswith(u"cython.parallel.") if module_name != u"cython" and not is_parallel: return node module_name = node.rhs.module_name.value as_name = node.lhs.name node = Nodes.CImportStatNode(node.pos, module_name = module_name, as_name = as_name) node = self.visit_CImportStatNode(node) else: self.visitchildren(node) return node def visit_NameNode(self, node): if node.name in self.cython_module_names: node.is_cython_module = True else: node.cython_attribute = self.directive_names.get(node.name) return node def try_to_parse_directives(self, node): # If node is the contents of an directive (in a with statement or # decorator), returns a list of (directivename, value) pairs. # Otherwise, returns None if isinstance(node, ExprNodes.CallNode): self.visit(node.function) optname = node.function.as_cython_attribute() if optname: directivetype = Options.directive_types.get(optname) if directivetype: args, kwds = node.explicit_args_kwds() directives = [] key_value_pairs = [] if kwds is not None and directivetype is not dict: for keyvalue in kwds.key_value_pairs: key, value = keyvalue sub_optname = "%s.%s" % (optname, key.value) if Options.directive_types.get(sub_optname): directives.append(self.try_to_parse_directive(sub_optname, [value], None, keyvalue.pos)) else: key_value_pairs.append(keyvalue) if not key_value_pairs: kwds = None else: kwds.key_value_pairs = key_value_pairs if directives and not kwds and not args: return directives directives.append(self.try_to_parse_directive(optname, args, kwds, node.function.pos)) return directives elif isinstance(node, (ExprNodes.AttributeNode, ExprNodes.NameNode)): self.visit(node) optname = node.as_cython_attribute() if optname: directivetype = Options.directive_types.get(optname) if directivetype is bool: arg = ExprNodes.BoolNode(node.pos, value=True) return [self.try_to_parse_directive(optname, [arg], None, node.pos)] elif directivetype is None: return [(optname, None)] else: raise PostParseError( node.pos, "The '%s' directive should be used as a function call." % optname) return None def try_to_parse_directive(self, optname, args, kwds, pos): directivetype = Options.directive_types.get(optname) if optname == 'np_pythran' and not self.context.cpp: raise PostParseError(pos, 'The %s directive can only be used in C++ mode.' % optname) if len(args) == 1 and isinstance(args[0], ExprNodes.NoneNode): return optname, Options.get_directive_defaults()[optname] elif directivetype is bool: if kwds is not None or len(args) != 1 or not isinstance(args[0], ExprNodes.BoolNode): raise PostParseError(pos, 'The %s directive takes one compile-time boolean argument' % optname) return (optname, args[0].value) elif directivetype is int: if kwds is not None or len(args) != 1 or not isinstance(args[0], ExprNodes.IntNode): raise PostParseError(pos, 'The %s directive takes one compile-time integer argument' % optname) return (optname, int(args[0].value)) elif directivetype is str: if kwds is not None or len(args) != 1 or not isinstance( args[0], (ExprNodes.StringNode, ExprNodes.UnicodeNode)): raise PostParseError(pos, 'The %s directive takes one compile-time string argument' % optname) return (optname, str(args[0].value)) elif directivetype is type: if kwds is not None or len(args) != 1: raise PostParseError(pos, 'The %s directive takes one type argument' % optname) return (optname, args[0]) elif directivetype is dict: if len(args) != 0: raise PostParseError(pos, 'The %s directive takes no prepositional arguments' % optname) return optname, dict([(key.value, value) for key, value in kwds.key_value_pairs]) elif directivetype is list: if kwds and len(kwds) != 0: raise PostParseError(pos, 'The %s directive takes no keyword arguments' % optname) return optname, [ str(arg.value) for arg in args ] elif callable(directivetype): if kwds is not None or len(args) != 1 or not isinstance( args[0], (ExprNodes.StringNode, ExprNodes.UnicodeNode)): raise PostParseError(pos, 'The %s directive takes one compile-time string argument' % optname) return (optname, directivetype(optname, str(args[0].value))) else: assert False def visit_with_directives(self, body, directives): olddirectives = self.directives newdirectives = copy.copy(olddirectives) newdirectives.update(directives) self.directives = newdirectives assert isinstance(body, Nodes.StatListNode), body retbody = self.visit_Node(body) directive = Nodes.CompilerDirectivesNode(pos=retbody.pos, body=retbody, directives=newdirectives) self.directives = olddirectives return directive # Handle decorators def visit_FuncDefNode(self, node): directives = self._extract_directives(node, 'function') if not directives: return self.visit_Node(node) body = Nodes.StatListNode(node.pos, stats=[node]) return self.visit_with_directives(body, directives) def visit_CVarDefNode(self, node): directives = self._extract_directives(node, 'function') if not directives: return node for name, value in directives.items(): if name == 'locals': node.directive_locals = value elif name not in ('final', 'staticmethod'): self.context.nonfatal_error(PostParseError( node.pos, "Cdef functions can only take cython.locals(), " "staticmethod, or final decorators, got %s." % name)) body = Nodes.StatListNode(node.pos, stats=[node]) return self.visit_with_directives(body, directives) def visit_CClassDefNode(self, node): directives = self._extract_directives(node, 'cclass') if not directives: return self.visit_Node(node) body = Nodes.StatListNode(node.pos, stats=[node]) return self.visit_with_directives(body, directives) def visit_CppClassNode(self, node): directives = self._extract_directives(node, 'cppclass') if not directives: return self.visit_Node(node) body = Nodes.StatListNode(node.pos, stats=[node]) return self.visit_with_directives(body, directives) def visit_PyClassDefNode(self, node): directives = self._extract_directives(node, 'class') if not directives: return self.visit_Node(node) body = Nodes.StatListNode(node.pos, stats=[node]) return self.visit_with_directives(body, directives) def _extract_directives(self, node, scope_name): if not node.decorators: return {} # Split the decorators into two lists -- real decorators and directives directives = [] realdecs = [] both = [] for dec in node.decorators: new_directives = self.try_to_parse_directives(dec.decorator) if new_directives is not None: for directive in new_directives: if self.check_directive_scope(node.pos, directive[0], scope_name): name, value = directive if self.directives.get(name, object()) != value: directives.append(directive) if directive[0] == 'staticmethod': both.append(dec) else: realdecs.append(dec) if realdecs and isinstance(node, (Nodes.CFuncDefNode, Nodes.CClassDefNode, Nodes.CVarDefNode)): raise PostParseError(realdecs[0].pos, "Cdef functions/classes cannot take arbitrary decorators.") else: node.decorators = realdecs + both # merge or override repeated directives optdict = {} directives.reverse() # Decorators coming first take precedence for directive in directives: name, value = directive if name in optdict: old_value = optdict[name] # keywords and arg lists can be merged, everything # else overrides completely if isinstance(old_value, dict): old_value.update(value) elif isinstance(old_value, list): old_value.extend(value) else: optdict[name] = value else: optdict[name] = value return optdict # Handle with statements def visit_WithStatNode(self, node): directive_dict = {} for directive in self.try_to_parse_directives(node.manager) or []: if directive is not None: if node.target is not None: self.context.nonfatal_error( PostParseError(node.pos, "Compiler directive with statements cannot contain 'as'")) else: name, value = directive if name in ('nogil', 'gil'): # special case: in pure mode, "with nogil" spells "with cython.nogil" node = Nodes.GILStatNode(node.pos, state = name, body = node.body) return self.visit_Node(node) if self.check_directive_scope(node.pos, name, 'with statement'): directive_dict[name] = value if directive_dict: return self.visit_with_directives(node.body, directive_dict) return self.visit_Node(node) class ParallelRangeTransform(CythonTransform, SkipDeclarations): """ Transform cython.parallel stuff. The parallel_directives come from the module node, set there by InterpretCompilerDirectives. x = cython.parallel.threadavailable() -> ParallelThreadAvailableNode with nogil, cython.parallel.parallel(): -> ParallelWithBlockNode print cython.parallel.threadid() -> ParallelThreadIdNode for i in cython.parallel.prange(...): -> ParallelRangeNode ... """ # a list of names, maps 'cython.parallel.prange' in the code to # ['cython', 'parallel', 'prange'] parallel_directive = None # Indicates whether a namenode in an expression is the cython module namenode_is_cython_module = False # Keep track of whether we are the context manager of a 'with' statement in_context_manager_section = False # One of 'prange' or 'with parallel'. This is used to disallow closely # nested 'with parallel:' blocks state = None directive_to_node = { u"cython.parallel.parallel": Nodes.ParallelWithBlockNode, # u"cython.parallel.threadsavailable": ExprNodes.ParallelThreadsAvailableNode, u"cython.parallel.threadid": ExprNodes.ParallelThreadIdNode, u"cython.parallel.prange": Nodes.ParallelRangeNode, } def node_is_parallel_directive(self, node): return node.name in self.parallel_directives or node.is_cython_module def get_directive_class_node(self, node): """ Figure out which parallel directive was used and return the associated Node class. E.g. for a cython.parallel.prange() call we return ParallelRangeNode """ if self.namenode_is_cython_module: directive = '.'.join(self.parallel_directive) else: directive = self.parallel_directives[self.parallel_directive[0]] directive = '%s.%s' % (directive, '.'.join(self.parallel_directive[1:])) directive = directive.rstrip('.') cls = self.directive_to_node.get(directive) if cls is None and not (self.namenode_is_cython_module and self.parallel_directive[0] != 'parallel'): error(node.pos, "Invalid directive: %s" % directive) self.namenode_is_cython_module = False self.parallel_directive = None return cls def visit_ModuleNode(self, node): """ If any parallel directives were imported, copy them over and visit the AST """ if node.parallel_directives: self.parallel_directives = node.parallel_directives return self.visit_Node(node) # No parallel directives were imported, so they can't be used :) return node def visit_NameNode(self, node): if self.node_is_parallel_directive(node): self.parallel_directive = [node.name] self.namenode_is_cython_module = node.is_cython_module return node def visit_AttributeNode(self, node): self.visitchildren(node) if self.parallel_directive: self.parallel_directive.append(node.attribute) return node def visit_CallNode(self, node): self.visit(node.function) if not self.parallel_directive: return node # We are a parallel directive, replace this node with the # corresponding ParallelSomethingSomething node if isinstance(node, ExprNodes.GeneralCallNode): args = node.positional_args.args kwargs = node.keyword_args else: args = node.args kwargs = {} parallel_directive_class = self.get_directive_class_node(node) if parallel_directive_class: # Note: in case of a parallel() the body is set by # visit_WithStatNode node = parallel_directive_class(node.pos, args=args, kwargs=kwargs) return node def visit_WithStatNode(self, node): "Rewrite with cython.parallel.parallel() blocks" newnode = self.visit(node.manager) if isinstance(newnode, Nodes.ParallelWithBlockNode): if self.state == 'parallel with': error(node.manager.pos, "Nested parallel with blocks are disallowed") self.state = 'parallel with' body = self.visit(node.body) self.state = None newnode.body = body return newnode elif self.parallel_directive: parallel_directive_class = self.get_directive_class_node(node) if not parallel_directive_class: # There was an error, stop here and now return None if parallel_directive_class is Nodes.ParallelWithBlockNode: error(node.pos, "The parallel directive must be called") return None node.body = self.visit(node.body) return node def visit_ForInStatNode(self, node): "Rewrite 'for i in cython.parallel.prange(...):'" self.visit(node.iterator) self.visit(node.target) in_prange = isinstance(node.iterator.sequence, Nodes.ParallelRangeNode) previous_state = self.state if in_prange: # This will replace the entire ForInStatNode, so copy the # attributes parallel_range_node = node.iterator.sequence parallel_range_node.target = node.target parallel_range_node.body = node.body parallel_range_node.else_clause = node.else_clause node = parallel_range_node if not isinstance(node.target, ExprNodes.NameNode): error(node.target.pos, "Can only iterate over an iteration variable") self.state = 'prange' self.visit(node.body) self.state = previous_state self.visit(node.else_clause) return node def visit(self, node): "Visit a node that may be None" if node is not None: return super(ParallelRangeTransform, self).visit(node) class WithTransform(CythonTransform, SkipDeclarations): def visit_WithStatNode(self, node): self.visitchildren(node, 'body') pos = node.pos is_async = node.is_async body, target, manager = node.body, node.target, node.manager node.enter_call = ExprNodes.SimpleCallNode( pos, function=ExprNodes.AttributeNode( pos, obj=ExprNodes.CloneNode(manager), attribute=EncodedString('__aenter__' if is_async else '__enter__'), is_special_lookup=True), args=[], is_temp=True) if is_async: node.enter_call = ExprNodes.AwaitExprNode(pos, arg=node.enter_call) if target is not None: body = Nodes.StatListNode( pos, stats=[ Nodes.WithTargetAssignmentStatNode( pos, lhs=target, with_node=node), body]) excinfo_target = ExprNodes.TupleNode(pos, slow=True, args=[ ExprNodes.ExcValueNode(pos) for _ in range(3)]) except_clause = Nodes.ExceptClauseNode( pos, body=Nodes.IfStatNode( pos, if_clauses=[ Nodes.IfClauseNode( pos, condition=ExprNodes.NotNode( pos, operand=ExprNodes.WithExitCallNode( pos, with_stat=node, test_if_run=False, args=excinfo_target, await=ExprNodes.AwaitExprNode(pos, arg=None) if is_async else None)), body=Nodes.ReraiseStatNode(pos), ), ], else_clause=None), pattern=None, target=None, excinfo_target=excinfo_target, ) node.body = Nodes.TryFinallyStatNode( pos, body=Nodes.TryExceptStatNode( pos, body=body, except_clauses=[except_clause], else_clause=None, ), finally_clause=Nodes.ExprStatNode( pos, expr=ExprNodes.WithExitCallNode( pos, with_stat=node, test_if_run=True, args=ExprNodes.TupleNode( pos, args=[ExprNodes.NoneNode(pos) for _ in range(3)]), await=ExprNodes.AwaitExprNode(pos, arg=None) if is_async else None)), handle_error_case=False, ) return node def visit_ExprNode(self, node): # With statements are never inside expressions. return node class DecoratorTransform(ScopeTrackingTransform, SkipDeclarations): """ Transforms method decorators in cdef classes into nested calls or properties. Python-style decorator properties are transformed into a PropertyNode with up to the three getter, setter and deleter DefNodes. The functional style isn't supported yet. """ _properties = None _map_property_attribute = { 'getter': '__get__', 'setter': '__set__', 'deleter': '__del__', }.get def visit_CClassDefNode(self, node): if self._properties is None: self._properties = [] self._properties.append({}) super(DecoratorTransform, self).visit_CClassDefNode(node) self._properties.pop() return node def visit_PropertyNode(self, node): # Low-level warning for other code until we can convert all our uses over. level = 2 if isinstance(node.pos[0], str) else 0 warning(node.pos, "'property %s:' syntax is deprecated, use '@property'" % node.name, level) return node def visit_DefNode(self, node): scope_type = self.scope_type node = self.visit_FuncDefNode(node) if scope_type != 'cclass' or not node.decorators: return node # transform @property decorators properties = self._properties[-1] for decorator_node in node.decorators[::-1]: decorator = decorator_node.decorator if decorator.is_name and decorator.name == 'property': if len(node.decorators) > 1: return self._reject_decorated_property(node, decorator_node) name = node.name node.name = EncodedString('__get__') node.decorators.remove(decorator_node) stat_list = [node] if name in properties: prop = properties[name] prop.pos = node.pos prop.doc = node.doc prop.body.stats = stat_list return [] prop = Nodes.PropertyNode(node.pos, name=name) prop.doc = node.doc prop.body = Nodes.StatListNode(node.pos, stats=stat_list) properties[name] = prop return [prop] elif decorator.is_attribute and decorator.obj.name in properties: handler_name = self._map_property_attribute(decorator.attribute) if handler_name: assert decorator.obj.name == node.name if len(node.decorators) > 1: return self._reject_decorated_property(node, decorator_node) return self._add_to_property(properties, node, handler_name, decorator_node) # we clear node.decorators, so we need to set the # is_staticmethod/is_classmethod attributes now for decorator in node.decorators: func = decorator.decorator if func.is_name: node.is_classmethod |= func.name == 'classmethod' node.is_staticmethod |= func.name == 'staticmethod' # transform normal decorators decs = node.decorators node.decorators = None return self.chain_decorators(node, decs, node.name) @staticmethod def _reject_decorated_property(node, decorator_node): # restrict transformation to outermost decorator as wrapped properties will probably not work for deco in node.decorators: if deco != decorator_node: error(deco.pos, "Property methods with additional decorators are not supported") return node @staticmethod def _add_to_property(properties, node, name, decorator): prop = properties[node.name] node.name = name node.decorators.remove(decorator) stats = prop.body.stats for i, stat in enumerate(stats): if stat.name == name: stats[i] = node break else: stats.append(node) return [] @staticmethod def chain_decorators(node, decorators, name): """ Decorators are applied directly in DefNode and PyClassDefNode to avoid reassignments to the function/class name - except for cdef class methods. For those, the reassignment is required as methods are originally defined in the PyMethodDef struct. The IndirectionNode allows DefNode to override the decorator. """ decorator_result = ExprNodes.NameNode(node.pos, name=name) for decorator in decorators[::-1]: decorator_result = ExprNodes.SimpleCallNode( decorator.pos, function=decorator.decorator, args=[decorator_result]) name_node = ExprNodes.NameNode(node.pos, name=name) reassignment = Nodes.SingleAssignmentNode( node.pos, lhs=name_node, rhs=decorator_result) reassignment = Nodes.IndirectionNode([reassignment]) node.decorator_indirection = reassignment return [node, reassignment] class CnameDirectivesTransform(CythonTransform, SkipDeclarations): """ Only part of the CythonUtilityCode pipeline. Must be run before DecoratorTransform in case this is a decorator for a cdef class. It filters out @cname('my_cname') decorators and rewrites them to CnameDecoratorNodes. """ def handle_function(self, node): if not getattr(node, 'decorators', None): return self.visit_Node(node) for i, decorator in enumerate(node.decorators): decorator = decorator.decorator if (isinstance(decorator, ExprNodes.CallNode) and decorator.function.is_name and decorator.function.name == 'cname'): args, kwargs = decorator.explicit_args_kwds() if kwargs: raise AssertionError( "cname decorator does not take keyword arguments") if len(args) != 1: raise AssertionError( "cname decorator takes exactly one argument") if not (args[0].is_literal and args[0].type == Builtin.str_type): raise AssertionError( "argument to cname decorator must be a string literal") cname = args[0].compile_time_value(None) del node.decorators[i] node = Nodes.CnameDecoratorNode(pos=node.pos, node=node, cname=cname) break return self.visit_Node(node) visit_FuncDefNode = handle_function visit_CClassDefNode = handle_function visit_CEnumDefNode = handle_function visit_CStructOrUnionDefNode = handle_function class ForwardDeclareTypes(CythonTransform): def visit_CompilerDirectivesNode(self, node): env = self.module_scope old = env.directives env.directives = node.directives self.visitchildren(node) env.directives = old return node def visit_ModuleNode(self, node): self.module_scope = node.scope self.module_scope.directives = node.directives self.visitchildren(node) return node def visit_CDefExternNode(self, node): old_cinclude_flag = self.module_scope.in_cinclude self.module_scope.in_cinclude = 1 self.visitchildren(node) self.module_scope.in_cinclude = old_cinclude_flag return node def visit_CEnumDefNode(self, node): node.declare(self.module_scope) return node def visit_CStructOrUnionDefNode(self, node): if node.name not in self.module_scope.entries: node.declare(self.module_scope) return node def visit_CClassDefNode(self, node): if node.class_name not in self.module_scope.entries: node.declare(self.module_scope) return node class AnalyseDeclarationsTransform(EnvTransform): basic_property = TreeFragment(u""" property NAME: def __get__(self): return ATTR def __set__(self, value): ATTR = value """, level='c_class', pipeline=[NormalizeTree(None)]) basic_pyobject_property = TreeFragment(u""" property NAME: def __get__(self): return ATTR def __set__(self, value): ATTR = value def __del__(self): ATTR = None """, level='c_class', pipeline=[NormalizeTree(None)]) basic_property_ro = TreeFragment(u""" property NAME: def __get__(self): return ATTR """, level='c_class', pipeline=[NormalizeTree(None)]) struct_or_union_wrapper = TreeFragment(u""" cdef class NAME: cdef TYPE value def __init__(self, MEMBER=None): cdef int count count = 0 INIT_ASSIGNMENTS if IS_UNION and count > 1: raise ValueError, "At most one union member should be specified." def __str__(self): return STR_FORMAT % MEMBER_TUPLE def __repr__(self): return REPR_FORMAT % MEMBER_TUPLE """, pipeline=[NormalizeTree(None)]) init_assignment = TreeFragment(u""" if VALUE is not None: ATTR = VALUE count += 1 """, pipeline=[NormalizeTree(None)]) fused_function = None in_lambda = 0 def __call__(self, root): # needed to determine if a cdef var is declared after it's used. self.seen_vars_stack = [] self.fused_error_funcs = set() super_class = super(AnalyseDeclarationsTransform, self) self._super_visit_FuncDefNode = super_class.visit_FuncDefNode return super_class.__call__(root) def visit_NameNode(self, node): self.seen_vars_stack[-1].add(node.name) return node def visit_ModuleNode(self, node): # Pickling support requires injecting module-level nodes. self.extra_module_declarations = [] self.seen_vars_stack.append(set()) node.analyse_declarations(self.current_env()) self.visitchildren(node) self.seen_vars_stack.pop() node.body.stats.extend(self.extra_module_declarations) return node def visit_LambdaNode(self, node): self.in_lambda += 1 node.analyse_declarations(self.current_env()) self.visitchildren(node) self.in_lambda -= 1 return node def visit_CClassDefNode(self, node): node = self.visit_ClassDefNode(node) if node.scope and node.scope.implemented and node.body: stats = [] for entry in node.scope.var_entries: if entry.needs_property: property = self.create_Property(entry) property.analyse_declarations(node.scope) self.visit(property) stats.append(property) if stats: node.body.stats += stats if (node.visibility != 'extern' and not node.scope.lookup('__reduce__') and not node.scope.lookup('__reduce_ex__')): self._inject_pickle_methods(node) return node def _inject_pickle_methods(self, node): env = self.current_env() if node.scope.directives['auto_pickle'] is False: # None means attempt it. # Old behavior of not doing anything. return auto_pickle_forced = node.scope.directives['auto_pickle'] is True all_members = [] cls = node.entry.type cinit = None inherited_reduce = None while cls is not None: all_members.extend(e for e in cls.scope.var_entries if e.name not in ('__weakref__', '__dict__')) cinit = cinit or cls.scope.lookup('__cinit__') inherited_reduce = inherited_reduce or cls.scope.lookup('__reduce__') or cls.scope.lookup('__reduce_ex__') cls = cls.base_type all_members.sort(key=lambda e: e.name) if inherited_reduce: # This is not failsafe, as we may not know whether a cimported class defines a __reduce__. # This is why we define __reduce_cython__ and only replace __reduce__ # (via ExtensionTypes.SetupReduce utility code) at runtime on class creation. return non_py = [ e for e in all_members if not e.type.is_pyobject and (not e.type.can_coerce_to_pyobject(env) or not e.type.can_coerce_from_pyobject(env)) ] structs = [e for e in all_members if e.type.is_struct_or_union] if cinit or non_py or (structs and not auto_pickle_forced): if cinit: # TODO(robertwb): We could allow this if __cinit__ has no require arguments. msg = 'no default __reduce__ due to non-trivial __cinit__' elif non_py: msg = "%s cannot be converted to a Python object for pickling" % ','.join("self.%s" % e.name for e in non_py) else: # Extern structs may be only partially defined. # TODO(robertwb): Limit the restriction to extern # (and recursively extern-containing) structs. msg = ("Pickling of struct members such as %s must be explicitly requested " "with @auto_pickle(True)" % ','.join("self.%s" % e.name for e in structs)) if auto_pickle_forced: error(node.pos, msg) pickle_func = TreeFragment(u""" def __reduce_cython__(self): raise TypeError("%(msg)s") def __setstate_cython__(self, __pyx_state): raise TypeError("%(msg)s") """ % {'msg': msg}, level='c_class', pipeline=[NormalizeTree(None)]).substitute({}) pickle_func.analyse_declarations(node.scope) self.visit(pickle_func) node.body.stats.append(pickle_func) else: for e in all_members: if not e.type.is_pyobject: e.type.create_to_py_utility_code(env) e.type.create_from_py_utility_code(env) all_members_names = sorted([e.name for e in all_members]) checksum = '0x%s' % hashlib.md5(' '.join(all_members_names).encode('utf-8')).hexdigest()[:7] unpickle_func_name = '__pyx_unpickle_%s' % node.class_name # TODO(robertwb): Move the state into the third argument # so it can be pickled *after* self is memoized. unpickle_func = TreeFragment(u""" def %(unpickle_func_name)s(__pyx_type, long __pyx_checksum, __pyx_state): if __pyx_checksum != %(checksum)s: from pickle import PickleError as __pyx_PickleError raise __pyx_PickleError("Incompatible checksums (%%s vs %(checksum)s = (%(members)s))" %% __pyx_checksum) __pyx_result = %(class_name)s.__new__(__pyx_type) if __pyx_state is not None: %(unpickle_func_name)s__set_state(<%(class_name)s> __pyx_result, __pyx_state) return __pyx_result cdef %(unpickle_func_name)s__set_state(%(class_name)s __pyx_result, tuple __pyx_state): %(assignments)s if len(__pyx_state) > %(num_members)d and hasattr(__pyx_result, '__dict__'): __pyx_result.__dict__.update(__pyx_state[%(num_members)d]) """ % { 'unpickle_func_name': unpickle_func_name, 'checksum': checksum, 'members': ', '.join(all_members_names), 'class_name': node.class_name, 'assignments': '; '.join( '__pyx_result.%s = __pyx_state[%s]' % (v, ix) for ix, v in enumerate(all_members_names)), 'num_members': len(all_members_names), }, level='module', pipeline=[NormalizeTree(None)]).substitute({}) unpickle_func.analyse_declarations(node.entry.scope) self.visit(unpickle_func) self.extra_module_declarations.append(unpickle_func) pickle_func = TreeFragment(u""" def __reduce_cython__(self): cdef bint use_setstate state = (%(members)s) _dict = getattr(self, '__dict__', None) if _dict is not None: state += (_dict,) use_setstate = True else: use_setstate = %(any_notnone_members)s if use_setstate: return %(unpickle_func_name)s, (type(self), %(checksum)s, None), state else: return %(unpickle_func_name)s, (type(self), %(checksum)s, state) def __setstate_cython__(self, __pyx_state): %(unpickle_func_name)s__set_state(self, __pyx_state) """ % { 'unpickle_func_name': unpickle_func_name, 'checksum': checksum, 'members': ', '.join('self.%s' % v for v in all_members_names) + (',' if len(all_members_names) == 1 else ''), # Even better, we could check PyType_IS_GC. 'any_notnone_members' : ' or '.join(['self.%s is not None' % e.name for e in all_members if e.type.is_pyobject] or ['False']), }, level='c_class', pipeline=[NormalizeTree(None)]).substitute({}) pickle_func.analyse_declarations(node.scope) self.visit(pickle_func) node.body.stats.append(pickle_func) def _handle_fused_def_decorators(self, old_decorators, env, node): """ Create function calls to the decorators and reassignments to the function. """ # Delete staticmethod and classmethod decorators, this is # handled directly by the fused function object. decorators = [] for decorator in old_decorators: func = decorator.decorator if (not func.is_name or func.name not in ('staticmethod', 'classmethod') or env.lookup_here(func.name)): # not a static or classmethod decorators.append(decorator) if decorators: transform = DecoratorTransform(self.context) def_node = node.node _, reassignments = transform.chain_decorators( def_node, decorators, def_node.name) reassignments.analyse_declarations(env) node = [node, reassignments] return node def _handle_def(self, decorators, env, node): "Handle def or cpdef fused functions" # Create PyCFunction nodes for each specialization node.stats.insert(0, node.py_func) node.py_func = self.visit(node.py_func) node.update_fused_defnode_entry(env) pycfunc = ExprNodes.PyCFunctionNode.from_defnode(node.py_func, binding=True) pycfunc = ExprNodes.ProxyNode(pycfunc.coerce_to_temp(env)) node.resulting_fused_function = pycfunc # Create assignment node for our def function node.fused_func_assignment = self._create_assignment( node.py_func, ExprNodes.CloneNode(pycfunc), env) if decorators: node = self._handle_fused_def_decorators(decorators, env, node) return node def _create_fused_function(self, env, node): "Create a fused function for a DefNode with fused arguments" from . import FusedNode if self.fused_function or self.in_lambda: if self.fused_function not in self.fused_error_funcs: if self.in_lambda: error(node.pos, "Fused lambdas not allowed") else: error(node.pos, "Cannot nest fused functions") self.fused_error_funcs.add(self.fused_function) node.body = Nodes.PassStatNode(node.pos) for arg in node.args: if arg.type.is_fused: arg.type = arg.type.get_fused_types()[0] return node decorators = getattr(node, 'decorators', None) node = FusedNode.FusedCFuncDefNode(node, env) self.fused_function = node self.visitchildren(node) self.fused_function = None if node.py_func: node = self._handle_def(decorators, env, node) return node def _handle_nogil_cleanup(self, lenv, node): "Handle cleanup for 'with gil' blocks in nogil functions." if lenv.nogil and lenv.has_with_gil_block: # Acquire the GIL for cleanup in 'nogil' functions, by wrapping # the entire function body in try/finally. # The corresponding release will be taken care of by # Nodes.FuncDefNode.generate_function_definitions() node.body = Nodes.NogilTryFinallyStatNode( node.body.pos, body=node.body, finally_clause=Nodes.EnsureGILNode(node.body.pos), finally_except_clause=Nodes.EnsureGILNode(node.body.pos)) def _handle_fused(self, node): if node.is_generator and node.has_fused_arguments: node.has_fused_arguments = False error(node.pos, "Fused generators not supported") node.gbody = Nodes.StatListNode(node.pos, stats=[], body=Nodes.PassStatNode(node.pos)) return node.has_fused_arguments def visit_FuncDefNode(self, node): """ Analyse a function and its body, as that hasn't happend yet. Also analyse the directive_locals set by @cython.locals(). Then, if we are a function with fused arguments, replace the function (after it has declared itself in the symbol table!) with a FusedCFuncDefNode, and analyse its children (which are in turn normal functions). If we're a normal function, just analyse the body of the function. """ env = self.current_env() self.seen_vars_stack.append(set()) lenv = node.local_scope node.declare_arguments(lenv) # @cython.locals(...) for var, type_node in node.directive_locals.items(): if not lenv.lookup_here(var): # don't redeclare args type = type_node.analyse_as_type(lenv) if type: lenv.declare_var(var, type, type_node.pos) else: error(type_node.pos, "Not a type") if self._handle_fused(node): node = self._create_fused_function(env, node) else: node.body.analyse_declarations(lenv) self._handle_nogil_cleanup(lenv, node) self._super_visit_FuncDefNode(node) self.seen_vars_stack.pop() return node def visit_DefNode(self, node): node = self.visit_FuncDefNode(node) env = self.current_env() if isinstance(node, Nodes.DefNode) and node.is_wrapper: env = env.parent_scope if (not isinstance(node, Nodes.DefNode) or node.fused_py_func or node.is_generator_body or not node.needs_assignment_synthesis(env)): return node return [node, self._synthesize_assignment(node, env)] def visit_GeneratorBodyDefNode(self, node): return self.visit_FuncDefNode(node) def _synthesize_assignment(self, node, env): # Synthesize assignment node and put it right after defnode genv = env while genv.is_py_class_scope or genv.is_c_class_scope: genv = genv.outer_scope if genv.is_closure_scope: rhs = node.py_cfunc_node = ExprNodes.InnerFunctionNode( node.pos, def_node=node, pymethdef_cname=node.entry.pymethdef_cname, code_object=ExprNodes.CodeObjectNode(node)) else: binding = self.current_directives.get('binding') rhs = ExprNodes.PyCFunctionNode.from_defnode(node, binding) node.code_object = rhs.code_object if env.is_py_class_scope: rhs.binding = True node.is_cyfunction = rhs.binding return self._create_assignment(node, rhs, env) def _create_assignment(self, def_node, rhs, env): if def_node.decorators: for decorator in def_node.decorators[::-1]: rhs = ExprNodes.SimpleCallNode( decorator.pos, function = decorator.decorator, args = [rhs]) def_node.decorators = None assmt = Nodes.SingleAssignmentNode( def_node.pos, lhs=ExprNodes.NameNode(def_node.pos, name=def_node.name), rhs=rhs) assmt.analyse_declarations(env) return assmt def visit_ScopedExprNode(self, node): env = self.current_env() node.analyse_declarations(env) # the node may or may not have a local scope if node.has_local_scope: self.seen_vars_stack.append(set(self.seen_vars_stack[-1])) self.enter_scope(node, node.expr_scope) node.analyse_scoped_declarations(node.expr_scope) self.visitchildren(node) self.exit_scope() self.seen_vars_stack.pop() else: node.analyse_scoped_declarations(env) self.visitchildren(node) return node def visit_TempResultFromStatNode(self, node): self.visitchildren(node) node.analyse_declarations(self.current_env()) return node def visit_CppClassNode(self, node): if node.visibility == 'extern': return None else: return self.visit_ClassDefNode(node) def visit_CStructOrUnionDefNode(self, node): # Create a wrapper node if needed. # We want to use the struct type information (so it can't happen # before this phase) but also create new objects to be declared # (so it can't happen later). # Note that we don't return the original node, as it is # never used after this phase. if True: # private (default) return None self_value = ExprNodes.AttributeNode( pos = node.pos, obj = ExprNodes.NameNode(pos=node.pos, name=u"self"), attribute = EncodedString(u"value")) var_entries = node.entry.type.scope.var_entries attributes = [] for entry in var_entries: attributes.append(ExprNodes.AttributeNode(pos = entry.pos, obj = self_value, attribute = entry.name)) # __init__ assignments init_assignments = [] for entry, attr in zip(var_entries, attributes): # TODO: branch on visibility init_assignments.append(self.init_assignment.substitute({ u"VALUE": ExprNodes.NameNode(entry.pos, name = entry.name), u"ATTR": attr, }, pos = entry.pos)) # create the class str_format = u"%s(%s)" % (node.entry.type.name, ("%s, " * len(attributes))[:-2]) wrapper_class = self.struct_or_union_wrapper.substitute({ u"INIT_ASSIGNMENTS": Nodes.StatListNode(node.pos, stats = init_assignments), u"IS_UNION": ExprNodes.BoolNode(node.pos, value = not node.entry.type.is_struct), u"MEMBER_TUPLE": ExprNodes.TupleNode(node.pos, args=attributes), u"STR_FORMAT": ExprNodes.StringNode(node.pos, value = EncodedString(str_format)), u"REPR_FORMAT": ExprNodes.StringNode(node.pos, value = EncodedString(str_format.replace("%s", "%r"))), }, pos = node.pos).stats[0] wrapper_class.class_name = node.name wrapper_class.shadow = True class_body = wrapper_class.body.stats # fix value type assert isinstance(class_body[0].base_type, Nodes.CSimpleBaseTypeNode) class_body[0].base_type.name = node.name # fix __init__ arguments init_method = class_body[1] assert isinstance(init_method, Nodes.DefNode) and init_method.name == '__init__' arg_template = init_method.args[1] if not node.entry.type.is_struct: arg_template.kw_only = True del init_method.args[1] for entry, attr in zip(var_entries, attributes): arg = copy.deepcopy(arg_template) arg.declarator.name = entry.name init_method.args.append(arg) # setters/getters for entry, attr in zip(var_entries, attributes): # TODO: branch on visibility if entry.type.is_pyobject: template = self.basic_pyobject_property else: template = self.basic_property property = template.substitute({ u"ATTR": attr, }, pos = entry.pos).stats[0] property.name = entry.name wrapper_class.body.stats.append(property) wrapper_class.analyse_declarations(self.current_env()) return self.visit_CClassDefNode(wrapper_class) # Some nodes are no longer needed after declaration # analysis and can be dropped. The analysis was performed # on these nodes in a seperate recursive process from the # enclosing function or module, so we can simply drop them. def visit_CDeclaratorNode(self, node): # necessary to ensure that all CNameDeclaratorNodes are visited. self.visitchildren(node) return node def visit_CTypeDefNode(self, node): return node def visit_CBaseTypeNode(self, node): return None def visit_CEnumDefNode(self, node): if node.visibility == 'public': return node else: return None def visit_CNameDeclaratorNode(self, node): if node.name in self.seen_vars_stack[-1]: entry = self.current_env().lookup(node.name) if (entry is None or entry.visibility != 'extern' and not entry.scope.is_c_class_scope): warning(node.pos, "cdef variable '%s' declared after it is used" % node.name, 2) self.visitchildren(node) return node def visit_CVarDefNode(self, node): # to ensure all CNameDeclaratorNodes are visited. self.visitchildren(node) return None def visit_CnameDecoratorNode(self, node): child_node = self.visit(node.node) if not child_node: return None if type(child_node) is list: # Assignment synthesized node.child_node = child_node[0] return [node] + child_node[1:] node.node = child_node return node def create_Property(self, entry): if entry.visibility == 'public': if entry.type.is_pyobject: template = self.basic_pyobject_property else: template = self.basic_property elif entry.visibility == 'readonly': template = self.basic_property_ro property = template.substitute({ u"ATTR": ExprNodes.AttributeNode(pos=entry.pos, obj=ExprNodes.NameNode(pos=entry.pos, name="self"), attribute=entry.name), }, pos=entry.pos).stats[0] property.name = entry.name property.doc = entry.doc return property class CalculateQualifiedNamesTransform(EnvTransform): """ Calculate and store the '__qualname__' and the global module name on some nodes. """ def visit_ModuleNode(self, node): self.module_name = self.global_scope().qualified_name self.qualified_name = [] _super = super(CalculateQualifiedNamesTransform, self) self._super_visit_FuncDefNode = _super.visit_FuncDefNode self._super_visit_ClassDefNode = _super.visit_ClassDefNode self.visitchildren(node) return node def _set_qualname(self, node, name=None): if name: qualname = self.qualified_name[:] qualname.append(name) else: qualname = self.qualified_name node.qualname = EncodedString('.'.join(qualname)) node.module_name = self.module_name def _append_entry(self, entry): if entry.is_pyglobal and not entry.is_pyclass_attr: self.qualified_name = [entry.name] else: self.qualified_name.append(entry.name) def visit_ClassNode(self, node): self._set_qualname(node, node.name) self.visitchildren(node) return node def visit_PyClassNamespaceNode(self, node): # class name was already added by parent node self._set_qualname(node) self.visitchildren(node) return node def visit_PyCFunctionNode(self, node): orig_qualified_name = self.qualified_name[:] if node.def_node.is_wrapper and self.qualified_name and self.qualified_name[-1] == '': self.qualified_name.pop() self._set_qualname(node) else: self._set_qualname(node, node.def_node.name) self.visitchildren(node) self.qualified_name = orig_qualified_name return node def visit_DefNode(self, node): if node.is_wrapper and self.qualified_name: assert self.qualified_name[-1] == '', self.qualified_name orig_qualified_name = self.qualified_name[:] self.qualified_name.pop() self._set_qualname(node) self._super_visit_FuncDefNode(node) self.qualified_name = orig_qualified_name else: self._set_qualname(node, node.name) self.visit_FuncDefNode(node) return node def visit_FuncDefNode(self, node): orig_qualified_name = self.qualified_name[:] if getattr(node, 'name', None) == '': self.qualified_name.append('') else: self._append_entry(node.entry) self.qualified_name.append('') self._super_visit_FuncDefNode(node) self.qualified_name = orig_qualified_name return node def visit_ClassDefNode(self, node): orig_qualified_name = self.qualified_name[:] entry = (getattr(node, 'entry', None) or # PyClass self.current_env().lookup_here(node.name)) # CClass self._append_entry(entry) self._super_visit_ClassDefNode(node) self.qualified_name = orig_qualified_name return node class AnalyseExpressionsTransform(CythonTransform): def visit_ModuleNode(self, node): node.scope.infer_types() node.body = node.body.analyse_expressions(node.scope) self.visitchildren(node) return node def visit_FuncDefNode(self, node): node.local_scope.infer_types() node.body = node.body.analyse_expressions(node.local_scope) self.visitchildren(node) return node def visit_ScopedExprNode(self, node): if node.has_local_scope: node.expr_scope.infer_types() node = node.analyse_scoped_expressions(node.expr_scope) self.visitchildren(node) return node def visit_IndexNode(self, node): """ Replace index nodes used to specialize cdef functions with fused argument types with the Attribute- or NameNode referring to the function. We then need to copy over the specialization properties to the attribute or name node. Because the indexing might be a Python indexing operation on a fused function, or (usually) a Cython indexing operation, we need to re-analyse the types. """ self.visit_Node(node) if node.is_fused_index and not node.type.is_error: node = node.base return node class FindInvalidUseOfFusedTypes(CythonTransform): def visit_FuncDefNode(self, node): # Errors related to use in functions with fused args will already # have been detected if not node.has_fused_arguments: if not node.is_generator_body and node.return_type.is_fused: error(node.pos, "Return type is not specified as argument type") else: self.visitchildren(node) return node def visit_ExprNode(self, node): if node.type and node.type.is_fused: error(node.pos, "Invalid use of fused types, type cannot be specialized") else: self.visitchildren(node) return node class ExpandInplaceOperators(EnvTransform): def visit_InPlaceAssignmentNode(self, node): lhs = node.lhs rhs = node.rhs if lhs.type.is_cpp_class: # No getting around this exact operator here. return node if isinstance(lhs, ExprNodes.BufferIndexNode): # There is code to handle this case in InPlaceAssignmentNode return node env = self.current_env() def side_effect_free_reference(node, setting=False): if node.is_name: return node, [] elif node.type.is_pyobject and not setting: node = LetRefNode(node) return node, [node] elif node.is_subscript: base, temps = side_effect_free_reference(node.base) index = LetRefNode(node.index) return ExprNodes.IndexNode(node.pos, base=base, index=index), temps + [index] elif node.is_attribute: obj, temps = side_effect_free_reference(node.obj) return ExprNodes.AttributeNode(node.pos, obj=obj, attribute=node.attribute), temps elif isinstance(node, ExprNodes.BufferIndexNode): raise ValueError("Don't allow things like attributes of buffer indexing operations") else: node = LetRefNode(node) return node, [node] try: lhs, let_ref_nodes = side_effect_free_reference(lhs, setting=True) except ValueError: return node dup = lhs.__class__(**lhs.__dict__) binop = ExprNodes.binop_node(node.pos, operator = node.operator, operand1 = dup, operand2 = rhs, inplace=True) # Manually analyse types for new node. lhs.analyse_target_types(env) dup.analyse_types(env) binop.analyse_operation(env) node = Nodes.SingleAssignmentNode( node.pos, lhs = lhs, rhs=binop.coerce_to(lhs.type, env)) # Use LetRefNode to avoid side effects. let_ref_nodes.reverse() for t in let_ref_nodes: node = LetNode(t, node) return node def visit_ExprNode(self, node): # In-place assignments can't happen within an expression. return node class AdjustDefByDirectives(CythonTransform, SkipDeclarations): """ Adjust function and class definitions by the decorator directives: @cython.cfunc @cython.cclass @cython.ccall @cython.inline """ def visit_ModuleNode(self, node): self.directives = node.directives self.in_py_class = False self.visitchildren(node) return node def visit_CompilerDirectivesNode(self, node): old_directives = self.directives self.directives = node.directives self.visitchildren(node) self.directives = old_directives return node def visit_DefNode(self, node): modifiers = [] if 'inline' in self.directives: modifiers.append('inline') if 'ccall' in self.directives: node = node.as_cfunction( overridable=True, returns=self.directives.get('returns'), modifiers=modifiers) return self.visit(node) if 'cfunc' in self.directives: if self.in_py_class: error(node.pos, "cfunc directive is not allowed here") else: node = node.as_cfunction( overridable=False, returns=self.directives.get('returns'), modifiers=modifiers) return self.visit(node) if 'inline' in modifiers: error(node.pos, "Python functions cannot be declared 'inline'") self.visitchildren(node) return node def visit_PyClassDefNode(self, node): if 'cclass' in self.directives: node = node.as_cclass() return self.visit(node) else: old_in_pyclass = self.in_py_class self.in_py_class = True self.visitchildren(node) self.in_py_class = old_in_pyclass return node def visit_CClassDefNode(self, node): old_in_pyclass = self.in_py_class self.in_py_class = False self.visitchildren(node) self.in_py_class = old_in_pyclass return node class AlignFunctionDefinitions(CythonTransform): """ This class takes the signatures from a .pxd file and applies them to the def methods in a .py file. """ def visit_ModuleNode(self, node): self.scope = node.scope self.directives = node.directives self.imported_names = set() # hack, see visit_FromImportStatNode() self.visitchildren(node) return node def visit_PyClassDefNode(self, node): pxd_def = self.scope.lookup(node.name) if pxd_def: if pxd_def.is_cclass: return self.visit_CClassDefNode(node.as_cclass(), pxd_def) elif not pxd_def.scope or not pxd_def.scope.is_builtin_scope: error(node.pos, "'%s' redeclared" % node.name) if pxd_def.pos: error(pxd_def.pos, "previous declaration here") return None return node def visit_CClassDefNode(self, node, pxd_def=None): if pxd_def is None: pxd_def = self.scope.lookup(node.class_name) if pxd_def: if not pxd_def.defined_in_pxd: return node outer_scope = self.scope self.scope = pxd_def.type.scope self.visitchildren(node) if pxd_def: self.scope = outer_scope return node def visit_DefNode(self, node): pxd_def = self.scope.lookup(node.name) if pxd_def and (not pxd_def.scope or not pxd_def.scope.is_builtin_scope): if not pxd_def.is_cfunction: error(node.pos, "'%s' redeclared" % node.name) if pxd_def.pos: error(pxd_def.pos, "previous declaration here") return None node = node.as_cfunction(pxd_def) elif (self.scope.is_module_scope and self.directives['auto_cpdef'] and not node.name in self.imported_names and node.is_cdef_func_compatible()): # FIXME: cpdef-ing should be done in analyse_declarations() node = node.as_cfunction(scope=self.scope) # Enable this when nested cdef functions are allowed. # self.visitchildren(node) return node def visit_FromImportStatNode(self, node): # hack to prevent conditional import fallback functions from # being cdpef-ed (global Python variables currently conflict # with imports) if self.scope.is_module_scope: for name, _ in node.items: self.imported_names.add(name) return node def visit_ExprNode(self, node): # ignore lambdas and everything else that appears in expressions return node class RemoveUnreachableCode(CythonTransform): def visit_StatListNode(self, node): if not self.current_directives['remove_unreachable']: return node self.visitchildren(node) for idx, stat in enumerate(node.stats): idx += 1 if stat.is_terminator: if idx < len(node.stats): if self.current_directives['warn.unreachable']: warning(node.stats[idx].pos, "Unreachable code", 2) node.stats = node.stats[:idx] node.is_terminator = True break return node def visit_IfClauseNode(self, node): self.visitchildren(node) if node.body.is_terminator: node.is_terminator = True return node def visit_IfStatNode(self, node): self.visitchildren(node) if node.else_clause and node.else_clause.is_terminator: for clause in node.if_clauses: if not clause.is_terminator: break else: node.is_terminator = True return node def visit_TryExceptStatNode(self, node): self.visitchildren(node) if node.body.is_terminator and node.else_clause: if self.current_directives['warn.unreachable']: warning(node.else_clause.pos, "Unreachable code", 2) node.else_clause = None return node class YieldNodeCollector(TreeVisitor): def __init__(self): super(YieldNodeCollector, self).__init__() self.yields = [] self.awaits = [] self.returns = [] self.has_return_value = False def visit_Node(self, node): self.visitchildren(node) def visit_YieldExprNode(self, node): self.yields.append(node) self.visitchildren(node) def visit_AwaitExprNode(self, node): self.awaits.append(node) self.visitchildren(node) def visit_ReturnStatNode(self, node): self.visitchildren(node) if node.value: self.has_return_value = True self.returns.append(node) def visit_ClassDefNode(self, node): pass def visit_FuncDefNode(self, node): pass def visit_LambdaNode(self, node): pass def visit_GeneratorExpressionNode(self, node): pass def visit_CArgDeclNode(self, node): # do not look into annotations # FIXME: support (yield) in default arguments (currently crashes) pass class MarkClosureVisitor(CythonTransform): def visit_ModuleNode(self, node): self.needs_closure = False self.visitchildren(node) return node def visit_FuncDefNode(self, node): self.needs_closure = False self.visitchildren(node) node.needs_closure = self.needs_closure self.needs_closure = True collector = YieldNodeCollector() collector.visitchildren(node) if node.is_async_def: if collector.yields: error(collector.yields[0].pos, "'yield' not allowed in async coroutines (use 'await')") yields = collector.awaits elif collector.yields: if collector.awaits: error(collector.yields[0].pos, "'await' not allowed in generators (use 'yield')") yields = collector.yields else: return node for i, yield_expr in enumerate(yields, 1): yield_expr.label_num = i for retnode in collector.returns: retnode.in_generator = True gbody = Nodes.GeneratorBodyDefNode( pos=node.pos, name=node.name, body=node.body) coroutine = (Nodes.AsyncDefNode if node.is_async_def else Nodes.GeneratorDefNode)( pos=node.pos, name=node.name, args=node.args, star_arg=node.star_arg, starstar_arg=node.starstar_arg, doc=node.doc, decorators=node.decorators, gbody=gbody, lambda_name=node.lambda_name) return coroutine def visit_CFuncDefNode(self, node): self.needs_closure = False self.visitchildren(node) node.needs_closure = self.needs_closure self.needs_closure = True if node.needs_closure and node.overridable: error(node.pos, "closures inside cpdef functions not yet supported") return node def visit_LambdaNode(self, node): self.needs_closure = False self.visitchildren(node) node.needs_closure = self.needs_closure self.needs_closure = True return node def visit_ClassDefNode(self, node): self.visitchildren(node) self.needs_closure = True return node class CreateClosureClasses(CythonTransform): # Output closure classes in module scope for all functions # that really need it. def __init__(self, context): super(CreateClosureClasses, self).__init__(context) self.path = [] self.in_lambda = False def visit_ModuleNode(self, node): self.module_scope = node.scope self.visitchildren(node) return node def find_entries_used_in_closures(self, node): from_closure = [] in_closure = [] for name, entry in node.local_scope.entries.items(): if entry.from_closure: from_closure.append((name, entry)) elif entry.in_closure: in_closure.append((name, entry)) return from_closure, in_closure def create_class_from_scope(self, node, target_module_scope, inner_node=None): # move local variables into closure if node.is_generator: for entry in node.local_scope.entries.values(): if not entry.from_closure: entry.in_closure = True from_closure, in_closure = self.find_entries_used_in_closures(node) in_closure.sort() # Now from the begining node.needs_closure = False node.needs_outer_scope = False func_scope = node.local_scope cscope = node.entry.scope while cscope.is_py_class_scope or cscope.is_c_class_scope: cscope = cscope.outer_scope if not from_closure and (self.path or inner_node): if not inner_node: if not node.py_cfunc_node: raise InternalError("DefNode does not have assignment node") inner_node = node.py_cfunc_node inner_node.needs_self_code = False node.needs_outer_scope = False if node.is_generator: pass elif not in_closure and not from_closure: return elif not in_closure: func_scope.is_passthrough = True func_scope.scope_class = cscope.scope_class node.needs_outer_scope = True return as_name = '%s_%s' % ( target_module_scope.next_id(Naming.closure_class_prefix), node.entry.cname) entry = target_module_scope.declare_c_class( name=as_name, pos=node.pos, defining=True, implementing=True) entry.type.is_final_type = True func_scope.scope_class = entry class_scope = entry.type.scope class_scope.is_internal = True class_scope.is_closure_class_scope = True if Options.closure_freelist_size: class_scope.directives['freelist'] = Options.closure_freelist_size if from_closure: assert cscope.is_closure_scope class_scope.declare_var(pos=node.pos, name=Naming.outer_scope_cname, cname=Naming.outer_scope_cname, type=cscope.scope_class.type, is_cdef=True) node.needs_outer_scope = True for name, entry in in_closure: closure_entry = class_scope.declare_var(pos=entry.pos, name=entry.name, cname=entry.cname, type=entry.type, is_cdef=True) if entry.is_declared_generic: closure_entry.is_declared_generic = 1 node.needs_closure = True # Do it here because other classes are already checked target_module_scope.check_c_class(func_scope.scope_class) def visit_LambdaNode(self, node): if not isinstance(node.def_node, Nodes.DefNode): # fused function, an error has been previously issued return node was_in_lambda = self.in_lambda self.in_lambda = True self.create_class_from_scope(node.def_node, self.module_scope, node) self.visitchildren(node) self.in_lambda = was_in_lambda return node def visit_FuncDefNode(self, node): if self.in_lambda: self.visitchildren(node) return node if node.needs_closure or self.path: self.create_class_from_scope(node, self.module_scope) self.path.append(node) self.visitchildren(node) self.path.pop() return node def visit_GeneratorBodyDefNode(self, node): self.visitchildren(node) return node def visit_CFuncDefNode(self, node): if not node.overridable: return self.visit_FuncDefNode(node) else: self.visitchildren(node) return node class GilCheck(VisitorTransform): """ Call `node.gil_check(env)` on each node to make sure we hold the GIL when we need it. Raise an error when on Python operations inside a `nogil` environment. Additionally, raise exceptions for closely nested with gil or with nogil statements. The latter would abort Python. """ def __call__(self, root): self.env_stack = [root.scope] self.nogil = False # True for 'cdef func() nogil:' functions, as the GIL may be held while # calling this function (thus contained 'nogil' blocks may be valid). self.nogil_declarator_only = False return super(GilCheck, self).__call__(root) def visit_FuncDefNode(self, node): self.env_stack.append(node.local_scope) was_nogil = self.nogil self.nogil = node.local_scope.nogil if self.nogil: self.nogil_declarator_only = True if self.nogil and node.nogil_check: node.nogil_check(node.local_scope) self.visitchildren(node) # This cannot be nested, so it doesn't need backup/restore self.nogil_declarator_only = False self.env_stack.pop() self.nogil = was_nogil return node def visit_GILStatNode(self, node): if self.nogil and node.nogil_check: node.nogil_check() was_nogil = self.nogil self.nogil = (node.state == 'nogil') if was_nogil == self.nogil and not self.nogil_declarator_only: if not was_nogil: error(node.pos, "Trying to acquire the GIL while it is " "already held.") else: error(node.pos, "Trying to release the GIL while it was " "previously released.") if isinstance(node.finally_clause, Nodes.StatListNode): # The finally clause of the GILStatNode is a GILExitNode, # which is wrapped in a StatListNode. Just unpack that. node.finally_clause, = node.finally_clause.stats self.visitchildren(node) self.nogil = was_nogil return node def visit_ParallelRangeNode(self, node): if node.nogil: node.nogil = False node = Nodes.GILStatNode(node.pos, state='nogil', body=node) return self.visit_GILStatNode(node) if not self.nogil: error(node.pos, "prange() can only be used without the GIL") # Forget about any GIL-related errors that may occur in the body return None node.nogil_check(self.env_stack[-1]) self.visitchildren(node) return node def visit_ParallelWithBlockNode(self, node): if not self.nogil: error(node.pos, "The parallel section may only be used without " "the GIL") return None if node.nogil_check: # It does not currently implement this, but test for it anyway to # avoid potential future surprises node.nogil_check(self.env_stack[-1]) self.visitchildren(node) return node def visit_TryFinallyStatNode(self, node): """ Take care of try/finally statements in nogil code sections. """ if not self.nogil or isinstance(node, Nodes.GILStatNode): return self.visit_Node(node) node.nogil_check = None node.is_try_finally_in_nogil = True self.visitchildren(node) return node def visit_Node(self, node): if self.env_stack and self.nogil and node.nogil_check: node.nogil_check(self.env_stack[-1]) self.visitchildren(node) node.in_nogil_context = self.nogil return node class TransformBuiltinMethods(EnvTransform): """ Replace Cython's own cython.* builtins by the corresponding tree nodes. """ def visit_SingleAssignmentNode(self, node): if node.declaration_only: return None else: self.visitchildren(node) return node def visit_AttributeNode(self, node): self.visitchildren(node) return self.visit_cython_attribute(node) def visit_NameNode(self, node): return self.visit_cython_attribute(node) def visit_cython_attribute(self, node): attribute = node.as_cython_attribute() if attribute: if attribute == u'compiled': node = ExprNodes.BoolNode(node.pos, value=True) elif attribute == u'__version__': from .. import __version__ as version node = ExprNodes.StringNode(node.pos, value=EncodedString(version)) elif attribute == u'NULL': node = ExprNodes.NullNode(node.pos) elif attribute in (u'set', u'frozenset', u'staticmethod'): node = ExprNodes.NameNode(node.pos, name=EncodedString(attribute), entry=self.current_env().builtin_scope().lookup_here(attribute)) elif PyrexTypes.parse_basic_type(attribute): pass elif self.context.cython_scope.lookup_qualified_name(attribute): pass else: error(node.pos, u"'%s' not a valid cython attribute or is being used incorrectly" % attribute) return node def visit_ExecStatNode(self, node): lenv = self.current_env() self.visitchildren(node) if len(node.args) == 1: node.args.append(ExprNodes.GlobalsExprNode(node.pos)) if not lenv.is_module_scope: node.args.append( ExprNodes.LocalsExprNode( node.pos, self.current_scope_node(), lenv)) return node def _inject_locals(self, node, func_name): # locals()/dir()/vars() builtins lenv = self.current_env() entry = lenv.lookup_here(func_name) if entry: # not the builtin return node pos = node.pos if func_name in ('locals', 'vars'): if func_name == 'locals' and len(node.args) > 0: error(self.pos, "Builtin 'locals()' called with wrong number of args, expected 0, got %d" % len(node.args)) return node elif func_name == 'vars': if len(node.args) > 1: error(self.pos, "Builtin 'vars()' called with wrong number of args, expected 0-1, got %d" % len(node.args)) if len(node.args) > 0: return node # nothing to do return ExprNodes.LocalsExprNode(pos, self.current_scope_node(), lenv) else: # dir() if len(node.args) > 1: error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d" % len(node.args)) if len(node.args) > 0: # optimised in Builtin.py return node if lenv.is_py_class_scope or lenv.is_module_scope: if lenv.is_py_class_scope: pyclass = self.current_scope_node() locals_dict = ExprNodes.CloneNode(pyclass.dict) else: locals_dict = ExprNodes.GlobalsExprNode(pos) return ExprNodes.SortedDictKeysNode(locals_dict) local_names = sorted(var.name for var in lenv.entries.values() if var.name) items = [ExprNodes.IdentifierStringNode(pos, value=var) for var in local_names] return ExprNodes.ListNode(pos, args=items) def visit_PrimaryCmpNode(self, node): # special case: for in/not-in test, we do not need to sort locals() self.visitchildren(node) if node.operator in 'not_in': # in/not_in if isinstance(node.operand2, ExprNodes.SortedDictKeysNode): arg = node.operand2.arg if isinstance(arg, ExprNodes.NoneCheckNode): arg = arg.arg node.operand2 = arg return node def visit_CascadedCmpNode(self, node): return self.visit_PrimaryCmpNode(node) def _inject_eval(self, node, func_name): lenv = self.current_env() entry = lenv.lookup_here(func_name) if entry or len(node.args) != 1: return node # Inject globals and locals node.args.append(ExprNodes.GlobalsExprNode(node.pos)) if not lenv.is_module_scope: node.args.append( ExprNodes.LocalsExprNode( node.pos, self.current_scope_node(), lenv)) return node def _inject_super(self, node, func_name): lenv = self.current_env() entry = lenv.lookup_here(func_name) if entry or node.args: return node # Inject no-args super def_node = self.current_scope_node() if (not isinstance(def_node, Nodes.DefNode) or not def_node.args or len(self.env_stack) < 2): return node class_node, class_scope = self.env_stack[-2] if class_scope.is_py_class_scope: def_node.requires_classobj = True class_node.class_cell.is_active = True node.args = [ ExprNodes.ClassCellNode( node.pos, is_generator=def_node.is_generator), ExprNodes.NameNode(node.pos, name=def_node.args[0].name) ] elif class_scope.is_c_class_scope: node.args = [ ExprNodes.NameNode( node.pos, name=class_node.scope.name, entry=class_node.entry), ExprNodes.NameNode(node.pos, name=def_node.args[0].name) ] return node def visit_SimpleCallNode(self, node): # cython.foo function = node.function.as_cython_attribute() if function: if function in InterpretCompilerDirectives.unop_method_nodes: if len(node.args) != 1: error(node.function.pos, u"%s() takes exactly one argument" % function) else: node = InterpretCompilerDirectives.unop_method_nodes[function]( node.function.pos, operand=node.args[0]) elif function in InterpretCompilerDirectives.binop_method_nodes: if len(node.args) != 2: error(node.function.pos, u"%s() takes exactly two arguments" % function) else: node = InterpretCompilerDirectives.binop_method_nodes[function]( node.function.pos, operand1=node.args[0], operand2=node.args[1]) elif function == u'cast': if len(node.args) != 2: error(node.function.pos, u"cast() takes exactly two arguments and an optional typecheck keyword") else: type = node.args[0].analyse_as_type(self.current_env()) if type: node = ExprNodes.TypecastNode( node.function.pos, type=type, operand=node.args[1], typecheck=False) else: error(node.args[0].pos, "Not a type") elif function == u'sizeof': if len(node.args) != 1: error(node.function.pos, u"sizeof() takes exactly one argument") else: type = node.args[0].analyse_as_type(self.current_env()) if type: node = ExprNodes.SizeofTypeNode(node.function.pos, arg_type=type) else: node = ExprNodes.SizeofVarNode(node.function.pos, operand=node.args[0]) elif function == 'cmod': if len(node.args) != 2: error(node.function.pos, u"cmod() takes exactly two arguments") else: node = ExprNodes.binop_node(node.function.pos, '%', node.args[0], node.args[1]) node.cdivision = True elif function == 'cdiv': if len(node.args) != 2: error(node.function.pos, u"cdiv() takes exactly two arguments") else: node = ExprNodes.binop_node(node.function.pos, '/', node.args[0], node.args[1]) node.cdivision = True elif function == u'set': node.function = ExprNodes.NameNode(node.pos, name=EncodedString('set')) elif function == u'staticmethod': node.function = ExprNodes.NameNode(node.pos, name=EncodedString('staticmethod')) elif self.context.cython_scope.lookup_qualified_name(function): pass else: error(node.function.pos, u"'%s' not a valid cython language construct" % function) self.visitchildren(node) if isinstance(node, ExprNodes.SimpleCallNode) and node.function.is_name: func_name = node.function.name if func_name in ('dir', 'locals', 'vars'): return self._inject_locals(node, func_name) if func_name == 'eval': return self._inject_eval(node, func_name) if func_name == 'super': return self._inject_super(node, func_name) return node def visit_GeneralCallNode(self, node): function = node.function.as_cython_attribute() if function == u'cast': # NOTE: assuming simple tuple/dict nodes for positional_args and keyword_args args = node.positional_args.args kwargs = node.keyword_args.compile_time_value(None) if (len(args) != 2 or len(kwargs) > 1 or (len(kwargs) == 1 and 'typecheck' not in kwargs)): error(node.function.pos, u"cast() takes exactly two arguments and an optional typecheck keyword") else: type = args[0].analyse_as_type(self.current_env()) if type: typecheck = kwargs.get('typecheck', False) node = ExprNodes.TypecastNode( node.function.pos, type=type, operand=args[1], typecheck=typecheck) else: error(args[0].pos, "Not a type") self.visitchildren(node) return node class ReplaceFusedTypeChecks(VisitorTransform): """ This is not a transform in the pipeline. It is invoked on the specific versions of a cdef function with fused argument types. It filters out any type branches that don't match. e.g. if fused_t is mytype: ... elif fused_t in other_fused_type: ... """ def __init__(self, local_scope): super(ReplaceFusedTypeChecks, self).__init__() self.local_scope = local_scope # defer the import until now to avoid circular import time dependencies from .Optimize import ConstantFolding self.transform = ConstantFolding(reevaluate=True) def visit_IfStatNode(self, node): """ Filters out any if clauses with false compile time type check expression. """ self.visitchildren(node) return self.transform(node) def visit_PrimaryCmpNode(self, node): type1 = node.operand1.analyse_as_type(self.local_scope) type2 = node.operand2.analyse_as_type(self.local_scope) if type1 and type2: false_node = ExprNodes.BoolNode(node.pos, value=False) true_node = ExprNodes.BoolNode(node.pos, value=True) type1 = self.specialize_type(type1, node.operand1.pos) op = node.operator if op in ('is', 'is_not', '==', '!='): type2 = self.specialize_type(type2, node.operand2.pos) is_same = type1.same_as(type2) eq = op in ('is', '==') if (is_same and eq) or (not is_same and not eq): return true_node elif op in ('in', 'not_in'): # We have to do an instance check directly, as operand2 # needs to be a fused type and not a type with a subtype # that is fused. First unpack the typedef if isinstance(type2, PyrexTypes.CTypedefType): type2 = type2.typedef_base_type if type1.is_fused: error(node.operand1.pos, "Type is fused") elif not type2.is_fused: error(node.operand2.pos, "Can only use 'in' or 'not in' on a fused type") else: types = PyrexTypes.get_specialized_types(type2) for specialized_type in types: if type1.same_as(specialized_type): if op == 'in': return true_node else: return false_node if op == 'not_in': return true_node return false_node return node def specialize_type(self, type, pos): try: return type.specialize(self.local_scope.fused_to_specific) except KeyError: error(pos, "Type is not specific") return type def visit_Node(self, node): self.visitchildren(node) return node class DebugTransform(CythonTransform): """ Write debug information for this Cython module. """ def __init__(self, context, options, result): super(DebugTransform, self).__init__(context) self.visited = set() # our treebuilder and debug output writer # (see Cython.Debugger.debug_output.CythonDebugWriter) self.tb = self.context.gdb_debug_outputwriter #self.c_output_file = options.output_file self.c_output_file = result.c_file # Closure support, basically treat nested functions as if the AST were # never nested self.nested_funcdefs = [] # tells visit_NameNode whether it should register step-into functions self.register_stepinto = False def visit_ModuleNode(self, node): self.tb.module_name = node.full_module_name attrs = dict( module_name=node.full_module_name, filename=node.pos[0].filename, c_filename=self.c_output_file) self.tb.start('Module', attrs) # serialize functions self.tb.start('Functions') # First, serialize functions normally... self.visitchildren(node) # ... then, serialize nested functions for nested_funcdef in self.nested_funcdefs: self.visit_FuncDefNode(nested_funcdef) self.register_stepinto = True self.serialize_modulenode_as_function(node) self.register_stepinto = False self.tb.end('Functions') # 2.3 compatibility. Serialize global variables self.tb.start('Globals') entries = {} for k, v in node.scope.entries.items(): if (v.qualified_name not in self.visited and not v.name.startswith('__pyx_') and not v.type.is_cfunction and not v.type.is_extension_type): entries[k]= v self.serialize_local_variables(entries) self.tb.end('Globals') # self.tb.end('Module') # end Module after the line number mapping in # Cython.Compiler.ModuleNode.ModuleNode._serialize_lineno_map return node def visit_FuncDefNode(self, node): self.visited.add(node.local_scope.qualified_name) if getattr(node, 'is_wrapper', False): return node if self.register_stepinto: self.nested_funcdefs.append(node) return node # node.entry.visibility = 'extern' if node.py_func is None: pf_cname = '' else: pf_cname = node.py_func.entry.func_cname attrs = dict( name=node.entry.name or getattr(node, 'name', ''), cname=node.entry.func_cname, pf_cname=pf_cname, qualified_name=node.local_scope.qualified_name, lineno=str(node.pos[1])) self.tb.start('Function', attrs=attrs) self.tb.start('Locals') self.serialize_local_variables(node.local_scope.entries) self.tb.end('Locals') self.tb.start('Arguments') for arg in node.local_scope.arg_entries: self.tb.start(arg.name) self.tb.end(arg.name) self.tb.end('Arguments') self.tb.start('StepIntoFunctions') self.register_stepinto = True self.visitchildren(node) self.register_stepinto = False self.tb.end('StepIntoFunctions') self.tb.end('Function') return node def visit_NameNode(self, node): if (self.register_stepinto and node.type is not None and node.type.is_cfunction and getattr(node, 'is_called', False) and node.entry.func_cname is not None): # don't check node.entry.in_cinclude, as 'cdef extern: ...' # declared functions are not 'in_cinclude'. # This means we will list called 'cdef' functions as # "step into functions", but this is not an issue as they will be # recognized as Cython functions anyway. attrs = dict(name=node.entry.func_cname) self.tb.start('StepIntoFunction', attrs=attrs) self.tb.end('StepIntoFunction') self.visitchildren(node) return node def serialize_modulenode_as_function(self, node): """ Serialize the module-level code as a function so the debugger will know it's a "relevant frame" and it will know where to set the breakpoint for 'break modulename'. """ name = node.full_module_name.rpartition('.')[-1] cname_py2 = 'init' + name cname_py3 = 'PyInit_' + name py2_attrs = dict( name=name, cname=cname_py2, pf_cname='', # Ignore the qualified_name, breakpoints should be set using # `cy break modulename:lineno` for module-level breakpoints. qualified_name='', lineno='1', is_initmodule_function="True", ) py3_attrs = dict(py2_attrs, cname=cname_py3) self._serialize_modulenode_as_function(node, py2_attrs) self._serialize_modulenode_as_function(node, py3_attrs) def _serialize_modulenode_as_function(self, node, attrs): self.tb.start('Function', attrs=attrs) self.tb.start('Locals') self.serialize_local_variables(node.scope.entries) self.tb.end('Locals') self.tb.start('Arguments') self.tb.end('Arguments') self.tb.start('StepIntoFunctions') self.register_stepinto = True self.visitchildren(node) self.register_stepinto = False self.tb.end('StepIntoFunctions') self.tb.end('Function') def serialize_local_variables(self, entries): for entry in entries.values(): if not entry.cname: # not a local variable continue if entry.type.is_pyobject: vartype = 'PythonObject' else: vartype = 'CObject' if entry.from_closure: # We're dealing with a closure where a variable from an outer # scope is accessed, get it from the scope object. cname = '%s->%s' % (Naming.cur_scope_cname, entry.outer_entry.cname) qname = '%s.%s.%s' % (entry.scope.outer_scope.qualified_name, entry.scope.name, entry.name) elif entry.in_closure: cname = '%s->%s' % (Naming.cur_scope_cname, entry.cname) qname = entry.qualified_name else: cname = entry.cname qname = entry.qualified_name if not entry.pos: # this happens for variables that are not in the user's code, # e.g. for the global __builtins__, __doc__, etc. We can just # set the lineno to 0 for those. lineno = '0' else: lineno = str(entry.pos[1]) attrs = dict( name=entry.name, cname=cname, qualified_name=qname, type=vartype, lineno=lineno) self.tb.start('LocalVar', attrs) self.tb.end('LocalVar') Cython-0.26.1/Cython/Compiler/UtilNodes.py0000664000175000017500000002645013023021033021120 0ustar stefanstefan00000000000000# # Nodes used as utilities and support for transforms etc. # These often make up sets including both Nodes and ExprNodes # so it is convenient to have them in a seperate module. # from __future__ import absolute_import from . import Nodes from . import ExprNodes from .Nodes import Node from .ExprNodes import AtomicExprNode from .PyrexTypes import c_ptr_type class TempHandle(object): # THIS IS DEPRECATED, USE LetRefNode instead temp = None needs_xdecref = False def __init__(self, type, needs_cleanup=None): self.type = type if needs_cleanup is None: self.needs_cleanup = type.is_pyobject else: self.needs_cleanup = needs_cleanup def ref(self, pos): return TempRefNode(pos, handle=self, type=self.type) class TempRefNode(AtomicExprNode): # THIS IS DEPRECATED, USE LetRefNode instead # handle TempHandle def analyse_types(self, env): assert self.type == self.handle.type return self def analyse_target_types(self, env): assert self.type == self.handle.type return self def analyse_target_declaration(self, env): pass def calculate_result_code(self): result = self.handle.temp if result is None: result = "" # might be called and overwritten return result def generate_result_code(self, code): pass def generate_assignment_code(self, rhs, code, overloaded_assignment=False): if self.type.is_pyobject: rhs.make_owned_reference(code) # TODO: analyse control flow to see if this is necessary code.put_xdecref(self.result(), self.ctype()) code.putln('%s = %s;' % ( self.result(), rhs.result() if overloaded_assignment else rhs.result_as(self.ctype()), )) rhs.generate_post_assignment_code(code) rhs.free_temps(code) class TempsBlockNode(Node): # THIS IS DEPRECATED, USE LetNode instead """ Creates a block which allocates temporary variables. This is used by transforms to output constructs that need to make use of a temporary variable. Simply pass the types of the needed temporaries to the constructor. The variables can be referred to using a TempRefNode (which can be constructed by calling get_ref_node). """ # temps [TempHandle] # body StatNode child_attrs = ["body"] def generate_execution_code(self, code): for handle in self.temps: handle.temp = code.funcstate.allocate_temp( handle.type, manage_ref=handle.needs_cleanup) self.body.generate_execution_code(code) for handle in self.temps: if handle.needs_cleanup: if handle.needs_xdecref: code.put_xdecref_clear(handle.temp, handle.type) else: code.put_decref_clear(handle.temp, handle.type) code.funcstate.release_temp(handle.temp) def analyse_declarations(self, env): self.body.analyse_declarations(env) def analyse_expressions(self, env): self.body = self.body.analyse_expressions(env) return self def generate_function_definitions(self, env, code): self.body.generate_function_definitions(env, code) def annotate(self, code): self.body.annotate(code) class ResultRefNode(AtomicExprNode): # A reference to the result of an expression. The result_code # must be set externally (usually a temp name). subexprs = [] lhs_of_first_assignment = False def __init__(self, expression=None, pos=None, type=None, may_hold_none=True, is_temp=False): self.expression = expression self.pos = None self.may_hold_none = may_hold_none if expression is not None: self.pos = expression.pos if hasattr(expression, "type"): self.type = expression.type if pos is not None: self.pos = pos if type is not None: self.type = type if is_temp: self.is_temp = True assert self.pos is not None def clone_node(self): # nothing to do here return self def type_dependencies(self, env): if self.expression: return self.expression.type_dependencies(env) else: return () def update_expression(self, expression): self.expression = expression if hasattr(expression, "type"): self.type = expression.type def analyse_types(self, env): if self.expression is not None: if not self.expression.type: self.expression = self.expression.analyse_types(env) self.type = self.expression.type return self def infer_type(self, env): if self.type is not None: return self.type if self.expression is not None: if self.expression.type is not None: return self.expression.type return self.expression.infer_type(env) assert False, "cannot infer type of ResultRefNode" def may_be_none(self): if not self.type.is_pyobject: return False return self.may_hold_none def _DISABLED_may_be_none(self): # not sure if this is safe - the expression may not be the # only value that gets assigned if self.expression is not None: return self.expression.may_be_none() if self.type is not None: return self.type.is_pyobject return True # play safe def is_simple(self): return True def result(self): try: return self.result_code except AttributeError: if self.expression is not None: self.result_code = self.expression.result() return self.result_code def generate_evaluation_code(self, code): pass def generate_result_code(self, code): pass def generate_disposal_code(self, code): pass def generate_assignment_code(self, rhs, code, overloaded_assignment=False): if self.type.is_pyobject: rhs.make_owned_reference(code) if not self.lhs_of_first_assignment: code.put_decref(self.result(), self.ctype()) code.putln('%s = %s;' % ( self.result(), rhs.result() if overloaded_assignment else rhs.result_as(self.ctype()), )) rhs.generate_post_assignment_code(code) rhs.free_temps(code) def allocate_temps(self, env): pass def release_temp(self, env): pass def free_temps(self, code): pass class LetNodeMixin: def set_temp_expr(self, lazy_temp): self.lazy_temp = lazy_temp self.temp_expression = lazy_temp.expression def setup_temp_expr(self, code): self.temp_expression.generate_evaluation_code(code) self.temp_type = self.temp_expression.type if self.temp_type.is_array: self.temp_type = c_ptr_type(self.temp_type.base_type) self._result_in_temp = self.temp_expression.result_in_temp() if self._result_in_temp: self.temp = self.temp_expression.result() else: self.temp_expression.make_owned_reference(code) self.temp = code.funcstate.allocate_temp( self.temp_type, manage_ref=True) code.putln("%s = %s;" % (self.temp, self.temp_expression.result())) self.temp_expression.generate_disposal_code(code) self.temp_expression.free_temps(code) self.lazy_temp.result_code = self.temp def teardown_temp_expr(self, code): if self._result_in_temp: self.temp_expression.generate_disposal_code(code) self.temp_expression.free_temps(code) else: if self.temp_type.is_pyobject: code.put_decref_clear(self.temp, self.temp_type) code.funcstate.release_temp(self.temp) class EvalWithTempExprNode(ExprNodes.ExprNode, LetNodeMixin): # A wrapper around a subexpression that moves an expression into a # temp variable and provides it to the subexpression. subexprs = ['temp_expression', 'subexpression'] def __init__(self, lazy_temp, subexpression): self.set_temp_expr(lazy_temp) self.pos = subexpression.pos self.subexpression = subexpression # if called after type analysis, we already know the type here self.type = self.subexpression.type def infer_type(self, env): return self.subexpression.infer_type(env) def result(self): return self.subexpression.result() def analyse_types(self, env): self.temp_expression = self.temp_expression.analyse_types(env) self.lazy_temp.update_expression(self.temp_expression) # overwrite in case it changed self.subexpression = self.subexpression.analyse_types(env) self.type = self.subexpression.type return self def free_subexpr_temps(self, code): self.subexpression.free_temps(code) def generate_subexpr_disposal_code(self, code): self.subexpression.generate_disposal_code(code) def generate_evaluation_code(self, code): self.setup_temp_expr(code) self.subexpression.generate_evaluation_code(code) self.teardown_temp_expr(code) LetRefNode = ResultRefNode class LetNode(Nodes.StatNode, LetNodeMixin): # Implements a local temporary variable scope. Imagine this # syntax being present: # let temp = VALUE: # BLOCK (can modify temp) # if temp is an object, decref # # Usually used after analysis phase, but forwards analysis methods # to its children child_attrs = ['temp_expression', 'body'] def __init__(self, lazy_temp, body): self.set_temp_expr(lazy_temp) self.pos = body.pos self.body = body def analyse_declarations(self, env): self.temp_expression.analyse_declarations(env) self.body.analyse_declarations(env) def analyse_expressions(self, env): self.temp_expression = self.temp_expression.analyse_expressions(env) self.body = self.body.analyse_expressions(env) return self def generate_execution_code(self, code): self.setup_temp_expr(code) self.body.generate_execution_code(code) self.teardown_temp_expr(code) def generate_function_definitions(self, env, code): self.temp_expression.generate_function_definitions(env, code) self.body.generate_function_definitions(env, code) class TempResultFromStatNode(ExprNodes.ExprNode): # An ExprNode wrapper around a StatNode that executes the StatNode # body. Requires a ResultRefNode that it sets up to refer to its # own temp result. The StatNode must assign a value to the result # node, which then becomes the result of this node. subexprs = [] child_attrs = ['body'] def __init__(self, result_ref, body): self.result_ref = result_ref self.pos = body.pos self.body = body self.type = result_ref.type self.is_temp = 1 def analyse_declarations(self, env): self.body.analyse_declarations(env) def analyse_types(self, env): self.body = self.body.analyse_expressions(env) return self def generate_result_code(self, code): self.result_ref.result_code = self.result() self.body.generate_execution_code(code) Cython-0.26.1/Cython/Compiler/CmdLine.py0000664000175000017500000002127212574327400020543 0ustar stefanstefan00000000000000# # Cython - Command Line Parsing # from __future__ import absolute_import import os import sys from . import Options usage = """\ Cython (http://cython.org) is a compiler for code written in the Cython language. Cython is based on Pyrex by Greg Ewing. Usage: cython [options] sourcefile.{pyx,py} ... Options: -V, --version Display version number of cython compiler -l, --create-listing Write error messages to a listing file -I, --include-dir Search for include files in named directory (multiple include directories are allowed). -o, --output-file Specify name of generated C file -t, --timestamps Only compile newer source files -f, --force Compile all source files (overrides implied -t) -v, --verbose Be verbose, print file names on multiple compilation -p, --embed-positions If specified, the positions in Cython files of each function definition is embedded in its docstring. --cleanup Release interned objects on python exit, for memory debugging. Level indicates aggressiveness, default 0 releases nothing. -w, --working Sets the working directory for Cython (the directory modules are searched from) --gdb Output debug information for cygdb --gdb-outdir Specify gdb debug information output directory. Implies --gdb. -D, --no-docstrings Strip docstrings from the compiled module. -a, --annotate Produce a colorized HTML version of the source. --annotate-coverage Annotate and include coverage information from cov.xml. --line-directives Produce #line directives pointing to the .pyx source --cplus Output a C++ rather than C file. --embed[=] Generate a main() function that embeds the Python interpreter. -2 Compile based on Python-2 syntax and code semantics. -3 Compile based on Python-3 syntax and code semantics. --lenient Change some compile time errors to runtime errors to improve Python compatibility --capi-reexport-cincludes Add cincluded headers to any auto-generated header files. --fast-fail Abort the compilation on the first error --warning-errors, -Werror Make all warnings into errors --warning-extra, -Wextra Enable extra warnings -X, --directive =[, 1: sys.stderr.write( "cython: Only one source file allowed when using -o\n") sys.exit(1) if len(sources) == 0 and not options.show_version: bad_usage() if Options.embed and len(sources) > 1: sys.stderr.write( "cython: Only one source file allowed when using -embed\n") sys.exit(1) return options, sources Cython-0.26.1/Cython/Compiler/Version.py0000664000175000017500000000026512542002467020652 0ustar stefanstefan00000000000000# for backwards compatibility from __future__ import absolute_import from .. import __version__ as version # For 'generated by' header line in C files. watermark = str(version) Cython-0.26.1/Cython/Compiler/StringEncoding.py0000664000175000017500000002264113143605603022143 0ustar stefanstefan00000000000000# # Cython -- encoding related tools # from __future__ import absolute_import import re import sys if sys.version_info[0] >= 3: _unicode, _str, _bytes, _unichr = str, str, bytes, chr IS_PYTHON3 = True else: _unicode, _str, _bytes, _unichr = unicode, str, str, unichr IS_PYTHON3 = False empty_bytes = _bytes() empty_unicode = _unicode() join_bytes = empty_bytes.join class UnicodeLiteralBuilder(object): """Assemble a unicode string. """ def __init__(self): self.chars = [] def append(self, characters): if isinstance(characters, _bytes): # this came from a Py2 string literal in the parser code characters = characters.decode("ASCII") assert isinstance(characters, _unicode), str(type(characters)) self.chars.append(characters) if sys.maxunicode == 65535: def append_charval(self, char_number): if char_number > 65535: # wide Unicode character on narrow platform => replace # by surrogate pair char_number -= 0x10000 self.chars.append( _unichr((char_number // 1024) + 0xD800) ) self.chars.append( _unichr((char_number % 1024) + 0xDC00) ) else: self.chars.append( _unichr(char_number) ) else: def append_charval(self, char_number): self.chars.append( _unichr(char_number) ) def append_uescape(self, char_number, escape_string): self.append_charval(char_number) def getstring(self): return EncodedString(u''.join(self.chars)) def getstrings(self): return (None, self.getstring()) class BytesLiteralBuilder(object): """Assemble a byte string or char value. """ def __init__(self, target_encoding): self.chars = [] self.target_encoding = target_encoding def append(self, characters): if isinstance(characters, _unicode): characters = characters.encode(self.target_encoding) assert isinstance(characters, _bytes), str(type(characters)) self.chars.append(characters) def append_charval(self, char_number): self.chars.append( _unichr(char_number).encode('ISO-8859-1') ) def append_uescape(self, char_number, escape_string): self.append(escape_string) def getstring(self): # this *must* return a byte string! return bytes_literal(join_bytes(self.chars), self.target_encoding) def getchar(self): # this *must* return a byte string! return self.getstring() def getstrings(self): return (self.getstring(), None) class StrLiteralBuilder(object): """Assemble both a bytes and a unicode representation of a string. """ def __init__(self, target_encoding): self._bytes = BytesLiteralBuilder(target_encoding) self._unicode = UnicodeLiteralBuilder() def append(self, characters): self._bytes.append(characters) self._unicode.append(characters) def append_charval(self, char_number): self._bytes.append_charval(char_number) self._unicode.append_charval(char_number) def append_uescape(self, char_number, escape_string): self._bytes.append(escape_string) self._unicode.append_charval(char_number) def getstrings(self): return (self._bytes.getstring(), self._unicode.getstring()) class EncodedString(_unicode): # unicode string subclass to keep track of the original encoding. # 'encoding' is None for unicode strings and the source encoding # otherwise encoding = None def __deepcopy__(self, memo): return self def byteencode(self): assert self.encoding is not None return self.encode(self.encoding) def utf8encode(self): assert self.encoding is None return self.encode("UTF-8") @property def is_unicode(self): return self.encoding is None def contains_surrogates(self): return string_contains_surrogates(self) def as_utf8_string(self): return bytes_literal(self.utf8encode(), 'utf8') def string_contains_surrogates(ustring): """ Check if the unicode string contains surrogate code points on a CPython platform with wide (UCS-4) or narrow (UTF-16) Unicode, i.e. characters that would be spelled as two separate code units on a narrow platform. """ for c in map(ord, ustring): if c > 65535: # can only happen on wide platforms return True if 0xD800 <= c <= 0xDFFF: return True return False class BytesLiteral(_bytes): # bytes subclass that is compatible with EncodedString encoding = None def __deepcopy__(self, memo): return self def byteencode(self): if IS_PYTHON3: return _bytes(self) else: # fake-recode the string to make it a plain bytes object return self.decode('ISO-8859-1').encode('ISO-8859-1') def utf8encode(self): assert False, "this is not a unicode string: %r" % self def __str__(self): """Fake-decode the byte string to unicode to support % formatting of unicode strings. """ return self.decode('ISO-8859-1') is_unicode = False def as_c_string_literal(self): value = split_string_literal(escape_byte_string(self)) return '"%s"' % value def bytes_literal(s, encoding): assert isinstance(s, bytes) s = BytesLiteral(s) s.encoding = encoding return s char_from_escape_sequence = { r'\a' : u'\a', r'\b' : u'\b', r'\f' : u'\f', r'\n' : u'\n', r'\r' : u'\r', r'\t' : u'\t', r'\v' : u'\v', }.get _c_special = ('\\', '??', '"') + tuple(map(chr, range(32))) def _to_escape_sequence(s): if s in '\n\r\t': return repr(s)[1:-1] elif s == '"': return r'\"' elif s == '\\': return r'\\' else: # within a character sequence, oct passes much better than hex return ''.join(['\\%03o' % ord(c) for c in s]) def _build_specials_replacer(): subexps = [] replacements = {} for special in _c_special: regexp = ''.join(['[%s]' % c.replace('\\', '\\\\') for c in special]) subexps.append(regexp) replacements[special.encode('ASCII')] = _to_escape_sequence(special).encode('ASCII') sub = re.compile(('(%s)' % '|'.join(subexps)).encode('ASCII')).sub def replace_specials(m): return replacements[m.group(1)] def replace(s): return sub(replace_specials, s) return replace _replace_specials = _build_specials_replacer() def escape_char(c): if IS_PYTHON3: c = c.decode('ISO-8859-1') if c in '\n\r\t\\': return repr(c)[1:-1] elif c == "'": return "\\'" n = ord(c) if n < 32 or n > 127: # hex works well for characters return "\\x%02X" % n else: return c def escape_byte_string(s): """Escape a byte string so that it can be written into C code. Note that this returns a Unicode string instead which, when encoded as ISO-8859-1, will result in the correct byte sequence being written. """ s = _replace_specials(s) try: return s.decode("ASCII") # trial decoding: plain ASCII => done except UnicodeDecodeError: pass if IS_PYTHON3: s_new = bytearray() append, extend = s_new.append, s_new.extend for b in s: if b >= 128: extend(('\\%3o' % b).encode('ASCII')) else: append(b) return s_new.decode('ISO-8859-1') else: l = [] append = l.append for c in s: o = ord(c) if o >= 128: append('\\%3o' % o) else: append(c) return join_bytes(l).decode('ISO-8859-1') def split_string_literal(s, limit=2000): # MSVC can't handle long string literals. if len(s) < limit: return s else: start = 0 chunks = [] while start < len(s): end = start + limit if len(s) > end-4 and '\\' in s[end-4:end]: end -= 4 - s[end-4:end].find('\\') # just before the backslash while s[end-1] == '\\': end -= 1 if end == start: # must have been a long line of backslashes end = start + limit - (limit % 2) - 4 break chunks.append(s[start:end]) start = end return '""'.join(chunks) def encode_pyunicode_string(s): """Create Py_UNICODE[] representation of a given unicode string. """ s = list(map(ord, s)) + [0] if sys.maxunicode >= 0x10000: # Wide build or Py3.3 utf16, utf32 = [], s for code_point in s: if code_point >= 0x10000: # outside of BMP high, low = divmod(code_point - 0x10000, 1024) utf16.append(high + 0xD800) utf16.append(low + 0xDC00) else: utf16.append(code_point) else: utf16, utf32 = s, [] for code_unit in s: if 0xDC00 <= code_unit <= 0xDFFF and utf32 and 0xD800 <= utf32[-1] <= 0xDBFF: high, low = utf32[-1], code_unit utf32[-1] = ((high & 0x3FF) << 10) + (low & 0x3FF) + 0x10000 else: utf32.append(code_unit) if utf16 == utf32: utf16 = [] return ",".join(map(_unicode, utf16)), ",".join(map(_unicode, utf32)) Cython-0.26.1/Cython/Compiler/Visitor.pxd0000664000175000017500000000312512542002467021025 0ustar stefanstefan00000000000000from __future__ import absolute_import cimport cython cdef class TreeVisitor: cdef public list access_path cdef dict dispatch_table cpdef visit(self, obj) cdef _visit(self, obj) cdef find_handler(self, obj) cdef _visitchild(self, child, parent, attrname, idx) cdef dict _visitchildren(self, parent, attrs) cpdef visitchildren(self, parent, attrs=*) cdef class VisitorTransform(TreeVisitor): cpdef visitchildren(self, parent, attrs=*) cpdef recurse_to_children(self, node) cdef class CythonTransform(VisitorTransform): cdef public context cdef public current_directives cdef class ScopeTrackingTransform(CythonTransform): cdef public scope_type cdef public scope_node cdef visit_scope(self, node, scope_type) cdef class EnvTransform(CythonTransform): cdef public list env_stack cdef class MethodDispatcherTransform(EnvTransform): @cython.final cdef _visit_binop_node(self, node) @cython.final cdef _find_handler(self, match_name, bint has_kwargs) @cython.final cdef _delegate_to_assigned_value(self, node, function, arg_list, kwargs) @cython.final cdef _dispatch_to_handler(self, node, function, arg_list, kwargs) @cython.final cdef _dispatch_to_method_handler(self, attr_name, self_arg, is_unbound_method, type_name, node, function, arg_list, kwargs) cdef class RecursiveNodeReplacer(VisitorTransform): cdef public orig_node cdef public new_node cdef class NodeFinder(TreeVisitor): cdef node cdef public bint found Cython-0.26.1/Cython/Compiler/UtilityCode.py0000664000175000017500000002205513143605603021463 0ustar stefanstefan00000000000000from __future__ import absolute_import from .TreeFragment import parse_from_strings, StringParseContext from . import Symtab from . import Naming from . import Code class NonManglingModuleScope(Symtab.ModuleScope): cpp = False def __init__(self, prefix, *args, **kw): self.prefix = prefix self.cython_scope = None Symtab.ModuleScope.__init__(self, *args, **kw) def add_imported_entry(self, name, entry, pos): entry.used = True return super(NonManglingModuleScope, self).add_imported_entry(name, entry, pos) def mangle(self, prefix, name=None): if name: if prefix in (Naming.typeobj_prefix, Naming.func_prefix, Naming.var_prefix, Naming.pyfunc_prefix): # Functions, classes etc. gets a manually defined prefix easily # manually callable instead (the one passed to CythonUtilityCode) prefix = self.prefix return "%s%s" % (prefix, name) else: return Symtab.ModuleScope.mangle(self, prefix) class CythonUtilityCodeContext(StringParseContext): scope = None def find_module(self, module_name, relative_to=None, pos=None, need_pxd=True, absolute_fallback=True): if relative_to: raise AssertionError("Relative imports not supported in utility code.") if module_name != self.module_name: if module_name not in self.modules: raise AssertionError("Only the cython cimport is supported.") else: return self.modules[module_name] if self.scope is None: self.scope = NonManglingModuleScope( self.prefix, module_name, parent_module=None, context=self) return self.scope class CythonUtilityCode(Code.UtilityCodeBase): """ Utility code written in the Cython language itself. The @cname decorator can set the cname for a function, method of cdef class. Functions decorated with @cname('c_func_name') get the given cname. For cdef classes the rules are as follows: obj struct -> _obj obj type ptr -> _type methods -> _ For methods the cname decorator is optional, but without the decorator the methods will not be prototyped. See Cython.Compiler.CythonScope and tests/run/cythonscope.pyx for examples. """ is_cython_utility = True def __init__(self, impl, name="__pyxutil", prefix="", requires=None, file=None, from_scope=None, context=None, compiler_directives=None, outer_module_scope=None): # 1) We need to delay the parsing/processing, so that all modules can be # imported without import loops # 2) The same utility code object can be used for multiple source files; # while the generated node trees can be altered in the compilation of a # single file. # Hence, delay any processing until later. context_types = {} if context is not None: from .PyrexTypes import BaseType for key, value in context.items(): if isinstance(value, BaseType): context[key] = key context_types[key] = value impl = Code.sub_tempita(impl, context, file, name) self.impl = impl self.name = name self.file = file self.prefix = prefix self.requires = requires or [] self.from_scope = from_scope self.outer_module_scope = outer_module_scope self.compiler_directives = compiler_directives self.context_types = context_types def __eq__(self, other): if isinstance(other, CythonUtilityCode): return self._equality_params() == other._equality_params() else: return False def _equality_params(self): outer_scope = self.outer_module_scope while isinstance(outer_scope, NonManglingModuleScope): outer_scope = outer_scope.outer_scope return self.impl, outer_scope, self.compiler_directives def __hash__(self): return hash(self.impl) def get_tree(self, entries_only=False, cython_scope=None): from .AnalysedTreeTransforms import AutoTestDictTransform # The AutoTestDictTransform creates the statement "__test__ = {}", # which when copied into the main ModuleNode overwrites # any __test__ in user code; not desired excludes = [AutoTestDictTransform] from . import Pipeline, ParseTreeTransforms context = CythonUtilityCodeContext( self.name, compiler_directives=self.compiler_directives) context.prefix = self.prefix context.cython_scope = cython_scope #context = StringParseContext(self.name) tree = parse_from_strings( self.name, self.impl, context=context, allow_struct_enum_decorator=True) pipeline = Pipeline.create_pipeline(context, 'pyx', exclude_classes=excludes) if entries_only: p = [] for t in pipeline: p.append(t) if isinstance(p, ParseTreeTransforms.AnalyseDeclarationsTransform): break pipeline = p transform = ParseTreeTransforms.CnameDirectivesTransform(context) # InterpretCompilerDirectives already does a cdef declarator check #before = ParseTreeTransforms.DecoratorTransform before = ParseTreeTransforms.InterpretCompilerDirectives pipeline = Pipeline.insert_into_pipeline(pipeline, transform, before=before) def merge_scope(scope): def merge_scope_transform(module_node): module_node.scope.merge_in(scope) return module_node return merge_scope_transform if self.from_scope: pipeline = Pipeline.insert_into_pipeline( pipeline, merge_scope(self.from_scope), before=ParseTreeTransforms.AnalyseDeclarationsTransform) for dep in self.requires: if isinstance(dep, CythonUtilityCode) and hasattr(dep, 'tree') and not cython_scope: pipeline = Pipeline.insert_into_pipeline( pipeline, merge_scope(dep.tree.scope), before=ParseTreeTransforms.AnalyseDeclarationsTransform) if self.outer_module_scope: # inject outer module between utility code module and builtin module def scope_transform(module_node): module_node.scope.outer_scope = self.outer_module_scope return module_node pipeline = Pipeline.insert_into_pipeline( pipeline, scope_transform, before=ParseTreeTransforms.AnalyseDeclarationsTransform) if self.context_types: # inject types into module scope def scope_transform(module_node): for name, type in self.context_types.items(): entry = module_node.scope.declare_type(name, type, None, visibility='extern') entry.in_cinclude = True return module_node pipeline = Pipeline.insert_into_pipeline( pipeline, scope_transform, before=ParseTreeTransforms.AnalyseDeclarationsTransform) (err, tree) = Pipeline.run_pipeline(pipeline, tree, printtree=False) assert not err, err self.tree = tree return tree def put_code(self, output): pass @classmethod def load_as_string(cls, util_code_name, from_file=None, **kwargs): """ Load a utility code as a string. Returns (proto, implementation) """ util = cls.load(util_code_name, from_file, **kwargs) return util.proto, util.impl # keep line numbers => no lstrip() def declare_in_scope(self, dest_scope, used=False, cython_scope=None, whitelist=None): """ Declare all entries from the utility code in dest_scope. Code will only be included for used entries. If module_name is given, declare the type entries with that name. """ tree = self.get_tree(entries_only=True, cython_scope=cython_scope) entries = tree.scope.entries entries.pop('__name__') entries.pop('__file__') entries.pop('__builtins__') entries.pop('__doc__') for entry in entries.values(): entry.utility_code_definition = self entry.used = used original_scope = tree.scope dest_scope.merge_in(original_scope, merge_unused=True, whitelist=whitelist) tree.scope = dest_scope for dep in self.requires: if dep.is_cython_utility: dep.declare_in_scope(dest_scope) return original_scope def declare_declarations_in_scope(declaration_string, env, private_type=True, *args, **kwargs): """ Declare some declarations given as Cython code in declaration_string in scope env. """ CythonUtilityCode(declaration_string, *args, **kwargs).declare_in_scope(env) Cython-0.26.1/Cython/Compiler/AutoDocTransforms.py0000664000175000017500000002102513023021033022620 0ustar stefanstefan00000000000000from __future__ import absolute_import from .Visitor import CythonTransform from .StringEncoding import EncodedString from . import Options from . import PyrexTypes, ExprNodes class EmbedSignature(CythonTransform): def __init__(self, context): super(EmbedSignature, self).__init__(context) self.denv = None # XXX self.class_name = None self.class_node = None unop_precedence = 11 binop_precedence = { 'or': 1, 'and': 2, 'not': 3, 'in': 4, 'not in': 4, 'is': 4, 'is not': 4, '<': 4, '<=': 4, '>': 4, '>=': 4, '!=': 4, '==': 4, '|': 5, '^': 6, '&': 7, '<<': 8, '>>': 8, '+': 9, '-': 9, '*': 10, '/': 10, '//': 10, '%': 10, # unary: '+': 11, '-': 11, '~': 11 '**': 12} def _fmt_expr_node(self, node, precedence=0): if isinstance(node, ExprNodes.BinopNode) and not node.inplace: new_prec = self.binop_precedence.get(node.operator, 0) result = '%s %s %s' % (self._fmt_expr_node(node.operand1, new_prec), node.operator, self._fmt_expr_node(node.operand2, new_prec)) if precedence > new_prec: result = '(%s)' % result elif isinstance(node, ExprNodes.UnopNode): result = '%s%s' % (node.operator, self._fmt_expr_node(node.operand, self.unop_precedence)) if precedence > self.unop_precedence: result = '(%s)' % result elif isinstance(node, ExprNodes.AttributeNode): result = '%s.%s' % (self._fmt_expr_node(node.obj), node.attribute) else: result = node.name return result def _fmt_arg_defv(self, arg): default_val = arg.default if not default_val: return None if isinstance(default_val, ExprNodes.NullNode): return 'NULL' try: denv = self.denv # XXX ctval = default_val.compile_time_value(self.denv) repr_val = repr(ctval) if isinstance(default_val, ExprNodes.UnicodeNode): if repr_val[:1] != 'u': return u'u%s' % repr_val elif isinstance(default_val, ExprNodes.BytesNode): if repr_val[:1] != 'b': return u'b%s' % repr_val elif isinstance(default_val, ExprNodes.StringNode): if repr_val[:1] in 'ub': return repr_val[1:] return repr_val except Exception: try: return self._fmt_expr_node(default_val) except AttributeError: return '' def _fmt_arg(self, arg): if arg.type is PyrexTypes.py_object_type or arg.is_self_arg: doc = arg.name else: doc = arg.type.declaration_code(arg.name, for_display=1) if arg.default: arg_defv = self._fmt_arg_defv(arg) if arg_defv: doc = doc + ('=%s' % arg_defv) return doc def _fmt_arglist(self, args, npargs=0, pargs=None, nkargs=0, kargs=None, hide_self=False): arglist = [] for arg in args: if not hide_self or not arg.entry.is_self_arg: arg_doc = self._fmt_arg(arg) arglist.append(arg_doc) if pargs: arglist.insert(npargs, '*%s' % pargs.name) elif nkargs: arglist.insert(npargs, '*') if kargs: arglist.append('**%s' % kargs.name) return arglist def _fmt_ret_type(self, ret): if ret is PyrexTypes.py_object_type: return None else: return ret.declaration_code("", for_display=1) def _fmt_signature(self, cls_name, func_name, args, npargs=0, pargs=None, nkargs=0, kargs=None, return_type=None, hide_self=False): arglist = self._fmt_arglist(args, npargs, pargs, nkargs, kargs, hide_self=hide_self) arglist_doc = ', '.join(arglist) func_doc = '%s(%s)' % (func_name, arglist_doc) if cls_name: func_doc = '%s.%s' % (cls_name, func_doc) if return_type: ret_doc = self._fmt_ret_type(return_type) if ret_doc: func_doc = '%s -> %s' % (func_doc, ret_doc) return func_doc def _embed_signature(self, signature, node_doc): if node_doc: return "%s\n%s" % (signature, node_doc) else: return signature def __call__(self, node): if not Options.docstrings: return node else: return super(EmbedSignature, self).__call__(node) def visit_ClassDefNode(self, node): oldname = self.class_name oldclass = self.class_node self.class_node = node try: # PyClassDefNode self.class_name = node.name except AttributeError: # CClassDefNode self.class_name = node.class_name self.visitchildren(node) self.class_name = oldname self.class_node = oldclass return node def visit_LambdaNode(self, node): # lambda expressions so not have signature or inner functions return node def visit_DefNode(self, node): if not self.current_directives['embedsignature']: return node is_constructor = False hide_self = False if node.entry.is_special: is_constructor = self.class_node and node.name == '__init__' if not is_constructor: return node class_name, func_name = None, self.class_name hide_self = True else: class_name, func_name = self.class_name, node.name nkargs = getattr(node, 'num_kwonly_args', 0) npargs = len(node.args) - nkargs signature = self._fmt_signature( class_name, func_name, node.args, npargs, node.star_arg, nkargs, node.starstar_arg, return_type=None, hide_self=hide_self) if signature: if is_constructor: doc_holder = self.class_node.entry.type.scope else: doc_holder = node.entry if doc_holder.doc is not None: old_doc = doc_holder.doc elif not is_constructor and getattr(node, 'py_func', None) is not None: old_doc = node.py_func.entry.doc else: old_doc = None new_doc = self._embed_signature(signature, old_doc) doc_holder.doc = EncodedString(new_doc) if not is_constructor and getattr(node, 'py_func', None) is not None: node.py_func.entry.doc = EncodedString(new_doc) return node def visit_CFuncDefNode(self, node): if not self.current_directives['embedsignature']: return node if not node.overridable: # not cpdef FOO(...): return node signature = self._fmt_signature( self.class_name, node.declarator.base.name, node.declarator.args, return_type=node.return_type) if signature: if node.entry.doc is not None: old_doc = node.entry.doc elif getattr(node, 'py_func', None) is not None: old_doc = node.py_func.entry.doc else: old_doc = None new_doc = self._embed_signature(signature, old_doc) node.entry.doc = EncodedString(new_doc) if hasattr(node, 'py_func') and node.py_func is not None: node.py_func.entry.doc = EncodedString(new_doc) return node def visit_PropertyNode(self, node): if not self.current_directives['embedsignature']: return node entry = node.entry if entry.visibility == 'public': # property synthesised from a cdef public attribute type_name = entry.type.declaration_code("", for_display=1) if not entry.type.is_pyobject: type_name = "'%s'" % type_name elif entry.type.is_extension_type: type_name = entry.type.module_name + '.' + type_name signature = '%s: %s' % (entry.name, type_name) new_doc = self._embed_signature(signature, entry.doc) entry.doc = EncodedString(new_doc) return node Cython-0.26.1/Cython/Compiler/PyrexTypes.py0000664000175000017500000051235413150045407021365 0ustar stefanstefan00000000000000# # Cython/Python language types # from __future__ import absolute_import import collections import copy import re try: reduce except NameError: from functools import reduce from Cython.Utils import cached_function from .Code import UtilityCode, LazyUtilityCode, TempitaUtilityCode from . import StringEncoding from . import Naming from .Errors import error, warning class BaseType(object): # # Base class for all Cython types including pseudo-types. # List of attribute names of any subtypes subtypes = [] _empty_declaration = None _specialization_name = None default_format_spec = None def can_coerce_to_pyobject(self, env): return False def can_coerce_from_pyobject(self, env): return False def can_coerce_to_pystring(self, env, format_spec=None): return False def convert_to_pystring(self, cvalue, code, format_spec=None): raise NotImplementedError("C types that support string formatting must override this method") def cast_code(self, expr_code): return "((%s)%s)" % (self.empty_declaration_code(), expr_code) def empty_declaration_code(self): if self._empty_declaration is None: self._empty_declaration = self.declaration_code('') return self._empty_declaration def specialization_name(self): if self._specialization_name is None: # This is not entirely robust. common_subs = (self.empty_declaration_code() .replace("unsigned ", "unsigned_") .replace("long long", "long_long") .replace(" ", "__")) self._specialization_name = re.sub( '[^a-zA-Z0-9_]', lambda x: '_%x_' % ord(x.group(0)), common_subs) return self._specialization_name def base_declaration_code(self, base_code, entity_code): if entity_code: return "%s %s" % (base_code, entity_code) else: return base_code def __deepcopy__(self, memo): """ Types never need to be copied, if we do copy, Unfortunate Things Will Happen! """ return self def get_fused_types(self, result=None, seen=None, subtypes=None): subtypes = subtypes or self.subtypes if not subtypes: return None if result is None: result = [] seen = set() for attr in subtypes: list_or_subtype = getattr(self, attr) if list_or_subtype: if isinstance(list_or_subtype, BaseType): list_or_subtype.get_fused_types(result, seen) else: for subtype in list_or_subtype: subtype.get_fused_types(result, seen) return result def specialize_fused(self, env): if env.fused_to_specific: return self.specialize(env.fused_to_specific) return self @property def is_fused(self): """ Whether this type or any of its subtypes is a fused type """ # Add this indirection for the is_fused property to allow overriding # get_fused_types in subclasses. return self.get_fused_types() def deduce_template_params(self, actual): """ Deduce any template params in this (argument) type given the actual argument type. http://en.cppreference.com/w/cpp/language/function_template#Template_argument_deduction """ return {} def __lt__(self, other): """ For sorting. The sorting order should correspond to the preference of conversion from Python types. Override to provide something sensible. This is only implemented so that python 3 doesn't trip """ return id(type(self)) < id(type(other)) def py_type_name(self): """ Return the name of the Python type that can coerce to this type. """ def typeof_name(self): """ Return the string with which fused python functions can be indexed. """ if self.is_builtin_type or self.py_type_name() == 'object': index_name = self.py_type_name() else: index_name = str(self) return index_name def check_for_null_code(self, cname): """ Return the code for a NULL-check in case an UnboundLocalError should be raised if an entry of this type is referenced before assignment. Returns None if no check should be performed. """ return None def invalid_value(self): """ Returns the most invalid value an object of this type can assume as a C expression string. Returns None if no such value exists. """ class PyrexType(BaseType): # # Base class for all Cython types # # is_pyobject boolean Is a Python object type # is_extension_type boolean Is a Python extension type # is_final_type boolean Is a final extension type # is_numeric boolean Is a C numeric type # is_int boolean Is a C integer type # is_float boolean Is a C floating point type # is_complex boolean Is a C complex type # is_void boolean Is the C void type # is_array boolean Is a C array type # is_ptr boolean Is a C pointer type # is_null_ptr boolean Is the type of NULL # is_reference boolean Is a C reference type # is_const boolean Is a C const type. # is_cfunction boolean Is a C function type # is_struct_or_union boolean Is a C struct or union type # is_struct boolean Is a C struct type # is_enum boolean Is a C enum type # is_typedef boolean Is a typedef type # is_string boolean Is a C char * type # is_pyunicode_ptr boolean Is a C PyUNICODE * type # is_cpp_string boolean Is a C++ std::string type # is_unicode_char boolean Is either Py_UCS4 or Py_UNICODE # is_returncode boolean Is used only to signal exceptions # is_error boolean Is the dummy error type # is_buffer boolean Is buffer access type # is_pythran_expr boolean Is Pythran expr # is_numpy_buffer boolean Is Numpy array buffer # has_attributes boolean Has C dot-selectable attributes # default_value string Initial value # entry Entry The Entry for this type # # declaration_code(entity_code, # for_display = 0, dll_linkage = None, pyrex = 0) # Returns a code fragment for the declaration of an entity # of this type, given a code fragment for the entity. # * If for_display, this is for reading by a human in an error # message; otherwise it must be valid C code. # * If dll_linkage is not None, it must be 'DL_EXPORT' or # 'DL_IMPORT', and will be added to the base type part of # the declaration. # * If pyrex = 1, this is for use in a 'cdef extern' # statement of a Cython include file. # # assignable_from(src_type) # Tests whether a variable of this type can be # assigned a value of type src_type. # # same_as(other_type) # Tests whether this type represents the same type # as other_type. # # as_argument_type(): # Coerces array and C function types into pointer type for use as # a formal argument type. # is_pyobject = 0 is_unspecified = 0 is_extension_type = 0 is_final_type = 0 is_builtin_type = 0 is_numeric = 0 is_int = 0 is_float = 0 is_complex = 0 is_void = 0 is_array = 0 is_ptr = 0 is_null_ptr = 0 is_reference = 0 is_const = 0 is_cfunction = 0 is_struct_or_union = 0 is_cpp_class = 0 is_cpp_string = 0 is_struct = 0 is_enum = 0 is_typedef = 0 is_string = 0 is_pyunicode_ptr = 0 is_unicode_char = 0 is_returncode = 0 is_error = 0 is_buffer = 0 is_ctuple = 0 is_memoryviewslice = 0 is_pythran_expr = 0 is_numpy_buffer = 0 has_attributes = 0 default_value = "" def resolve(self): # If a typedef, returns the base type. return self def specialize(self, values): # TODO(danilo): Override wherever it makes sense. return self def literal_code(self, value): # Returns a C code fragment representing a literal # value of this type. return str(value) def __str__(self): return self.declaration_code("", for_display = 1).strip() def same_as(self, other_type, **kwds): return self.same_as_resolved_type(other_type.resolve(), **kwds) def same_as_resolved_type(self, other_type): return self == other_type or other_type is error_type def subtype_of(self, other_type): return self.subtype_of_resolved_type(other_type.resolve()) def subtype_of_resolved_type(self, other_type): return self.same_as(other_type) def assignable_from(self, src_type): return self.assignable_from_resolved_type(src_type.resolve()) def assignable_from_resolved_type(self, src_type): return self.same_as(src_type) def as_argument_type(self): return self def is_complete(self): # A type is incomplete if it is an unsized array, # a struct whose attributes are not defined, etc. return 1 def is_simple_buffer_dtype(self): return (self.is_int or self.is_float or self.is_complex or self.is_pyobject or self.is_extension_type or self.is_ptr) def struct_nesting_depth(self): # Returns the number levels of nested structs. This is # used for constructing a stack for walking the run-time # type information of the struct. return 1 def global_init_code(self, entry, code): # abstract pass def needs_nonecheck(self): return 0 def public_decl(base_code, dll_linkage): if dll_linkage: return "%s(%s)" % (dll_linkage, base_code.replace(',', ' __PYX_COMMA ')) else: return base_code def create_typedef_type(name, base_type, cname, is_external=0, namespace=None): is_fused = base_type.is_fused if base_type.is_complex or is_fused: if is_external: if is_fused: msg = "Fused" else: msg = "Complex" raise ValueError("%s external typedefs not supported" % msg) return base_type else: return CTypedefType(name, base_type, cname, is_external, namespace) class CTypedefType(BaseType): # # Pseudo-type defined with a ctypedef statement in a # 'cdef extern from' block. # Delegates most attribute lookups to the base type. # (Anything not defined here or in the BaseType is delegated.) # # qualified_name string # typedef_name string # typedef_cname string # typedef_base_type PyrexType # typedef_is_external bool is_typedef = 1 typedef_is_external = 0 to_py_utility_code = None from_py_utility_code = None subtypes = ['typedef_base_type'] def __init__(self, name, base_type, cname, is_external=0, namespace=None): assert not base_type.is_complex self.typedef_name = name self.typedef_cname = cname self.typedef_base_type = base_type self.typedef_is_external = is_external self.typedef_namespace = namespace def invalid_value(self): return self.typedef_base_type.invalid_value() def resolve(self): return self.typedef_base_type.resolve() def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if pyrex or for_display: base_code = self.typedef_name else: base_code = public_decl(self.typedef_cname, dll_linkage) if self.typedef_namespace is not None and not pyrex: base_code = "%s::%s" % (self.typedef_namespace.empty_declaration_code(), base_code) return self.base_declaration_code(base_code, entity_code) def as_argument_type(self): return self def cast_code(self, expr_code): # If self is really an array (rather than pointer), we can't cast. # For example, the gmp mpz_t. if self.typedef_base_type.is_array: base_type = self.typedef_base_type.base_type return CPtrType(base_type).cast_code(expr_code) else: return BaseType.cast_code(self, expr_code) def specialize(self, values): base_type = self.typedef_base_type.specialize(values) namespace = self.typedef_namespace.specialize(values) if self.typedef_namespace else None if base_type is self.typedef_base_type and namespace is self.typedef_namespace: return self else: return create_typedef_type(self.typedef_name, base_type, self.typedef_cname, 0, namespace) def __repr__(self): return "" % self.typedef_cname def __str__(self): return self.typedef_name def _create_utility_code(self, template_utility_code, template_function_name): type_name = type_identifier(self.typedef_cname) utility_code = template_utility_code.specialize( type = self.typedef_cname, TypeName = type_name) function_name = template_function_name % type_name return utility_code, function_name def create_to_py_utility_code(self, env): if self.typedef_is_external: if not self.to_py_utility_code: base_type = self.typedef_base_type if type(base_type) is CIntType: self.to_py_function = "__Pyx_PyInt_From_" + self.specialization_name() env.use_utility_code(TempitaUtilityCode.load_cached( "CIntToPy", "TypeConversion.c", context={"TYPE": self.empty_declaration_code(), "TO_PY_FUNCTION": self.to_py_function})) return True elif base_type.is_float: pass # XXX implement! elif base_type.is_complex: pass # XXX implement! pass elif base_type.is_cpp_string: cname = "__pyx_convert_PyObject_string_to_py_%s" % type_identifier(self) context = { 'cname': cname, 'type': self.typedef_cname, } from .UtilityCode import CythonUtilityCode env.use_utility_code(CythonUtilityCode.load( "string.to_py", "CppConvert.pyx", context=context)) self.to_py_function = cname return True if self.to_py_utility_code: env.use_utility_code(self.to_py_utility_code) return True # delegation return self.typedef_base_type.create_to_py_utility_code(env) def create_from_py_utility_code(self, env): if self.typedef_is_external: if not self.from_py_utility_code: base_type = self.typedef_base_type if type(base_type) is CIntType: self.from_py_function = "__Pyx_PyInt_As_" + self.specialization_name() env.use_utility_code(TempitaUtilityCode.load_cached( "CIntFromPy", "TypeConversion.c", context={"TYPE": self.empty_declaration_code(), "FROM_PY_FUNCTION": self.from_py_function})) return True elif base_type.is_float: pass # XXX implement! elif base_type.is_complex: pass # XXX implement! elif base_type.is_cpp_string: cname = '__pyx_convert_string_from_py_%s' % type_identifier(self) context = { 'cname': cname, 'type': self.typedef_cname, } from .UtilityCode import CythonUtilityCode env.use_utility_code(CythonUtilityCode.load( "string.from_py", "CppConvert.pyx", context=context)) self.from_py_function = cname return True if self.from_py_utility_code: env.use_utility_code(self.from_py_utility_code) return True # delegation return self.typedef_base_type.create_from_py_utility_code(env) def to_py_call_code(self, source_code, result_code, result_type, to_py_function=None): if to_py_function is None: to_py_function = self.to_py_function return self.typedef_base_type.to_py_call_code( source_code, result_code, result_type, to_py_function) def from_py_call_code(self, source_code, result_code, error_pos, code, from_py_function=None, error_condition=None): if from_py_function is None: from_py_function = self.from_py_function if error_condition is None: error_condition = self.error_condition(result_code) return self.typedef_base_type.from_py_call_code( source_code, result_code, error_pos, code, from_py_function, error_condition) def overflow_check_binop(self, binop, env, const_rhs=False): env.use_utility_code(UtilityCode.load("Common", "Overflow.c")) type = self.empty_declaration_code() name = self.specialization_name() if binop == "lshift": env.use_utility_code(TempitaUtilityCode.load_cached( "LeftShift", "Overflow.c", context={'TYPE': type, 'NAME': name, 'SIGNED': self.signed})) else: if const_rhs: binop += "_const" _load_overflow_base(env) env.use_utility_code(TempitaUtilityCode.load_cached( "SizeCheck", "Overflow.c", context={'TYPE': type, 'NAME': name})) env.use_utility_code(TempitaUtilityCode.load_cached( "Binop", "Overflow.c", context={'TYPE': type, 'NAME': name, 'BINOP': binop})) return "__Pyx_%s_%s_checking_overflow" % (binop, name) def error_condition(self, result_code): if self.typedef_is_external: if self.exception_value: condition = "(%s == %s)" % ( result_code, self.cast_code(self.exception_value)) if self.exception_check: condition += " && PyErr_Occurred()" return condition # delegation return self.typedef_base_type.error_condition(result_code) def __getattr__(self, name): return getattr(self.typedef_base_type, name) def py_type_name(self): return self.typedef_base_type.py_type_name() def can_coerce_to_pyobject(self, env): return self.typedef_base_type.can_coerce_to_pyobject(env) def can_coerce_from_pyobject(self, env): return self.typedef_base_type.can_coerce_from_pyobject(env) class MemoryViewSliceType(PyrexType): is_memoryviewslice = 1 has_attributes = 1 scope = None # These are special cased in Defnode from_py_function = None to_py_function = None exception_value = None exception_check = True subtypes = ['dtype'] def __init__(self, base_dtype, axes): """ MemoryViewSliceType(base, axes) Base is the C base type; axes is a list of (access, packing) strings, where access is one of 'full', 'direct' or 'ptr' and packing is one of 'contig', 'strided' or 'follow'. There is one (access, packing) tuple for each dimension. the access specifiers determine whether the array data contains pointers that need to be dereferenced along that axis when retrieving/setting: 'direct' -- No pointers stored in this dimension. 'ptr' -- Pointer stored in this dimension. 'full' -- Check along this dimension, don't assume either. the packing specifiers specify how the array elements are layed-out in memory. 'contig' -- The data are contiguous in memory along this dimension. At most one dimension may be specified as 'contig'. 'strided' -- The data aren't contiguous along this dimenison. 'follow' -- Used for C/Fortran contiguous arrays, a 'follow' dimension has its stride automatically computed from extents of the other dimensions to ensure C or Fortran memory layout. C-contiguous memory has 'direct' as the access spec, 'contig' as the *last* axis' packing spec and 'follow' for all other packing specs. Fortran-contiguous memory has 'direct' as the access spec, 'contig' as the *first* axis' packing spec and 'follow' for all other packing specs. """ from . import Buffer, MemoryView self.dtype = base_dtype self.axes = axes self.ndim = len(axes) self.flags = MemoryView.get_buf_flags(self.axes) self.is_c_contig, self.is_f_contig = MemoryView.is_cf_contig(self.axes) assert not (self.is_c_contig and self.is_f_contig) self.mode = MemoryView.get_mode(axes) self.writable_needed = False if not self.dtype.is_fused: self.dtype_name = Buffer.mangle_dtype_name(self.dtype) def __hash__(self): return hash(self.__class__) ^ hash(self.dtype) ^ hash(tuple(self.axes)) def __eq__(self, other): if isinstance(other, BaseType): return self.same_as_resolved_type(other) else: return False def same_as_resolved_type(self, other_type): return ((other_type.is_memoryviewslice and self.dtype.same_as(other_type.dtype) and self.axes == other_type.axes) or other_type is error_type) def needs_nonecheck(self): return True def is_complete(self): # incomplete since the underlying struct doesn't have a cython.memoryview object. return 0 def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): # XXX: we put these guards in for now... assert not pyrex assert not dll_linkage from . import MemoryView return self.base_declaration_code( MemoryView.memviewslice_cname, entity_code) def attributes_known(self): if self.scope is None: from . import Symtab self.scope = scope = Symtab.CClassScope( 'mvs_class_'+self.specialization_suffix(), None, visibility='extern') scope.parent_type = self scope.directives = {} scope.declare_var('_data', c_char_ptr_type, None, cname='data', is_cdef=1) return True def declare_attribute(self, attribute, env, pos): from . import MemoryView, Options scope = self.scope if attribute == 'shape': scope.declare_var('shape', c_array_type(c_py_ssize_t_type, Options.buffer_max_dims), pos, cname='shape', is_cdef=1) elif attribute == 'strides': scope.declare_var('strides', c_array_type(c_py_ssize_t_type, Options.buffer_max_dims), pos, cname='strides', is_cdef=1) elif attribute == 'suboffsets': scope.declare_var('suboffsets', c_array_type(c_py_ssize_t_type, Options.buffer_max_dims), pos, cname='suboffsets', is_cdef=1) elif attribute in ("copy", "copy_fortran"): ndim = len(self.axes) follow_dim = [('direct', 'follow')] contig_dim = [('direct', 'contig')] to_axes_c = follow_dim * (ndim - 1) + contig_dim to_axes_f = contig_dim + follow_dim * (ndim -1) to_memview_c = MemoryViewSliceType(self.dtype, to_axes_c) to_memview_f = MemoryViewSliceType(self.dtype, to_axes_f) for to_memview, cython_name in [(to_memview_c, "copy"), (to_memview_f, "copy_fortran")]: copy_func_type = CFuncType( to_memview, [CFuncTypeArg("memviewslice", self, None)]) copy_cname = MemoryView.copy_c_or_fortran_cname(to_memview) entry = scope.declare_cfunction( cython_name, copy_func_type, pos=pos, defining=1, cname=copy_cname) utility = MemoryView.get_copy_new_utility(pos, self, to_memview) env.use_utility_code(utility) MemoryView.use_cython_array_utility_code(env) elif attribute in ("is_c_contig", "is_f_contig"): # is_c_contig and is_f_contig functions for (c_or_f, cython_name) in (('c', 'is_c_contig'), ('f', 'is_f_contig')): is_contig_name = \ MemoryView.get_is_contig_func_name(c_or_f, self.ndim) cfunctype = CFuncType( return_type=c_bint_type, args=[CFuncTypeArg("memviewslice", self, None)], exception_value="-1", ) entry = scope.declare_cfunction(cython_name, cfunctype, pos=pos, defining=1, cname=is_contig_name) entry.utility_code_definition = MemoryView.get_is_contig_utility( attribute == 'is_c_contig', self.ndim) return True def get_entry(self, node, cname=None, type=None): from . import MemoryView, Symtab if cname is None: assert node.is_simple() or node.is_temp or node.is_elemental cname = node.result() if type is None: type = node.type entry = Symtab.Entry(cname, cname, type, node.pos) return MemoryView.MemoryViewSliceBufferEntry(entry) def conforms_to(self, dst, broadcast=False, copying=False): """ Returns True if src conforms to dst, False otherwise. If conformable, the types are the same, the ndims are equal, and each axis spec is conformable. Any packing/access spec is conformable to itself. 'direct' and 'ptr' are conformable to 'full'. 'contig' and 'follow' are conformable to 'strided'. Any other combo is not conformable. """ from . import MemoryView src = self if src.dtype != dst.dtype: return False if src.ndim != dst.ndim: if broadcast: src, dst = MemoryView.broadcast_types(src, dst) else: return False for src_spec, dst_spec in zip(src.axes, dst.axes): src_access, src_packing = src_spec dst_access, dst_packing = dst_spec if src_access != dst_access and dst_access != 'full': return False if src_packing != dst_packing and dst_packing != 'strided' and not copying: return False return True def valid_dtype(self, dtype, i=0): """ Return whether type dtype can be used as the base type of a memoryview slice. We support structs, numeric types and objects """ if dtype.is_complex and dtype.real_type.is_int: return False if dtype.is_struct and dtype.kind == 'struct': for member in dtype.scope.var_entries: if not self.valid_dtype(member.type): return False return True return ( dtype.is_error or # Pointers are not valid (yet) # (dtype.is_ptr and valid_memslice_dtype(dtype.base_type)) or (dtype.is_array and i < 8 and self.valid_dtype(dtype.base_type, i + 1)) or dtype.is_numeric or dtype.is_pyobject or dtype.is_fused or # accept this as it will be replaced by specializations later (dtype.is_typedef and self.valid_dtype(dtype.typedef_base_type)) ) def validate_memslice_dtype(self, pos): if not self.valid_dtype(self.dtype): error(pos, "Invalid base type for memoryview slice: %s" % self.dtype) def assert_direct_dims(self, pos): for access, packing in self.axes: if access != 'direct': error(pos, "All dimensions must be direct") return False return True def transpose(self, pos): if not self.assert_direct_dims(pos): return error_type return MemoryViewSliceType(self.dtype, self.axes[::-1]) def specialization_name(self): return '%s_%s' % ( super(MemoryViewSliceType,self).specialization_name(), self.specialization_suffix()) def specialization_suffix(self): return "%s_%s" % (self.axes_to_name(), self.dtype_name) def can_coerce_to_pyobject(self, env): return True def can_coerce_from_pyobject(self, env): return True def check_for_null_code(self, cname): return cname + '.memview' def create_from_py_utility_code(self, env): from . import MemoryView, Buffer # We don't have 'code', so use a LazyUtilityCode with a callback. def lazy_utility_callback(code): context['dtype_typeinfo'] = Buffer.get_type_information_cname(code, self.dtype) return TempitaUtilityCode.load( "ObjectToMemviewSlice", "MemoryView_C.c", context=context) env.use_utility_code(Buffer.acquire_utility_code) env.use_utility_code(MemoryView.memviewslice_init_code) env.use_utility_code(LazyUtilityCode(lazy_utility_callback)) if self.is_c_contig: c_or_f_flag = "__Pyx_IS_C_CONTIG" elif self.is_f_contig: c_or_f_flag = "__Pyx_IS_F_CONTIG" else: c_or_f_flag = "0" suffix = self.specialization_suffix() funcname = "__Pyx_PyObject_to_MemoryviewSlice_" + suffix context = dict( MemoryView.context, buf_flag = self.flags, ndim = self.ndim, axes_specs = ', '.join(self.axes_to_code()), dtype_typedecl = self.dtype.empty_declaration_code(), struct_nesting_depth = self.dtype.struct_nesting_depth(), c_or_f_flag = c_or_f_flag, funcname = funcname, ) self.from_py_function = funcname return True def from_py_call_code(self, source_code, result_code, error_pos, code, from_py_function=None, error_condition=None): return '%s = %s(%s); %s' % ( result_code, from_py_function or self.from_py_function, source_code, code.error_goto_if(error_condition or self.error_condition(result_code), error_pos)) def create_to_py_utility_code(self, env): self._dtype_to_py_func, self._dtype_from_py_func = self.dtype_object_conversion_funcs(env) return True def to_py_call_code(self, source_code, result_code, result_type, to_py_function=None): assert self._dtype_to_py_func assert self._dtype_from_py_func to_py_func = "(PyObject *(*)(char *)) " + self._dtype_to_py_func from_py_func = "(int (*)(char *, PyObject *)) " + self._dtype_from_py_func tup = (result_code, source_code, self.ndim, to_py_func, from_py_func, self.dtype.is_pyobject) return "%s = __pyx_memoryview_fromslice(%s, %s, %s, %s, %d);" % tup def dtype_object_conversion_funcs(self, env): get_function = "__pyx_memview_get_%s" % self.dtype_name set_function = "__pyx_memview_set_%s" % self.dtype_name context = dict( get_function = get_function, set_function = set_function, ) if self.dtype.is_pyobject: utility_name = "MemviewObjectToObject" else: to_py = self.dtype.create_to_py_utility_code(env) from_py = self.dtype.create_from_py_utility_code(env) if not (to_py or from_py): return "NULL", "NULL" if not self.dtype.to_py_function: get_function = "NULL" if not self.dtype.from_py_function: set_function = "NULL" utility_name = "MemviewDtypeToObject" error_condition = (self.dtype.error_condition('value') or 'PyErr_Occurred()') context.update( to_py_function = self.dtype.to_py_function, from_py_function = self.dtype.from_py_function, dtype = self.dtype.empty_declaration_code(), error_condition = error_condition, ) utility = TempitaUtilityCode.load_cached( utility_name, "MemoryView_C.c", context=context) env.use_utility_code(utility) return get_function, set_function def axes_to_code(self): """Return a list of code constants for each axis""" from . import MemoryView d = MemoryView._spec_to_const return ["(%s | %s)" % (d[a], d[p]) for a, p in self.axes] def axes_to_name(self): """Return an abbreviated name for our axes""" from . import MemoryView d = MemoryView._spec_to_abbrev return "".join(["%s%s" % (d[a], d[p]) for a, p in self.axes]) def error_condition(self, result_code): return "!%s.memview" % result_code def __str__(self): from . import MemoryView axes_code_list = [] for idx, (access, packing) in enumerate(self.axes): flag = MemoryView.get_memoryview_flag(access, packing) if flag == "strided": axes_code_list.append(":") else: if flag == 'contiguous': have_follow = [p for a, p in self.axes[idx - 1:idx + 2] if p == 'follow'] if have_follow or self.ndim == 1: flag = '1' axes_code_list.append("::" + flag) if self.dtype.is_pyobject: dtype_name = self.dtype.name else: dtype_name = self.dtype return "%s[%s]" % (dtype_name, ", ".join(axes_code_list)) def specialize(self, values): """This does not validate the base type!!""" dtype = self.dtype.specialize(values) if dtype is not self.dtype: return MemoryViewSliceType(dtype, self.axes) return self def cast_code(self, expr_code): return expr_code class BufferType(BaseType): # # Delegates most attribute lookups to the base type. # (Anything not defined here or in the BaseType is delegated.) # # dtype PyrexType # ndim int # mode str # negative_indices bool # cast bool # is_buffer bool # writable bool is_buffer = 1 writable = True subtypes = ['dtype'] def __init__(self, base, dtype, ndim, mode, negative_indices, cast): self.base = base self.dtype = dtype self.ndim = ndim self.buffer_ptr_type = CPtrType(dtype) self.mode = mode self.negative_indices = negative_indices self.cast = cast self.is_numpy_buffer = self.base.name == "ndarray" def can_coerce_to_pyobject(self,env): return True def can_coerce_from_pyobject(self,env): return True def as_argument_type(self): return self def specialize(self, values): dtype = self.dtype.specialize(values) if dtype is not self.dtype: return BufferType(self.base, dtype, self.ndim, self.mode, self.negative_indices, self.cast) return self def get_entry(self, node): from . import Buffer assert node.is_name return Buffer.BufferEntry(node.entry) def __getattr__(self, name): return getattr(self.base, name) def __repr__(self): return "" % self.base def __str__(self): # avoid ', ', as fused functions split the signature string on ', ' cast_str = '' if self.cast: cast_str = ',cast=True' return "%s[%s,ndim=%d%s]" % (self.base, self.dtype, self.ndim, cast_str) def assignable_from(self, other_type): if other_type.is_buffer: return (self.same_as(other_type, compare_base=False) and self.base.assignable_from(other_type.base)) return self.base.assignable_from(other_type) def same_as(self, other_type, compare_base=True): if not other_type.is_buffer: return other_type.same_as(self.base) return (self.dtype.same_as(other_type.dtype) and self.ndim == other_type.ndim and self.mode == other_type.mode and self.cast == other_type.cast and (not compare_base or self.base.same_as(other_type.base))) class PyObjectType(PyrexType): # # Base class for all Python object types (reference-counted). # # buffer_defaults dict or None Default options for bu name = "object" is_pyobject = 1 default_value = "0" buffer_defaults = None is_extern = False is_subclassed = False is_gc_simple = False def __str__(self): return "Python object" def __repr__(self): return "" def can_coerce_to_pyobject(self, env): return True def can_coerce_from_pyobject(self, env): return True def default_coerced_ctype(self): """The default C type that this Python type coerces to, or None.""" return None def assignable_from(self, src_type): # except for pointers, conversion will be attempted return not src_type.is_ptr or src_type.is_string or src_type.is_pyunicode_ptr def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if pyrex or for_display: base_code = "object" else: base_code = public_decl("PyObject", dll_linkage) entity_code = "*%s" % entity_code return self.base_declaration_code(base_code, entity_code) def as_pyobject(self, cname): if (not self.is_complete()) or self.is_extension_type: return "(PyObject *)" + cname else: return cname def py_type_name(self): return "object" def __lt__(self, other): """ Make sure we sort highest, as instance checking on py_type_name ('object') is always true """ return False def global_init_code(self, entry, code): code.put_init_var_to_py_none(entry, nanny=False) def check_for_null_code(self, cname): return cname builtin_types_that_cannot_create_refcycles = set([ 'bool', 'int', 'long', 'float', 'complex', 'bytearray', 'bytes', 'unicode', 'str', 'basestring' ]) class BuiltinObjectType(PyObjectType): # objstruct_cname string Name of PyObject struct is_builtin_type = 1 has_attributes = 1 base_type = None module_name = '__builtin__' require_exact = 1 # fields that let it look like an extension type vtabslot_cname = None vtabstruct_cname = None vtabptr_cname = None typedef_flag = True is_external = True decl_type = 'PyObject' def __init__(self, name, cname, objstruct_cname=None): self.name = name self.cname = cname self.typeptr_cname = "(&%s)" % cname self.objstruct_cname = objstruct_cname self.is_gc_simple = name in builtin_types_that_cannot_create_refcycles if name == 'type': # Special case the type type, as many C API calls (and other # libraries) actually expect a PyTypeObject* for type arguments. self.decl_type = objstruct_cname if name == 'Exception': self.require_exact = 0 def set_scope(self, scope): self.scope = scope if scope: scope.parent_type = self def __str__(self): return "%s object" % self.name def __repr__(self): return "<%s>"% self.cname def default_coerced_ctype(self): if self.name in ('bytes', 'bytearray'): return c_char_ptr_type elif self.name == 'bool': return c_bint_type elif self.name == 'float': return c_double_type return None def assignable_from(self, src_type): if isinstance(src_type, BuiltinObjectType): if self.name == 'basestring': return src_type.name in ('str', 'unicode', 'basestring') else: return src_type.name == self.name elif src_type.is_extension_type: # FIXME: This is an ugly special case that we currently # keep supporting. It allows users to specify builtin # types as external extension types, while keeping them # compatible with the real builtin types. We already # generate a warning for it. Big TODO: remove! return (src_type.module_name == '__builtin__' and src_type.name == self.name) else: return True def typeobj_is_available(self): return True def attributes_known(self): return True def subtype_of(self, type): return type.is_pyobject and type.assignable_from(self) def type_check_function(self, exact=True): type_name = self.name if type_name == 'str': type_check = 'PyString_Check' elif type_name == 'basestring': type_check = '__Pyx_PyBaseString_Check' elif type_name == 'Exception': type_check = '__Pyx_PyException_Check' elif type_name == 'bytearray': type_check = 'PyByteArray_Check' elif type_name == 'frozenset': type_check = 'PyFrozenSet_Check' else: type_check = 'Py%s_Check' % type_name.capitalize() if exact and type_name not in ('bool', 'slice', 'Exception'): type_check += 'Exact' return type_check def isinstance_code(self, arg): return '%s(%s)' % (self.type_check_function(exact=False), arg) def type_test_code(self, arg, notnone=False, exact=True): type_check = self.type_check_function(exact=exact) check = 'likely(%s(%s))' % (type_check, arg) if not notnone: check += '||((%s) == Py_None)' % arg if self.name == 'basestring': name = '(PY_MAJOR_VERSION < 3 ? "basestring" : "str")' space_for_name = 16 else: name = '"%s"' % self.name # avoid wasting too much space but limit number of different format strings space_for_name = (len(self.name) // 16 + 1) * 16 error = '(PyErr_Format(PyExc_TypeError, "Expected %%.%ds, got %%.200s", %s, Py_TYPE(%s)->tp_name), 0)' % ( space_for_name, name, arg) return check + '||' + error def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if pyrex or for_display: base_code = self.name else: base_code = public_decl(self.decl_type, dll_linkage) entity_code = "*%s" % entity_code return self.base_declaration_code(base_code, entity_code) def as_pyobject(self, cname): if self.decl_type == 'PyObject': return cname else: return "(PyObject *)" + cname def cast_code(self, expr_code, to_object_struct = False): return "((%s*)%s)" % ( to_object_struct and self.objstruct_cname or self.decl_type, # self.objstruct_cname may be None expr_code) def py_type_name(self): return self.name class PyExtensionType(PyObjectType): # # A Python extension type. # # name string # scope CClassScope Attribute namespace # visibility string # typedef_flag boolean # base_type PyExtensionType or None # module_name string or None Qualified name of defining module # objstruct_cname string Name of PyObject struct # objtypedef_cname string Name of PyObject struct typedef # typeobj_cname string or None C code fragment referring to type object # typeptr_cname string or None Name of pointer to external type object # vtabslot_cname string Name of C method table member # vtabstruct_cname string Name of C method table struct # vtabptr_cname string Name of pointer to C method table # vtable_cname string Name of C method table definition # defered_declarations [thunk] Used to declare class hierarchies in order is_extension_type = 1 has_attributes = 1 objtypedef_cname = None def __init__(self, name, typedef_flag, base_type, is_external=0): self.name = name self.scope = None self.typedef_flag = typedef_flag if base_type is not None: base_type.is_subclassed = True self.base_type = base_type self.module_name = None self.objstruct_cname = None self.typeobj_cname = None self.typeptr_cname = None self.vtabslot_cname = None self.vtabstruct_cname = None self.vtabptr_cname = None self.vtable_cname = None self.is_external = is_external self.defered_declarations = [] def set_scope(self, scope): self.scope = scope if scope: scope.parent_type = self def needs_nonecheck(self): return True def subtype_of_resolved_type(self, other_type): if other_type.is_extension_type or other_type.is_builtin_type: return self is other_type or ( self.base_type and self.base_type.subtype_of(other_type)) else: return other_type is py_object_type def typeobj_is_available(self): # Do we have a pointer to the type object? return self.typeptr_cname def typeobj_is_imported(self): # If we don't know the C name of the type object but we do # know which module it's defined in, it will be imported. return self.typeobj_cname is None and self.module_name is not None def assignable_from(self, src_type): if self == src_type: return True if isinstance(src_type, PyExtensionType): if src_type.base_type is not None: return self.assignable_from(src_type.base_type) if isinstance(src_type, BuiltinObjectType): # FIXME: This is an ugly special case that we currently # keep supporting. It allows users to specify builtin # types as external extension types, while keeping them # compatible with the real builtin types. We already # generate a warning for it. Big TODO: remove! return (self.module_name == '__builtin__' and self.name == src_type.name) return False def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0, deref = 0): if pyrex or for_display: base_code = self.name else: if self.typedef_flag: objstruct = self.objstruct_cname else: objstruct = "struct %s" % self.objstruct_cname base_code = public_decl(objstruct, dll_linkage) if deref: assert not entity_code else: entity_code = "*%s" % entity_code return self.base_declaration_code(base_code, entity_code) def type_test_code(self, py_arg, notnone=False): none_check = "((%s) == Py_None)" % py_arg type_check = "likely(__Pyx_TypeTest(%s, %s))" % ( py_arg, self.typeptr_cname) if notnone: return type_check else: return "likely(%s || %s)" % (none_check, type_check) def attributes_known(self): return self.scope is not None def __str__(self): return self.name def __repr__(self): return "" % (self.scope.class_name, ("", " typedef")[self.typedef_flag]) def py_type_name(self): if not self.module_name: return self.name return "__import__(%r, None, None, ['']).%s" % (self.module_name, self.name) class CType(PyrexType): # # Base class for all C types (non-reference-counted). # # to_py_function string C function for converting to Python object # from_py_function string C function for constructing from Python object # to_py_function = None from_py_function = None exception_value = None exception_check = 1 def create_to_py_utility_code(self, env): return self.to_py_function is not None def create_from_py_utility_code(self, env): return self.from_py_function is not None def can_coerce_to_pyobject(self, env): return self.create_to_py_utility_code(env) def can_coerce_from_pyobject(self, env): return self.create_from_py_utility_code(env) def error_condition(self, result_code): conds = [] if self.is_string or self.is_pyunicode_ptr: conds.append("(!%s)" % result_code) elif self.exception_value is not None: conds.append("(%s == (%s)%s)" % (result_code, self.sign_and_name(), self.exception_value)) if self.exception_check: conds.append("PyErr_Occurred()") if len(conds) > 0: return " && ".join(conds) else: return 0 def to_py_call_code(self, source_code, result_code, result_type, to_py_function=None): func = self.to_py_function if to_py_function is None else to_py_function assert func if self.is_string or self.is_cpp_string: if result_type.is_builtin_type: result_type_name = result_type.name if result_type_name in ('bytes', 'str', 'unicode'): func = func.replace("Object", result_type_name.title(), 1) elif result_type_name == 'bytearray': func = func.replace("Object", "ByteArray", 1) return '%s = %s(%s)' % ( result_code, func, source_code or 'NULL') def from_py_call_code(self, source_code, result_code, error_pos, code, from_py_function=None, error_condition=None): return '%s = %s(%s); %s' % ( result_code, from_py_function or self.from_py_function, source_code, code.error_goto_if(error_condition or self.error_condition(result_code), error_pos)) class PythranExpr(CType): # Pythran object of a given type to_py_function = "to_python_from_expr" is_pythran_expr = True writable = True has_attributes = 1 def __init__(self, pythran_type, org_buffer=None): self.org_buffer = org_buffer self.pythran_type = pythran_type self.name = self.pythran_type self.cname = self.pythran_type self.from_py_function = "from_python<%s>" % (self.pythran_type) self.scope = None def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): assert pyrex == 0 return "%s %s" % (self.name, entity_code) def attributes_known(self): if self.scope is None: from . import Symtab self.scope = scope = Symtab.CClassScope( '', None, visibility="extern") scope.parent_type = self scope.directives = {} # rank 3 == long scope.declare_var("shape", CPtrType(CIntType(3)), None, cname="_shape", is_cdef=True) scope.declare_var("ndim", CIntType(3), None, cname="value", is_cdef=True) return True class CConstType(BaseType): is_const = 1 def __init__(self, const_base_type): self.const_base_type = const_base_type if const_base_type.has_attributes and const_base_type.scope is not None: from . import Symtab self.scope = Symtab.CConstScope(const_base_type.scope) def __repr__(self): return "" % repr(self.const_base_type) def __str__(self): return self.declaration_code("", for_display=1) def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if for_display or pyrex: return "const " + self.const_base_type.declaration_code(entity_code, for_display, dll_linkage, pyrex) else: return self.const_base_type.declaration_code("const %s" % entity_code, for_display, dll_linkage, pyrex) def specialize(self, values): base_type = self.const_base_type.specialize(values) if base_type == self.const_base_type: return self else: return CConstType(base_type) def deduce_template_params(self, actual): return self.const_base_type.deduce_template_params(actual) def can_coerce_to_pyobject(self, env): return self.const_base_type.can_coerce_to_pyobject(env) def can_coerce_from_pyobject(self, env): return self.const_base_type.can_coerce_from_pyobject(env) def create_to_py_utility_code(self, env): if self.const_base_type.create_to_py_utility_code(env): self.to_py_function = self.const_base_type.to_py_function return True def __getattr__(self, name): return getattr(self.const_base_type, name) class FusedType(CType): """ Represents a Fused Type. All it needs to do is keep track of the types it aggregates, as it will be replaced with its specific version wherever needed. See http://wiki.cython.org/enhancements/fusedtypes types [PyrexType] is the list of types to be fused name str the name of the ctypedef """ is_fused = 1 exception_check = 0 def __init__(self, types, name=None): # Use list rather than set to preserve order (list should be short). flattened_types = [] for t in types: if t.is_fused: # recursively merge in subtypes for subtype in t.types: if subtype not in flattened_types: flattened_types.append(subtype) elif t not in flattened_types: flattened_types.append(t) self.types = flattened_types self.name = name def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if pyrex or for_display: return self.name raise Exception("This may never happen, please report a bug") def __repr__(self): return 'FusedType(name=%r)' % self.name def specialize(self, values): return values[self] def get_fused_types(self, result=None, seen=None): if result is None: return [self] if self not in seen: result.append(self) seen.add(self) class CVoidType(CType): # # C "void" type # is_void = 1 to_py_function = "__Pyx_void_to_None" def __repr__(self): return "" def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if pyrex or for_display: base_code = "void" else: base_code = public_decl("void", dll_linkage) return self.base_declaration_code(base_code, entity_code) def is_complete(self): return 0 class InvisibleVoidType(CVoidType): # # For use with C++ constructors and destructors return types. # Acts like void, but does not print out a declaration. # def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if pyrex or for_display: base_code = "[void]" else: base_code = public_decl("", dll_linkage) return self.base_declaration_code(base_code, entity_code) class CNumericType(CType): # # Base class for all C numeric types. # # rank integer Relative size # signed integer 0 = unsigned, 1 = unspecified, 2 = explicitly signed # is_numeric = 1 default_value = "0" has_attributes = True scope = None sign_words = ("unsigned ", "", "signed ") def __init__(self, rank, signed = 1): self.rank = rank if rank > 0 and signed == SIGNED: # Signed is meaningless for anything but char, and complicates # type promotion. signed = 1 self.signed = signed def sign_and_name(self): s = self.sign_words[self.signed] n = rank_to_type_name[self.rank] return s + n def __repr__(self): return "" % self.sign_and_name() def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): type_name = self.sign_and_name() if pyrex or for_display: base_code = type_name.replace('PY_LONG_LONG', 'long long') else: base_code = public_decl(type_name, dll_linkage) return self.base_declaration_code(base_code, entity_code) def attributes_known(self): if self.scope is None: from . import Symtab self.scope = scope = Symtab.CClassScope( '', None, visibility="extern") scope.parent_type = self scope.directives = {} scope.declare_cfunction( "conjugate", CFuncType(self, [CFuncTypeArg("self", self, None)], nogil=True), pos=None, defining=1, cname=" ") return True def __lt__(self, other): """Sort based on rank, preferring signed over unsigned""" if other.is_numeric: return self.rank > other.rank and self.signed >= other.signed # Prefer numeric types over others return True def py_type_name(self): if self.rank <= 4: return "(int, long)" return "float" class ForbidUseClass: def __repr__(self): raise RuntimeError() def __str__(self): raise RuntimeError() ForbidUse = ForbidUseClass() class CIntType(CNumericType): is_int = 1 typedef_flag = 0 to_py_function = None from_py_function = None to_pyunicode_utility = None default_format_spec = 'd' exception_value = -1 def can_coerce_to_pyobject(self, env): return True def can_coerce_from_pyobject(self, env): return True @staticmethod def _parse_format(format_spec): padding = ' ' if not format_spec: return ('d', 0, padding) format_type = format_spec[-1] if format_type in ('o', 'd', 'x', 'X'): prefix = format_spec[:-1] elif format_type.isdigit(): format_type = 'd' prefix = format_spec else: return (None, 0, padding) if not prefix: return (format_type, 0, padding) if prefix[0] == '-': prefix = prefix[1:] if prefix and prefix[0] == '0': padding = '0' prefix = prefix.lstrip('0') if prefix.isdigit(): return (format_type, int(prefix), padding) return (None, 0, padding) def can_coerce_to_pystring(self, env, format_spec=None): format_type, width, padding = self._parse_format(format_spec) return format_type is not None and width <= 2**30 def convert_to_pystring(self, cvalue, code, format_spec=None): if self.to_pyunicode_utility is None: utility_code_name = "__Pyx_PyUnicode_From_" + self.specialization_name() to_pyunicode_utility = TempitaUtilityCode.load_cached( "CIntToPyUnicode", "TypeConversion.c", context={"TYPE": self.empty_declaration_code(), "TO_PY_FUNCTION": utility_code_name}) self.to_pyunicode_utility = (utility_code_name, to_pyunicode_utility) else: utility_code_name, to_pyunicode_utility = self.to_pyunicode_utility code.globalstate.use_utility_code(to_pyunicode_utility) format_type, width, padding_char = self._parse_format(format_spec) return "%s(%s, %d, '%s', '%s')" % (utility_code_name, cvalue, width, padding_char, format_type) def create_to_py_utility_code(self, env): if type(self).to_py_function is None: self.to_py_function = "__Pyx_PyInt_From_" + self.specialization_name() env.use_utility_code(TempitaUtilityCode.load_cached( "CIntToPy", "TypeConversion.c", context={"TYPE": self.empty_declaration_code(), "TO_PY_FUNCTION": self.to_py_function})) return True def create_from_py_utility_code(self, env): if type(self).from_py_function is None: self.from_py_function = "__Pyx_PyInt_As_" + self.specialization_name() env.use_utility_code(TempitaUtilityCode.load_cached( "CIntFromPy", "TypeConversion.c", context={"TYPE": self.empty_declaration_code(), "FROM_PY_FUNCTION": self.from_py_function})) return True def get_to_py_type_conversion(self): if self.rank < list(rank_to_type_name).index('int'): # This assumes sizeof(short) < sizeof(int) return "PyInt_FromLong" else: # Py{Int|Long}_From[Unsigned]Long[Long] Prefix = "Int" SignWord = "" TypeName = "Long" if not self.signed: Prefix = "Long" SignWord = "Unsigned" if self.rank >= list(rank_to_type_name).index('PY_LONG_LONG'): Prefix = "Long" TypeName = "LongLong" return "Py%s_From%s%s" % (Prefix, SignWord, TypeName) def assignable_from_resolved_type(self, src_type): return src_type.is_int or src_type.is_enum or src_type is error_type def invalid_value(self): if rank_to_type_name[int(self.rank)] == 'char': return "'?'" else: # We do not really know the size of the type, so return # a 32-bit literal and rely on casting to final type. It will # be negative for signed ints, which is good. return "0xbad0bad0" def overflow_check_binop(self, binop, env, const_rhs=False): env.use_utility_code(UtilityCode.load("Common", "Overflow.c")) type = self.empty_declaration_code() name = self.specialization_name() if binop == "lshift": env.use_utility_code(TempitaUtilityCode.load_cached( "LeftShift", "Overflow.c", context={'TYPE': type, 'NAME': name, 'SIGNED': self.signed})) else: if const_rhs: binop += "_const" if type in ('int', 'long', 'long long'): env.use_utility_code(TempitaUtilityCode.load_cached( "BaseCaseSigned", "Overflow.c", context={'INT': type, 'NAME': name})) elif type in ('unsigned int', 'unsigned long', 'unsigned long long'): env.use_utility_code(TempitaUtilityCode.load_cached( "BaseCaseUnsigned", "Overflow.c", context={'UINT': type, 'NAME': name})) elif self.rank <= 1: # sizeof(short) < sizeof(int) return "__Pyx_%s_%s_no_overflow" % (binop, name) else: _load_overflow_base(env) env.use_utility_code(TempitaUtilityCode.load_cached( "SizeCheck", "Overflow.c", context={'TYPE': type, 'NAME': name})) env.use_utility_code(TempitaUtilityCode.load_cached( "Binop", "Overflow.c", context={'TYPE': type, 'NAME': name, 'BINOP': binop})) return "__Pyx_%s_%s_checking_overflow" % (binop, name) def _load_overflow_base(env): env.use_utility_code(UtilityCode.load("Common", "Overflow.c")) for type in ('int', 'long', 'long long'): env.use_utility_code(TempitaUtilityCode.load_cached( "BaseCaseSigned", "Overflow.c", context={'INT': type, 'NAME': type.replace(' ', '_')})) for type in ('unsigned int', 'unsigned long', 'unsigned long long'): env.use_utility_code(TempitaUtilityCode.load_cached( "BaseCaseUnsigned", "Overflow.c", context={'UINT': type, 'NAME': type.replace(' ', '_')})) class CAnonEnumType(CIntType): is_enum = 1 def sign_and_name(self): return 'int' class CReturnCodeType(CIntType): to_py_function = "__Pyx_Owned_Py_None" is_returncode = True exception_check = False default_format_spec = '' def can_coerce_to_pystring(self, env, format_spec=None): return not format_spec def convert_to_pystring(self, cvalue, code, format_spec=None): return "__Pyx_NewRef(%s)" % code.globalstate.get_py_string_const(StringEncoding.EncodedString("None")).cname class CBIntType(CIntType): to_py_function = "__Pyx_PyBool_FromLong" from_py_function = "__Pyx_PyObject_IsTrue" exception_check = 1 # for C++ bool default_format_spec = '' def can_coerce_to_pystring(self, env, format_spec=None): return not format_spec or super(CBIntType, self).can_coerce_to_pystring(env, format_spec) def convert_to_pystring(self, cvalue, code, format_spec=None): if format_spec: return super(CBIntType, self).convert_to_pystring(cvalue, code, format_spec) # NOTE: no caching here as the string constant cnames depend on the current module utility_code_name = "__Pyx_PyUnicode_FromBInt_" + self.specialization_name() to_pyunicode_utility = TempitaUtilityCode.load_cached( "CBIntToPyUnicode", "TypeConversion.c", context={ "TRUE_CONST": code.globalstate.get_py_string_const(StringEncoding.EncodedString("True")).cname, "FALSE_CONST": code.globalstate.get_py_string_const(StringEncoding.EncodedString("False")).cname, "TO_PY_FUNCTION": utility_code_name, }) code.globalstate.use_utility_code(to_pyunicode_utility) return "%s(%s)" % (utility_code_name, cvalue) def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if for_display: base_code = 'bool' elif pyrex: base_code = 'bint' else: base_code = public_decl('int', dll_linkage) return self.base_declaration_code(base_code, entity_code) def __repr__(self): return "" def __str__(self): return 'bint' def py_type_name(self): return "bool" class CPyUCS4IntType(CIntType): # Py_UCS4 is_unicode_char = True # Py_UCS4 coerces from and to single character unicode strings (or # at most two characters on 16bit Unicode builds), but we also # allow Python integers as input. The value range for Py_UCS4 # is 0..1114111, which is checked when converting from an integer # value. to_py_function = "PyUnicode_FromOrdinal" from_py_function = "__Pyx_PyObject_AsPy_UCS4" def can_coerce_to_pystring(self, env, format_spec=None): return False # does the right thing anyway def create_from_py_utility_code(self, env): env.use_utility_code(UtilityCode.load_cached("ObjectAsUCS4", "TypeConversion.c")) return True def sign_and_name(self): return "Py_UCS4" class CPyUnicodeIntType(CIntType): # Py_UNICODE is_unicode_char = True # Py_UNICODE coerces from and to single character unicode strings, # but we also allow Python integers as input. The value range for # Py_UNICODE is 0..1114111, which is checked when converting from # an integer value. to_py_function = "PyUnicode_FromOrdinal" from_py_function = "__Pyx_PyObject_AsPy_UNICODE" def can_coerce_to_pystring(self, env, format_spec=None): return False # does the right thing anyway def create_from_py_utility_code(self, env): env.use_utility_code(UtilityCode.load_cached("ObjectAsPyUnicode", "TypeConversion.c")) return True def sign_and_name(self): return "Py_UNICODE" class CPyHashTType(CIntType): to_py_function = "__Pyx_PyInt_FromHash_t" from_py_function = "__Pyx_PyInt_AsHash_t" def sign_and_name(self): return "Py_hash_t" class CPySSizeTType(CIntType): to_py_function = "PyInt_FromSsize_t" from_py_function = "__Pyx_PyIndex_AsSsize_t" def sign_and_name(self): return "Py_ssize_t" class CSSizeTType(CIntType): to_py_function = "PyInt_FromSsize_t" from_py_function = "PyInt_AsSsize_t" def sign_and_name(self): return "Py_ssize_t" class CSizeTType(CIntType): to_py_function = "__Pyx_PyInt_FromSize_t" def sign_and_name(self): return "size_t" class CPtrdiffTType(CIntType): def sign_and_name(self): return "ptrdiff_t" class CFloatType(CNumericType): is_float = 1 to_py_function = "PyFloat_FromDouble" from_py_function = "__pyx_PyFloat_AsDouble" exception_value = -1 def __init__(self, rank, math_h_modifier = ''): CNumericType.__init__(self, rank, 1) self.math_h_modifier = math_h_modifier if rank == RANK_FLOAT: self.from_py_function = "__pyx_PyFloat_AsFloat" def assignable_from_resolved_type(self, src_type): return (src_type.is_numeric and not src_type.is_complex) or src_type is error_type def invalid_value(self): return Naming.PYX_NAN class CComplexType(CNumericType): is_complex = 1 to_py_function = "__pyx_PyComplex_FromComplex" has_attributes = 1 scope = None def __init__(self, real_type): while real_type.is_typedef and not real_type.typedef_is_external: real_type = real_type.typedef_base_type self.funcsuffix = "_%s" % real_type.specialization_name() if real_type.is_float: self.math_h_modifier = real_type.math_h_modifier else: self.math_h_modifier = "_UNUSED" self.real_type = real_type CNumericType.__init__(self, real_type.rank + 0.5, real_type.signed) self.binops = {} self.from_parts = "%s_from_parts" % self.specialization_name() self.default_value = "%s(0, 0)" % self.from_parts def __eq__(self, other): if isinstance(self, CComplexType) and isinstance(other, CComplexType): return self.real_type == other.real_type else: return False def __ne__(self, other): if isinstance(self, CComplexType) and isinstance(other, CComplexType): return self.real_type != other.real_type else: return True def __lt__(self, other): if isinstance(self, CComplexType) and isinstance(other, CComplexType): return self.real_type < other.real_type else: # this is arbitrary, but it makes sure we always have # *some* kind of order return False def __hash__(self): return ~hash(self.real_type) def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if pyrex or for_display: real_code = self.real_type.declaration_code("", for_display, dll_linkage, pyrex) base_code = "%s complex" % real_code else: base_code = public_decl(self.sign_and_name(), dll_linkage) return self.base_declaration_code(base_code, entity_code) def sign_and_name(self): real_type_name = self.real_type.specialization_name() real_type_name = real_type_name.replace('long__double','long_double') real_type_name = real_type_name.replace('PY_LONG_LONG','long_long') return Naming.type_prefix + real_type_name + "_complex" def assignable_from(self, src_type): # Temporary hack/feature disabling, see #441 if (not src_type.is_complex and src_type.is_numeric and src_type.is_typedef and src_type.typedef_is_external): return False elif src_type.is_pyobject: return True else: return super(CComplexType, self).assignable_from(src_type) def assignable_from_resolved_type(self, src_type): return (src_type.is_complex and self.real_type.assignable_from_resolved_type(src_type.real_type) or src_type.is_numeric and self.real_type.assignable_from_resolved_type(src_type) or src_type is error_type) def attributes_known(self): if self.scope is None: from . import Symtab self.scope = scope = Symtab.CClassScope( '', None, visibility="extern") scope.parent_type = self scope.directives = {} scope.declare_var("real", self.real_type, None, cname="real", is_cdef=True) scope.declare_var("imag", self.real_type, None, cname="imag", is_cdef=True) scope.declare_cfunction( "conjugate", CFuncType(self, [CFuncTypeArg("self", self, None)], nogil=True), pos=None, defining=1, cname="__Pyx_c_conj%s" % self.funcsuffix) return True def _utility_code_context(self): return { 'type': self.empty_declaration_code(), 'type_name': self.specialization_name(), 'real_type': self.real_type.empty_declaration_code(), 'func_suffix': self.funcsuffix, 'm': self.math_h_modifier, 'is_float': int(self.real_type.is_float) } def create_declaration_utility_code(self, env): # This must always be run, because a single CComplexType instance can be shared # across multiple compilations (the one created in the module scope) env.use_utility_code(UtilityCode.load_cached('Header', 'Complex.c')) env.use_utility_code(UtilityCode.load_cached('RealImag', 'Complex.c')) env.use_utility_code(TempitaUtilityCode.load_cached( 'Declarations', 'Complex.c', self._utility_code_context())) env.use_utility_code(TempitaUtilityCode.load_cached( 'Arithmetic', 'Complex.c', self._utility_code_context())) return True def can_coerce_to_pyobject(self, env): return True def can_coerce_from_pyobject(self, env): return True def create_to_py_utility_code(self, env): env.use_utility_code(UtilityCode.load_cached('ToPy', 'Complex.c')) return True def create_from_py_utility_code(self, env): env.use_utility_code(TempitaUtilityCode.load_cached( 'FromPy', 'Complex.c', self._utility_code_context())) self.from_py_function = "__Pyx_PyComplex_As_" + self.specialization_name() return True def lookup_op(self, nargs, op): try: return self.binops[nargs, op] except KeyError: pass try: op_name = complex_ops[nargs, op] self.binops[nargs, op] = func_name = "__Pyx_c_%s%s" % (op_name, self.funcsuffix) return func_name except KeyError: return None def unary_op(self, op): return self.lookup_op(1, op) def binary_op(self, op): return self.lookup_op(2, op) def py_type_name(self): return "complex" def cast_code(self, expr_code): return expr_code complex_ops = { (1, '-'): 'neg', (1, 'zero'): 'is_zero', (2, '+'): 'sum', (2, '-'): 'diff', (2, '*'): 'prod', (2, '/'): 'quot', (2, '**'): 'pow', (2, '=='): 'eq', } class CPointerBaseType(CType): # common base type for pointer/array types # # base_type CType Reference type subtypes = ['base_type'] def __init__(self, base_type): self.base_type = base_type if base_type.is_const: base_type = base_type.const_base_type for char_type in (c_char_type, c_uchar_type, c_schar_type): if base_type.same_as(char_type): self.is_string = 1 break else: if base_type.same_as(c_py_unicode_type): self.is_pyunicode_ptr = 1 if self.is_string and not base_type.is_error: if base_type.signed == 2: self.to_py_function = "__Pyx_PyObject_FromCString" if self.is_ptr: self.from_py_function = "__Pyx_PyObject_As%sSString" elif base_type.signed: self.to_py_function = "__Pyx_PyObject_FromString" if self.is_ptr: self.from_py_function = "__Pyx_PyObject_As%sString" else: self.to_py_function = "__Pyx_PyObject_FromCString" if self.is_ptr: self.from_py_function = "__Pyx_PyObject_As%sUString" if self.is_ptr: self.from_py_function %= '' if self.base_type.is_const else 'Writable' self.exception_value = "NULL" elif self.is_pyunicode_ptr and not base_type.is_error: self.to_py_function = "__Pyx_PyUnicode_FromUnicode" if self.is_ptr: self.from_py_function = "__Pyx_PyUnicode_AsUnicode" self.exception_value = "NULL" def py_type_name(self): if self.is_string: return "bytes" elif self.is_pyunicode_ptr: return "unicode" else: return super(CPointerBaseType, self).py_type_name() def literal_code(self, value): if self.is_string: assert isinstance(value, str) return '"%s"' % StringEncoding.escape_byte_string(value) class CArrayType(CPointerBaseType): # base_type CType Element type # size integer or None Number of elements is_array = 1 to_tuple_function = None def __init__(self, base_type, size): super(CArrayType, self).__init__(base_type) self.size = size def __eq__(self, other): if isinstance(other, CType) and other.is_array and self.size == other.size: return self.base_type.same_as(other.base_type) return False def __hash__(self): return hash(self.base_type) + 28 # arbitrarily chosen offset def __repr__(self): return "" % (self.size, repr(self.base_type)) def same_as_resolved_type(self, other_type): return ((other_type.is_array and self.base_type.same_as(other_type.base_type)) or other_type is error_type) def assignable_from_resolved_type(self, src_type): # C arrays are assigned by value, either Python containers or C arrays/pointers if src_type.is_pyobject: return True if src_type.is_ptr or src_type.is_array: return self.base_type.assignable_from(src_type.base_type) return False def element_ptr_type(self): return c_ptr_type(self.base_type) def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if self.size is not None: dimension_code = self.size else: dimension_code = "" if entity_code.startswith("*"): entity_code = "(%s)" % entity_code return self.base_type.declaration_code( "%s[%s]" % (entity_code, dimension_code), for_display, dll_linkage, pyrex) def as_argument_type(self): return c_ptr_type(self.base_type) def is_complete(self): return self.size is not None def specialize(self, values): base_type = self.base_type.specialize(values) if base_type == self.base_type: return self else: return CArrayType(base_type, self.size) def deduce_template_params(self, actual): if isinstance(actual, CArrayType): return self.base_type.deduce_template_params(actual.base_type) else: return {} def can_coerce_to_pyobject(self, env): return self.base_type.can_coerce_to_pyobject(env) def can_coerce_from_pyobject(self, env): return self.base_type.can_coerce_from_pyobject(env) def create_to_py_utility_code(self, env): if self.to_py_function is not None: return self.to_py_function if not self.base_type.create_to_py_utility_code(env): return False safe_typename = self.base_type.specialization_name() to_py_function = "__Pyx_carray_to_py_%s" % safe_typename to_tuple_function = "__Pyx_carray_to_tuple_%s" % safe_typename from .UtilityCode import CythonUtilityCode context = { 'cname': to_py_function, 'to_tuple_cname': to_tuple_function, 'base_type': self.base_type, } env.use_utility_code(CythonUtilityCode.load( "carray.to_py", "CConvert.pyx", outer_module_scope=env.global_scope(), # need access to types declared in module context=context, compiler_directives=dict(env.global_scope().directives))) self.to_tuple_function = to_tuple_function self.to_py_function = to_py_function return True def to_py_call_code(self, source_code, result_code, result_type, to_py_function=None): func = self.to_py_function if to_py_function is None else to_py_function if self.is_string or self.is_pyunicode_ptr: return '%s = %s(%s)' % ( result_code, func, source_code) target_is_tuple = result_type.is_builtin_type and result_type.name == 'tuple' return '%s = %s(%s, %s)' % ( result_code, self.to_tuple_function if target_is_tuple else func, source_code, self.size) def create_from_py_utility_code(self, env): if self.from_py_function is not None: return self.from_py_function if not self.base_type.create_from_py_utility_code(env): return False from_py_function = "__Pyx_carray_from_py_%s" % self.base_type.specialization_name() from .UtilityCode import CythonUtilityCode context = { 'cname': from_py_function, 'base_type': self.base_type, } env.use_utility_code(CythonUtilityCode.load( "carray.from_py", "CConvert.pyx", outer_module_scope=env.global_scope(), # need access to types declared in module context=context, compiler_directives=dict(env.global_scope().directives))) self.from_py_function = from_py_function return True def from_py_call_code(self, source_code, result_code, error_pos, code, from_py_function=None, error_condition=None): call_code = "%s(%s, %s, %s)" % ( from_py_function or self.from_py_function, source_code, result_code, self.size) return code.error_goto_if_neg(call_code, error_pos) class CPtrType(CPointerBaseType): # base_type CType Reference type is_ptr = 1 default_value = "0" def __hash__(self): return hash(self.base_type) + 27 # arbitrarily chosen offset def __eq__(self, other): if isinstance(other, CType) and other.is_ptr: return self.base_type.same_as(other.base_type) return False def __ne__(self, other): return not (self == other) def __repr__(self): return "" % repr(self.base_type) def same_as_resolved_type(self, other_type): return ((other_type.is_ptr and self.base_type.same_as(other_type.base_type)) or other_type is error_type) def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): #print "CPtrType.declaration_code: pointer to", self.base_type ### return self.base_type.declaration_code( "*%s" % entity_code, for_display, dll_linkage, pyrex) def assignable_from_resolved_type(self, other_type): if other_type is error_type: return 1 if other_type.is_null_ptr: return 1 if self.base_type.is_const: self = CPtrType(self.base_type.const_base_type) if self.base_type.is_cfunction: if other_type.is_ptr: other_type = other_type.base_type.resolve() if other_type.is_cfunction: return self.base_type.pointer_assignable_from_resolved_type(other_type) else: return 0 if (self.base_type.is_cpp_class and other_type.is_ptr and other_type.base_type.is_cpp_class and other_type.base_type.is_subclass(self.base_type)): return 1 if other_type.is_array or other_type.is_ptr: return self.base_type.is_void or self.base_type.same_as(other_type.base_type) return 0 def specialize(self, values): base_type = self.base_type.specialize(values) if base_type == self.base_type: return self else: return CPtrType(base_type) def deduce_template_params(self, actual): if isinstance(actual, CPtrType): return self.base_type.deduce_template_params(actual.base_type) else: return {} def invalid_value(self): return "1" def find_cpp_operation_type(self, operator, operand_type=None): if self.base_type.is_cpp_class: return self.base_type.find_cpp_operation_type(operator, operand_type) return None class CNullPtrType(CPtrType): is_null_ptr = 1 class CReferenceType(BaseType): is_reference = 1 is_fake_reference = 0 def __init__(self, base_type): self.ref_base_type = base_type def __repr__(self): return "" % repr(self.ref_base_type) def __str__(self): return "%s &" % self.ref_base_type def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): #print "CReferenceType.declaration_code: pointer to", self.base_type ### return self.ref_base_type.declaration_code( "&%s" % entity_code, for_display, dll_linkage, pyrex) def specialize(self, values): base_type = self.ref_base_type.specialize(values) if base_type == self.ref_base_type: return self else: return type(self)(base_type) def deduce_template_params(self, actual): return self.ref_base_type.deduce_template_params(actual) def __getattr__(self, name): return getattr(self.ref_base_type, name) class CFakeReferenceType(CReferenceType): is_fake_reference = 1 def __repr__(self): return "" % repr(self.ref_base_type) def __str__(self): return "%s [&]" % self.ref_base_type def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): #print "CReferenceType.declaration_code: pointer to", self.base_type ### return "__Pyx_FakeReference<%s> %s" % (self.ref_base_type.empty_declaration_code(), entity_code) class CFuncType(CType): # return_type CType # args [CFuncTypeArg] # has_varargs boolean # exception_value string # exception_check boolean True if PyErr_Occurred check needed # calling_convention string Function calling convention # nogil boolean Can be called without gil # with_gil boolean Acquire gil around function body # templates [string] or None # cached_specialized_types [CFuncType] cached specialized versions of the CFuncType if defined in a pxd # from_fused boolean Indicates whether this is a specialized # C function # is_strict_signature boolean function refuses to accept coerced arguments # (used for optimisation overrides) # is_const_method boolean # is_static_method boolean is_cfunction = 1 original_sig = None cached_specialized_types = None from_fused = False is_const_method = False subtypes = ['return_type', 'args'] def __init__(self, return_type, args, has_varargs = 0, exception_value = None, exception_check = 0, calling_convention = "", nogil = 0, with_gil = 0, is_overridable = 0, optional_arg_count = 0, is_const_method = False, is_static_method=False, templates = None, is_strict_signature = False): self.return_type = return_type self.args = args self.has_varargs = has_varargs self.optional_arg_count = optional_arg_count self.exception_value = exception_value self.exception_check = exception_check self.calling_convention = calling_convention self.nogil = nogil self.with_gil = with_gil self.is_overridable = is_overridable self.is_const_method = is_const_method self.is_static_method = is_static_method self.templates = templates self.is_strict_signature = is_strict_signature def __repr__(self): arg_reprs = list(map(repr, self.args)) if self.has_varargs: arg_reprs.append("...") if self.exception_value: except_clause = " %r" % self.exception_value else: except_clause = "" if self.exception_check: except_clause += "?" return "" % ( repr(self.return_type), self.calling_convention_prefix(), ",".join(arg_reprs), except_clause) def with_with_gil(self, with_gil): if with_gil == self.with_gil: return self else: return CFuncType( self.return_type, self.args, self.has_varargs, self.exception_value, self.exception_check, self.calling_convention, self.nogil, with_gil, self.is_overridable, self.optional_arg_count, self.is_const_method, self.is_static_method, self.templates, self.is_strict_signature) def calling_convention_prefix(self): cc = self.calling_convention if cc: return cc + " " else: return "" def as_argument_type(self): return c_ptr_type(self) def same_c_signature_as(self, other_type, as_cmethod = 0): return self.same_c_signature_as_resolved_type( other_type.resolve(), as_cmethod) def same_c_signature_as_resolved_type(self, other_type, as_cmethod = 0, as_pxd_definition = 0): #print "CFuncType.same_c_signature_as_resolved_type:", \ # self, other_type, "as_cmethod =", as_cmethod ### if other_type is error_type: return 1 if not other_type.is_cfunction: return 0 if self.is_overridable != other_type.is_overridable: return 0 nargs = len(self.args) if nargs != len(other_type.args): return 0 # When comparing C method signatures, the first argument # is exempt from compatibility checking (the proper check # is performed elsewhere). for i in range(as_cmethod, nargs): if not self.args[i].type.same_as(other_type.args[i].type): return 0 if self.has_varargs != other_type.has_varargs: return 0 if self.optional_arg_count != other_type.optional_arg_count: return 0 if as_pxd_definition: # A narrowing of the return type declared in the pxd is allowed. if not self.return_type.subtype_of_resolved_type(other_type.return_type): return 0 else: if not self.return_type.same_as(other_type.return_type): return 0 if not self.same_calling_convention_as(other_type): return 0 if self.exception_check != other_type.exception_check: return 0 if not self._same_exception_value(other_type.exception_value): return 0 return 1 def _same_exception_value(self, other_exc_value): if self.exception_value == other_exc_value: return 1 if self.exception_check != '+': return 0 if not self.exception_value or not other_exc_value: return 0 if self.exception_value.type != other_exc_value.type: return 0 if self.exception_value.entry and other_exc_value.entry: if self.exception_value.entry.cname != other_exc_value.entry.cname: return 0 if self.exception_value.name != other_exc_value.name: return 0 return 1 def compatible_signature_with(self, other_type, as_cmethod = 0): return self.compatible_signature_with_resolved_type(other_type.resolve(), as_cmethod) def compatible_signature_with_resolved_type(self, other_type, as_cmethod): #print "CFuncType.same_c_signature_as_resolved_type:", \ # self, other_type, "as_cmethod =", as_cmethod ### if other_type is error_type: return 1 if not other_type.is_cfunction: return 0 if not self.is_overridable and other_type.is_overridable: return 0 nargs = len(self.args) if nargs - self.optional_arg_count != len(other_type.args) - other_type.optional_arg_count: return 0 if self.optional_arg_count < other_type.optional_arg_count: return 0 # When comparing C method signatures, the first argument # is exempt from compatibility checking (the proper check # is performed elsewhere). for i in range(as_cmethod, len(other_type.args)): if not self.args[i].type.same_as( other_type.args[i].type): return 0 if self.has_varargs != other_type.has_varargs: return 0 if not self.return_type.subtype_of_resolved_type(other_type.return_type): return 0 if not self.same_calling_convention_as(other_type): return 0 if self.nogil != other_type.nogil: return 0 if not self.exception_check and other_type.exception_check: # a redundant exception check doesn't make functions incompatible, but a missing one does return 0 if not self._same_exception_value(other_type.exception_value): return 0 self.original_sig = other_type.original_sig or other_type return 1 def narrower_c_signature_than(self, other_type, as_cmethod = 0): return self.narrower_c_signature_than_resolved_type(other_type.resolve(), as_cmethod) def narrower_c_signature_than_resolved_type(self, other_type, as_cmethod): if other_type is error_type: return 1 if not other_type.is_cfunction: return 0 nargs = len(self.args) if nargs != len(other_type.args): return 0 for i in range(as_cmethod, nargs): if not self.args[i].type.subtype_of_resolved_type(other_type.args[i].type): return 0 else: self.args[i].needs_type_test = other_type.args[i].needs_type_test \ or not self.args[i].type.same_as(other_type.args[i].type) if self.has_varargs != other_type.has_varargs: return 0 if self.optional_arg_count != other_type.optional_arg_count: return 0 if not self.return_type.subtype_of_resolved_type(other_type.return_type): return 0 if not self.exception_check and other_type.exception_check: # a redundant exception check doesn't make functions incompatible, but a missing one does return 0 if not self._same_exception_value(other_type.exception_value): return 0 return 1 def same_calling_convention_as(self, other): ## XXX Under discussion ... ## callspec_words = ("__stdcall", "__cdecl", "__fastcall") ## cs1 = self.calling_convention ## cs2 = other.calling_convention ## if (cs1 in callspec_words or ## cs2 in callspec_words): ## return cs1 == cs2 ## else: ## return True sc1 = self.calling_convention == '__stdcall' sc2 = other.calling_convention == '__stdcall' return sc1 == sc2 def same_as_resolved_type(self, other_type, as_cmethod = 0): return self.same_c_signature_as_resolved_type(other_type, as_cmethod) \ and self.nogil == other_type.nogil def pointer_assignable_from_resolved_type(self, other_type): return self.same_c_signature_as_resolved_type(other_type) \ and not (self.nogil and not other_type.nogil) def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0, with_calling_convention = 1): arg_decl_list = [] for arg in self.args[:len(self.args)-self.optional_arg_count]: arg_decl_list.append( arg.type.declaration_code("", for_display, pyrex = pyrex)) if self.is_overridable: arg_decl_list.append("int %s" % Naming.skip_dispatch_cname) if self.optional_arg_count: arg_decl_list.append(self.op_arg_struct.declaration_code(Naming.optional_args_cname)) if self.has_varargs: arg_decl_list.append("...") arg_decl_code = ", ".join(arg_decl_list) if not arg_decl_code and not pyrex: arg_decl_code = "void" trailer = "" if (pyrex or for_display) and not self.return_type.is_pyobject: if self.exception_value and self.exception_check: trailer = " except? %s" % self.exception_value elif self.exception_value: trailer = " except %s" % self.exception_value elif self.exception_check == '+': trailer = " except +" elif self.exception_check and for_display: # not spelled out by default, unless for human eyes trailer = " except *" if self.nogil: trailer += " nogil" if not with_calling_convention: cc = '' else: cc = self.calling_convention_prefix() if (not entity_code and cc) or entity_code.startswith("*"): entity_code = "(%s%s)" % (cc, entity_code) cc = "" if self.is_const_method: trailer += " const" return self.return_type.declaration_code( "%s%s(%s)%s" % (cc, entity_code, arg_decl_code, trailer), for_display, dll_linkage, pyrex) def function_header_code(self, func_name, arg_code): if self.is_const_method: trailer = " const" else: trailer = "" return "%s%s(%s)%s" % (self.calling_convention_prefix(), func_name, arg_code, trailer) def signature_string(self): s = self.empty_declaration_code() return s def signature_cast_string(self): s = self.declaration_code("(*)", with_calling_convention=False) return '(%s)' % s def specialize(self, values): result = CFuncType(self.return_type.specialize(values), [arg.specialize(values) for arg in self.args], has_varargs = self.has_varargs, exception_value = self.exception_value, exception_check = self.exception_check, calling_convention = self.calling_convention, nogil = self.nogil, with_gil = self.with_gil, is_overridable = self.is_overridable, optional_arg_count = self.optional_arg_count, is_const_method = self.is_const_method, is_static_method = self.is_static_method, templates = self.templates) result.from_fused = self.is_fused return result def opt_arg_cname(self, arg_name): return self.op_arg_struct.base_type.scope.lookup(arg_name).cname # Methods that deal with Fused Types # All but map_with_specific_entries should be called only on functions # with fused types (and not on their corresponding specific versions). def get_all_specialized_permutations(self, fused_types=None): """ Permute all the types. For every specific instance of a fused type, we want all other specific instances of all other fused types. It returns an iterable of two-tuples of the cname that should prefix the cname of the function, and a dict mapping any fused types to their respective specific types. """ assert self.is_fused if fused_types is None: fused_types = self.get_fused_types() return get_all_specialized_permutations(fused_types) def get_all_specialized_function_types(self): """ Get all the specific function types of this one. """ assert self.is_fused if self.entry.fused_cfunction: return [n.type for n in self.entry.fused_cfunction.nodes] elif self.cached_specialized_types is not None: return self.cached_specialized_types cfunc_entries = self.entry.scope.cfunc_entries cfunc_entries.remove(self.entry) result = [] permutations = self.get_all_specialized_permutations() for cname, fused_to_specific in permutations: new_func_type = self.entry.type.specialize(fused_to_specific) if self.optional_arg_count: # Remember, this method is set by CFuncDeclaratorNode self.declare_opt_arg_struct(new_func_type, cname) new_entry = copy.deepcopy(self.entry) new_func_type.specialize_entry(new_entry, cname) new_entry.type = new_func_type new_func_type.entry = new_entry result.append(new_func_type) cfunc_entries.append(new_entry) self.cached_specialized_types = result return result def get_fused_types(self, result=None, seen=None, subtypes=None): """Return fused types in the order they appear as parameter types""" return super(CFuncType, self).get_fused_types(result, seen, subtypes=['args']) def specialize_entry(self, entry, cname): assert not self.is_fused specialize_entry(entry, cname) def can_coerce_to_pyobject(self, env): # duplicating the decisions from create_to_py_utility_code() here avoids writing out unused code if self.has_varargs or self.optional_arg_count: return False if self.to_py_function is not None: return self.to_py_function for arg in self.args: if not arg.type.is_pyobject and not arg.type.can_coerce_to_pyobject(env): return False if not self.return_type.is_pyobject and not self.return_type.can_coerce_to_pyobject(env): return False return True def create_to_py_utility_code(self, env): # FIXME: it seems we're trying to coerce in more cases than we should if self.to_py_function is not None: return self.to_py_function if not self.can_coerce_to_pyobject(env): return False from .UtilityCode import CythonUtilityCode safe_typename = re.sub('[^a-zA-Z0-9]', '__', self.declaration_code("", pyrex=1)) to_py_function = "__Pyx_CFunc_%s_to_py" % safe_typename for arg in self.args: if not arg.type.is_pyobject and not arg.type.create_from_py_utility_code(env): return False if not self.return_type.is_pyobject and not self.return_type.create_to_py_utility_code(env): return False def declared_type(ctype): type_displayname = str(ctype.declaration_code("", for_display=True)) if ctype.is_pyobject: arg_ctype = type_name = type_displayname if ctype.is_builtin_type: arg_ctype = ctype.name elif not ctype.is_extension_type: type_name = 'object' type_displayname = None else: type_displayname = repr(type_displayname) elif ctype is c_bint_type: type_name = arg_ctype = 'bint' else: type_name = arg_ctype = type_displayname if ctype is c_double_type: type_displayname = 'float' else: type_displayname = repr(type_displayname) return type_name, arg_ctype, type_displayname class Arg(object): def __init__(self, arg_name, arg_type): self.name = arg_name self.type = arg_type self.type_cname, self.ctype, self.type_displayname = declared_type(arg_type) if self.return_type.is_void: except_clause = 'except *' elif self.return_type.is_pyobject: except_clause = '' elif self.exception_value: except_clause = ('except? %s' if self.exception_check else 'except %s') % self.exception_value else: except_clause = 'except *' context = { 'cname': to_py_function, 'args': [Arg(arg.name or 'arg%s' % ix, arg.type) for ix, arg in enumerate(self.args)], 'return_type': Arg('return', self.return_type), 'except_clause': except_clause, } # FIXME: directives come from first defining environment and do not adapt for reuse env.use_utility_code(CythonUtilityCode.load( "cfunc.to_py", "CConvert.pyx", outer_module_scope=env.global_scope(), # need access to types declared in module context=context, compiler_directives=dict(env.global_scope().directives))) self.to_py_function = to_py_function return True def specialize_entry(entry, cname): """ Specialize an entry of a copied fused function or method """ entry.is_fused_specialized = True entry.name = get_fused_cname(cname, entry.name) if entry.is_cmethod: entry.cname = entry.name if entry.is_inherited: entry.cname = StringEncoding.EncodedString( "%s.%s" % (Naming.obj_base_cname, entry.cname)) else: entry.cname = get_fused_cname(cname, entry.cname) if entry.func_cname: entry.func_cname = get_fused_cname(cname, entry.func_cname) def get_fused_cname(fused_cname, orig_cname): """ Given the fused cname id and an original cname, return a specialized cname """ assert fused_cname and orig_cname return StringEncoding.EncodedString('%s%s%s' % (Naming.fused_func_prefix, fused_cname, orig_cname)) def unique(somelist): seen = set() result = [] for obj in somelist: if obj not in seen: result.append(obj) seen.add(obj) return result def get_all_specialized_permutations(fused_types): return _get_all_specialized_permutations(unique(fused_types)) def _get_all_specialized_permutations(fused_types, id="", f2s=()): fused_type, = fused_types[0].get_fused_types() result = [] for newid, specific_type in enumerate(fused_type.types): # f2s = dict(f2s, **{ fused_type: specific_type }) f2s = dict(f2s) f2s.update({ fused_type: specific_type }) if id: cname = '%s_%s' % (id, newid) else: cname = str(newid) if len(fused_types) > 1: result.extend(_get_all_specialized_permutations( fused_types[1:], cname, f2s)) else: result.append((cname, f2s)) return result def specialization_signature_string(fused_compound_type, fused_to_specific): """ Return the signature for a specialization of a fused type. e.g. floating[:] -> 'float' or 'double' cdef fused ft: float[:] double[:] ft -> 'float[:]' or 'double[:]' integral func(floating) -> 'int (*func)(float)' or ... """ fused_types = fused_compound_type.get_fused_types() if len(fused_types) == 1: fused_type = fused_types[0] else: fused_type = fused_compound_type return fused_type.specialize(fused_to_specific).typeof_name() def get_specialized_types(type): """ Return a list of specialized types in their declared order. """ assert type.is_fused if isinstance(type, FusedType): result = list(type.types) for specialized_type in result: specialized_type.specialization_string = specialized_type.typeof_name() else: result = [] for cname, f2s in get_all_specialized_permutations(type.get_fused_types()): specialized_type = type.specialize(f2s) specialized_type.specialization_string = ( specialization_signature_string(type, f2s)) result.append(specialized_type) return result class CFuncTypeArg(BaseType): # name string # cname string # type PyrexType # pos source file position # FIXME: is this the right setup? should None be allowed here? not_none = False or_none = False accept_none = True accept_builtin_subtypes = False subtypes = ['type'] def __init__(self, name, type, pos, cname=None): self.name = name if cname is not None: self.cname = cname else: self.cname = Naming.var_prefix + name self.type = type self.pos = pos self.needs_type_test = False # TODO: should these defaults be set in analyse_types()? def __repr__(self): return "%s:%s" % (self.name, repr(self.type)) def declaration_code(self, for_display = 0): return self.type.declaration_code(self.cname, for_display) def specialize(self, values): return CFuncTypeArg(self.name, self.type.specialize(values), self.pos, self.cname) class ToPyStructUtilityCode(object): requires = None def __init__(self, type, forward_decl, env): self.type = type self.header = "static PyObject* %s(%s)" % (type.to_py_function, type.declaration_code('s')) self.forward_decl = forward_decl self.env = env def __eq__(self, other): return isinstance(other, ToPyStructUtilityCode) and self.header == other.header def __hash__(self): return hash(self.header) def get_tree(self, **kwargs): pass def put_code(self, output): code = output['utility_code_def'] proto = output['utility_code_proto'] code.putln("%s {" % self.header) code.putln("PyObject* res;") code.putln("PyObject* member;") code.putln("res = PyDict_New(); if (unlikely(!res)) return NULL;") for member in self.type.scope.var_entries: nameconst_cname = code.get_py_string_const(member.name, identifier=True) code.putln("%s; if (unlikely(!member)) goto bad;" % ( member.type.to_py_call_code('s.%s' % member.cname, 'member', member.type))) code.putln("if (unlikely(PyDict_SetItem(res, %s, member) < 0)) goto bad;" % nameconst_cname) code.putln("Py_DECREF(member);") code.putln("return res;") code.putln("bad:") code.putln("Py_XDECREF(member);") code.putln("Py_DECREF(res);") code.putln("return NULL;") code.putln("}") # This is a bit of a hack, we need a forward declaration # due to the way things are ordered in the module... if self.forward_decl: proto.putln(self.type.empty_declaration_code() + ';') proto.putln(self.header + ";") def inject_tree_and_scope_into(self, module_node): pass class CStructOrUnionType(CType): # name string # cname string # kind string "struct" or "union" # scope StructOrUnionScope, or None if incomplete # typedef_flag boolean # packed boolean # entry Entry is_struct_or_union = 1 has_attributes = 1 exception_check = True def __init__(self, name, kind, scope, typedef_flag, cname, packed=False): self.name = name self.cname = cname self.kind = kind self.scope = scope self.typedef_flag = typedef_flag self.is_struct = kind == 'struct' self.to_py_function = "%s_to_py_%s" % ( Naming.convert_func_prefix, self.specialization_name()) self.from_py_function = "%s_from_py_%s" % ( Naming.convert_func_prefix, self.specialization_name()) self.exception_check = True self._convert_to_py_code = None self._convert_from_py_code = None self.packed = packed def can_coerce_to_pyobject(self, env): if self._convert_to_py_code is False: return None # tri-state-ish if env.outer_scope is None: return False if self._convert_to_py_code is None: is_union = not self.is_struct unsafe_union_types = set() safe_union_types = set() for member in self.scope.var_entries: member_type = member.type if not member_type.can_coerce_to_pyobject(env): self.to_py_function = None self._convert_to_py_code = False return False if is_union: if member_type.is_ptr or member_type.is_cpp_class: unsafe_union_types.add(member_type) else: safe_union_types.add(member_type) if unsafe_union_types and (safe_union_types or len(unsafe_union_types) > 1): # unsafe mix of safe and unsafe to convert types self.from_py_function = None self._convert_from_py_code = False return False return True def create_to_py_utility_code(self, env): if not self.can_coerce_to_pyobject(env): return False if self._convert_to_py_code is None: for member in self.scope.var_entries: member.type.create_to_py_utility_code(env) forward_decl = self.entry.visibility != 'extern' and not self.typedef_flag self._convert_to_py_code = ToPyStructUtilityCode(self, forward_decl, env) env.use_utility_code(self._convert_to_py_code) return True def can_coerce_from_pyobject(self, env): if env.outer_scope is None or self._convert_from_py_code is False: return False for member in self.scope.var_entries: if not member.type.can_coerce_from_pyobject(env): return False return True def create_from_py_utility_code(self, env): if env.outer_scope is None: return False if self._convert_from_py_code is False: return None # tri-state-ish if self._convert_from_py_code is None: if not self.scope.var_entries: # There are obviously missing fields; don't allow instantiation # where absolutely no content is provided. return False for member in self.scope.var_entries: if not member.type.create_from_py_utility_code(env): self.from_py_function = None self._convert_from_py_code = False return False context = dict( struct_type=self, var_entries=self.scope.var_entries, funcname=self.from_py_function, ) from .UtilityCode import CythonUtilityCode self._convert_from_py_code = CythonUtilityCode.load( "FromPyStructUtility" if self.is_struct else "FromPyUnionUtility", "CConvert.pyx", outer_module_scope=env.global_scope(), # need access to types declared in module context=context) env.use_utility_code(self._convert_from_py_code) return True def __repr__(self): return "" % ( self.name, self.cname, ("", " typedef")[self.typedef_flag]) def declaration_code(self, entity_code, for_display=0, dll_linkage=None, pyrex=0): if pyrex or for_display: base_code = self.name else: if self.typedef_flag: base_code = self.cname else: base_code = "%s %s" % (self.kind, self.cname) base_code = public_decl(base_code, dll_linkage) return self.base_declaration_code(base_code, entity_code) def __eq__(self, other): try: return (isinstance(other, CStructOrUnionType) and self.name == other.name) except AttributeError: return False def __lt__(self, other): try: return self.name < other.name except AttributeError: # this is arbitrary, but it makes sure we always have # *some* kind of order return False def __hash__(self): return hash(self.cname) ^ hash(self.kind) def is_complete(self): return self.scope is not None def attributes_known(self): return self.is_complete() def can_be_complex(self): # Does the struct consist of exactly two identical floats? fields = self.scope.var_entries if len(fields) != 2: return False a, b = fields return (a.type.is_float and b.type.is_float and a.type.empty_declaration_code() == b.type.empty_declaration_code()) def struct_nesting_depth(self): child_depths = [x.type.struct_nesting_depth() for x in self.scope.var_entries] return max(child_depths) + 1 def cast_code(self, expr_code): if self.is_struct: return expr_code return super(CStructOrUnionType, self).cast_code(expr_code) cpp_string_conversions = ("std::string",) builtin_cpp_conversions = { # type element template params "std::pair": 2, "std::vector": 1, "std::list": 1, "std::set": 1, "std::unordered_set": 1, "std::map": 2, "std::unordered_map": 2, "std::complex": 1, } class CppClassType(CType): # name string # cname string # scope CppClassScope # templates [string] or None is_cpp_class = 1 has_attributes = 1 exception_check = True namespace = None # For struct-like declaration. kind = "struct" packed = False typedef_flag = False subtypes = ['templates'] def __init__(self, name, scope, cname, base_classes, templates=None, template_type=None): self.name = name self.cname = cname self.scope = scope self.base_classes = base_classes self.operators = [] self.templates = templates self.template_type = template_type self.num_optional_templates = sum(is_optional_template_param(T) for T in templates or ()) if templates: self.specializations = {tuple(zip(templates, templates)): self} else: self.specializations = {} self.is_cpp_string = cname in cpp_string_conversions def use_conversion_utility(self, from_or_to): pass def maybe_unordered(self): if 'unordered' in self.cname: return 'unordered_' else: return '' def can_coerce_from_pyobject(self, env): if self.cname in builtin_cpp_conversions or self.cname in cpp_string_conversions: for ix, T in enumerate(self.templates or []): if ix >= builtin_cpp_conversions[self.cname]: break if T.is_pyobject or not T.can_coerce_from_pyobject(env): return False return True def create_from_py_utility_code(self, env): if self.from_py_function is not None: return True if self.cname in builtin_cpp_conversions or self.cname in cpp_string_conversions: X = "XYZABC" tags = [] context = {} for ix, T in enumerate(self.templates or []): if ix >= builtin_cpp_conversions[self.cname]: break if T.is_pyobject or not T.create_from_py_utility_code(env): return False tags.append(T.specialization_name()) context[X[ix]] = T if self.cname in cpp_string_conversions: cls = 'string' tags = type_identifier(self), else: cls = self.cname[5:] cname = '__pyx_convert_%s_from_py_%s' % (cls, '__and_'.join(tags)) context.update({ 'cname': cname, 'maybe_unordered': self.maybe_unordered(), 'type': self.cname, }) from .UtilityCode import CythonUtilityCode env.use_utility_code(CythonUtilityCode.load( cls.replace('unordered_', '') + ".from_py", "CppConvert.pyx", context=context, compiler_directives=env.directives)) self.from_py_function = cname return True def can_coerce_to_pyobject(self, env): if self.cname in builtin_cpp_conversions or self.cname in cpp_string_conversions: for ix, T in enumerate(self.templates or []): if ix >= builtin_cpp_conversions[self.cname]: break if T.is_pyobject or not T.can_coerce_to_pyobject(env): return False return True def create_to_py_utility_code(self, env): if self.to_py_function is not None: return True if self.cname in builtin_cpp_conversions or self.cname in cpp_string_conversions: X = "XYZABC" tags = [] context = {} for ix, T in enumerate(self.templates or []): if ix >= builtin_cpp_conversions[self.cname]: break if not T.create_to_py_utility_code(env): return False tags.append(T.specialization_name()) context[X[ix]] = T if self.cname in cpp_string_conversions: cls = 'string' prefix = 'PyObject_' # gets specialised by explicit type casts in CoerceToPyTypeNode tags = type_identifier(self), else: cls = self.cname[5:] prefix = '' cname = "__pyx_convert_%s%s_to_py_%s" % (prefix, cls, "____".join(tags)) context.update({ 'cname': cname, 'maybe_unordered': self.maybe_unordered(), 'type': self.cname, }) from .UtilityCode import CythonUtilityCode env.use_utility_code(CythonUtilityCode.load( cls.replace('unordered_', '') + ".to_py", "CppConvert.pyx", context=context, compiler_directives=env.directives)) self.to_py_function = cname return True def is_template_type(self): return self.templates is not None and self.template_type is None def get_fused_types(self, result=None, seen=None): if result is None: result = [] seen = set() if self.namespace: self.namespace.get_fused_types(result, seen) if self.templates: for T in self.templates: T.get_fused_types(result, seen) return result def specialize_here(self, pos, template_values=None): if not self.is_template_type(): error(pos, "'%s' type is not a template" % self) return error_type if len(self.templates) - self.num_optional_templates <= len(template_values) < len(self.templates): num_defaults = len(self.templates) - len(template_values) partial_specialization = self.declaration_code('', template_params=template_values) # Most of the time we don't need to declare anything typed to these # default template arguments, but when we do there's no way in C++ # to reference this directly. However, it is common convention to # provide a typedef in the template class that resolves to each # template type. For now, allow the user to specify this name as # the template parameter. # TODO: Allow typedefs in cpp classes and search for it in this # classes scope as a concrete name we could use. template_values = template_values + [ TemplatePlaceholderType( "%s::%s" % (partial_specialization, param.name), True) for param in self.templates[-num_defaults:]] if len(self.templates) != len(template_values): error(pos, "%s templated type receives %d arguments, got %d" % (self.name, len(self.templates), len(template_values))) return error_type has_object_template_param = False for value in template_values: if value.is_pyobject: has_object_template_param = True error(pos, "Python object type '%s' cannot be used as a template argument" % value) if has_object_template_param: return error_type return self.specialize(dict(zip(self.templates, template_values))) def specialize(self, values): if not self.templates and not self.namespace: return self if self.templates is None: self.templates = [] key = tuple(values.items()) if key in self.specializations: return self.specializations[key] template_values = [t.specialize(values) for t in self.templates] specialized = self.specializations[key] = \ CppClassType(self.name, None, self.cname, [], template_values, template_type=self) # Need to do these *after* self.specializations[key] is set # to avoid infinite recursion on circular references. specialized.base_classes = [b.specialize(values) for b in self.base_classes] if self.namespace is not None: specialized.namespace = self.namespace.specialize(values) specialized.scope = self.scope.specialize(values, specialized) if self.cname == 'std::vector': # vector is special cased in the C++ standard, and its # accessors do not necessarily return references to the underlying # elements (which may be bit-packed). # http://www.cplusplus.com/reference/vector/vector-bool/ # Here we pretend that the various methods return bool values # (as the actual returned values are coercable to such, and # we don't support call expressions as lvalues). T = values.get(self.templates[0], None) if T and not T.is_fused and T.empty_declaration_code() == 'bool': for bit_ref_returner in ('at', 'back', 'front'): if bit_ref_returner in specialized.scope.entries: specialized.scope.entries[bit_ref_returner].type.return_type = T return specialized def deduce_template_params(self, actual): if actual.is_const: actual = actual.const_base_type if actual.is_reference: actual = actual.ref_base_type if self == actual: return {} elif actual.is_cpp_class: self_template_type = self while getattr(self_template_type, 'template_type', None): self_template_type = self_template_type.template_type def all_bases(cls): yield cls for parent in cls.base_classes: for base in all_bases(parent): yield base for actual_base in all_bases(actual): template_type = actual_base while getattr(template_type, 'template_type', None): template_type = template_type.template_type if (self_template_type.empty_declaration_code() == template_type.empty_declaration_code()): return reduce( merge_template_deductions, [formal_param.deduce_template_params(actual_param) for (formal_param, actual_param) in zip(self.templates, actual_base.templates)], {}) else: return {} def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0, template_params = None): if template_params is None: template_params = self.templates if self.templates: template_strings = [param.declaration_code('', for_display, None, pyrex) for param in template_params if not is_optional_template_param(param) and not param.is_fused] if for_display: brackets = "[%s]" else: brackets = "<%s> " templates = brackets % ",".join(template_strings) else: templates = "" if pyrex or for_display: base_code = "%s%s" % (self.name, templates) else: base_code = "%s%s" % (self.cname, templates) if self.namespace is not None: base_code = "%s::%s" % (self.namespace.empty_declaration_code(), base_code) base_code = public_decl(base_code, dll_linkage) return self.base_declaration_code(base_code, entity_code) def is_subclass(self, other_type): if self.same_as_resolved_type(other_type): return 1 for base_class in self.base_classes: if base_class.is_subclass(other_type): return 1 return 0 def subclass_dist(self, super_type): if self.same_as_resolved_type(super_type): return 0 elif not self.base_classes: return float('inf') else: return 1 + min(b.subclass_dist(super_type) for b in self.base_classes) def same_as_resolved_type(self, other_type): if other_type.is_cpp_class: if self == other_type: return 1 elif (self.cname == other_type.cname and self.template_type and other_type.template_type): if self.templates == other_type.templates: return 1 for t1, t2 in zip(self.templates, other_type.templates): if is_optional_template_param(t1) and is_optional_template_param(t2): break if not t1.same_as_resolved_type(t2): return 0 return 1 return 0 def assignable_from_resolved_type(self, other_type): # TODO: handle operator=(...) here? if other_type is error_type: return True elif other_type.is_cpp_class: return other_type.is_subclass(self) def attributes_known(self): return self.scope is not None def find_cpp_operation_type(self, operator, operand_type=None): operands = [self] if operand_type is not None: operands.append(operand_type) # pos == None => no errors operator_entry = self.scope.lookup_operator_for_types(None, operator, operands) if not operator_entry: return None func_type = operator_entry.type if func_type.is_ptr: func_type = func_type.base_type return func_type.return_type def check_nullary_constructor(self, pos, msg="stack allocated"): constructor = self.scope.lookup(u'') if constructor is not None and best_match([], constructor.all_alternatives()) is None: error(pos, "C++ class must have a nullary constructor to be %s" % msg) class TemplatePlaceholderType(CType): def __init__(self, name, optional=False): self.name = name self.optional = optional def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if entity_code: return self.name + " " + entity_code else: return self.name def specialize(self, values): if self in values: return values[self] else: return self def deduce_template_params(self, actual): return {self: actual} def same_as_resolved_type(self, other_type): if isinstance(other_type, TemplatePlaceholderType): return self.name == other_type.name else: return 0 def __hash__(self): return hash(self.name) def __cmp__(self, other): if isinstance(other, TemplatePlaceholderType): return cmp(self.name, other.name) else: return cmp(type(self), type(other)) def __eq__(self, other): if isinstance(other, TemplatePlaceholderType): return self.name == other.name else: return False def is_optional_template_param(type): return isinstance(type, TemplatePlaceholderType) and type.optional class CEnumType(CType): # name string # cname string or None # typedef_flag boolean # values [string], populated during declaration analysis is_enum = 1 signed = 1 rank = -1 # Ranks below any integer type def __init__(self, name, cname, typedef_flag, namespace=None): self.name = name self.cname = cname self.values = [] self.typedef_flag = typedef_flag self.namespace = namespace self.default_value = "(%s) 0" % self.empty_declaration_code() def __str__(self): return self.name def __repr__(self): return "" % (self.name, self.cname, ("", " typedef")[self.typedef_flag]) def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if pyrex or for_display: base_code = self.name else: if self.namespace: base_code = "%s::%s" % ( self.namespace.empty_declaration_code(), self.cname) elif self.typedef_flag: base_code = self.cname else: base_code = "enum %s" % self.cname base_code = public_decl(base_code, dll_linkage) return self.base_declaration_code(base_code, entity_code) def specialize(self, values): if self.namespace: namespace = self.namespace.specialize(values) if namespace != self.namespace: return CEnumType( self.name, self.cname, self.typedef_flag, namespace) return self def can_coerce_to_pyobject(self, env): return True def can_coerce_from_pyobject(self, env): return True def create_to_py_utility_code(self, env): self.to_py_function = "__Pyx_PyInt_From_" + self.specialization_name() env.use_utility_code(TempitaUtilityCode.load_cached( "CIntToPy", "TypeConversion.c", context={"TYPE": self.empty_declaration_code(), "TO_PY_FUNCTION": self.to_py_function})) return True def create_from_py_utility_code(self, env): self.from_py_function = "__Pyx_PyInt_As_" + self.specialization_name() env.use_utility_code(TempitaUtilityCode.load_cached( "CIntFromPy", "TypeConversion.c", context={"TYPE": self.empty_declaration_code(), "FROM_PY_FUNCTION": self.from_py_function})) return True def from_py_call_code(self, source_code, result_code, error_pos, code, from_py_function=None, error_condition=None): rhs = "%s(%s)" % ( from_py_function or self.from_py_function, source_code) return '%s = %s;%s' % ( result_code, typecast(self, c_long_type, rhs), ' %s' % code.error_goto_if(error_condition or self.error_condition(result_code), error_pos)) def create_type_wrapper(self, env): from .UtilityCode import CythonUtilityCode env.use_utility_code(CythonUtilityCode.load( "EnumType", "CpdefEnums.pyx", context={"name": self.name, "items": tuple(self.values)}, outer_module_scope=env.global_scope())) class CTupleType(CType): # components [PyrexType] is_ctuple = True def __init__(self, cname, components): self.cname = cname self.components = components self.size = len(components) self.to_py_function = "%s_to_py_%s" % (Naming.convert_func_prefix, self.cname) self.from_py_function = "%s_from_py_%s" % (Naming.convert_func_prefix, self.cname) self.exception_check = True self._convert_to_py_code = None self._convert_from_py_code = None def __str__(self): return "(%s)" % ", ".join(str(c) for c in self.components) def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): if pyrex or for_display: return str(self) else: return self.base_declaration_code(self.cname, entity_code) def can_coerce_to_pyobject(self, env): for component in self.components: if not component.can_coerce_to_pyobject(env): return False return True def can_coerce_from_pyobject(self, env): for component in self.components: if not component.can_coerce_from_pyobject(env): return False return True def create_to_py_utility_code(self, env): if self._convert_to_py_code is False: return None # tri-state-ish if self._convert_to_py_code is None: for component in self.components: if not component.create_to_py_utility_code(env): self.to_py_function = None self._convert_to_py_code = False return False context = dict( struct_type_decl=self.empty_declaration_code(), components=self.components, funcname=self.to_py_function, size=len(self.components) ) self._convert_to_py_code = TempitaUtilityCode.load( "ToPyCTupleUtility", "TypeConversion.c", context=context) env.use_utility_code(self._convert_to_py_code) return True def create_from_py_utility_code(self, env): if self._convert_from_py_code is False: return None # tri-state-ish if self._convert_from_py_code is None: for component in self.components: if not component.create_from_py_utility_code(env): self.from_py_function = None self._convert_from_py_code = False return False context = dict( struct_type_decl=self.empty_declaration_code(), components=self.components, funcname=self.from_py_function, size=len(self.components) ) self._convert_from_py_code = TempitaUtilityCode.load( "FromPyCTupleUtility", "TypeConversion.c", context=context) env.use_utility_code(self._convert_from_py_code) return True def c_tuple_type(components): components = tuple(components) cname = Naming.ctuple_type_prefix + type_list_identifier(components) tuple_type = CTupleType(cname, components) return tuple_type class UnspecifiedType(PyrexType): # Used as a placeholder until the type can be determined. is_unspecified = 1 def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): return "" def same_as_resolved_type(self, other_type): return False class ErrorType(PyrexType): # Used to prevent propagation of error messages. is_error = 1 exception_value = "0" exception_check = 0 to_py_function = "dummy" from_py_function = "dummy" def create_to_py_utility_code(self, env): return True def create_from_py_utility_code(self, env): return True def declaration_code(self, entity_code, for_display = 0, dll_linkage = None, pyrex = 0): return "" def same_as_resolved_type(self, other_type): return 1 def error_condition(self, result_code): return "dummy" rank_to_type_name = ( "char", # 0 "short", # 1 "int", # 2 "long", # 3 "PY_LONG_LONG", # 4 "float", # 5 "double", # 6 "long double", # 7 ) _rank_to_type_name = list(rank_to_type_name) RANK_INT = _rank_to_type_name.index('int') RANK_LONG = _rank_to_type_name.index('long') RANK_FLOAT = _rank_to_type_name.index('float') UNSIGNED = 0 SIGNED = 2 error_type = ErrorType() unspecified_type = UnspecifiedType() py_object_type = PyObjectType() c_void_type = CVoidType() c_uchar_type = CIntType(0, UNSIGNED) c_ushort_type = CIntType(1, UNSIGNED) c_uint_type = CIntType(2, UNSIGNED) c_ulong_type = CIntType(3, UNSIGNED) c_ulonglong_type = CIntType(4, UNSIGNED) c_char_type = CIntType(0) c_short_type = CIntType(1) c_int_type = CIntType(2) c_long_type = CIntType(3) c_longlong_type = CIntType(4) c_schar_type = CIntType(0, SIGNED) c_sshort_type = CIntType(1, SIGNED) c_sint_type = CIntType(2, SIGNED) c_slong_type = CIntType(3, SIGNED) c_slonglong_type = CIntType(4, SIGNED) c_float_type = CFloatType(5, math_h_modifier='f') c_double_type = CFloatType(6) c_longdouble_type = CFloatType(7, math_h_modifier='l') c_float_complex_type = CComplexType(c_float_type) c_double_complex_type = CComplexType(c_double_type) c_longdouble_complex_type = CComplexType(c_longdouble_type) c_anon_enum_type = CAnonEnumType(-1) c_returncode_type = CReturnCodeType(RANK_INT) c_bint_type = CBIntType(RANK_INT) c_py_unicode_type = CPyUnicodeIntType(RANK_INT-0.5, UNSIGNED) c_py_ucs4_type = CPyUCS4IntType(RANK_LONG-0.5, UNSIGNED) c_py_hash_t_type = CPyHashTType(RANK_LONG+0.5, SIGNED) c_py_ssize_t_type = CPySSizeTType(RANK_LONG+0.5, SIGNED) c_ssize_t_type = CSSizeTType(RANK_LONG+0.5, SIGNED) c_size_t_type = CSizeTType(RANK_LONG+0.5, UNSIGNED) c_ptrdiff_t_type = CPtrdiffTType(RANK_LONG+0.75, SIGNED) c_null_ptr_type = CNullPtrType(c_void_type) c_void_ptr_type = CPtrType(c_void_type) c_void_ptr_ptr_type = CPtrType(c_void_ptr_type) c_char_ptr_type = CPtrType(c_char_type) c_const_char_ptr_type = CPtrType(CConstType(c_char_type)) c_uchar_ptr_type = CPtrType(c_uchar_type) c_const_uchar_ptr_type = CPtrType(CConstType(c_uchar_type)) c_char_ptr_ptr_type = CPtrType(c_char_ptr_type) c_int_ptr_type = CPtrType(c_int_type) c_py_unicode_ptr_type = CPtrType(c_py_unicode_type) c_const_py_unicode_ptr_type = CPtrType(CConstType(c_py_unicode_type)) c_py_ssize_t_ptr_type = CPtrType(c_py_ssize_t_type) c_ssize_t_ptr_type = CPtrType(c_ssize_t_type) c_size_t_ptr_type = CPtrType(c_size_t_type) # GIL state c_gilstate_type = CEnumType("PyGILState_STATE", "PyGILState_STATE", True) c_threadstate_type = CStructOrUnionType("PyThreadState", "struct", None, 1, "PyThreadState") c_threadstate_ptr_type = CPtrType(c_threadstate_type) # the Py_buffer type is defined in Builtin.py c_py_buffer_type = CStructOrUnionType("Py_buffer", "struct", None, 1, "Py_buffer") c_py_buffer_ptr_type = CPtrType(c_py_buffer_type) # Not sure whether the unsigned versions and 'long long' should be in there # long long requires C99 and might be slow, and would always get preferred # when specialization happens through calling and not indexing cy_integral_type = FusedType([c_short_type, c_int_type, c_long_type], name="integral") # Omitting long double as it might be slow cy_floating_type = FusedType([c_float_type, c_double_type], name="floating") cy_numeric_type = FusedType([c_short_type, c_int_type, c_long_type, c_float_type, c_double_type, c_float_complex_type, c_double_complex_type], name="numeric") # buffer-related structs c_buf_diminfo_type = CStructOrUnionType("__Pyx_Buf_DimInfo", "struct", None, 1, "__Pyx_Buf_DimInfo") c_pyx_buffer_type = CStructOrUnionType("__Pyx_Buffer", "struct", None, 1, "__Pyx_Buffer") c_pyx_buffer_ptr_type = CPtrType(c_pyx_buffer_type) c_pyx_buffer_nd_type = CStructOrUnionType("__Pyx_LocalBuf_ND", "struct", None, 1, "__Pyx_LocalBuf_ND") cython_memoryview_type = CStructOrUnionType("__pyx_memoryview_obj", "struct", None, 0, "__pyx_memoryview_obj") memoryviewslice_type = CStructOrUnionType("memoryviewslice", "struct", None, 1, "__Pyx_memviewslice") modifiers_and_name_to_type = { #(signed, longness, name) : type (0, 0, "char"): c_uchar_type, (1, 0, "char"): c_char_type, (2, 0, "char"): c_schar_type, (0, -1, "int"): c_ushort_type, (0, 0, "int"): c_uint_type, (0, 1, "int"): c_ulong_type, (0, 2, "int"): c_ulonglong_type, (1, -1, "int"): c_short_type, (1, 0, "int"): c_int_type, (1, 1, "int"): c_long_type, (1, 2, "int"): c_longlong_type, (2, -1, "int"): c_sshort_type, (2, 0, "int"): c_sint_type, (2, 1, "int"): c_slong_type, (2, 2, "int"): c_slonglong_type, (1, 0, "float"): c_float_type, (1, 0, "double"): c_double_type, (1, 1, "double"): c_longdouble_type, (1, 0, "complex"): c_double_complex_type, # C: float, Python: double => Python wins (1, 0, "floatcomplex"): c_float_complex_type, (1, 0, "doublecomplex"): c_double_complex_type, (1, 1, "doublecomplex"): c_longdouble_complex_type, # (1, 0, "void"): c_void_type, (1, 0, "bint"): c_bint_type, (0, 0, "Py_UNICODE"): c_py_unicode_type, (0, 0, "Py_UCS4"): c_py_ucs4_type, (2, 0, "Py_hash_t"): c_py_hash_t_type, (2, 0, "Py_ssize_t"): c_py_ssize_t_type, (2, 0, "ssize_t") : c_ssize_t_type, (0, 0, "size_t") : c_size_t_type, (2, 0, "ptrdiff_t") : c_ptrdiff_t_type, (1, 0, "object"): py_object_type, } def is_promotion(src_type, dst_type): # It's hard to find a hard definition of promotion, but empirical # evidence suggests that the below is all that's allowed. if src_type.is_numeric: if dst_type.same_as(c_int_type): unsigned = (not src_type.signed) return (src_type.is_enum or (src_type.is_int and unsigned + src_type.rank < dst_type.rank)) elif dst_type.same_as(c_double_type): return src_type.is_float and src_type.rank <= dst_type.rank return False def best_match(arg_types, functions, pos=None, env=None, args=None): """ Given a list args of arguments and a list of functions, choose one to call which seems to be the "best" fit for this list of arguments. This function is used, e.g., when deciding which overloaded method to dispatch for C++ classes. We first eliminate functions based on arity, and if only one function has the correct arity, we return it. Otherwise, we weight functions based on how much work must be done to convert the arguments, with the following priorities: * identical types or pointers to identical types * promotions * non-Python types That is, we prefer functions where no arguments need converted, and failing that, functions where only promotions are required, and so on. If no function is deemed a good fit, or if two or more functions have the same weight, we return None (as there is no best match). If pos is not None, we also generate an error. """ # TODO: args should be a list of types, not a list of Nodes. actual_nargs = len(arg_types) candidates = [] errors = [] for func in functions: error_mesg = "" func_type = func.type if func_type.is_ptr: func_type = func_type.base_type # Check function type if not func_type.is_cfunction: if not func_type.is_error and pos is not None: error_mesg = "Calling non-function type '%s'" % func_type errors.append((func, error_mesg)) continue # Check no. of args max_nargs = len(func_type.args) min_nargs = max_nargs - func_type.optional_arg_count if actual_nargs < min_nargs or \ (not func_type.has_varargs and actual_nargs > max_nargs): if max_nargs == min_nargs and not func_type.has_varargs: expectation = max_nargs elif actual_nargs < min_nargs: expectation = "at least %s" % min_nargs else: expectation = "at most %s" % max_nargs error_mesg = "Call with wrong number of arguments (expected %s, got %s)" \ % (expectation, actual_nargs) errors.append((func, error_mesg)) continue if func_type.templates: deductions = reduce( merge_template_deductions, [pattern.type.deduce_template_params(actual) for (pattern, actual) in zip(func_type.args, arg_types)], {}) if deductions is None: errors.append((func, "Unable to deduce type parameters for %s given (%s)" % (func_type, ', '.join(map(str, arg_types))))) elif len(deductions) < len(func_type.templates): errors.append((func, "Unable to deduce type parameter %s" % ( ", ".join([param.name for param in set(func_type.templates) - set(deductions.keys())])))) else: type_list = [deductions[param] for param in func_type.templates] from .Symtab import Entry specialization = Entry( name = func.name + "[%s]" % ",".join([str(t) for t in type_list]), cname = func.cname + "<%s>" % ",".join([t.empty_declaration_code() for t in type_list]), type = func_type.specialize(deductions), pos = func.pos) candidates.append((specialization, specialization.type)) else: candidates.append((func, func_type)) # Optimize the most common case of no overloading... if len(candidates) == 1: return candidates[0][0] elif len(candidates) == 0: if pos is not None: func, errmsg = errors[0] if len(errors) == 1 or [1 for func, e in errors if e == errmsg]: error(pos, errmsg) else: error(pos, "no suitable method found") return None possibilities = [] bad_types = [] needed_coercions = {} for index, (func, func_type) in enumerate(candidates): score = [0,0,0,0,0,0,0] for i in range(min(actual_nargs, len(func_type.args))): src_type = arg_types[i] dst_type = func_type.args[i].type assignable = dst_type.assignable_from(src_type) # Now take care of unprefixed string literals. So when you call a cdef # function that takes a char *, the coercion will mean that the # type will simply become bytes. We need to do this coercion # manually for overloaded and fused functions if not assignable: c_src_type = None if src_type.is_pyobject: if src_type.is_builtin_type and src_type.name == 'str' and dst_type.resolve().is_string: c_src_type = dst_type.resolve() else: c_src_type = src_type.default_coerced_ctype() elif src_type.is_pythran_expr: c_src_type = src_type.org_buffer if c_src_type is not None: assignable = dst_type.assignable_from(c_src_type) if assignable: src_type = c_src_type needed_coercions[func] = (i, dst_type) if assignable: if src_type == dst_type or dst_type.same_as(src_type): pass # score 0 elif func_type.is_strict_signature: break # exact match requested but not found elif is_promotion(src_type, dst_type): score[2] += 1 elif ((src_type.is_int and dst_type.is_int) or (src_type.is_float and dst_type.is_float)): score[2] += abs(dst_type.rank + (not dst_type.signed) - (src_type.rank + (not src_type.signed))) + 1 elif dst_type.is_ptr and src_type.is_ptr: if dst_type.base_type == c_void_type: score[4] += 1 elif src_type.base_type.is_cpp_class and src_type.base_type.is_subclass(dst_type.base_type): score[6] += src_type.base_type.subclass_dist(dst_type.base_type) else: score[5] += 1 elif not src_type.is_pyobject: score[1] += 1 else: score[0] += 1 else: error_mesg = "Invalid conversion from '%s' to '%s'" % (src_type, dst_type) bad_types.append((func, error_mesg)) break else: possibilities.append((score, index, func)) # so we can sort it if possibilities: possibilities.sort() if len(possibilities) > 1: score1 = possibilities[0][0] score2 = possibilities[1][0] if score1 == score2: if pos is not None: error(pos, "ambiguous overloaded method") return None function = possibilities[0][-1] if function in needed_coercions and env: arg_i, coerce_to_type = needed_coercions[function] args[arg_i] = args[arg_i].coerce_to(coerce_to_type, env) return function if pos is not None: if len(bad_types) == 1: error(pos, bad_types[0][1]) else: error(pos, "no suitable method found") return None def merge_template_deductions(a, b): if a is None or b is None: return None all = a for param, value in b.items(): if param in all: if a[param] != b[param]: return None else: all[param] = value return all def widest_numeric_type(type1, type2): """Given two numeric types, return the narrowest type encompassing both of them. """ if type1.is_reference: type1 = type1.ref_base_type if type2.is_reference: type2 = type2.ref_base_type if type1.is_const: type1 = type1.const_base_type if type2.is_const: type2 = type2.const_base_type if type1 == type2: widest_type = type1 elif type1.is_complex or type2.is_complex: def real_type(ntype): if ntype.is_complex: return ntype.real_type return ntype widest_type = CComplexType( widest_numeric_type( real_type(type1), real_type(type2))) elif type1.is_enum and type2.is_enum: widest_type = c_int_type elif type1.rank < type2.rank: widest_type = type2 elif type1.rank > type2.rank: widest_type = type1 elif type1.signed < type2.signed: widest_type = type1 elif type1.signed > type2.signed: widest_type = type2 elif type1.is_typedef > type2.is_typedef: widest_type = type1 else: widest_type = type2 return widest_type def numeric_type_fits(small_type, large_type): return widest_numeric_type(small_type, large_type) == large_type def independent_spanning_type(type1, type2): # Return a type assignable independently from both type1 and # type2, but do not require any interoperability between the two. # For example, in "True * 2", it is safe to assume an integer # result type (so spanning_type() will do the right thing), # whereas "x = True or 2" must evaluate to a type that can hold # both a boolean value and an integer, so this function works # better. if type1.is_reference ^ type2.is_reference: if type1.is_reference: type1 = type1.ref_base_type else: type2 = type2.ref_base_type if type1 == type2: return type1 elif (type1 is c_bint_type or type2 is c_bint_type) and (type1.is_numeric and type2.is_numeric): # special case: if one of the results is a bint and the other # is another C integer, we must prevent returning a numeric # type so that we do not lose the ability to coerce to a # Python bool if we have to. return py_object_type span_type = _spanning_type(type1, type2) if span_type is None: return error_type return span_type def spanning_type(type1, type2): # Return a type assignable from both type1 and type2, or # py_object_type if no better type is found. Assumes that the # code that calls this will try a coercion afterwards, which will # fail if the types cannot actually coerce to a py_object_type. if type1 == type2: return type1 elif type1 is py_object_type or type2 is py_object_type: return py_object_type elif type1 is c_py_unicode_type or type2 is c_py_unicode_type: # Py_UNICODE behaves more like a string than an int return py_object_type span_type = _spanning_type(type1, type2) if span_type is None: return py_object_type return span_type def _spanning_type(type1, type2): if type1.is_numeric and type2.is_numeric: return widest_numeric_type(type1, type2) elif type1.is_builtin_type and type1.name == 'float' and type2.is_numeric: return widest_numeric_type(c_double_type, type2) elif type2.is_builtin_type and type2.name == 'float' and type1.is_numeric: return widest_numeric_type(type1, c_double_type) elif type1.is_extension_type and type2.is_extension_type: return widest_extension_type(type1, type2) elif type1.is_pyobject or type2.is_pyobject: return py_object_type elif type1.assignable_from(type2): if type1.is_extension_type and type1.typeobj_is_imported(): # external types are unsafe, so we use PyObject instead return py_object_type return type1 elif type2.assignable_from(type1): if type2.is_extension_type and type2.typeobj_is_imported(): # external types are unsafe, so we use PyObject instead return py_object_type return type2 elif type1.is_ptr and type2.is_ptr: if type1.base_type.is_cpp_class and type2.base_type.is_cpp_class: common_base = widest_cpp_type(type1.base_type, type2.base_type) if common_base: return CPtrType(common_base) # incompatible pointers, void* will do as a result return c_void_ptr_type else: return None def widest_extension_type(type1, type2): if type1.typeobj_is_imported() or type2.typeobj_is_imported(): return py_object_type while True: if type1.subtype_of(type2): return type2 elif type2.subtype_of(type1): return type1 type1, type2 = type1.base_type, type2.base_type if type1 is None or type2 is None: return py_object_type def widest_cpp_type(type1, type2): @cached_function def bases(type): all = set() for base in type.base_classes: all.add(base) all.update(bases(base)) return all common_bases = bases(type1).intersection(bases(type2)) common_bases_bases = reduce(set.union, [bases(b) for b in common_bases], set()) candidates = [b for b in common_bases if b not in common_bases_bases] if len(candidates) == 1: return candidates[0] else: # Fall back to void* for now. return None def simple_c_type(signed, longness, name): # Find type descriptor for simple type given name and modifiers. # Returns None if arguments don't make sense. return modifiers_and_name_to_type.get((signed, longness, name)) def parse_basic_type(name): base = None if name.startswith('p_'): base = parse_basic_type(name[2:]) elif name.startswith('p'): base = parse_basic_type(name[1:]) elif name.endswith('*'): base = parse_basic_type(name[:-1]) if base: return CPtrType(base) # basic_type = simple_c_type(1, 0, name) if basic_type: return basic_type # signed = 1 longness = 0 if name == 'Py_UNICODE': signed = 0 elif name == 'Py_UCS4': signed = 0 elif name == 'Py_hash_t': signed = 2 elif name == 'Py_ssize_t': signed = 2 elif name == 'ssize_t': signed = 2 elif name == 'size_t': signed = 0 else: if name.startswith('u'): name = name[1:] signed = 0 elif (name.startswith('s') and not name.startswith('short')): name = name[1:] signed = 2 longness = 0 while name.startswith('short'): name = name.replace('short', '', 1).strip() longness -= 1 while name.startswith('long'): name = name.replace('long', '', 1).strip() longness += 1 if longness != 0 and not name: name = 'int' return simple_c_type(signed, longness, name) def c_array_type(base_type, size): # Construct a C array type. if base_type is error_type: return error_type else: return CArrayType(base_type, size) def c_ptr_type(base_type): # Construct a C pointer type. if base_type is error_type: return error_type elif base_type.is_reference: return CPtrType(base_type.ref_base_type) else: return CPtrType(base_type) def c_ref_type(base_type): # Construct a C reference type if base_type is error_type: return error_type else: return CReferenceType(base_type) def c_const_type(base_type): # Construct a C const type. if base_type is error_type: return error_type else: return CConstType(base_type) def same_type(type1, type2): return type1.same_as(type2) def assignable_from(type1, type2): return type1.assignable_from(type2) def typecast(to_type, from_type, expr_code): # Return expr_code cast to a C type which can be # assigned to to_type, assuming its existing C type # is from_type. if (to_type is from_type or (not to_type.is_pyobject and assignable_from(to_type, from_type))): return expr_code elif (to_type is py_object_type and from_type and from_type.is_builtin_type and from_type.name != 'type'): # no cast needed, builtins are PyObject* already return expr_code else: #print "typecast: to", to_type, "from", from_type ### return to_type.cast_code(expr_code) def type_list_identifier(types): return cap_length('__and_'.join(type_identifier(type) for type in types)) _type_identifier_cache = {} def type_identifier(type): decl = type.empty_declaration_code() safe = _type_identifier_cache.get(decl) if safe is None: safe = decl safe = re.sub(' +', ' ', safe) safe = re.sub(' ([^a-zA-Z0-9_])', r'\1', safe) safe = re.sub('([^a-zA-Z0-9_]) ', r'\1', safe) safe = (safe.replace('__', '__dunder') .replace('const ', '__const_') .replace(' ', '__space_') .replace('*', '__ptr') .replace('&', '__ref') .replace('[', '__lArr') .replace(']', '__rArr') .replace('<', '__lAng') .replace('>', '__rAng') .replace('(', '__lParen') .replace(')', '__rParen') .replace(',', '__comma_') .replace('::', '__in_')) safe = cap_length(re.sub('[^a-zA-Z0-9_]', lambda x: '__%X' % ord(x.group(0)), safe)) _type_identifier_cache[decl] = safe return safe def cap_length(s, max_prefix=63, max_len=1024): if len(s) <= max_prefix: return s else: return '%x__%s__etc' % (abs(hash(s)) % (1<<20), s[:max_len-17]) Cython-0.26.1/Cython/Compiler/FusedNode.py0000664000175000017500000010662713150045407021107 0ustar stefanstefan00000000000000from __future__ import absolute_import import copy from . import (ExprNodes, PyrexTypes, MemoryView, ParseTreeTransforms, StringEncoding, Errors) from .ExprNodes import CloneNode, ProxyNode, TupleNode from .Nodes import FuncDefNode, CFuncDefNode, StatListNode, DefNode from ..Utils import OrderedSet class FusedCFuncDefNode(StatListNode): """ This node replaces a function with fused arguments. It deep-copies the function for every permutation of fused types, and allocates a new local scope for it. It keeps track of the original function in self.node, and the entry of the original function in the symbol table is given the 'fused_cfunction' attribute which points back to us. Then when a function lookup occurs (to e.g. call it), the call can be dispatched to the right function. node FuncDefNode the original function nodes [FuncDefNode] list of copies of node with different specific types py_func DefNode the fused python function subscriptable from Python space __signatures__ A DictNode mapping signature specialization strings to PyCFunction nodes resulting_fused_function PyCFunction for the fused DefNode that delegates to specializations fused_func_assignment Assignment of the fused function to the function name defaults_tuple TupleNode of defaults (letting PyCFunctionNode build defaults would result in many different tuples) specialized_pycfuncs List of synthesized pycfunction nodes for the specializations code_object CodeObjectNode shared by all specializations and the fused function fused_compound_types All fused (compound) types (e.g. floating[:]) """ __signatures__ = None resulting_fused_function = None fused_func_assignment = None defaults_tuple = None decorators = None child_attrs = StatListNode.child_attrs + [ '__signatures__', 'resulting_fused_function', 'fused_func_assignment'] def __init__(self, node, env): super(FusedCFuncDefNode, self).__init__(node.pos) self.nodes = [] self.node = node is_def = isinstance(self.node, DefNode) if is_def: # self.node.decorators = [] self.copy_def(env) else: self.copy_cdef(env) # Perform some sanity checks. If anything fails, it's a bug for n in self.nodes: assert not n.entry.type.is_fused assert not n.local_scope.return_type.is_fused if node.return_type.is_fused: assert not n.return_type.is_fused if not is_def and n.cfunc_declarator.optional_arg_count: assert n.type.op_arg_struct node.entry.fused_cfunction = self # Copy the nodes as AnalyseDeclarationsTransform will prepend # self.py_func to self.stats, as we only want specialized # CFuncDefNodes in self.nodes self.stats = self.nodes[:] def copy_def(self, env): """ Create a copy of the original def or lambda function for specialized versions. """ fused_compound_types = PyrexTypes.unique( [arg.type for arg in self.node.args if arg.type.is_fused]) fused_types = self._get_fused_base_types(fused_compound_types) permutations = PyrexTypes.get_all_specialized_permutations(fused_types) self.fused_compound_types = fused_compound_types if self.node.entry in env.pyfunc_entries: env.pyfunc_entries.remove(self.node.entry) for cname, fused_to_specific in permutations: copied_node = copy.deepcopy(self.node) # keep signature object identity for special casing in DefNode.analyse_declarations() copied_node.entry.signature = self.node.entry.signature self._specialize_function_args(copied_node.args, fused_to_specific) copied_node.return_type = self.node.return_type.specialize( fused_to_specific) copied_node.analyse_declarations(env) # copied_node.is_staticmethod = self.node.is_staticmethod # copied_node.is_classmethod = self.node.is_classmethod self.create_new_local_scope(copied_node, env, fused_to_specific) self.specialize_copied_def(copied_node, cname, self.node.entry, fused_to_specific, fused_compound_types) PyrexTypes.specialize_entry(copied_node.entry, cname) copied_node.entry.used = True env.entries[copied_node.entry.name] = copied_node.entry if not self.replace_fused_typechecks(copied_node): break self.orig_py_func = self.node self.py_func = self.make_fused_cpdef(self.node, env, is_def=True) def copy_cdef(self, env): """ Create a copy of the original c(p)def function for all specialized versions. """ permutations = self.node.type.get_all_specialized_permutations() # print 'Node %s has %d specializations:' % (self.node.entry.name, # len(permutations)) # import pprint; pprint.pprint([d for cname, d in permutations]) if self.node.entry in env.cfunc_entries: env.cfunc_entries.remove(self.node.entry) # Prevent copying of the python function self.orig_py_func = orig_py_func = self.node.py_func self.node.py_func = None if orig_py_func: env.pyfunc_entries.remove(orig_py_func.entry) fused_types = self.node.type.get_fused_types() self.fused_compound_types = fused_types for cname, fused_to_specific in permutations: copied_node = copy.deepcopy(self.node) # Make the types in our CFuncType specific type = copied_node.type.specialize(fused_to_specific) entry = copied_node.entry copied_node.type = type entry.type, type.entry = type, entry entry.used = (entry.used or self.node.entry.defined_in_pxd or env.is_c_class_scope or entry.is_cmethod) if self.node.cfunc_declarator.optional_arg_count: self.node.cfunc_declarator.declare_optional_arg_struct( type, env, fused_cname=cname) copied_node.return_type = type.return_type self.create_new_local_scope(copied_node, env, fused_to_specific) # Make the argument types in the CFuncDeclarator specific self._specialize_function_args(copied_node.cfunc_declarator.args, fused_to_specific) type.specialize_entry(entry, cname) env.cfunc_entries.append(entry) # If a cpdef, declare all specialized cpdefs (this # also calls analyse_declarations) copied_node.declare_cpdef_wrapper(env) if copied_node.py_func: env.pyfunc_entries.remove(copied_node.py_func.entry) self.specialize_copied_def( copied_node.py_func, cname, self.node.entry.as_variable, fused_to_specific, fused_types) if not self.replace_fused_typechecks(copied_node): break if orig_py_func: self.py_func = self.make_fused_cpdef(orig_py_func, env, is_def=False) else: self.py_func = orig_py_func def _get_fused_base_types(self, fused_compound_types): """ Get a list of unique basic fused types, from a list of (possibly) compound fused types. """ base_types = [] seen = set() for fused_type in fused_compound_types: fused_type.get_fused_types(result=base_types, seen=seen) return base_types def _specialize_function_args(self, args, fused_to_specific): for arg in args: if arg.type.is_fused: arg.type = arg.type.specialize(fused_to_specific) if arg.type.is_memoryviewslice: arg.type.validate_memslice_dtype(arg.pos) def create_new_local_scope(self, node, env, f2s): """ Create a new local scope for the copied node and append it to self.nodes. A new local scope is needed because the arguments with the fused types are aready in the local scope, and we need the specialized entries created after analyse_declarations on each specialized version of the (CFunc)DefNode. f2s is a dict mapping each fused type to its specialized version """ node.create_local_scope(env) node.local_scope.fused_to_specific = f2s # This is copied from the original function, set it to false to # stop recursion node.has_fused_arguments = False self.nodes.append(node) def specialize_copied_def(self, node, cname, py_entry, f2s, fused_compound_types): """Specialize the copy of a DefNode given the copied node, the specialization cname and the original DefNode entry""" fused_types = self._get_fused_base_types(fused_compound_types) type_strings = [ PyrexTypes.specialization_signature_string(fused_type, f2s) for fused_type in fused_types ] node.specialized_signature_string = '|'.join(type_strings) node.entry.pymethdef_cname = PyrexTypes.get_fused_cname( cname, node.entry.pymethdef_cname) node.entry.doc = py_entry.doc node.entry.doc_cname = py_entry.doc_cname def replace_fused_typechecks(self, copied_node): """ Branch-prune fused type checks like if fused_t is int: ... Returns whether an error was issued and whether we should stop in in order to prevent a flood of errors. """ num_errors = Errors.num_errors transform = ParseTreeTransforms.ReplaceFusedTypeChecks( copied_node.local_scope) transform(copied_node) if Errors.num_errors > num_errors: return False return True def _fused_instance_checks(self, normal_types, pyx_code, env): """ Genereate Cython code for instance checks, matching an object to specialized types. """ for specialized_type in normal_types: # all_numeric = all_numeric and specialized_type.is_numeric pyx_code.context.update( py_type_name=specialized_type.py_type_name(), specialized_type_name=specialized_type.specialization_string, ) pyx_code.put_chunk( u""" if isinstance(arg, {{py_type_name}}): dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'; break """) def _dtype_name(self, dtype): if dtype.is_typedef: return '___pyx_%s' % dtype return str(dtype).replace(' ', '_') def _dtype_type(self, dtype): if dtype.is_typedef: return self._dtype_name(dtype) return str(dtype) def _sizeof_dtype(self, dtype): if dtype.is_pyobject: return 'sizeof(void *)' else: return "sizeof(%s)" % self._dtype_type(dtype) def _buffer_check_numpy_dtype_setup_cases(self, pyx_code): "Setup some common cases to match dtypes against specializations" if pyx_code.indenter("if kind in b'iu':"): pyx_code.putln("pass") pyx_code.named_insertion_point("dtype_int") pyx_code.dedent() if pyx_code.indenter("elif kind == b'f':"): pyx_code.putln("pass") pyx_code.named_insertion_point("dtype_float") pyx_code.dedent() if pyx_code.indenter("elif kind == b'c':"): pyx_code.putln("pass") pyx_code.named_insertion_point("dtype_complex") pyx_code.dedent() if pyx_code.indenter("elif kind == b'O':"): pyx_code.putln("pass") pyx_code.named_insertion_point("dtype_object") pyx_code.dedent() match = "dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'" no_match = "dest_sig[{{dest_sig_idx}}] = None" def _buffer_check_numpy_dtype(self, pyx_code, specialized_buffer_types, pythran_types): """ Match a numpy dtype object to the individual specializations. """ self._buffer_check_numpy_dtype_setup_cases(pyx_code) for specialized_type in pythran_types+specialized_buffer_types: final_type = specialized_type if specialized_type.is_pythran_expr: specialized_type = specialized_type.org_buffer dtype = specialized_type.dtype pyx_code.context.update( itemsize_match=self._sizeof_dtype(dtype) + " == itemsize", signed_match="not (%s_is_signed ^ dtype_signed)" % self._dtype_name(dtype), dtype=dtype, specialized_type_name=final_type.specialization_string) dtypes = [ (dtype.is_int, pyx_code.dtype_int), (dtype.is_float, pyx_code.dtype_float), (dtype.is_complex, pyx_code.dtype_complex) ] for dtype_category, codewriter in dtypes: if dtype_category: cond = '{{itemsize_match}} and (arg.ndim) == %d' % ( specialized_type.ndim,) if dtype.is_int: cond += ' and {{signed_match}}' if final_type.is_pythran_expr: cond += ' and arg_is_pythran_compatible' if codewriter.indenter("if %s:" % cond): #codewriter.putln("print 'buffer match found based on numpy dtype'") codewriter.putln(self.match) codewriter.putln("break") codewriter.dedent() def _buffer_parse_format_string_check(self, pyx_code, decl_code, specialized_type, env): """ For each specialized type, try to coerce the object to a memoryview slice of that type. This means obtaining a buffer and parsing the format string. TODO: separate buffer acquisition from format parsing """ dtype = specialized_type.dtype if specialized_type.is_buffer: axes = [('direct', 'strided')] * specialized_type.ndim else: axes = specialized_type.axes memslice_type = PyrexTypes.MemoryViewSliceType(dtype, axes) memslice_type.create_from_py_utility_code(env) pyx_code.context.update( coerce_from_py_func=memslice_type.from_py_function, dtype=dtype) decl_code.putln( "{{memviewslice_cname}} {{coerce_from_py_func}}(object)") pyx_code.context.update( specialized_type_name=specialized_type.specialization_string, sizeof_dtype=self._sizeof_dtype(dtype)) pyx_code.put_chunk( u""" # try {{dtype}} if itemsize == -1 or itemsize == {{sizeof_dtype}}: memslice = {{coerce_from_py_func}}(arg) if memslice.memview: __PYX_XDEC_MEMVIEW(&memslice, 1) # print 'found a match for the buffer through format parsing' %s break else: __pyx_PyErr_Clear() """ % self.match) def _buffer_checks(self, buffer_types, pythran_types, pyx_code, decl_code, env): """ Generate Cython code to match objects to buffer specializations. First try to get a numpy dtype object and match it against the individual specializations. If that fails, try naively to coerce the object to each specialization, which obtains the buffer each time and tries to match the format string. """ # The first thing to find a match in this loop breaks out of the loop pyx_code.put_chunk( u""" if ndarray is not None: if isinstance(arg, ndarray): dtype = arg.dtype arg_is_pythran_compatible = True elif __pyx_memoryview_check(arg): arg_base = arg.base if isinstance(arg_base, ndarray): dtype = arg_base.dtype else: dtype = None else: dtype = None itemsize = -1 if dtype is not None: itemsize = dtype.itemsize kind = ord(dtype.kind) # We only support the endianess of the current compiler byteorder = dtype.byteorder if byteorder == "<" and not __Pyx_Is_Little_Endian(): arg_is_pythran_compatible = False if byteorder == ">" and __Pyx_Is_Little_Endian(): arg_is_pythran_compatible = False dtype_signed = kind == 'i' if arg_is_pythran_compatible: cur_stride = itemsize for dim,stride in zip(reversed(arg.shape),reversed(arg.strides)): if stride != cur_stride: arg_is_pythran_compatible = False break cur_stride *= dim else: arg_is_pythran_compatible = not (arg.flags.f_contiguous and arg.ndim > 1) """) pyx_code.indent(2) pyx_code.named_insertion_point("numpy_dtype_checks") self._buffer_check_numpy_dtype(pyx_code, buffer_types, pythran_types) pyx_code.dedent(2) for specialized_type in buffer_types: self._buffer_parse_format_string_check( pyx_code, decl_code, specialized_type, env) def _buffer_declarations(self, pyx_code, decl_code, all_buffer_types): """ If we have any buffer specializations, write out some variable declarations and imports. """ decl_code.put_chunk( u""" ctypedef struct {{memviewslice_cname}}: void *memview void __PYX_XDEC_MEMVIEW({{memviewslice_cname}} *, int have_gil) bint __pyx_memoryview_check(object) """) pyx_code.local_variable_declarations.put_chunk( u""" cdef {{memviewslice_cname}} memslice cdef Py_ssize_t itemsize cdef bint dtype_signed cdef char kind cdef bint arg_is_pythran_compatible itemsize = -1 arg_is_pythran_compatible = False """) pyx_code.imports.put_chunk( u""" cdef type ndarray ndarray = __Pyx_ImportNumPyArrayTypeIfAvailable() """) seen_int_dtypes = set() for buffer_type in all_buffer_types: dtype = buffer_type.dtype if dtype.is_typedef: #decl_code.putln("ctypedef %s %s" % (dtype.resolve(), # self._dtype_name(dtype))) decl_code.putln('ctypedef %s %s "%s"' % (dtype.resolve(), self._dtype_name(dtype), dtype.empty_declaration_code())) if buffer_type.dtype.is_int: if str(dtype) not in seen_int_dtypes: seen_int_dtypes.add(str(dtype)) pyx_code.context.update(dtype_name=self._dtype_name(dtype), dtype_type=self._dtype_type(dtype)) pyx_code.local_variable_declarations.put_chunk( u""" cdef bint {{dtype_name}}_is_signed {{dtype_name}}_is_signed = <{{dtype_type}}> -1 < 0 """) def _split_fused_types(self, arg): """ Specialize fused types and split into normal types and buffer types. """ specialized_types = PyrexTypes.get_specialized_types(arg.type) # Prefer long over int, etc by sorting (see type classes in PyrexTypes.py) specialized_types.sort() seen_py_type_names = set() normal_types, buffer_types, pythran_types = [], [], [] has_object_fallback = False for specialized_type in specialized_types: py_type_name = specialized_type.py_type_name() if py_type_name: if py_type_name in seen_py_type_names: continue seen_py_type_names.add(py_type_name) if py_type_name == 'object': has_object_fallback = True else: normal_types.append(specialized_type) elif specialized_type.is_pythran_expr: pythran_types.append(specialized_type) elif specialized_type.is_buffer or specialized_type.is_memoryviewslice: buffer_types.append(specialized_type) return normal_types, buffer_types, pythran_types, has_object_fallback def _unpack_argument(self, pyx_code): pyx_code.put_chunk( u""" # PROCESSING ARGUMENT {{arg_tuple_idx}} if {{arg_tuple_idx}} < len(args): arg = (args)[{{arg_tuple_idx}}] elif kwargs is not None and '{{arg.name}}' in kwargs: arg = (kwargs)['{{arg.name}}'] else: {{if arg.default}} arg = (defaults)[{{default_idx}}] {{else}} {{if arg_tuple_idx < min_positional_args}} raise TypeError("Expected at least %d argument%s, got %d" % ( {{min_positional_args}}, {{'"s"' if min_positional_args != 1 else '""'}}, len(args))) {{else}} raise TypeError("Missing keyword-only argument: '%s'" % "{{arg.default}}") {{endif}} {{endif}} """) def make_fused_cpdef(self, orig_py_func, env, is_def): """ This creates the function that is indexable from Python and does runtime dispatch based on the argument types. The function gets the arg tuple and kwargs dict (or None) and the defaults tuple as arguments from the Binding Fused Function's tp_call. """ from . import TreeFragment, Code, UtilityCode fused_types = self._get_fused_base_types([ arg.type for arg in self.node.args if arg.type.is_fused]) context = { 'memviewslice_cname': MemoryView.memviewslice_cname, 'func_args': self.node.args, 'n_fused': len(fused_types), 'min_positional_args': self.node.num_required_args - self.node.num_required_kw_args if is_def else sum(1 for arg in self.node.args if arg.default is None), 'name': orig_py_func.entry.name, } pyx_code = Code.PyxCodeWriter(context=context) decl_code = Code.PyxCodeWriter(context=context) decl_code.put_chunk( u""" cdef extern from *: void __pyx_PyErr_Clear "PyErr_Clear" () type __Pyx_ImportNumPyArrayTypeIfAvailable() int __Pyx_Is_Little_Endian() """) decl_code.indent() pyx_code.put_chunk( u""" def __pyx_fused_cpdef(signatures, args, kwargs, defaults): # FIXME: use a typed signature - currently fails badly because # default arguments inherit the types we specify here! dest_sig = [None] * {{n_fused}} if kwargs is not None and not kwargs: kwargs = None cdef Py_ssize_t i # instance check body """) pyx_code.indent() # indent following code to function body pyx_code.named_insertion_point("imports") pyx_code.named_insertion_point("func_defs") pyx_code.named_insertion_point("local_variable_declarations") fused_index = 0 default_idx = 0 all_buffer_types = OrderedSet() seen_fused_types = set() for i, arg in enumerate(self.node.args): if arg.type.is_fused: arg_fused_types = arg.type.get_fused_types() if len(arg_fused_types) > 1: raise NotImplementedError("Determination of more than one fused base " "type per argument is not implemented.") fused_type = arg_fused_types[0] if arg.type.is_fused and fused_type not in seen_fused_types: seen_fused_types.add(fused_type) context.update( arg_tuple_idx=i, arg=arg, dest_sig_idx=fused_index, default_idx=default_idx, ) normal_types, buffer_types, pythran_types, has_object_fallback = self._split_fused_types(arg) self._unpack_argument(pyx_code) # 'unrolled' loop, first match breaks out of it if pyx_code.indenter("while 1:"): if normal_types: self._fused_instance_checks(normal_types, pyx_code, env) if buffer_types or pythran_types: env.use_utility_code(Code.UtilityCode.load_cached("IsLittleEndian", "ModuleSetupCode.c")) self._buffer_checks(buffer_types, pythran_types, pyx_code, decl_code, env) if has_object_fallback: pyx_code.context.update(specialized_type_name='object') pyx_code.putln(self.match) else: pyx_code.putln(self.no_match) pyx_code.putln("break") pyx_code.dedent() fused_index += 1 all_buffer_types.update(buffer_types) all_buffer_types.update(ty.org_buffer for ty in pythran_types) if arg.default: default_idx += 1 if all_buffer_types: self._buffer_declarations(pyx_code, decl_code, all_buffer_types) env.use_utility_code(Code.UtilityCode.load_cached("Import", "ImportExport.c")) env.use_utility_code(Code.UtilityCode.load_cached("ImportNumPyArray", "ImportExport.c")) pyx_code.put_chunk( u""" candidates = [] for sig in signatures: match_found = False src_sig = sig.strip('()').split('|') for i in range(len(dest_sig)): dst_type = dest_sig[i] if dst_type is not None: if src_sig[i] == dst_type: match_found = True else: match_found = False break if match_found: candidates.append(sig) if not candidates: raise TypeError("No matching signature found") elif len(candidates) > 1: raise TypeError("Function call with ambiguous argument types") else: return (signatures)[candidates[0]] """) fragment_code = pyx_code.getvalue() # print decl_code.getvalue() # print fragment_code from .Optimize import ConstantFolding fragment = TreeFragment.TreeFragment( fragment_code, level='module', pipeline=[ConstantFolding()]) ast = TreeFragment.SetPosTransform(self.node.pos)(fragment.root) UtilityCode.declare_declarations_in_scope( decl_code.getvalue(), env.global_scope()) ast.scope = env # FIXME: for static methods of cdef classes, we build the wrong signature here: first arg becomes 'self' ast.analyse_declarations(env) py_func = ast.stats[-1] # the DefNode self.fragment_scope = ast.scope if isinstance(self.node, DefNode): py_func.specialized_cpdefs = self.nodes[:] else: py_func.specialized_cpdefs = [n.py_func for n in self.nodes] return py_func def update_fused_defnode_entry(self, env): copy_attributes = ( 'name', 'pos', 'cname', 'func_cname', 'pyfunc_cname', 'pymethdef_cname', 'doc', 'doc_cname', 'is_member', 'scope' ) entry = self.py_func.entry for attr in copy_attributes: setattr(entry, attr, getattr(self.orig_py_func.entry, attr)) self.py_func.name = self.orig_py_func.name self.py_func.doc = self.orig_py_func.doc env.entries.pop('__pyx_fused_cpdef', None) if isinstance(self.node, DefNode): env.entries[entry.name] = entry else: env.entries[entry.name].as_variable = entry env.pyfunc_entries.append(entry) self.py_func.entry.fused_cfunction = self for node in self.nodes: if isinstance(self.node, DefNode): node.fused_py_func = self.py_func else: node.py_func.fused_py_func = self.py_func node.entry.as_variable = entry self.synthesize_defnodes() self.stats.append(self.__signatures__) def analyse_expressions(self, env): """ Analyse the expressions. Take care to only evaluate default arguments once and clone the result for all specializations """ for fused_compound_type in self.fused_compound_types: for fused_type in fused_compound_type.get_fused_types(): for specialization_type in fused_type.types: if specialization_type.is_complex: specialization_type.create_declaration_utility_code(env) if self.py_func: self.__signatures__ = self.__signatures__.analyse_expressions(env) self.py_func = self.py_func.analyse_expressions(env) self.resulting_fused_function = self.resulting_fused_function.analyse_expressions(env) self.fused_func_assignment = self.fused_func_assignment.analyse_expressions(env) self.defaults = defaults = [] for arg in self.node.args: if arg.default: arg.default = arg.default.analyse_expressions(env) defaults.append(ProxyNode(arg.default)) else: defaults.append(None) for i, stat in enumerate(self.stats): stat = self.stats[i] = stat.analyse_expressions(env) if isinstance(stat, FuncDefNode): for arg, default in zip(stat.args, defaults): if default is not None: arg.default = CloneNode(default).coerce_to(arg.type, env) if self.py_func: args = [CloneNode(default) for default in defaults if default] self.defaults_tuple = TupleNode(self.pos, args=args) self.defaults_tuple = self.defaults_tuple.analyse_types(env, skip_children=True).coerce_to_pyobject(env) self.defaults_tuple = ProxyNode(self.defaults_tuple) self.code_object = ProxyNode(self.specialized_pycfuncs[0].code_object) fused_func = self.resulting_fused_function.arg fused_func.defaults_tuple = CloneNode(self.defaults_tuple) fused_func.code_object = CloneNode(self.code_object) for i, pycfunc in enumerate(self.specialized_pycfuncs): pycfunc.code_object = CloneNode(self.code_object) pycfunc = self.specialized_pycfuncs[i] = pycfunc.analyse_types(env) pycfunc.defaults_tuple = CloneNode(self.defaults_tuple) return self def synthesize_defnodes(self): """ Create the __signatures__ dict of PyCFunctionNode specializations. """ if isinstance(self.nodes[0], CFuncDefNode): nodes = [node.py_func for node in self.nodes] else: nodes = self.nodes signatures = [StringEncoding.EncodedString(node.specialized_signature_string) for node in nodes] keys = [ExprNodes.StringNode(node.pos, value=sig) for node, sig in zip(nodes, signatures)] values = [ExprNodes.PyCFunctionNode.from_defnode(node, binding=True) for node in nodes] self.__signatures__ = ExprNodes.DictNode.from_pairs(self.pos, zip(keys, values)) self.specialized_pycfuncs = values for pycfuncnode in values: pycfuncnode.is_specialization = True def generate_function_definitions(self, env, code): if self.py_func: self.py_func.pymethdef_required = True self.fused_func_assignment.generate_function_definitions(env, code) for stat in self.stats: if isinstance(stat, FuncDefNode) and stat.entry.used: code.mark_pos(stat.pos) stat.generate_function_definitions(env, code) def generate_execution_code(self, code): # Note: all def function specialization are wrapped in PyCFunction # nodes in the self.__signatures__ dictnode. for default in self.defaults: if default is not None: default.generate_evaluation_code(code) if self.py_func: self.defaults_tuple.generate_evaluation_code(code) self.code_object.generate_evaluation_code(code) for stat in self.stats: code.mark_pos(stat.pos) if isinstance(stat, ExprNodes.ExprNode): stat.generate_evaluation_code(code) else: stat.generate_execution_code(code) if self.__signatures__: self.resulting_fused_function.generate_evaluation_code(code) code.putln( "((__pyx_FusedFunctionObject *) %s)->__signatures__ = %s;" % (self.resulting_fused_function.result(), self.__signatures__.result())) code.put_giveref(self.__signatures__.result()) self.fused_func_assignment.generate_execution_code(code) # Dispose of results self.resulting_fused_function.generate_disposal_code(code) self.defaults_tuple.generate_disposal_code(code) self.code_object.generate_disposal_code(code) for default in self.defaults: if default is not None: default.generate_disposal_code(code) def annotate(self, code): for stat in self.stats: stat.annotate(code) Cython-0.26.1/Cython/Compiler/FlowControl.pxd0000664000175000017500000000564212574327400021646 0ustar stefanstefan00000000000000from __future__ import absolute_import cimport cython from .Visitor cimport CythonTransform, TreeVisitor cdef class ControlBlock: cdef public set children cdef public set parents cdef public set positions cdef public list stats cdef public dict gen cdef public set bounded cdef public dict input cdef public dict output # Big integer it bitsets cdef public object i_input cdef public object i_output cdef public object i_gen cdef public object i_kill cdef public object i_state cpdef bint empty(self) cpdef detach(self) cpdef add_child(self, block) cdef class ExitBlock(ControlBlock): cpdef bint empty(self) cdef class NameAssignment: cdef public bint is_arg cdef public bint is_deletion cdef public object lhs cdef public object rhs cdef public object entry cdef public object pos cdef public set refs cdef public object bit cdef public object inferred_type cdef class AssignmentList: cdef public object bit cdef public object mask cdef public list stats cdef class AssignmentCollector(TreeVisitor): cdef list assignments @cython.final cdef class ControlFlow: cdef public set blocks cdef public set entries cdef public list loops cdef public list exceptions cdef public ControlBlock entry_point cdef public ExitBlock exit_point cdef public ControlBlock block cdef public dict assmts cpdef newblock(self, ControlBlock parent=*) cpdef nextblock(self, ControlBlock parent=*) cpdef bint is_tracked(self, entry) cpdef bint is_statically_assigned(self, entry) cpdef mark_position(self, node) cpdef mark_assignment(self, lhs, rhs, entry) cpdef mark_argument(self, lhs, rhs, entry) cpdef mark_deletion(self, node, entry) cpdef mark_reference(self, node, entry) @cython.locals(block=ControlBlock, parent=ControlBlock, unreachable=set) cpdef normalize(self) @cython.locals(bit=object, assmts=AssignmentList, block=ControlBlock) cpdef initialize(self) @cython.locals(assmts=AssignmentList, assmt=NameAssignment) cpdef set map_one(self, istate, entry) @cython.locals(block=ControlBlock, parent=ControlBlock) cdef reaching_definitions(self) cdef class Uninitialized: pass cdef class Unknown: pass cdef class MessageCollection: cdef set messages @cython.locals(dirty=bint, block=ControlBlock, parent=ControlBlock, assmt=NameAssignment) cdef check_definitions(ControlFlow flow, dict compiler_directives) @cython.final cdef class ControlFlowAnalysis(CythonTransform): cdef object gv_ctx cdef object constant_folder cdef set reductions cdef list env_stack cdef list stack cdef object env cdef ControlFlow flow cdef bint in_inplace_assignment cpdef mark_assignment(self, lhs, rhs=*) cpdef mark_position(self, node) Cython-0.26.1/Cython/Compiler/Errors.py0000664000175000017500000001621313143605603020500 0ustar stefanstefan00000000000000# # Errors # from __future__ import absolute_import try: from __builtin__ import basestring as any_string_type except ImportError: any_string_type = (bytes, str) import sys from ..Utils import open_new_file from . import DebugFlags from . import Options class PyrexError(Exception): pass class PyrexWarning(Exception): pass def context(position): source = position[0] assert not (isinstance(source, any_string_type)), ( "Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source) try: F = source.get_lines() except UnicodeDecodeError: # file has an encoding problem s = u"[unprintable code]\n" else: s = u''.join(F[max(0, position[1]-6):position[1]]) s = u'...\n%s%s^\n' % (s, u' '*(position[2]-1)) s = u'%s\n%s%s\n' % (u'-'*60, s, u'-'*60) return s def format_position(position): if position: return u"%s:%d:%d: " % (position[0].get_error_description(), position[1], position[2]) return u'' def format_error(message, position): if position: pos_str = format_position(position) cont = context(position) message = u'\nError compiling Cython file:\n%s\n%s%s' % (cont, pos_str, message or u'') return message class CompileError(PyrexError): def __init__(self, position = None, message = u""): self.position = position self.message_only = message self.formatted_message = format_error(message, position) self.reported = False # Deprecated and withdrawn in 2.6: # self.message = message Exception.__init__(self, self.formatted_message) # Python Exception subclass pickling is broken, # see http://bugs.python.org/issue1692335 self.args = (position, message) def __str__(self): return self.formatted_message class CompileWarning(PyrexWarning): def __init__(self, position = None, message = ""): self.position = position # Deprecated and withdrawn in 2.6: # self.message = message Exception.__init__(self, format_position(position) + message) class InternalError(Exception): # If this is ever raised, there is a bug in the compiler. def __init__(self, message): self.message_only = message Exception.__init__(self, u"Internal compiler error: %s" % message) class AbortError(Exception): # Throw this to stop the compilation immediately. def __init__(self, message): self.message_only = message Exception.__init__(self, u"Abort error: %s" % message) class CompilerCrash(CompileError): # raised when an unexpected exception occurs in a transform def __init__(self, pos, context, message, cause, stacktrace=None): if message: message = u'\n' + message else: message = u'\n' self.message_only = message if context: message = u"Compiler crash in %s%s" % (context, message) if stacktrace: import traceback message += ( u'\n\nCompiler crash traceback from this point on:\n' + u''.join(traceback.format_tb(stacktrace))) if cause: if not stacktrace: message += u'\n' message += u'%s: %s' % (cause.__class__.__name__, cause) CompileError.__init__(self, pos, message) # Python Exception subclass pickling is broken, # see http://bugs.python.org/issue1692335 self.args = (pos, context, message, cause, stacktrace) class NoElementTreeInstalledException(PyrexError): """raised when the user enabled options.gdb_debug but no ElementTree implementation was found """ listing_file = None num_errors = 0 echo_file = None def open_listing_file(path, echo_to_stderr = 1): # Begin a new error listing. If path is None, no file # is opened, the error counter is just reset. global listing_file, num_errors, echo_file if path is not None: listing_file = open_new_file(path) else: listing_file = None if echo_to_stderr: echo_file = sys.stderr else: echo_file = None num_errors = 0 def close_listing_file(): global listing_file if listing_file: listing_file.close() listing_file = None def report_error(err, use_stack=True): if error_stack and use_stack: error_stack[-1].append(err) else: global num_errors # See Main.py for why dual reporting occurs. Quick fix for now. if err.reported: return err.reported = True try: line = u"%s\n" % err except UnicodeEncodeError: # Python <= 2.5 does this for non-ASCII Unicode exceptions line = format_error(getattr(err, 'message_only', "[unprintable exception message]"), getattr(err, 'position', None)) + u'\n' if listing_file: try: listing_file.write(line) except UnicodeEncodeError: listing_file.write(line.encode('ASCII', 'replace')) if echo_file: try: echo_file.write(line) except UnicodeEncodeError: echo_file.write(line.encode('ASCII', 'replace')) num_errors += 1 if Options.fast_fail: raise AbortError("fatal errors") def error(position, message): #print("Errors.error:", repr(position), repr(message)) ### if position is None: raise InternalError(message) err = CompileError(position, message) if DebugFlags.debug_exception_on_error: raise Exception(err) # debug report_error(err) return err LEVEL = 1 # warn about all errors level 1 or higher def message(position, message, level=1): if level < LEVEL: return warn = CompileWarning(position, message) line = "note: %s\n" % warn if listing_file: listing_file.write(line) if echo_file: echo_file.write(line) return warn def warning(position, message, level=0): if level < LEVEL: return if Options.warning_errors and position: return error(position, message) warn = CompileWarning(position, message) line = "warning: %s\n" % warn if listing_file: listing_file.write(line) if echo_file: echo_file.write(line) return warn _warn_once_seen = {} def warn_once(position, message, level=0): if level < LEVEL or message in _warn_once_seen: return warn = CompileWarning(position, message) line = "warning: %s\n" % warn if listing_file: listing_file.write(line) if echo_file: echo_file.write(line) _warn_once_seen[message] = True return warn # These functions can be used to momentarily suppress errors. error_stack = [] def hold_errors(): error_stack.append([]) def release_errors(ignore=False): held_errors = error_stack.pop() if not ignore: for err in held_errors: report_error(err) def held_errors(): return error_stack[-1] # this module needs a redesign to support parallel cythonisation, but # for now, the following works at least in sequential compiler runs def reset(): _warn_once_seen.clear() del error_stack[:] Cython-0.26.1/Cython/Compiler/TypeSlots.py0000664000175000017500000010577713150045407021206 0ustar stefanstefan00000000000000# # Tables describing slots in the CPython type object # and associated know-how. # from __future__ import absolute_import from . import Naming from . import PyrexTypes from .Errors import error invisible = ['__cinit__', '__dealloc__', '__richcmp__', '__nonzero__', '__bool__'] class Signature(object): # Method slot signature descriptor. # # has_dummy_arg boolean # has_generic_args boolean # fixed_arg_format string # ret_format string # error_value string # # The formats are strings made up of the following # characters: # # 'O' Python object # 'T' Python object of the type of 'self' # 'v' void # 'p' void * # 'P' void ** # 'i' int # 'b' bint # 'I' int * # 'l' long # 'f' float # 'd' double # 'h' Py_hash_t # 'z' Py_ssize_t # 'Z' Py_ssize_t * # 's' char * # 'S' char ** # 'r' int used only to signal exception # 'B' Py_buffer * # '-' dummy 'self' argument (not used) # '*' rest of args passed as generic Python # arg tuple and kw dict (must be last # char in format string) format_map = { 'O': PyrexTypes.py_object_type, 'v': PyrexTypes.c_void_type, 'p': PyrexTypes.c_void_ptr_type, 'P': PyrexTypes.c_void_ptr_ptr_type, 'i': PyrexTypes.c_int_type, 'b': PyrexTypes.c_bint_type, 'I': PyrexTypes.c_int_ptr_type, 'l': PyrexTypes.c_long_type, 'f': PyrexTypes.c_float_type, 'd': PyrexTypes.c_double_type, 'h': PyrexTypes.c_py_hash_t_type, 'z': PyrexTypes.c_py_ssize_t_type, 'Z': PyrexTypes.c_py_ssize_t_ptr_type, 's': PyrexTypes.c_char_ptr_type, 'S': PyrexTypes.c_char_ptr_ptr_type, 'r': PyrexTypes.c_returncode_type, 'B': PyrexTypes.c_py_buffer_ptr_type, # 'T', '-' and '*' are handled otherwise # and are not looked up in here } type_to_format_map = dict( (type_, format_) for format_, type_ in format_map.items()) error_value_map = { 'O': "NULL", 'T': "NULL", 'i': "-1", 'b': "-1", 'l': "-1", 'r': "-1", 'h': "-1", 'z': "-1", } def __init__(self, arg_format, ret_format): self.has_dummy_arg = 0 self.has_generic_args = 0 if arg_format[:1] == '-': self.has_dummy_arg = 1 arg_format = arg_format[1:] if arg_format[-1:] == '*': self.has_generic_args = 1 arg_format = arg_format[:-1] self.fixed_arg_format = arg_format self.ret_format = ret_format self.error_value = self.error_value_map.get(ret_format, None) self.exception_check = ret_format != 'r' and self.error_value is not None self.is_staticmethod = False def __repr__(self): return '' % ( self.ret_format, ', '.join(self.fixed_arg_format), '*' if self.has_generic_args else '') def num_fixed_args(self): return len(self.fixed_arg_format) def is_self_arg(self, i): # argument is 'self' for methods or 'class' for classmethods return self.fixed_arg_format[i] == 'T' def returns_self_type(self): # return type is same as 'self' argument type return self.ret_format == 'T' def fixed_arg_type(self, i): return self.format_map[self.fixed_arg_format[i]] def return_type(self): return self.format_map[self.ret_format] def format_from_type(self, arg_type): if arg_type.is_pyobject: arg_type = PyrexTypes.py_object_type return self.type_to_format_map[arg_type] def exception_value(self): return self.error_value_map.get(self.ret_format) def function_type(self, self_arg_override=None): # Construct a C function type descriptor for this signature args = [] for i in range(self.num_fixed_args()): if self_arg_override is not None and self.is_self_arg(i): assert isinstance(self_arg_override, PyrexTypes.CFuncTypeArg) args.append(self_arg_override) else: arg_type = self.fixed_arg_type(i) args.append(PyrexTypes.CFuncTypeArg("", arg_type, None)) if self_arg_override is not None and self.returns_self_type(): ret_type = self_arg_override.type else: ret_type = self.return_type() exc_value = self.exception_value() return PyrexTypes.CFuncType( ret_type, args, exception_value=exc_value, exception_check=self.exception_check) def method_flags(self): if self.ret_format == "O": full_args = self.fixed_arg_format if self.has_dummy_arg: full_args = "O" + full_args if full_args in ["O", "T"]: if self.has_generic_args: return [method_varargs, method_keywords] else: return [method_noargs] elif full_args in ["OO", "TO"] and not self.has_generic_args: return [method_onearg] if self.is_staticmethod: return [method_varargs, method_keywords] return None class SlotDescriptor(object): # Abstract base class for type slot descriptors. # # slot_name string Member name of the slot in the type object # is_initialised_dynamically Is initialised by code in the module init function # is_inherited Is inherited by subtypes (see PyType_Ready()) # py3 Indicates presence of slot in Python 3 # py2 Indicates presence of slot in Python 2 # ifdef Full #ifdef string that slot is wrapped in. Using this causes py3, py2 and flags to be ignored.) def __init__(self, slot_name, dynamic=False, inherited=False, py3=True, py2=True, ifdef=None): self.slot_name = slot_name self.is_initialised_dynamically = dynamic self.is_inherited = inherited self.ifdef = ifdef self.py3 = py3 self.py2 = py2 def preprocessor_guard_code(self): ifdef = self.ifdef py2 = self.py2 py3 = self.py3 guard = None if ifdef: guard = ("#if %s" % ifdef) elif not py3 or py3 == '': guard = ("#if PY_MAJOR_VERSION < 3") elif not py2: guard = ("#if PY_MAJOR_VERSION >= 3") return guard def generate(self, scope, code): preprocessor_guard = self.preprocessor_guard_code() if preprocessor_guard: code.putln(preprocessor_guard) end_pypy_guard = False if self.is_initialised_dynamically: value = "0" else: value = self.slot_code(scope) if value == "0" and self.is_inherited: # PyPy currently has a broken PyType_Ready() that fails to # inherit some slots. To work around this, we explicitly # set inherited slots here, but only in PyPy since CPython # handles this better than we do. inherited_value = value current_scope = scope while (inherited_value == "0" and current_scope.parent_type and current_scope.parent_type.base_type and current_scope.parent_type.base_type.scope): current_scope = current_scope.parent_type.base_type.scope inherited_value = self.slot_code(current_scope) if inherited_value != "0": code.putln("#if CYTHON_COMPILING_IN_PYPY") code.putln("%s, /*%s*/" % (inherited_value, self.slot_name)) code.putln("#else") end_pypy_guard = True code.putln("%s, /*%s*/" % (value, self.slot_name)) if end_pypy_guard: code.putln("#endif") if self.py3 == '': code.putln("#else") code.putln("0, /*reserved*/") if preprocessor_guard: code.putln("#endif") # Some C implementations have trouble statically # initialising a global with a pointer to an extern # function, so we initialise some of the type slots # in the module init function instead. def generate_dynamic_init_code(self, scope, code): if self.is_initialised_dynamically: value = self.slot_code(scope) if value != "0": code.putln("%s.%s = %s;" % ( scope.parent_type.typeobj_cname, self.slot_name, value ) ) class FixedSlot(SlotDescriptor): # Descriptor for a type slot with a fixed value. # # value string def __init__(self, slot_name, value, py3=True, py2=True, ifdef=None): SlotDescriptor.__init__(self, slot_name, py3=py3, py2=py2, ifdef=ifdef) self.value = value def slot_code(self, scope): return self.value class EmptySlot(FixedSlot): # Descriptor for a type slot whose value is always 0. def __init__(self, slot_name, py3=True, py2=True, ifdef=None): FixedSlot.__init__(self, slot_name, "0", py3=py3, py2=py2, ifdef=ifdef) class MethodSlot(SlotDescriptor): # Type slot descriptor for a user-definable method. # # signature Signature # method_name string The __xxx__ name of the method # alternatives [string] Alternative list of __xxx__ names for the method def __init__(self, signature, slot_name, method_name, fallback=None, py3=True, py2=True, ifdef=None, inherited=True): SlotDescriptor.__init__(self, slot_name, py3=py3, py2=py2, ifdef=ifdef, inherited=inherited) self.signature = signature self.slot_name = slot_name self.method_name = method_name self.alternatives = [] method_name_to_slot[method_name] = self # if fallback: self.alternatives.append(fallback) for alt in (self.py2, self.py3): if isinstance(alt, (tuple, list)): slot_name, method_name = alt self.alternatives.append(method_name) method_name_to_slot[method_name] = self def slot_code(self, scope): entry = scope.lookup_here(self.method_name) if entry and entry.func_cname: return entry.func_cname for method_name in self.alternatives: entry = scope.lookup_here(method_name) if entry and entry.func_cname: return entry.func_cname return "0" class InternalMethodSlot(SlotDescriptor): # Type slot descriptor for a method which is always # synthesized by Cython. # # slot_name string Member name of the slot in the type object def __init__(self, slot_name, **kargs): SlotDescriptor.__init__(self, slot_name, **kargs) def slot_code(self, scope): return scope.mangle_internal(self.slot_name) class GCDependentSlot(InternalMethodSlot): # Descriptor for a slot whose value depends on whether # the type participates in GC. def __init__(self, slot_name, **kargs): InternalMethodSlot.__init__(self, slot_name, **kargs) def slot_code(self, scope): if not scope.needs_gc(): return "0" if not scope.has_cyclic_pyobject_attrs: # if the type does not have GC relevant object attributes, it can # delegate GC methods to its parent - iff the parent functions # are defined in the same module parent_type_scope = scope.parent_type.base_type.scope if scope.parent_scope is parent_type_scope.parent_scope: entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name) if entry.visibility != 'extern': return self.slot_code(parent_type_scope) return InternalMethodSlot.slot_code(self, scope) class GCClearReferencesSlot(GCDependentSlot): def slot_code(self, scope): if scope.needs_tp_clear(): return GCDependentSlot.slot_code(self, scope) return "0" class ConstructorSlot(InternalMethodSlot): # Descriptor for tp_new and tp_dealloc. def __init__(self, slot_name, method, **kargs): InternalMethodSlot.__init__(self, slot_name, **kargs) self.method = method def slot_code(self, scope): if (self.slot_name != 'tp_new' and scope.parent_type.base_type and not scope.has_pyobject_attrs and not scope.has_memoryview_attrs and not scope.has_cpp_class_attrs and not scope.lookup_here(self.method)): # if the type does not have object attributes, it can # delegate GC methods to its parent - iff the parent # functions are defined in the same module parent_type_scope = scope.parent_type.base_type.scope if scope.parent_scope is parent_type_scope.parent_scope: entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name) if entry.visibility != 'extern': return self.slot_code(parent_type_scope) return InternalMethodSlot.slot_code(self, scope) class SyntheticSlot(InternalMethodSlot): # Type slot descriptor for a synthesized method which # dispatches to one or more user-defined methods depending # on its arguments. If none of the relevant methods are # defined, the method will not be synthesized and an # alternative default value will be placed in the type # slot. def __init__(self, slot_name, user_methods, default_value, **kargs): InternalMethodSlot.__init__(self, slot_name, **kargs) self.user_methods = user_methods self.default_value = default_value def slot_code(self, scope): if scope.defines_any(self.user_methods): return InternalMethodSlot.slot_code(self, scope) else: return self.default_value class TypeFlagsSlot(SlotDescriptor): # Descriptor for the type flags slot. def slot_code(self, scope): value = "Py_TPFLAGS_DEFAULT" if scope.directives['type_version_tag']: # it's not in 'Py_TPFLAGS_DEFAULT' in Py2 value += "|Py_TPFLAGS_HAVE_VERSION_TAG" else: # it's enabled in 'Py_TPFLAGS_DEFAULT' in Py3 value = "(%s&~Py_TPFLAGS_HAVE_VERSION_TAG)" % value value += "|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER" if not scope.parent_type.is_final_type: value += "|Py_TPFLAGS_BASETYPE" if scope.needs_gc(): value += "|Py_TPFLAGS_HAVE_GC" return value class DocStringSlot(SlotDescriptor): # Descriptor for the docstring slot. def slot_code(self, scope): doc = scope.doc if doc is None: return "0" if doc.is_unicode: doc = doc.as_utf8_string() return doc.as_c_string_literal() class SuiteSlot(SlotDescriptor): # Descriptor for a substructure of the type object. # # sub_slots [SlotDescriptor] def __init__(self, sub_slots, slot_type, slot_name, ifdef=None): SlotDescriptor.__init__(self, slot_name, ifdef=ifdef) self.sub_slots = sub_slots self.slot_type = slot_type substructures.append(self) def is_empty(self, scope): for slot in self.sub_slots: if slot.slot_code(scope) != "0": return False return True def substructure_cname(self, scope): return "%s%s_%s" % (Naming.pyrex_prefix, self.slot_name, scope.class_name) def slot_code(self, scope): if not self.is_empty(scope): return "&%s" % self.substructure_cname(scope) return "0" def generate_substructure(self, scope, code): if not self.is_empty(scope): code.putln("") if self.ifdef: code.putln("#if %s" % self.ifdef) code.putln( "static %s %s = {" % ( self.slot_type, self.substructure_cname(scope))) for slot in self.sub_slots: slot.generate(scope, code) code.putln("};") if self.ifdef: code.putln("#endif") substructures = [] # List of all SuiteSlot instances class MethodTableSlot(SlotDescriptor): # Slot descriptor for the method table. def slot_code(self, scope): if scope.pyfunc_entries: return scope.method_table_cname else: return "0" class MemberTableSlot(SlotDescriptor): # Slot descriptor for the table of Python-accessible attributes. def slot_code(self, scope): return "0" class GetSetSlot(SlotDescriptor): # Slot descriptor for the table of attribute get & set methods. def slot_code(self, scope): if scope.property_entries: return scope.getset_table_cname else: return "0" class BaseClassSlot(SlotDescriptor): # Slot descriptor for the base class slot. def __init__(self, name): SlotDescriptor.__init__(self, name, dynamic = 1) def generate_dynamic_init_code(self, scope, code): base_type = scope.parent_type.base_type if base_type: code.putln("%s.%s = %s;" % ( scope.parent_type.typeobj_cname, self.slot_name, base_type.typeptr_cname)) class DictOffsetSlot(SlotDescriptor): # Slot descriptor for a class' dict offset, for dynamic attributes. def slot_code(self, scope): dict_entry = scope.lookup_here("__dict__") if not scope.is_closure_class_scope else None if dict_entry and dict_entry.is_variable: if getattr(dict_entry.type, 'cname', None) != 'PyDict_Type': error(dict_entry.pos, "__dict__ slot must be of type 'dict'") return "0" type = scope.parent_type if type.typedef_flag: objstruct = type.objstruct_cname else: objstruct = "struct %s" % type.objstruct_cname return ("offsetof(%s, %s)" % ( objstruct, dict_entry.cname)) else: return "0" # The following dictionary maps __xxx__ method names to slot descriptors. method_name_to_slot = {} ## The following slots are (or could be) initialised with an ## extern function pointer. # #slots_initialised_from_extern = ( # "tp_free", #) #------------------------------------------------------------------------------------------ # # Utility functions for accessing slot table data structures # #------------------------------------------------------------------------------------------ def get_special_method_signature(name): # Given a method name, if it is a special method, # return its signature, else return None. slot = method_name_to_slot.get(name) if slot: return slot.signature else: return None def get_property_accessor_signature(name): # Return signature of accessor for an extension type # property, else None. return property_accessor_signatures.get(name) def get_base_slot_function(scope, slot): # Returns the function implementing this slot in the baseclass. # This is useful for enabling the compiler to optimize calls # that recursively climb the class hierarchy. base_type = scope.parent_type.base_type if scope.parent_scope is base_type.scope.parent_scope: parent_slot = slot.slot_code(base_type.scope) if parent_slot != '0': entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name) if entry.visibility != 'extern': return parent_slot return None def get_slot_function(scope, slot): # Returns the function implementing this slot in the baseclass. # This is useful for enabling the compiler to optimize calls # that recursively climb the class hierarchy. slot_code = slot.slot_code(scope) if slot_code != '0': entry = scope.parent_scope.lookup_here(scope.parent_type.name) if entry.visibility != 'extern': return slot_code return None #------------------------------------------------------------------------------------------ # # Signatures for generic Python functions and methods. # #------------------------------------------------------------------------------------------ pyfunction_signature = Signature("-*", "O") pymethod_signature = Signature("T*", "O") #------------------------------------------------------------------------------------------ # # Signatures for simple Python functions. # #------------------------------------------------------------------------------------------ pyfunction_noargs = Signature("-", "O") pyfunction_onearg = Signature("-O", "O") #------------------------------------------------------------------------------------------ # # Signatures for the various kinds of function that # can appear in the type object and its substructures. # #------------------------------------------------------------------------------------------ unaryfunc = Signature("T", "O") # typedef PyObject * (*unaryfunc)(PyObject *); binaryfunc = Signature("OO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *); ibinaryfunc = Signature("TO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *); ternaryfunc = Signature("OOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *); iternaryfunc = Signature("TOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *); callfunc = Signature("T*", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *); inquiry = Signature("T", "i") # typedef int (*inquiry)(PyObject *); lenfunc = Signature("T", "z") # typedef Py_ssize_t (*lenfunc)(PyObject *); # typedef int (*coercion)(PyObject **, PyObject **); intargfunc = Signature("Ti", "O") # typedef PyObject *(*intargfunc)(PyObject *, int); ssizeargfunc = Signature("Tz", "O") # typedef PyObject *(*ssizeargfunc)(PyObject *, Py_ssize_t); intintargfunc = Signature("Tii", "O") # typedef PyObject *(*intintargfunc)(PyObject *, int, int); ssizessizeargfunc = Signature("Tzz", "O") # typedef PyObject *(*ssizessizeargfunc)(PyObject *, Py_ssize_t, Py_ssize_t); intobjargproc = Signature("TiO", 'r') # typedef int(*intobjargproc)(PyObject *, int, PyObject *); ssizeobjargproc = Signature("TzO", 'r') # typedef int(*ssizeobjargproc)(PyObject *, Py_ssize_t, PyObject *); intintobjargproc = Signature("TiiO", 'r') # typedef int(*intintobjargproc)(PyObject *, int, int, PyObject *); ssizessizeobjargproc = Signature("TzzO", 'r') # typedef int(*ssizessizeobjargproc)(PyObject *, Py_ssize_t, Py_ssize_t, PyObject *); intintargproc = Signature("Tii", 'r') ssizessizeargproc = Signature("Tzz", 'r') objargfunc = Signature("TO", "O") objobjargproc = Signature("TOO", 'r') # typedef int (*objobjargproc)(PyObject *, PyObject *, PyObject *); readbufferproc = Signature("TzP", "z") # typedef Py_ssize_t (*readbufferproc)(PyObject *, Py_ssize_t, void **); writebufferproc = Signature("TzP", "z") # typedef Py_ssize_t (*writebufferproc)(PyObject *, Py_ssize_t, void **); segcountproc = Signature("TZ", "z") # typedef Py_ssize_t (*segcountproc)(PyObject *, Py_ssize_t *); charbufferproc = Signature("TzS", "z") # typedef Py_ssize_t (*charbufferproc)(PyObject *, Py_ssize_t, char **); objargproc = Signature("TO", 'r') # typedef int (*objobjproc)(PyObject *, PyObject *); # typedef int (*visitproc)(PyObject *, void *); # typedef int (*traverseproc)(PyObject *, visitproc, void *); destructor = Signature("T", "v") # typedef void (*destructor)(PyObject *); # printfunc = Signature("TFi", 'r') # typedef int (*printfunc)(PyObject *, FILE *, int); # typedef PyObject *(*getattrfunc)(PyObject *, char *); getattrofunc = Signature("TO", "O") # typedef PyObject *(*getattrofunc)(PyObject *, PyObject *); # typedef int (*setattrfunc)(PyObject *, char *, PyObject *); setattrofunc = Signature("TOO", 'r') # typedef int (*setattrofunc)(PyObject *, PyObject *, PyObject *); delattrofunc = Signature("TO", 'r') cmpfunc = Signature("TO", "i") # typedef int (*cmpfunc)(PyObject *, PyObject *); reprfunc = Signature("T", "O") # typedef PyObject *(*reprfunc)(PyObject *); hashfunc = Signature("T", "h") # typedef Py_hash_t (*hashfunc)(PyObject *); # typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int); richcmpfunc = Signature("OOi", "O") # typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int); getiterfunc = Signature("T", "O") # typedef PyObject *(*getiterfunc) (PyObject *); iternextfunc = Signature("T", "O") # typedef PyObject *(*iternextfunc) (PyObject *); descrgetfunc = Signature("TOO", "O") # typedef PyObject *(*descrgetfunc) (PyObject *, PyObject *, PyObject *); descrsetfunc = Signature("TOO", 'r') # typedef int (*descrsetfunc) (PyObject *, PyObject *, PyObject *); descrdelfunc = Signature("TO", 'r') initproc = Signature("T*", 'r') # typedef int (*initproc)(PyObject *, PyObject *, PyObject *); # typedef PyObject *(*newfunc)(struct _typeobject *, PyObject *, PyObject *); # typedef PyObject *(*allocfunc)(struct _typeobject *, int); getbufferproc = Signature("TBi", "r") # typedef int (*getbufferproc)(PyObject *, Py_buffer *, int); releasebufferproc = Signature("TB", "v") # typedef void (*releasebufferproc)(PyObject *, Py_buffer *); #------------------------------------------------------------------------------------------ # # Signatures for accessor methods of properties. # #------------------------------------------------------------------------------------------ property_accessor_signatures = { '__get__': Signature("T", "O"), '__set__': Signature("TO", 'r'), '__del__': Signature("T", 'r') } #------------------------------------------------------------------------------------------ # # Descriptor tables for the slots of the various type object # substructures, in the order they appear in the structure. # #------------------------------------------------------------------------------------------ PyNumberMethods_Py3_GUARD = "PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000)" PyNumberMethods = ( MethodSlot(binaryfunc, "nb_add", "__add__"), MethodSlot(binaryfunc, "nb_subtract", "__sub__"), MethodSlot(binaryfunc, "nb_multiply", "__mul__"), MethodSlot(binaryfunc, "nb_divide", "__div__", ifdef = PyNumberMethods_Py3_GUARD), MethodSlot(binaryfunc, "nb_remainder", "__mod__"), MethodSlot(binaryfunc, "nb_divmod", "__divmod__"), MethodSlot(ternaryfunc, "nb_power", "__pow__"), MethodSlot(unaryfunc, "nb_negative", "__neg__"), MethodSlot(unaryfunc, "nb_positive", "__pos__"), MethodSlot(unaryfunc, "nb_absolute", "__abs__"), MethodSlot(inquiry, "nb_nonzero", "__nonzero__", py3 = ("nb_bool", "__bool__")), MethodSlot(unaryfunc, "nb_invert", "__invert__"), MethodSlot(binaryfunc, "nb_lshift", "__lshift__"), MethodSlot(binaryfunc, "nb_rshift", "__rshift__"), MethodSlot(binaryfunc, "nb_and", "__and__"), MethodSlot(binaryfunc, "nb_xor", "__xor__"), MethodSlot(binaryfunc, "nb_or", "__or__"), EmptySlot("nb_coerce", ifdef = PyNumberMethods_Py3_GUARD), MethodSlot(unaryfunc, "nb_int", "__int__", fallback="__long__"), MethodSlot(unaryfunc, "nb_long", "__long__", fallback="__int__", py3 = ""), MethodSlot(unaryfunc, "nb_float", "__float__"), MethodSlot(unaryfunc, "nb_oct", "__oct__", ifdef = PyNumberMethods_Py3_GUARD), MethodSlot(unaryfunc, "nb_hex", "__hex__", ifdef = PyNumberMethods_Py3_GUARD), # Added in release 2.0 MethodSlot(ibinaryfunc, "nb_inplace_add", "__iadd__"), MethodSlot(ibinaryfunc, "nb_inplace_subtract", "__isub__"), MethodSlot(ibinaryfunc, "nb_inplace_multiply", "__imul__"), MethodSlot(ibinaryfunc, "nb_inplace_divide", "__idiv__", ifdef = PyNumberMethods_Py3_GUARD), MethodSlot(ibinaryfunc, "nb_inplace_remainder", "__imod__"), MethodSlot(ibinaryfunc, "nb_inplace_power", "__ipow__"), # actually ternaryfunc!!! MethodSlot(ibinaryfunc, "nb_inplace_lshift", "__ilshift__"), MethodSlot(ibinaryfunc, "nb_inplace_rshift", "__irshift__"), MethodSlot(ibinaryfunc, "nb_inplace_and", "__iand__"), MethodSlot(ibinaryfunc, "nb_inplace_xor", "__ixor__"), MethodSlot(ibinaryfunc, "nb_inplace_or", "__ior__"), # Added in release 2.2 # The following require the Py_TPFLAGS_HAVE_CLASS flag MethodSlot(binaryfunc, "nb_floor_divide", "__floordiv__"), MethodSlot(binaryfunc, "nb_true_divide", "__truediv__"), MethodSlot(ibinaryfunc, "nb_inplace_floor_divide", "__ifloordiv__"), MethodSlot(ibinaryfunc, "nb_inplace_true_divide", "__itruediv__"), # Added in release 2.5 MethodSlot(unaryfunc, "nb_index", "__index__"), # Added in release 3.5 MethodSlot(binaryfunc, "nb_matrix_multiply", "__matmul__", ifdef="PY_VERSION_HEX >= 0x03050000"), MethodSlot(ibinaryfunc, "nb_inplace_matrix_multiply", "__imatmul__", ifdef="PY_VERSION_HEX >= 0x03050000"), ) PySequenceMethods = ( MethodSlot(lenfunc, "sq_length", "__len__"), EmptySlot("sq_concat"), # nb_add used instead EmptySlot("sq_repeat"), # nb_multiply used instead SyntheticSlot("sq_item", ["__getitem__"], "0"), #EmptySlot("sq_item"), # mp_subscript used instead MethodSlot(ssizessizeargfunc, "sq_slice", "__getslice__"), EmptySlot("sq_ass_item"), # mp_ass_subscript used instead SyntheticSlot("sq_ass_slice", ["__setslice__", "__delslice__"], "0"), MethodSlot(cmpfunc, "sq_contains", "__contains__"), EmptySlot("sq_inplace_concat"), # nb_inplace_add used instead EmptySlot("sq_inplace_repeat"), # nb_inplace_multiply used instead ) PyMappingMethods = ( MethodSlot(lenfunc, "mp_length", "__len__"), MethodSlot(objargfunc, "mp_subscript", "__getitem__"), SyntheticSlot("mp_ass_subscript", ["__setitem__", "__delitem__"], "0"), ) PyBufferProcs = ( MethodSlot(readbufferproc, "bf_getreadbuffer", "__getreadbuffer__", py3 = False), MethodSlot(writebufferproc, "bf_getwritebuffer", "__getwritebuffer__", py3 = False), MethodSlot(segcountproc, "bf_getsegcount", "__getsegcount__", py3 = False), MethodSlot(charbufferproc, "bf_getcharbuffer", "__getcharbuffer__", py3 = False), MethodSlot(getbufferproc, "bf_getbuffer", "__getbuffer__"), MethodSlot(releasebufferproc, "bf_releasebuffer", "__releasebuffer__") ) PyAsyncMethods = ( MethodSlot(unaryfunc, "am_await", "__await__"), MethodSlot(unaryfunc, "am_aiter", "__aiter__"), MethodSlot(unaryfunc, "am_anext", "__anext__"), ) #------------------------------------------------------------------------------------------ # # The main slot table. This table contains descriptors for all the # top-level type slots, beginning with tp_dealloc, in the order they # appear in the type object. # #------------------------------------------------------------------------------------------ slot_table = ( ConstructorSlot("tp_dealloc", '__dealloc__'), EmptySlot("tp_print"), #MethodSlot(printfunc, "tp_print", "__print__"), EmptySlot("tp_getattr"), EmptySlot("tp_setattr"), # tp_compare (Py2) / tp_reserved (Py3<3.5) / tp_as_async (Py3.5+) is always used as tp_as_async in Py3 MethodSlot(cmpfunc, "tp_compare", "__cmp__", ifdef="PY_MAJOR_VERSION < 3"), SuiteSlot(PyAsyncMethods, "__Pyx_PyAsyncMethodsStruct", "tp_as_async", ifdef="PY_MAJOR_VERSION >= 3"), MethodSlot(reprfunc, "tp_repr", "__repr__"), SuiteSlot(PyNumberMethods, "PyNumberMethods", "tp_as_number"), SuiteSlot(PySequenceMethods, "PySequenceMethods", "tp_as_sequence"), SuiteSlot(PyMappingMethods, "PyMappingMethods", "tp_as_mapping"), MethodSlot(hashfunc, "tp_hash", "__hash__", inherited=False), # Py3 checks for __richcmp__ MethodSlot(callfunc, "tp_call", "__call__"), MethodSlot(reprfunc, "tp_str", "__str__"), SyntheticSlot("tp_getattro", ["__getattr__","__getattribute__"], "0"), #"PyObject_GenericGetAttr"), SyntheticSlot("tp_setattro", ["__setattr__", "__delattr__"], "0"), #"PyObject_GenericSetAttr"), SuiteSlot(PyBufferProcs, "PyBufferProcs", "tp_as_buffer"), TypeFlagsSlot("tp_flags"), DocStringSlot("tp_doc"), GCDependentSlot("tp_traverse"), GCClearReferencesSlot("tp_clear"), # Later -- synthesize a method to split into separate ops? MethodSlot(richcmpfunc, "tp_richcompare", "__richcmp__", inherited=False), # Py3 checks for __hash__ EmptySlot("tp_weaklistoffset"), MethodSlot(getiterfunc, "tp_iter", "__iter__"), MethodSlot(iternextfunc, "tp_iternext", "__next__"), MethodTableSlot("tp_methods"), MemberTableSlot("tp_members"), GetSetSlot("tp_getset"), BaseClassSlot("tp_base"), #EmptySlot("tp_base"), EmptySlot("tp_dict"), SyntheticSlot("tp_descr_get", ["__get__"], "0"), SyntheticSlot("tp_descr_set", ["__set__", "__delete__"], "0"), DictOffsetSlot("tp_dictoffset"), MethodSlot(initproc, "tp_init", "__init__"), EmptySlot("tp_alloc"), #FixedSlot("tp_alloc", "PyType_GenericAlloc"), InternalMethodSlot("tp_new"), EmptySlot("tp_free"), EmptySlot("tp_is_gc"), EmptySlot("tp_bases"), EmptySlot("tp_mro"), EmptySlot("tp_cache"), EmptySlot("tp_subclasses"), EmptySlot("tp_weaklist"), EmptySlot("tp_del"), EmptySlot("tp_version_tag"), EmptySlot("tp_finalize", ifdef="PY_VERSION_HEX >= 0x030400a1"), ) #------------------------------------------------------------------------------------------ # # Descriptors for special methods which don't appear directly # in the type object or its substructures. These methods are # called from slot functions synthesized by Cython. # #------------------------------------------------------------------------------------------ MethodSlot(initproc, "", "__cinit__") MethodSlot(destructor, "", "__dealloc__") MethodSlot(objobjargproc, "", "__setitem__") MethodSlot(objargproc, "", "__delitem__") MethodSlot(ssizessizeobjargproc, "", "__setslice__") MethodSlot(ssizessizeargproc, "", "__delslice__") MethodSlot(getattrofunc, "", "__getattr__") MethodSlot(setattrofunc, "", "__setattr__") MethodSlot(delattrofunc, "", "__delattr__") MethodSlot(descrgetfunc, "", "__get__") MethodSlot(descrsetfunc, "", "__set__") MethodSlot(descrdelfunc, "", "__delete__") # Method flags for python-exposed methods. method_noargs = "METH_NOARGS" method_onearg = "METH_O" method_varargs = "METH_VARARGS" method_keywords = "METH_KEYWORDS" method_coexist = "METH_COEXIST" Cython-0.26.1/Cython/Compiler/Scanning.pxd0000664000175000017500000000372413023021033021114 0ustar stefanstefan00000000000000from __future__ import absolute_import import cython from ..Plex.Scanners cimport Scanner cdef unicode any_string_prefix, IDENT cdef get_lexicon() cdef initial_compile_time_env() cdef class Method: cdef object name cdef dict kwargs cdef readonly object __name__ # for tracing the scanner @cython.final cdef class CompileTimeScope: cdef public dict entries cdef public CompileTimeScope outer cdef declare(self, name, value) cdef lookup_here(self, name) cpdef lookup(self, name) @cython.final cdef class PyrexScanner(Scanner): cdef public context cdef public list included_files cdef public CompileTimeScope compile_time_env cdef public bint compile_time_eval cdef public bint compile_time_expr cdef public bint parse_comments cdef public bint in_python_file cdef public source_encoding cdef set keywords cdef public list indentation_stack cdef public indentation_char cdef public int bracket_nesting_level cdef bint async_enabled cdef public sy cdef public systring cdef long current_level(self) #cpdef commentline(self, text) #cpdef open_bracket_action(self, text) #cpdef close_bracket_action(self, text) #cpdef newline_action(self, text) #cpdef begin_string_action(self, text) #cpdef end_string_action(self, text) #cpdef unclosed_string_action(self, text) @cython.locals(current_level=cython.long, new_level=cython.long) cpdef indentation_action(self, text) #cpdef eof_action(self, text) cdef next(self) cdef peek(self) #cpdef put_back(self, sy, systring) #cdef unread(self, token, value) cdef bint expect(self, what, message = *) except -2 cdef expect_keyword(self, what, message = *) cdef expected(self, what, message = *) cdef expect_indent(self) cdef expect_dedent(self) cdef expect_newline(self, message=*, bint ignore_semicolon=*) cdef int enter_async(self) except -1 cdef int exit_async(self) except -1 Cython-0.26.1/Cython/Compiler/CythonScope.py0000664000175000017500000001344213023021033021445 0ustar stefanstefan00000000000000from __future__ import absolute_import from .Symtab import ModuleScope from .PyrexTypes import * from .UtilityCode import CythonUtilityCode from .Errors import error from .Scanning import StringSourceDescriptor from . import MemoryView class CythonScope(ModuleScope): is_cython_builtin = 1 _cythonscope_initialized = False def __init__(self, context): ModuleScope.__init__(self, u'cython', None, None) self.pxd_file_loaded = True self.populate_cython_scope() # The Main.Context object self.context = context for fused_type in (cy_integral_type, cy_floating_type, cy_numeric_type): entry = self.declare_typedef(fused_type.name, fused_type, None, cname='') entry.in_cinclude = True def lookup_type(self, name): # This function should go away when types are all first-level objects. type = parse_basic_type(name) if type: return type return super(CythonScope, self).lookup_type(name) def lookup(self, name): entry = super(CythonScope, self).lookup(name) if entry is None and not self._cythonscope_initialized: self.load_cythonscope() entry = super(CythonScope, self).lookup(name) return entry def find_module(self, module_name, pos): error("cython.%s is not available" % module_name, pos) def find_submodule(self, module_name): entry = self.entries.get(module_name, None) if not entry: self.load_cythonscope() entry = self.entries.get(module_name, None) if entry and entry.as_module: return entry.as_module else: # TODO: fix find_submodule control flow so that we're not # expected to create a submodule here (to protect CythonScope's # possible immutability). Hack ourselves out of the situation # for now. raise error((StringSourceDescriptor(u"cython", u""), 0, 0), "cython.%s is not available" % module_name) def lookup_qualified_name(self, qname): # ExprNode.as_cython_attribute generates qnames and we untangle it here... name_path = qname.split(u'.') scope = self while len(name_path) > 1: scope = scope.lookup_here(name_path[0]) if scope: scope = scope.as_module del name_path[0] if scope is None: return None else: return scope.lookup_here(name_path[0]) def populate_cython_scope(self): # These are used to optimize isinstance in FinalOptimizePhase type_object = self.declare_typedef( 'PyTypeObject', base_type = c_void_type, pos = None, cname = 'PyTypeObject') type_object.is_void = True type_object_type = type_object.type self.declare_cfunction( 'PyObject_TypeCheck', CFuncType(c_bint_type, [CFuncTypeArg("o", py_object_type, None), CFuncTypeArg("t", c_ptr_type(type_object_type), None)]), pos = None, defining = 1, cname = 'PyObject_TypeCheck') def load_cythonscope(self): """ Creates some entries for testing purposes and entries for cython.array() and for cython.view.*. """ if self._cythonscope_initialized: return self._cythonscope_initialized = True cython_testscope_utility_code.declare_in_scope( self, cython_scope=self) cython_test_extclass_utility_code.declare_in_scope( self, cython_scope=self) # # The view sub-scope # self.viewscope = viewscope = ModuleScope(u'view', self, None) self.declare_module('view', viewscope, None).as_module = viewscope viewscope.is_cython_builtin = True viewscope.pxd_file_loaded = True cythonview_testscope_utility_code.declare_in_scope( viewscope, cython_scope=self) view_utility_scope = MemoryView.view_utility_code.declare_in_scope( self.viewscope, cython_scope=self, whitelist=MemoryView.view_utility_whitelist) # self.entries["array"] = view_utility_scope.entries.pop("array") def create_cython_scope(context): # One could in fact probably make it a singleton, # but not sure yet whether any code mutates it (which would kill reusing # it across different contexts) return CythonScope(context) # Load test utilities for the cython scope def load_testscope_utility(cy_util_name, **kwargs): return CythonUtilityCode.load(cy_util_name, "TestCythonScope.pyx", **kwargs) undecorated_methods_protos = UtilityCode(proto=u""" /* These methods are undecorated and have therefore no prototype */ static PyObject *__pyx_TestClass_cdef_method( struct __pyx_TestClass_obj *self, int value); static PyObject *__pyx_TestClass_cpdef_method( struct __pyx_TestClass_obj *self, int value, int skip_dispatch); static PyObject *__pyx_TestClass_def_method( PyObject *self, PyObject *value); """) cython_testscope_utility_code = load_testscope_utility("TestScope") test_cython_utility_dep = load_testscope_utility("TestDep") cython_test_extclass_utility_code = \ load_testscope_utility("TestClass", name="TestClass", requires=[undecorated_methods_protos, test_cython_utility_dep]) cythonview_testscope_utility_code = load_testscope_utility("View.TestScope") Cython-0.26.1/Cython/Compiler/AnalysedTreeTransforms.py0000664000175000017500000000736212542002467023671 0ustar stefanstefan00000000000000from __future__ import absolute_import from .Visitor import ScopeTrackingTransform from .Nodes import StatListNode, SingleAssignmentNode, CFuncDefNode, DefNode from .ExprNodes import DictNode, DictItemNode, NameNode, UnicodeNode from .PyrexTypes import py_object_type from .StringEncoding import EncodedString from . import Symtab class AutoTestDictTransform(ScopeTrackingTransform): # Handles autotestdict directive blacklist = ['__cinit__', '__dealloc__', '__richcmp__', '__nonzero__', '__bool__', '__len__', '__contains__'] def visit_ModuleNode(self, node): if node.is_pxd: return node self.scope_type = 'module' self.scope_node = node if not self.current_directives['autotestdict']: return node self.all_docstrings = self.current_directives['autotestdict.all'] self.cdef_docstrings = self.all_docstrings or self.current_directives['autotestdict.cdef'] assert isinstance(node.body, StatListNode) # First see if __test__ is already created if u'__test__' in node.scope.entries: # Do nothing return node pos = node.pos self.tests = [] self.testspos = node.pos test_dict_entry = node.scope.declare_var(EncodedString(u'__test__'), py_object_type, pos, visibility='public') create_test_dict_assignment = SingleAssignmentNode(pos, lhs=NameNode(pos, name=EncodedString(u'__test__'), entry=test_dict_entry), rhs=DictNode(pos, key_value_pairs=self.tests)) self.visitchildren(node) node.body.stats.append(create_test_dict_assignment) return node def add_test(self, testpos, path, doctest): pos = self.testspos keystr = u'%s (line %d)' % (path, testpos[1]) key = UnicodeNode(pos, value=EncodedString(keystr)) value = UnicodeNode(pos, value=doctest) self.tests.append(DictItemNode(pos, key=key, value=value)) def visit_ExprNode(self, node): # expressions cannot contain functions and lambda expressions # do not have a docstring return node def visit_FuncDefNode(self, node): if not node.doc or (isinstance(node, DefNode) and node.fused_py_func): return node if not self.cdef_docstrings: if isinstance(node, CFuncDefNode) and not node.py_func: return node if not self.all_docstrings and '>>>' not in node.doc: return node pos = self.testspos if self.scope_type == 'module': path = node.entry.name elif self.scope_type in ('pyclass', 'cclass'): if isinstance(node, CFuncDefNode): if node.py_func is not None: name = node.py_func.name else: name = node.entry.name else: name = node.name if self.scope_type == 'cclass' and name in self.blacklist: return node if self.scope_type == 'pyclass': class_name = self.scope_node.name else: class_name = self.scope_node.class_name if isinstance(node.entry.scope, Symtab.PropertyScope): property_method_name = node.entry.scope.name path = "%s.%s.%s" % (class_name, node.entry.scope.name, node.entry.name) else: path = "%s.%s" % (class_name, node.entry.name) else: assert False self.add_test(node.pos, path, node.doc) return node Cython-0.26.1/Cython/Compiler/Pipeline.py0000664000175000017500000003211213143605603020765 0ustar stefanstefan00000000000000from __future__ import absolute_import import itertools from time import time from . import Errors from . import DebugFlags from . import Options from .Visitor import CythonTransform from .Errors import CompileError, InternalError, AbortError from . import Naming # # Really small pipeline stages # def dumptree(t): # For quick debugging in pipelines print(t.dump()) return t def abort_on_errors(node): # Stop the pipeline if there are any errors. if Errors.num_errors != 0: raise AbortError("pipeline break") return node def parse_stage_factory(context): def parse(compsrc): source_desc = compsrc.source_desc full_module_name = compsrc.full_module_name initial_pos = (source_desc, 1, 0) saved_cimport_from_pyx, Options.cimport_from_pyx = Options.cimport_from_pyx, False scope = context.find_module(full_module_name, pos = initial_pos, need_pxd = 0) Options.cimport_from_pyx = saved_cimport_from_pyx tree = context.parse(source_desc, scope, pxd = 0, full_module_name = full_module_name) tree.compilation_source = compsrc tree.scope = scope tree.is_pxd = False return tree return parse def parse_pxd_stage_factory(context, scope, module_name): def parse(source_desc): tree = context.parse(source_desc, scope, pxd=True, full_module_name=module_name) tree.scope = scope tree.is_pxd = True return tree return parse def generate_pyx_code_stage_factory(options, result): def generate_pyx_code_stage(module_node): module_node.process_implementation(options, result) result.compilation_source = module_node.compilation_source return result return generate_pyx_code_stage def inject_pxd_code_stage_factory(context): def inject_pxd_code_stage(module_node): for name, (statlistnode, scope) in context.pxds.items(): module_node.merge_in(statlistnode, scope) return module_node return inject_pxd_code_stage def use_utility_code_definitions(scope, target, seen=None): if seen is None: seen = set() for entry in scope.entries.values(): if entry in seen: continue seen.add(entry) if entry.used and entry.utility_code_definition: target.use_utility_code(entry.utility_code_definition) for required_utility in entry.utility_code_definition.requires: target.use_utility_code(required_utility) elif entry.as_module: use_utility_code_definitions(entry.as_module, target, seen) def sort_utility_codes(utilcodes): ranks = {} def get_rank(utilcode): if utilcode not in ranks: ranks[utilcode] = 0 # prevent infinite recursion on circular dependencies original_order = len(ranks) ranks[utilcode] = 1 + min([get_rank(dep) for dep in utilcode.requires or ()] or [-1]) + original_order * 1e-8 return ranks[utilcode] for utilcode in utilcodes: get_rank(utilcode) return [utilcode for utilcode, _ in sorted(ranks.items(), key=lambda kv: kv[1])] def normalize_deps(utilcodes): deps = {} for utilcode in utilcodes: deps[utilcode] = utilcode def unify_dep(dep): if dep in deps: return deps[dep] else: deps[dep] = dep return dep for utilcode in utilcodes: utilcode.requires = [unify_dep(dep) for dep in utilcode.requires or ()] def inject_utility_code_stage_factory(context): def inject_utility_code_stage(module_node): module_node.prepare_utility_code() use_utility_code_definitions(context.cython_scope, module_node.scope) module_node.scope.utility_code_list = sort_utility_codes(module_node.scope.utility_code_list) normalize_deps(module_node.scope.utility_code_list) added = [] # Note: the list might be extended inside the loop (if some utility code # pulls in other utility code, explicitly or implicitly) for utilcode in module_node.scope.utility_code_list: if utilcode in added: continue added.append(utilcode) if utilcode.requires: for dep in utilcode.requires: if dep not in added and dep not in module_node.scope.utility_code_list: module_node.scope.utility_code_list.append(dep) tree = utilcode.get_tree(cython_scope=context.cython_scope) if tree: module_node.merge_in(tree.body, tree.scope, merge_scope=True) return module_node return inject_utility_code_stage # # Pipeline factories # def create_pipeline(context, mode, exclude_classes=()): assert mode in ('pyx', 'py', 'pxd') from .Visitor import PrintTree from .ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse from .ParseTreeTransforms import ForwardDeclareTypes, AnalyseDeclarationsTransform from .ParseTreeTransforms import AnalyseExpressionsTransform, FindInvalidUseOfFusedTypes from .ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform from .ParseTreeTransforms import TrackNumpyAttributes, InterpretCompilerDirectives, TransformBuiltinMethods from .ParseTreeTransforms import ExpandInplaceOperators, ParallelRangeTransform from .ParseTreeTransforms import CalculateQualifiedNamesTransform from .TypeInference import MarkParallelAssignments, MarkOverflowingArithmetic from .ParseTreeTransforms import AdjustDefByDirectives, AlignFunctionDefinitions from .ParseTreeTransforms import RemoveUnreachableCode, GilCheck from .FlowControl import ControlFlowAnalysis from .AnalysedTreeTransforms import AutoTestDictTransform from .AutoDocTransforms import EmbedSignature from .Optimize import FlattenInListTransform, SwitchTransform, IterationTransform from .Optimize import EarlyReplaceBuiltinCalls, OptimizeBuiltinCalls from .Optimize import InlineDefNodeCalls from .Optimize import ConstantFolding, FinalOptimizePhase from .Optimize import DropRefcountingTransform from .Optimize import ConsolidateOverflowCheck from .Buffer import IntroduceBufferAuxiliaryVars from .ModuleNode import check_c_declarations, check_c_declarations_pxd if mode == 'pxd': _check_c_declarations = check_c_declarations_pxd _specific_post_parse = PxdPostParse(context) else: _check_c_declarations = check_c_declarations _specific_post_parse = None if mode == 'py': _align_function_definitions = AlignFunctionDefinitions(context) else: _align_function_definitions = None # NOTE: This is the "common" parts of the pipeline, which is also # code in pxd files. So it will be run multiple times in a # compilation stage. stages = [ NormalizeTree(context), PostParse(context), _specific_post_parse, TrackNumpyAttributes(context), InterpretCompilerDirectives(context, context.compiler_directives), ParallelRangeTransform(context), AdjustDefByDirectives(context), WithTransform(context), MarkClosureVisitor(context), _align_function_definitions, RemoveUnreachableCode(context), ConstantFolding(), FlattenInListTransform(), DecoratorTransform(context), ForwardDeclareTypes(context), AnalyseDeclarationsTransform(context), AutoTestDictTransform(context), EmbedSignature(context), EarlyReplaceBuiltinCalls(context), ## Necessary? TransformBuiltinMethods(context), MarkParallelAssignments(context), ControlFlowAnalysis(context), RemoveUnreachableCode(context), # MarkParallelAssignments(context), MarkOverflowingArithmetic(context), IntroduceBufferAuxiliaryVars(context), _check_c_declarations, InlineDefNodeCalls(context), AnalyseExpressionsTransform(context), FindInvalidUseOfFusedTypes(context), ExpandInplaceOperators(context), IterationTransform(context), SwitchTransform(context), OptimizeBuiltinCalls(context), ## Necessary? CreateClosureClasses(context), ## After all lookups and type inference CalculateQualifiedNamesTransform(context), ConsolidateOverflowCheck(context), DropRefcountingTransform(), FinalOptimizePhase(context), GilCheck(), ] filtered_stages = [] for s in stages: if s.__class__ not in exclude_classes: filtered_stages.append(s) return filtered_stages def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()): if py: mode = 'py' else: mode = 'pyx' test_support = [] if options.evaluate_tree_assertions: from ..TestUtils import TreeAssertVisitor test_support.append(TreeAssertVisitor()) if options.gdb_debug: from ..Debugger import DebugWriter # requires Py2.5+ from .ParseTreeTransforms import DebugTransform context.gdb_debug_outputwriter = DebugWriter.CythonDebugWriter( options.output_dir) debug_transform = [DebugTransform(context, options, result)] else: debug_transform = [] return list(itertools.chain( [parse_stage_factory(context)], create_pipeline(context, mode, exclude_classes=exclude_classes), test_support, [inject_pxd_code_stage_factory(context), inject_utility_code_stage_factory(context), abort_on_errors], debug_transform, [generate_pyx_code_stage_factory(options, result)])) def create_pxd_pipeline(context, scope, module_name): from .CodeGeneration import ExtractPxdCode # The pxd pipeline ends up with a CCodeWriter containing the # code of the pxd, as well as a pxd scope. return [ parse_pxd_stage_factory(context, scope, module_name) ] + create_pipeline(context, 'pxd') + [ ExtractPxdCode() ] def create_py_pipeline(context, options, result): return create_pyx_pipeline(context, options, result, py=True) def create_pyx_as_pxd_pipeline(context, result): from .ParseTreeTransforms import AlignFunctionDefinitions, \ MarkClosureVisitor, WithTransform, AnalyseDeclarationsTransform from .Optimize import ConstantFolding, FlattenInListTransform from .Nodes import StatListNode pipeline = [] pyx_pipeline = create_pyx_pipeline(context, context.options, result, exclude_classes=[ AlignFunctionDefinitions, MarkClosureVisitor, ConstantFolding, FlattenInListTransform, WithTransform ]) for stage in pyx_pipeline: pipeline.append(stage) if isinstance(stage, AnalyseDeclarationsTransform): # This is the last stage we need. break def fake_pxd(root): for entry in root.scope.entries.values(): if not entry.in_cinclude: entry.defined_in_pxd = 1 if entry.name == entry.cname and entry.visibility != 'extern': # Always mangle non-extern cimported entries. entry.cname = entry.scope.mangle(Naming.func_prefix, entry.name) return StatListNode(root.pos, stats=[]), root.scope pipeline.append(fake_pxd) return pipeline def insert_into_pipeline(pipeline, transform, before=None, after=None): """ Insert a new transform into the pipeline after or before an instance of the given class. e.g. pipeline = insert_into_pipeline(pipeline, transform, after=AnalyseDeclarationsTransform) """ assert before or after cls = before or after for i, t in enumerate(pipeline): if isinstance(t, cls): break if after: i += 1 return pipeline[:i] + [transform] + pipeline[i:] # # Running a pipeline # def run_pipeline(pipeline, source, printtree=True): from .Visitor import PrintTree error = None data = source try: try: for phase in pipeline: if phase is not None: if DebugFlags.debug_verbose_pipeline: t = time() print("Entering pipeline phase %r" % phase) if not printtree and isinstance(phase, PrintTree): continue data = phase(data) if DebugFlags.debug_verbose_pipeline: print(" %.3f seconds" % (time() - t)) except CompileError as err: # err is set Errors.report_error(err, use_stack=False) error = err except InternalError as err: # Only raise if there was not an earlier error if Errors.num_errors == 0: raise error = err except AbortError as err: error = err return (error, data) Cython-0.26.1/Cython/Compiler/MemoryView.py0000664000175000017500000007255513023021023021323 0ustar stefanstefan00000000000000from __future__ import absolute_import from .Errors import CompileError, error from . import ExprNodes from .ExprNodes import IntNode, NameNode, AttributeNode from . import Options from .Code import UtilityCode, TempitaUtilityCode from .UtilityCode import CythonUtilityCode from . import Buffer from . import PyrexTypes from . import ModuleNode START_ERR = "Start must not be given." STOP_ERR = "Axis specification only allowed in the 'step' slot." STEP_ERR = "Step must be omitted, 1, or a valid specifier." BOTH_CF_ERR = "Cannot specify an array that is both C and Fortran contiguous." INVALID_ERR = "Invalid axis specification." NOT_CIMPORTED_ERR = "Variable was not cimported from cython.view" EXPR_ERR = "no expressions allowed in axis spec, only names and literals." CF_ERR = "Invalid axis specification for a C/Fortran contiguous array." ERR_UNINITIALIZED = ("Cannot check if memoryview %s is initialized without the " "GIL, consider using initializedcheck(False)") def concat_flags(*flags): return "(%s)" % "|".join(flags) format_flag = "PyBUF_FORMAT" memview_c_contiguous = "(PyBUF_C_CONTIGUOUS | PyBUF_FORMAT | PyBUF_WRITABLE)" memview_f_contiguous = "(PyBUF_F_CONTIGUOUS | PyBUF_FORMAT | PyBUF_WRITABLE)" memview_any_contiguous = "(PyBUF_ANY_CONTIGUOUS | PyBUF_FORMAT | PyBUF_WRITABLE)" memview_full_access = "PyBUF_FULL" #memview_strided_access = "PyBUF_STRIDED" memview_strided_access = "PyBUF_RECORDS" MEMVIEW_DIRECT = '__Pyx_MEMVIEW_DIRECT' MEMVIEW_PTR = '__Pyx_MEMVIEW_PTR' MEMVIEW_FULL = '__Pyx_MEMVIEW_FULL' MEMVIEW_CONTIG = '__Pyx_MEMVIEW_CONTIG' MEMVIEW_STRIDED= '__Pyx_MEMVIEW_STRIDED' MEMVIEW_FOLLOW = '__Pyx_MEMVIEW_FOLLOW' _spec_to_const = { 'direct' : MEMVIEW_DIRECT, 'ptr' : MEMVIEW_PTR, 'full' : MEMVIEW_FULL, 'contig' : MEMVIEW_CONTIG, 'strided': MEMVIEW_STRIDED, 'follow' : MEMVIEW_FOLLOW, } _spec_to_abbrev = { 'direct' : 'd', 'ptr' : 'p', 'full' : 'f', 'contig' : 'c', 'strided' : 's', 'follow' : '_', } memslice_entry_init = "{ 0, 0, { 0 }, { 0 }, { 0 } }" memview_name = u'memoryview' memview_typeptr_cname = '__pyx_memoryview_type' memview_objstruct_cname = '__pyx_memoryview_obj' memviewslice_cname = u'__Pyx_memviewslice' def put_init_entry(mv_cname, code): code.putln("%s.data = NULL;" % mv_cname) code.putln("%s.memview = NULL;" % mv_cname) #def axes_to_str(axes): # return "".join([access[0].upper()+packing[0] for (access, packing) in axes]) def put_acquire_memoryviewslice(lhs_cname, lhs_type, lhs_pos, rhs, code, have_gil=False, first_assignment=True): "We can avoid decreffing the lhs if we know it is the first assignment" assert rhs.type.is_memoryviewslice pretty_rhs = rhs.result_in_temp() or rhs.is_simple() if pretty_rhs: rhstmp = rhs.result() else: rhstmp = code.funcstate.allocate_temp(lhs_type, manage_ref=False) code.putln("%s = %s;" % (rhstmp, rhs.result_as(lhs_type))) # Allow uninitialized assignment #code.putln(code.put_error_if_unbound(lhs_pos, rhs.entry)) put_assign_to_memviewslice(lhs_cname, rhs, rhstmp, lhs_type, code, have_gil=have_gil, first_assignment=first_assignment) if not pretty_rhs: code.funcstate.release_temp(rhstmp) def put_assign_to_memviewslice(lhs_cname, rhs, rhs_cname, memviewslicetype, code, have_gil=False, first_assignment=False): if not first_assignment: code.put_xdecref_memoryviewslice(lhs_cname, have_gil=have_gil) if not rhs.result_in_temp(): rhs.make_owned_memoryviewslice(code) code.putln("%s = %s;" % (lhs_cname, rhs_cname)) def get_buf_flags(specs): is_c_contig, is_f_contig = is_cf_contig(specs) if is_c_contig: return memview_c_contiguous elif is_f_contig: return memview_f_contiguous access, packing = zip(*specs) if 'full' in access or 'ptr' in access: return memview_full_access else: return memview_strided_access def insert_newaxes(memoryviewtype, n): axes = [('direct', 'strided')] * n axes.extend(memoryviewtype.axes) return PyrexTypes.MemoryViewSliceType(memoryviewtype.dtype, axes) def broadcast_types(src, dst): n = abs(src.ndim - dst.ndim) if src.ndim < dst.ndim: return insert_newaxes(src, n), dst else: return src, insert_newaxes(dst, n) def valid_memslice_dtype(dtype, i=0): """ Return whether type dtype can be used as the base type of a memoryview slice. We support structs, numeric types and objects """ if dtype.is_complex and dtype.real_type.is_int: return False if dtype is PyrexTypes.c_bint_type: return False if dtype.is_struct and dtype.kind == 'struct': for member in dtype.scope.var_entries: if not valid_memslice_dtype(member.type): return False return True return ( dtype.is_error or # Pointers are not valid (yet) # (dtype.is_ptr and valid_memslice_dtype(dtype.base_type)) or (dtype.is_array and i < 8 and valid_memslice_dtype(dtype.base_type, i + 1)) or dtype.is_numeric or dtype.is_pyobject or dtype.is_fused or # accept this as it will be replaced by specializations later (dtype.is_typedef and valid_memslice_dtype(dtype.typedef_base_type)) ) class MemoryViewSliceBufferEntry(Buffer.BufferEntry): """ May be used during code generation time to be queried for shape/strides/suboffsets attributes, or to perform indexing or slicing. """ def __init__(self, entry): self.entry = entry self.type = entry.type self.cname = entry.cname self.buf_ptr = "%s.data" % self.cname dtype = self.entry.type.dtype self.buf_ptr_type = PyrexTypes.CPtrType(dtype) self.init_attributes() def get_buf_suboffsetvars(self): return self._for_all_ndim("%s.suboffsets[%d]") def get_buf_stridevars(self): return self._for_all_ndim("%s.strides[%d]") def get_buf_shapevars(self): return self._for_all_ndim("%s.shape[%d]") def generate_buffer_lookup_code(self, code, index_cnames): axes = [(dim, index_cnames[dim], access, packing) for dim, (access, packing) in enumerate(self.type.axes)] return self._generate_buffer_lookup_code(code, axes) def _generate_buffer_lookup_code(self, code, axes, cast_result=True): """ Generate a single expression that indexes the memory view slice in each dimension. """ bufp = self.buf_ptr type_decl = self.type.dtype.empty_declaration_code() for dim, index, access, packing in axes: shape = "%s.shape[%d]" % (self.cname, dim) stride = "%s.strides[%d]" % (self.cname, dim) suboffset = "%s.suboffsets[%d]" % (self.cname, dim) flag = get_memoryview_flag(access, packing) if flag in ("generic", "generic_contiguous"): # Note: we cannot do cast tricks to avoid stride multiplication # for generic_contiguous, as we may have to do (dtype *) # or (dtype **) arithmetic, we won't know which unless # we check suboffsets code.globalstate.use_utility_code(memviewslice_index_helpers) bufp = ('__pyx_memviewslice_index_full(%s, %s, %s, %s)' % (bufp, index, stride, suboffset)) elif flag == "indirect": bufp = "(%s + %s * %s)" % (bufp, index, stride) bufp = ("(*((char **) %s) + %s)" % (bufp, suboffset)) elif flag == "indirect_contiguous": # Note: we do char ** arithmetic bufp = "(*((char **) %s + %s) + %s)" % (bufp, index, suboffset) elif flag == "strided": bufp = "(%s + %s * %s)" % (bufp, index, stride) else: assert flag == 'contiguous', flag bufp = '((char *) (((%s *) %s) + %s))' % (type_decl, bufp, index) bufp = '( /* dim=%d */ %s )' % (dim, bufp) if cast_result: return "((%s *) %s)" % (type_decl, bufp) return bufp def generate_buffer_slice_code(self, code, indices, dst, have_gil, have_slices, directives): """ Slice a memoryviewslice. indices - list of index nodes. If not a SliceNode, or NoneNode, then it must be coercible to Py_ssize_t Simply call __pyx_memoryview_slice_memviewslice with the right arguments, unless the dimension is omitted or a bare ':', in which case we copy over the shape/strides/suboffsets attributes directly for that dimension. """ src = self.cname code.putln("%(dst)s.data = %(src)s.data;" % locals()) code.putln("%(dst)s.memview = %(src)s.memview;" % locals()) code.put_incref_memoryviewslice(dst) all_dimensions_direct = all(access == 'direct' for access, packing in self.type.axes) suboffset_dim_temp = [] def get_suboffset_dim(): # create global temp variable at request if not suboffset_dim_temp: suboffset_dim = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False) code.putln("%s = -1;" % suboffset_dim) suboffset_dim_temp.append(suboffset_dim) return suboffset_dim_temp[0] dim = -1 new_ndim = 0 for index in indices: if index.is_none: # newaxis for attrib, value in [('shape', 1), ('strides', 0), ('suboffsets', -1)]: code.putln("%s.%s[%d] = %d;" % (dst, attrib, new_ndim, value)) new_ndim += 1 continue dim += 1 access, packing = self.type.axes[dim] error_goto = code.error_goto(index.pos) if isinstance(index, ExprNodes.SliceNode): # slice, unspecified dimension, or part of ellipsis d = dict(locals()) for s in "start stop step".split(): idx = getattr(index, s) have_idx = d['have_' + s] = not idx.is_none d[s] = idx.result() if have_idx else "0" if not (d['have_start'] or d['have_stop'] or d['have_step']): # full slice (:), simply copy over the extent, stride # and suboffset. Also update suboffset_dim if needed d['access'] = access util_name = "SimpleSlice" else: util_name = "ToughSlice" new_ndim += 1 else: # normal index idx = index.result() indirect = access != 'direct' if indirect: generic = access == 'full' if new_ndim != 0: return error(index.pos, "All preceding dimensions must be " "indexed and not sliced") d = dict( locals(), wraparound=int(directives['wraparound']), boundscheck=int(directives['boundscheck']) ) util_name = "SliceIndex" _, impl = TempitaUtilityCode.load_as_string(util_name, "MemoryView_C.c", context=d) code.put(impl) if suboffset_dim_temp: code.funcstate.release_temp(suboffset_dim_temp[0]) def empty_slice(pos): none = ExprNodes.NoneNode(pos) return ExprNodes.SliceNode(pos, start=none, stop=none, step=none) def unellipsify(indices, ndim): result = [] seen_ellipsis = False have_slices = False newaxes = [newaxis for newaxis in indices if newaxis.is_none] n_indices = len(indices) - len(newaxes) for index in indices: if isinstance(index, ExprNodes.EllipsisNode): have_slices = True full_slice = empty_slice(index.pos) if seen_ellipsis: result.append(full_slice) else: nslices = ndim - n_indices + 1 result.extend([full_slice] * nslices) seen_ellipsis = True else: have_slices = have_slices or index.is_slice or index.is_none result.append(index) result_length = len(result) - len(newaxes) if result_length < ndim: have_slices = True nslices = ndim - result_length result.extend([empty_slice(indices[-1].pos)] * nslices) return have_slices, result, newaxes def get_memoryview_flag(access, packing): if access == 'full' and packing in ('strided', 'follow'): return 'generic' elif access == 'full' and packing == 'contig': return 'generic_contiguous' elif access == 'ptr' and packing in ('strided', 'follow'): return 'indirect' elif access == 'ptr' and packing == 'contig': return 'indirect_contiguous' elif access == 'direct' and packing in ('strided', 'follow'): return 'strided' else: assert (access, packing) == ('direct', 'contig'), (access, packing) return 'contiguous' def get_is_contig_func_name(c_or_f, ndim): return "__pyx_memviewslice_is_%s_contig%d" % (c_or_f, ndim) def get_is_contig_utility(c_contig, ndim): C = dict(context, ndim=ndim) if c_contig: utility = load_memview_c_utility("MemviewSliceIsCContig", C, requires=[is_contig_utility]) else: utility = load_memview_c_utility("MemviewSliceIsFContig", C, requires=[is_contig_utility]) return utility def slice_iter(slice_type, slice_result, ndim, code): if slice_type.is_c_contig or slice_type.is_f_contig: return ContigSliceIter(slice_type, slice_result, ndim, code) else: return StridedSliceIter(slice_type, slice_result, ndim, code) class SliceIter(object): def __init__(self, slice_type, slice_result, ndim, code): self.slice_type = slice_type self.slice_result = slice_result self.code = code self.ndim = ndim class ContigSliceIter(SliceIter): def start_loops(self): code = self.code code.begin_block() type_decl = self.slice_type.dtype.empty_declaration_code() total_size = ' * '.join("%s.shape[%d]" % (self.slice_result, i) for i in range(self.ndim)) code.putln("Py_ssize_t __pyx_temp_extent = %s;" % total_size) code.putln("Py_ssize_t __pyx_temp_idx;") code.putln("%s *__pyx_temp_pointer = (%s *) %s.data;" % ( type_decl, type_decl, self.slice_result)) code.putln("for (__pyx_temp_idx = 0; " "__pyx_temp_idx < __pyx_temp_extent; " "__pyx_temp_idx++) {") return "__pyx_temp_pointer" def end_loops(self): self.code.putln("__pyx_temp_pointer += 1;") self.code.putln("}") self.code.end_block() class StridedSliceIter(SliceIter): def start_loops(self): code = self.code code.begin_block() for i in range(self.ndim): t = i, self.slice_result, i code.putln("Py_ssize_t __pyx_temp_extent_%d = %s.shape[%d];" % t) code.putln("Py_ssize_t __pyx_temp_stride_%d = %s.strides[%d];" % t) code.putln("char *__pyx_temp_pointer_%d;" % i) code.putln("Py_ssize_t __pyx_temp_idx_%d;" % i) code.putln("__pyx_temp_pointer_0 = %s.data;" % self.slice_result) for i in range(self.ndim): if i > 0: code.putln("__pyx_temp_pointer_%d = __pyx_temp_pointer_%d;" % (i, i - 1)) code.putln("for (__pyx_temp_idx_%d = 0; " "__pyx_temp_idx_%d < __pyx_temp_extent_%d; " "__pyx_temp_idx_%d++) {" % (i, i, i, i)) return "__pyx_temp_pointer_%d" % (self.ndim - 1) def end_loops(self): code = self.code for i in range(self.ndim - 1, -1, -1): code.putln("__pyx_temp_pointer_%d += __pyx_temp_stride_%d;" % (i, i)) code.putln("}") code.end_block() def copy_c_or_fortran_cname(memview): if memview.is_c_contig: c_or_f = 'c' else: c_or_f = 'f' return "__pyx_memoryview_copy_slice_%s_%s" % ( memview.specialization_suffix(), c_or_f) def get_copy_new_utility(pos, from_memview, to_memview): if from_memview.dtype != to_memview.dtype: return error(pos, "dtypes must be the same!") if len(from_memview.axes) != len(to_memview.axes): return error(pos, "number of dimensions must be same") if not (to_memview.is_c_contig or to_memview.is_f_contig): return error(pos, "to_memview must be c or f contiguous.") for (access, packing) in from_memview.axes: if access != 'direct': return error( pos, "cannot handle 'full' or 'ptr' access at this time.") if to_memview.is_c_contig: mode = 'c' contig_flag = memview_c_contiguous elif to_memview.is_f_contig: mode = 'fortran' contig_flag = memview_f_contiguous return load_memview_c_utility( "CopyContentsUtility", context=dict( context, mode=mode, dtype_decl=to_memview.dtype.empty_declaration_code(), contig_flag=contig_flag, ndim=to_memview.ndim, func_cname=copy_c_or_fortran_cname(to_memview), dtype_is_object=int(to_memview.dtype.is_pyobject)), requires=[copy_contents_new_utility]) def get_axes_specs(env, axes): ''' get_axes_specs(env, axes) -> list of (access, packing) specs for each axis. access is one of 'full', 'ptr' or 'direct' packing is one of 'contig', 'strided' or 'follow' ''' cythonscope = env.global_scope().context.cython_scope cythonscope.load_cythonscope() viewscope = cythonscope.viewscope access_specs = tuple([viewscope.lookup(name) for name in ('full', 'direct', 'ptr')]) packing_specs = tuple([viewscope.lookup(name) for name in ('contig', 'strided', 'follow')]) is_f_contig, is_c_contig = False, False default_access, default_packing = 'direct', 'strided' cf_access, cf_packing = default_access, 'follow' axes_specs = [] # analyse all axes. for idx, axis in enumerate(axes): if not axis.start.is_none: raise CompileError(axis.start.pos, START_ERR) if not axis.stop.is_none: raise CompileError(axis.stop.pos, STOP_ERR) if axis.step.is_none: axes_specs.append((default_access, default_packing)) elif isinstance(axis.step, IntNode): # the packing for the ::1 axis is contiguous, # all others are cf_packing. if axis.step.compile_time_value(env) != 1: raise CompileError(axis.step.pos, STEP_ERR) axes_specs.append((cf_access, 'cfcontig')) elif isinstance(axis.step, (NameNode, AttributeNode)): entry = _get_resolved_spec(env, axis.step) if entry.name in view_constant_to_access_packing: axes_specs.append(view_constant_to_access_packing[entry.name]) else: raise CompileError(axis.step.pos, INVALID_ERR) else: raise CompileError(axis.step.pos, INVALID_ERR) # First, find out if we have a ::1 somewhere contig_dim = 0 is_contig = False for idx, (access, packing) in enumerate(axes_specs): if packing == 'cfcontig': if is_contig: raise CompileError(axis.step.pos, BOTH_CF_ERR) contig_dim = idx axes_specs[idx] = (access, 'contig') is_contig = True if is_contig: # We have a ::1 somewhere, see if we're C or Fortran contiguous if contig_dim == len(axes) - 1: is_c_contig = True else: is_f_contig = True if contig_dim and not axes_specs[contig_dim - 1][0] in ('full', 'ptr'): raise CompileError(axes[contig_dim].pos, "Fortran contiguous specifier must follow an indirect dimension") if is_c_contig: # Contiguous in the last dimension, find the last indirect dimension contig_dim = -1 for idx, (access, packing) in enumerate(reversed(axes_specs)): if access in ('ptr', 'full'): contig_dim = len(axes) - idx - 1 # Replace 'strided' with 'follow' for any dimension following the last # indirect dimension, the first dimension or the dimension following # the ::1. # int[::indirect, ::1, :, :] # ^ ^ # int[::indirect, :, :, ::1] # ^ ^ start = contig_dim + 1 stop = len(axes) - is_c_contig for idx, (access, packing) in enumerate(axes_specs[start:stop]): idx = contig_dim + 1 + idx if access != 'direct': raise CompileError(axes[idx].pos, "Indirect dimension may not follow " "Fortran contiguous dimension") if packing == 'contig': raise CompileError(axes[idx].pos, "Dimension may not be contiguous") axes_specs[idx] = (access, cf_packing) if is_c_contig: # For C contiguity, we need to fix the 'contig' dimension # after the loop a, p = axes_specs[-1] axes_specs[-1] = a, 'contig' validate_axes_specs([axis.start.pos for axis in axes], axes_specs, is_c_contig, is_f_contig) return axes_specs def validate_axes(pos, axes): if len(axes) >= Options.buffer_max_dims: error(pos, "More dimensions than the maximum number" " of buffer dimensions were used.") return False return True def is_cf_contig(specs): is_c_contig = is_f_contig = False if len(specs) == 1 and specs == [('direct', 'contig')]: is_c_contig = True elif (specs[-1] == ('direct','contig') and all(axis == ('direct','follow') for axis in specs[:-1])): # c_contiguous: 'follow', 'follow', ..., 'follow', 'contig' is_c_contig = True elif (len(specs) > 1 and specs[0] == ('direct','contig') and all(axis == ('direct','follow') for axis in specs[1:])): # f_contiguous: 'contig', 'follow', 'follow', ..., 'follow' is_f_contig = True return is_c_contig, is_f_contig def get_mode(specs): is_c_contig, is_f_contig = is_cf_contig(specs) if is_c_contig: return 'c' elif is_f_contig: return 'fortran' for access, packing in specs: if access in ('ptr', 'full'): return 'full' return 'strided' view_constant_to_access_packing = { 'generic': ('full', 'strided'), 'strided': ('direct', 'strided'), 'indirect': ('ptr', 'strided'), 'generic_contiguous': ('full', 'contig'), 'contiguous': ('direct', 'contig'), 'indirect_contiguous': ('ptr', 'contig'), } def validate_axes_specs(positions, specs, is_c_contig, is_f_contig): packing_specs = ('contig', 'strided', 'follow') access_specs = ('direct', 'ptr', 'full') # is_c_contig, is_f_contig = is_cf_contig(specs) has_contig = has_follow = has_strided = has_generic_contig = False last_indirect_dimension = -1 for idx, (access, packing) in enumerate(specs): if access == 'ptr': last_indirect_dimension = idx for idx, (pos, (access, packing)) in enumerate(zip(positions, specs)): if not (access in access_specs and packing in packing_specs): raise CompileError(pos, "Invalid axes specification.") if packing == 'strided': has_strided = True elif packing == 'contig': if has_contig: raise CompileError(pos, "Only one direct contiguous " "axis may be specified.") valid_contig_dims = last_indirect_dimension + 1, len(specs) - 1 if idx not in valid_contig_dims and access != 'ptr': if last_indirect_dimension + 1 != len(specs) - 1: dims = "dimensions %d and %d" % valid_contig_dims else: dims = "dimension %d" % valid_contig_dims[0] raise CompileError(pos, "Only %s may be contiguous and direct" % dims) has_contig = access != 'ptr' elif packing == 'follow': if has_strided: raise CompileError(pos, "A memoryview cannot have both follow and strided axis specifiers.") if not (is_c_contig or is_f_contig): raise CompileError(pos, "Invalid use of the follow specifier.") if access in ('ptr', 'full'): has_strided = False def _get_resolved_spec(env, spec): # spec must be a NameNode or an AttributeNode if isinstance(spec, NameNode): return _resolve_NameNode(env, spec) elif isinstance(spec, AttributeNode): return _resolve_AttributeNode(env, spec) else: raise CompileError(spec.pos, INVALID_ERR) def _resolve_NameNode(env, node): try: resolved_name = env.lookup(node.name).name except AttributeError: raise CompileError(node.pos, INVALID_ERR) viewscope = env.global_scope().context.cython_scope.viewscope entry = viewscope.lookup(resolved_name) if entry is None: raise CompileError(node.pos, NOT_CIMPORTED_ERR) return entry def _resolve_AttributeNode(env, node): path = [] while isinstance(node, AttributeNode): path.insert(0, node.attribute) node = node.obj if isinstance(node, NameNode): path.insert(0, node.name) else: raise CompileError(node.pos, EXPR_ERR) modnames = path[:-1] # must be at least 1 module name, o/w not an AttributeNode. assert modnames scope = env for modname in modnames: mod = scope.lookup(modname) if not mod or not mod.as_module: raise CompileError( node.pos, "undeclared name not builtin: %s" % modname) scope = mod.as_module entry = scope.lookup(path[-1]) if not entry: raise CompileError(node.pos, "No such attribute '%s'" % path[-1]) return entry # ### Utility loading # def load_memview_cy_utility(util_code_name, context=None, **kwargs): return CythonUtilityCode.load(util_code_name, "MemoryView.pyx", context=context, **kwargs) def load_memview_c_utility(util_code_name, context=None, **kwargs): if context is None: return UtilityCode.load(util_code_name, "MemoryView_C.c", **kwargs) else: return TempitaUtilityCode.load(util_code_name, "MemoryView_C.c", context=context, **kwargs) def use_cython_array_utility_code(env): cython_scope = env.global_scope().context.cython_scope cython_scope.load_cythonscope() cython_scope.viewscope.lookup('array_cwrapper').used = True context = { 'memview_struct_name': memview_objstruct_cname, 'max_dims': Options.buffer_max_dims, 'memviewslice_name': memviewslice_cname, 'memslice_init': memslice_entry_init, } memviewslice_declare_code = load_memview_c_utility( "MemviewSliceStruct", proto_block='utility_code_proto_before_types', context=context, requires=[]) atomic_utility = load_memview_c_utility("Atomics", context, proto_block='utility_code_proto_before_types') memviewslice_init_code = load_memview_c_utility( "MemviewSliceInit", context=dict(context, BUF_MAX_NDIMS=Options.buffer_max_dims), requires=[memviewslice_declare_code, Buffer.acquire_utility_code, atomic_utility], ) memviewslice_index_helpers = load_memview_c_utility("MemviewSliceIndex") typeinfo_to_format_code = load_memview_cy_utility( "BufferFormatFromTypeInfo", requires=[Buffer._typeinfo_to_format_code]) is_contig_utility = load_memview_c_utility("MemviewSliceIsContig", context) overlapping_utility = load_memview_c_utility("OverlappingSlices", context) copy_contents_new_utility = load_memview_c_utility( "MemviewSliceCopyTemplate", context, requires=[], # require cython_array_utility_code ) view_utility_code = load_memview_cy_utility( "View.MemoryView", context=context, requires=[Buffer.GetAndReleaseBufferUtilityCode(), Buffer.buffer_struct_declare_code, Buffer.empty_bufstruct_utility, memviewslice_init_code, is_contig_utility, overlapping_utility, copy_contents_new_utility, ModuleNode.capsule_utility_code], ) view_utility_whitelist = ('array', 'memoryview', 'array_cwrapper', 'generic', 'strided', 'indirect', 'contiguous', 'indirect_contiguous') memviewslice_declare_code.requires.append(view_utility_code) copy_contents_new_utility.requires.append(view_utility_code) Cython-0.26.1/Cython/Compiler/Parsing.pxd0000664000175000017500000002120513143605603020767 0ustar stefanstefan00000000000000# We declare all of these here to type the first argument. from __future__ import absolute_import cimport cython from .Scanning cimport PyrexScanner ctypedef object (*p_sub_expr_func)(PyrexScanner obj) # entry points cpdef p_module(PyrexScanner s, pxd, full_module_name, ctx=*) cpdef p_code(PyrexScanner s, level= *, ctx=*) # internal parser states cdef p_ident(PyrexScanner s, message =*) cdef p_ident_list(PyrexScanner s) cdef tuple p_binop_operator(PyrexScanner s) cdef p_binop_expr(PyrexScanner s, ops, p_sub_expr_func p_sub_expr) cdef p_lambdef(PyrexScanner s, bint allow_conditional=*) cdef p_lambdef_nocond(PyrexScanner s) cdef p_test(PyrexScanner s) cdef p_test_nocond(PyrexScanner s) cdef p_or_test(PyrexScanner s) cdef p_rassoc_binop_expr(PyrexScanner s, ops, p_sub_expr_func p_subexpr) cdef p_and_test(PyrexScanner s) cdef p_not_test(PyrexScanner s) cdef p_comparison(PyrexScanner s) cdef p_test_or_starred_expr(PyrexScanner s) cdef p_starred_expr(PyrexScanner s) cdef p_cascaded_cmp(PyrexScanner s) cdef p_cmp_op(PyrexScanner s) cdef p_bit_expr(PyrexScanner s) cdef p_xor_expr(PyrexScanner s) cdef p_and_expr(PyrexScanner s) cdef p_shift_expr(PyrexScanner s) cdef p_arith_expr(PyrexScanner s) cdef p_term(PyrexScanner s) cdef p_factor(PyrexScanner s) cdef _p_factor(PyrexScanner s) cdef p_typecast(PyrexScanner s) cdef p_sizeof(PyrexScanner s) cdef p_yield_expression(PyrexScanner s) cdef p_yield_statement(PyrexScanner s) cdef p_async_statement(PyrexScanner s, ctx, decorators) cdef p_power(PyrexScanner s) cdef p_new_expr(PyrexScanner s) cdef p_trailer(PyrexScanner s, node1) cdef p_call_parse_args(PyrexScanner s, bint allow_genexp = *) cdef p_call_build_packed_args(pos, positional_args, keyword_args) cdef p_call(PyrexScanner s, function) cdef p_index(PyrexScanner s, base) cdef tuple p_subscript_list(PyrexScanner s) cdef p_subscript(PyrexScanner s) cdef p_slice_element(PyrexScanner s, follow_set) cdef expect_ellipsis(PyrexScanner s) cdef make_slice_nodes(pos, subscripts) cpdef make_slice_node(pos, start, stop = *, step = *) cdef p_atom(PyrexScanner s) @cython.locals(value=unicode) cdef p_int_literal(PyrexScanner s) cdef p_name(PyrexScanner s, name) cdef wrap_compile_time_constant(pos, value) cdef p_cat_string_literal(PyrexScanner s) cdef p_opt_string_literal(PyrexScanner s, required_type=*) cdef bint check_for_non_ascii_characters(unicode string) @cython.locals(systr=unicode, is_python3_source=bint, is_raw=bint) cdef p_string_literal(PyrexScanner s, kind_override=*) cdef _append_escape_sequence(kind, builder, unicode escape_sequence, PyrexScanner s) @cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4) cdef list p_f_string(PyrexScanner s, unicode_value, pos, bint is_raw) @cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, quote_char=Py_UCS4, NO_CHAR=Py_UCS4) cdef tuple p_f_string_expr(PyrexScanner s, unicode_value, pos, Py_ssize_t starting_index, bint is_raw) cdef p_list_maker(PyrexScanner s) cdef p_comp_iter(PyrexScanner s, body) cdef p_comp_for(PyrexScanner s, body) cdef p_comp_if(PyrexScanner s, body) cdef p_dict_or_set_maker(PyrexScanner s) cdef p_backquote_expr(PyrexScanner s) cdef p_simple_expr_list(PyrexScanner s, expr=*) cdef p_test_or_starred_expr_list(PyrexScanner s, expr=*) cdef p_testlist(PyrexScanner s) cdef p_testlist_star_expr(PyrexScanner s) cdef p_testlist_comp(PyrexScanner s) cdef p_genexp(PyrexScanner s, expr) #------------------------------------------------------- # # Statements # #------------------------------------------------------- cdef p_global_statement(PyrexScanner s) cdef p_nonlocal_statement(PyrexScanner s) cdef p_expression_or_assignment(PyrexScanner s) cdef p_print_statement(PyrexScanner s) cdef p_exec_statement(PyrexScanner s) cdef p_del_statement(PyrexScanner s) cdef p_pass_statement(PyrexScanner s, bint with_newline = *) cdef p_break_statement(PyrexScanner s) cdef p_continue_statement(PyrexScanner s) cdef p_return_statement(PyrexScanner s) cdef p_raise_statement(PyrexScanner s) cdef p_import_statement(PyrexScanner s) cdef p_from_import_statement(PyrexScanner s, bint first_statement = *) cdef p_imported_name(PyrexScanner s, bint is_cimport) cdef p_dotted_name(PyrexScanner s, bint as_allowed) cdef p_as_name(PyrexScanner s) cdef p_assert_statement(PyrexScanner s) cdef p_if_statement(PyrexScanner s) cdef p_if_clause(PyrexScanner s) cdef p_else_clause(PyrexScanner s) cdef p_while_statement(PyrexScanner s) cdef p_for_statement(PyrexScanner s, bint is_async=*) cdef dict p_for_bounds(PyrexScanner s, bint allow_testlist=*, bint is_async=*) cdef p_for_from_relation(PyrexScanner s) cdef p_for_from_step(PyrexScanner s) cdef p_target(PyrexScanner s, terminator) cdef p_for_target(PyrexScanner s) cdef p_for_iterator(PyrexScanner s, bint allow_testlist=*, bint is_async=*) cdef p_try_statement(PyrexScanner s) cdef p_except_clause(PyrexScanner s) cdef p_include_statement(PyrexScanner s, ctx) cdef p_with_statement(PyrexScanner s) cdef p_with_items(PyrexScanner s, bint is_async=*) cdef p_with_template(PyrexScanner s) cdef p_simple_statement(PyrexScanner s, bint first_statement = *) cdef p_simple_statement_list(PyrexScanner s, ctx, bint first_statement = *) cdef p_compile_time_expr(PyrexScanner s) cdef p_DEF_statement(PyrexScanner s) cdef p_IF_statement(PyrexScanner s, ctx) cdef p_statement(PyrexScanner s, ctx, bint first_statement = *) cdef p_statement_list(PyrexScanner s, ctx, bint first_statement = *) cdef p_suite(PyrexScanner s, ctx = *) cdef tuple p_suite_with_docstring(PyrexScanner s, ctx, bint with_doc_only=*) cdef tuple _extract_docstring(node) cdef p_positional_and_keyword_args(PyrexScanner s, end_sy_set, templates = *) cpdef p_c_base_type(PyrexScanner s, bint self_flag = *, bint nonempty = *, templates = *) cdef p_calling_convention(PyrexScanner s) cdef p_c_complex_base_type(PyrexScanner s, templates = *) cdef p_c_simple_base_type(PyrexScanner s, bint self_flag, bint nonempty, templates = *) cdef p_buffer_or_template(PyrexScanner s, base_type_node, templates) cdef p_bracketed_base_type(PyrexScanner s, base_type_node, nonempty, empty) cdef is_memoryviewslice_access(PyrexScanner s) cdef p_memoryviewslice_access(PyrexScanner s, base_type_node) cdef bint looking_at_name(PyrexScanner s) except -2 cdef object looking_at_expr(PyrexScanner s)# except -2 cdef bint looking_at_base_type(PyrexScanner s) except -2 cdef bint looking_at_dotted_name(PyrexScanner s) except -2 cdef bint looking_at_call(PyrexScanner s) except -2 cdef p_sign_and_longness(PyrexScanner s) cdef p_opt_cname(PyrexScanner s) cpdef p_c_declarator(PyrexScanner s, ctx = *, bint empty = *, bint is_type = *, bint cmethod_flag = *, bint assignable = *, bint nonempty = *, bint calling_convention_allowed = *) cdef p_c_array_declarator(PyrexScanner s, base) cdef p_c_func_declarator(PyrexScanner s, pos, ctx, base, bint cmethod_flag) cdef p_c_simple_declarator(PyrexScanner s, ctx, bint empty, bint is_type, bint cmethod_flag, bint assignable, bint nonempty) cdef p_nogil(PyrexScanner s) cdef p_with_gil(PyrexScanner s) cdef p_exception_value_clause(PyrexScanner s) cpdef p_c_arg_list(PyrexScanner s, ctx = *, bint in_pyfunc = *, bint cmethod_flag = *, bint nonempty_declarators = *, bint kw_only = *, bint annotated = *) cdef p_optional_ellipsis(PyrexScanner s) cdef p_c_arg_decl(PyrexScanner s, ctx, in_pyfunc, bint cmethod_flag = *, bint nonempty = *, bint kw_only = *, bint annotated = *) cdef p_api(PyrexScanner s) cdef p_cdef_statement(PyrexScanner s, ctx) cdef p_cdef_block(PyrexScanner s, ctx) cdef p_cdef_extern_block(PyrexScanner s, pos, ctx) cdef p_c_enum_definition(PyrexScanner s, pos, ctx) cdef p_c_enum_line(PyrexScanner s, ctx, list items) cdef p_c_enum_item(PyrexScanner s, ctx, list items) cdef p_c_struct_or_union_definition(PyrexScanner s, pos, ctx) cdef p_fused_definition(PyrexScanner s, pos, ctx) cdef p_struct_enum(PyrexScanner s, pos, ctx) cdef p_visibility(PyrexScanner s, prev_visibility) cdef p_c_modifiers(PyrexScanner s) cdef p_c_func_or_var_declaration(PyrexScanner s, pos, ctx) cdef p_ctypedef_statement(PyrexScanner s, ctx) cdef p_decorators(PyrexScanner s) cdef p_def_statement(PyrexScanner s, list decorators=*, bint is_async_def=*) cdef p_varargslist(PyrexScanner s, terminator=*, bint annotated = *) cdef p_py_arg_decl(PyrexScanner s, bint annotated = *) cdef p_class_statement(PyrexScanner s, decorators) cdef p_c_class_definition(PyrexScanner s, pos, ctx) cdef p_c_class_options(PyrexScanner s) cdef p_property_decl(PyrexScanner s) cdef p_doc_string(PyrexScanner s) cdef p_ignorable_statement(PyrexScanner s) cdef p_compiler_directive_comments(PyrexScanner s) cdef p_template_definition(PyrexScanner s) cdef p_cpp_class_definition(PyrexScanner s, pos, ctx) cdef p_cpp_class_attribute(PyrexScanner s, ctx) Cython-0.26.1/Cython/Compiler/Code.py0000664000175000017500000025707213143605603020110 0ustar stefanstefan00000000000000# cython: language_level = 2 # # Code output module # from __future__ import absolute_import import cython cython.declare(os=object, re=object, operator=object, Naming=object, Options=object, StringEncoding=object, Utils=object, SourceDescriptor=object, StringIOTree=object, DebugFlags=object, basestring=object) import os import re import shutil import sys import operator import textwrap from string import Template from functools import partial from contextlib import closing from collections import defaultdict try: import hashlib except ImportError: import md5 as hashlib from . import Naming from . import Options from . import DebugFlags from . import StringEncoding from . import Version from .. import Utils from .Scanning import SourceDescriptor from ..StringIOTree import StringIOTree try: from __builtin__ import basestring except ImportError: from builtins import str as basestring KEYWORDS_MUST_BE_BYTES = sys.version_info < (2, 7) non_portable_builtins_map = { # builtins that have different names in different Python versions 'bytes' : ('PY_MAJOR_VERSION < 3', 'str'), 'unicode' : ('PY_MAJOR_VERSION >= 3', 'str'), 'basestring' : ('PY_MAJOR_VERSION >= 3', 'str'), 'xrange' : ('PY_MAJOR_VERSION >= 3', 'range'), 'raw_input' : ('PY_MAJOR_VERSION >= 3', 'input'), } basicsize_builtins_map = { # builtins whose type has a different tp_basicsize than sizeof(...) 'PyTypeObject': 'PyHeapTypeObject', } uncachable_builtins = [ # builtin names that cannot be cached because they may or may not # be available at import time 'WindowsError', '_', # e.g. gettext ] special_py_methods = set([ '__cinit__', '__dealloc__', '__richcmp__', '__next__', '__await__', '__aiter__', '__anext__', '__getreadbuffer__', '__getwritebuffer__', '__getsegcount__', '__getcharbuffer__', '__getbuffer__', '__releasebuffer__' ]) modifier_output_mapper = { 'inline': 'CYTHON_INLINE' }.get is_self_assignment = re.compile(r" *(\w+) = (\1);\s*$").match def get_utility_dir(): # make this a function and not global variables: # http://trac.cython.org/cython_trac/ticket/475 Cython_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) return os.path.join(Cython_dir, "Utility") class UtilityCodeBase(object): """ Support for loading utility code from a file. Code sections in the file can be specified as follows: ##### MyUtility.proto ##### [proto declarations] ##### MyUtility.init ##### [code run at module initialization] ##### MyUtility ##### #@requires: MyOtherUtility #@substitute: naming [definitions] for prototypes and implementation respectively. For non-python or -cython files backslashes should be used instead. 5 to 30 comment characters may be used on either side. If the @cname decorator is not used and this is a CythonUtilityCode, one should pass in the 'name' keyword argument to be used for name mangling of such entries. """ is_cython_utility = False requires = None _utility_cache = {} @classmethod def _add_utility(cls, utility, type, lines, begin_lineno, tags=None): if utility is None: return code = '\n'.join(lines) if tags and 'substitute' in tags and tags['substitute'] == set(['naming']): del tags['substitute'] try: code = Template(code).substitute(vars(Naming)) except (KeyError, ValueError) as e: raise RuntimeError("Error parsing templated utility code of type '%s' at line %d: %s" % ( type, begin_lineno, e)) # remember correct line numbers at least until after templating code = '\n' * begin_lineno + code if type == 'proto': utility[0] = code elif type.startswith('proto.'): utility[0] = code utility[1] = type[6:] elif type == 'impl': utility[2] = code else: all_tags = utility[3] if KEYWORDS_MUST_BE_BYTES: type = type.encode('ASCII') all_tags[type] = code if tags: all_tags = utility[3] for name, values in tags.items(): if KEYWORDS_MUST_BE_BYTES: name = name.encode('ASCII') all_tags.setdefault(name, set()).update(values) @classmethod def load_utilities_from_file(cls, path): utilities = cls._utility_cache.get(path) if utilities: return utilities filename = os.path.join(get_utility_dir(), path) _, ext = os.path.splitext(path) if ext in ('.pyx', '.py', '.pxd', '.pxi'): comment = '#' strip_comments = partial(re.compile(r'^\s*#.*').sub, '') rstrip = StringEncoding._unicode.rstrip else: comment = '/' strip_comments = partial(re.compile(r'^\s*//.*|/\*[^*]*\*/').sub, '') rstrip = partial(re.compile(r'\s+(\\?)$').sub, r'\1') match_special = re.compile( (r'^%(C)s{5,30}\s*(?P(?:\w|\.)+)\s*%(C)s{5,30}|' r'^%(C)s+@(?P\w+)\s*:\s*(?P(?:\w|[.:])+)') % {'C': comment}).match match_type = re.compile('(.+)[.](proto(?:[.]\S+)?|impl|init|cleanup)$').match with closing(Utils.open_source_file(filename, encoding='UTF-8')) as f: all_lines = f.readlines() utilities = defaultdict(lambda: [None, None, None, {}]) lines = [] tags = defaultdict(set) utility = type = None begin_lineno = 0 for lineno, line in enumerate(all_lines): m = match_special(line) if m: if m.group('name'): cls._add_utility(utility, type, lines, begin_lineno, tags) begin_lineno = lineno + 1 del lines[:] tags.clear() name = m.group('name') mtype = match_type(name) if mtype: name, type = mtype.groups() else: type = 'impl' utility = utilities[name] else: tags[m.group('tag')].add(m.group('value')) lines.append('') # keep line number correct else: lines.append(rstrip(strip_comments(line))) if utility is None: raise ValueError("Empty utility code file") # Don't forget to add the last utility code cls._add_utility(utility, type, lines, begin_lineno, tags) utilities = dict(utilities) # un-defaultdict-ify cls._utility_cache[path] = utilities return utilities @classmethod def load(cls, util_code_name, from_file=None, **kwargs): """ Load utility code from a file specified by from_file (relative to Cython/Utility) and name util_code_name. If from_file is not given, load it from the file util_code_name.*. There should be only one file matched by this pattern. """ if '::' in util_code_name: from_file, util_code_name = util_code_name.rsplit('::', 1) if not from_file: utility_dir = get_utility_dir() prefix = util_code_name + '.' try: listing = os.listdir(utility_dir) except OSError: # XXX the code below assumes as 'zipimport.zipimporter' instance # XXX should be easy to generalize, but too lazy right now to write it import zipfile global __loader__ loader = __loader__ archive = loader.archive with closing(zipfile.ZipFile(archive)) as fileobj: listing = [os.path.basename(name) for name in fileobj.namelist() if os.path.join(archive, name).startswith(utility_dir)] files = [filename for filename in listing if filename.startswith(prefix)] if not files: raise ValueError("No match found for utility code " + util_code_name) if len(files) > 1: raise ValueError("More than one filename match found for utility code " + util_code_name) from_file = files[0] utilities = cls.load_utilities_from_file(from_file) proto, proto_block, impl, tags = utilities[util_code_name] if tags: orig_kwargs = kwargs.copy() for name, values in tags.items(): if name in kwargs: continue # only pass lists when we have to: most argument expect one value or None if name == 'requires': if orig_kwargs: values = [cls.load(dep, from_file, **orig_kwargs) for dep in sorted(values)] else: # dependencies are rarely unique, so use load_cached() when we can values = [cls.load_cached(dep, from_file) for dep in sorted(values)] elif not values: values = None elif len(values) == 1: values = values[0] kwargs[name] = values if proto is not None: kwargs['proto'] = proto if proto_block is not None: kwargs['proto_block'] = proto_block if impl is not None: kwargs['impl'] = impl if 'name' not in kwargs: kwargs['name'] = util_code_name if 'file' not in kwargs and from_file: kwargs['file'] = from_file return cls(**kwargs) @classmethod def load_cached(cls, utility_code_name, from_file=None, __cache={}): """ Calls .load(), but using a per-type cache based on utility name and file name. """ key = (cls, from_file, utility_code_name) try: return __cache[key] except KeyError: pass code = __cache[key] = cls.load(utility_code_name, from_file) return code @classmethod def load_as_string(cls, util_code_name, from_file=None, **kwargs): """ Load a utility code as a string. Returns (proto, implementation) """ util = cls.load(util_code_name, from_file, **kwargs) proto, impl = util.proto, util.impl return util.format_code(proto), util.format_code(impl) def format_code(self, code_string, replace_empty_lines=re.compile(r'\n\n+').sub): """ Format a code section for output. """ if code_string: code_string = replace_empty_lines('\n', code_string.strip()) + '\n\n' return code_string def __str__(self): return "<%s(%s)>" % (type(self).__name__, self.name) def get_tree(self, **kwargs): pass class UtilityCode(UtilityCodeBase): """ Stores utility code to add during code generation. See GlobalState.put_utility_code. hashes/equals by instance proto C prototypes impl implemenation code init code to call on module initialization requires utility code dependencies proto_block the place in the resulting file where the prototype should end up name name of the utility code (or None) file filename of the utility code file this utility was loaded from (or None) """ def __init__(self, proto=None, impl=None, init=None, cleanup=None, requires=None, proto_block='utility_code_proto', name=None, file=None): # proto_block: Which code block to dump prototype in. See GlobalState. self.proto = proto self.impl = impl self.init = init self.cleanup = cleanup self.requires = requires self._cache = {} self.specialize_list = [] self.proto_block = proto_block self.name = name self.file = file def __hash__(self): return hash((self.proto, self.impl)) def __eq__(self, other): if self is other: return True self_type, other_type = type(self), type(other) if self_type is not other_type and not (isinstance(other, self_type) or isinstance(self, other_type)): return False self_proto = getattr(self, 'proto', None) other_proto = getattr(other, 'proto', None) return (self_proto, self.impl) == (other_proto, other.impl) def none_or_sub(self, s, context): """ Format a string in this utility code with context. If None, do nothing. """ if s is None: return None return s % context def specialize(self, pyrex_type=None, **data): # Dicts aren't hashable... if pyrex_type is not None: data['type'] = pyrex_type.empty_declaration_code() data['type_name'] = pyrex_type.specialization_name() key = tuple(sorted(data.items())) try: return self._cache[key] except KeyError: if self.requires is None: requires = None else: requires = [r.specialize(data) for r in self.requires] s = self._cache[key] = UtilityCode( self.none_or_sub(self.proto, data), self.none_or_sub(self.impl, data), self.none_or_sub(self.init, data), self.none_or_sub(self.cleanup, data), requires, self.proto_block) self.specialize_list.append(s) return s def inject_string_constants(self, impl, output): """Replace 'PYIDENT("xyz")' by a constant Python identifier cname. """ if 'PYIDENT(' not in impl: return False, impl replacements = {} def externalise(matchobj): name = matchobj.group(1) try: cname = replacements[name] except KeyError: cname = replacements[name] = output.get_interned_identifier( StringEncoding.EncodedString(name)).cname return cname impl = re.sub(r'PYIDENT\("([^"]+)"\)', externalise, impl) assert 'PYIDENT(' not in impl return bool(replacements), impl def inject_unbound_methods(self, impl, output): """Replace 'UNBOUND_METHOD(type, "name")' by a constant Python identifier cname. """ if 'CALL_UNBOUND_METHOD(' not in impl: return False, impl utility_code = set() def externalise(matchobj): type_cname, method_name, args = matchobj.groups() args = [arg.strip() for arg in args[1:].split(',')] if len(args) == 1: call = '__Pyx_CallUnboundCMethod0' utility_code.add("CallUnboundCMethod0") elif len(args) == 2: call = '__Pyx_CallUnboundCMethod1' utility_code.add("CallUnboundCMethod1") else: assert False, "CALL_UNBOUND_METHOD() requires 1 or 2 call arguments" cname = output.get_cached_unbound_method(type_cname, method_name, len(args)) return '%s(&%s, %s)' % (call, cname, ', '.join(args)) impl = re.sub(r'CALL_UNBOUND_METHOD\(([a-zA-Z_]+),\s*"([^"]+)"((?:,\s*[^),]+)+)\)', externalise, impl) assert 'CALL_UNBOUND_METHOD(' not in impl for helper in sorted(utility_code): output.use_utility_code(UtilityCode.load_cached(helper, "ObjectHandling.c")) return bool(utility_code), impl def wrap_c_strings(self, impl): """Replace CSTRING('''xyz''') by a C compatible string """ if 'CSTRING(' not in impl: return impl def split_string(matchobj): content = matchobj.group(1).replace('"', '\042') return ''.join( '"%s\\n"\n' % line if not line.endswith('\\') or line.endswith('\\\\') else '"%s"\n' % line[:-1] for line in content.splitlines()) impl = re.sub(r'CSTRING\(\s*"""([^"]*(?:"[^"]+)*)"""\s*\)', split_string, impl) assert 'CSTRING(' not in impl return impl def put_code(self, output): if self.requires: for dependency in self.requires: output.use_utility_code(dependency) if self.proto: writer = output[self.proto_block] writer.putln("/* %s.proto */" % self.name) writer.put_or_include( self.format_code(self.proto), '%s_proto' % self.name) if self.impl: impl = self.format_code(self.wrap_c_strings(self.impl)) is_specialised1, impl = self.inject_string_constants(impl, output) is_specialised2, impl = self.inject_unbound_methods(impl, output) writer = output['utility_code_def'] writer.putln("/* %s */" % self.name) if not (is_specialised1 or is_specialised2): # no module specific adaptations => can be reused writer.put_or_include(impl, '%s_impl' % self.name) else: writer.put(impl) if self.init: writer = output['init_globals'] writer.putln("/* %s.init */" % self.name) if isinstance(self.init, basestring): writer.put(self.format_code(self.init)) else: self.init(writer, output.module_pos) writer.putln(writer.error_goto_if_PyErr(output.module_pos)) writer.putln() if self.cleanup and Options.generate_cleanup_code: writer = output['cleanup_globals'] writer.putln("/* %s.cleanup */" % self.name) if isinstance(self.cleanup, basestring): writer.put_or_include( self.format_code(self.cleanup), '%s_cleanup' % self.name) else: self.cleanup(writer, output.module_pos) def sub_tempita(s, context, file=None, name=None): "Run tempita on string s with given context." if not s: return None if file: context['__name'] = "%s:%s" % (file, name) elif name: context['__name'] = name from ..Tempita import sub return sub(s, **context) class TempitaUtilityCode(UtilityCode): def __init__(self, name=None, proto=None, impl=None, init=None, file=None, context=None, **kwargs): if context is None: context = {} proto = sub_tempita(proto, context, file, name) impl = sub_tempita(impl, context, file, name) init = sub_tempita(init, context, file, name) super(TempitaUtilityCode, self).__init__( proto, impl, init=init, name=name, file=file, **kwargs) @classmethod def load_cached(cls, utility_code_name, from_file=None, context=None, __cache={}): context_key = tuple(sorted(context.items())) if context else None assert hash(context_key) is not None # raise TypeError if not hashable key = (cls, from_file, utility_code_name, context_key) try: return __cache[key] except KeyError: pass code = __cache[key] = cls.load(utility_code_name, from_file, context=context) return code def none_or_sub(self, s, context): """ Format a string in this utility code with context. If None, do nothing. """ if s is None: return None return sub_tempita(s, context, self.file, self.name) class LazyUtilityCode(UtilityCodeBase): """ Utility code that calls a callback with the root code writer when available. Useful when you only have 'env' but not 'code'. """ __name__ = '' def __init__(self, callback): self.callback = callback def put_code(self, globalstate): utility = self.callback(globalstate.rootwriter) globalstate.use_utility_code(utility) class FunctionState(object): # return_label string function return point label # error_label string error catch point label # continue_label string loop continue point label # break_label string loop break point label # return_from_error_cleanup_label string # label_counter integer counter for naming labels # in_try_finally boolean inside try of try...finally # exc_vars (string * 3) exception variables for reraise, or None # can_trace boolean line tracing is supported in the current context # scope Scope the scope object of the current function # Not used for now, perhaps later def __init__(self, owner, names_taken=set(), scope=None): self.names_taken = names_taken self.owner = owner self.scope = scope self.error_label = None self.label_counter = 0 self.labels_used = set() self.return_label = self.new_label() self.new_error_label() self.continue_label = None self.break_label = None self.yield_labels = [] self.in_try_finally = 0 self.exc_vars = None self.can_trace = False self.gil_owned = True self.temps_allocated = [] # of (name, type, manage_ref, static) self.temps_free = {} # (type, manage_ref) -> list of free vars with same type/managed status self.temps_used_type = {} # name -> (type, manage_ref) self.temp_counter = 0 self.closure_temps = None # This is used to collect temporaries, useful to find out which temps # need to be privatized in parallel sections self.collect_temps_stack = [] # This is used for the error indicator, which needs to be local to the # function. It used to be global, which relies on the GIL being held. # However, exceptions may need to be propagated through 'nogil' # sections, in which case we introduce a race condition. self.should_declare_error_indicator = False self.uses_error_indicator = False # labels def new_label(self, name=None): n = self.label_counter self.label_counter = n + 1 label = "%s%d" % (Naming.label_prefix, n) if name is not None: label += '_' + name return label def new_yield_label(self): label = self.new_label('resume_from_yield') num_and_label = (len(self.yield_labels) + 1, label) self.yield_labels.append(num_and_label) return num_and_label def new_error_label(self): old_err_lbl = self.error_label self.error_label = self.new_label('error') return old_err_lbl def get_loop_labels(self): return ( self.continue_label, self.break_label) def set_loop_labels(self, labels): (self.continue_label, self.break_label) = labels def new_loop_labels(self): old_labels = self.get_loop_labels() self.set_loop_labels( (self.new_label("continue"), self.new_label("break"))) return old_labels def get_all_labels(self): return ( self.continue_label, self.break_label, self.return_label, self.error_label) def set_all_labels(self, labels): (self.continue_label, self.break_label, self.return_label, self.error_label) = labels def all_new_labels(self): old_labels = self.get_all_labels() new_labels = [] for old_label, name in zip(old_labels, ['continue', 'break', 'return', 'error']): if old_label: new_labels.append(self.new_label(name)) else: new_labels.append(old_label) self.set_all_labels(new_labels) return old_labels def use_label(self, lbl): self.labels_used.add(lbl) def label_used(self, lbl): return lbl in self.labels_used # temp handling def allocate_temp(self, type, manage_ref, static=False): """ Allocates a temporary (which may create a new one or get a previously allocated and released one of the same type). Type is simply registered and handed back, but will usually be a PyrexType. If type.is_pyobject, manage_ref comes into play. If manage_ref is set to True, the temp will be decref-ed on return statements and in exception handling clauses. Otherwise the caller has to deal with any reference counting of the variable. If not type.is_pyobject, then manage_ref will be ignored, but it still has to be passed. It is recommended to pass False by convention if it is known that type will never be a Python object. static=True marks the temporary declaration with "static". This is only used when allocating backing store for a module-level C array literals. A C string referring to the variable is returned. """ if type.is_const and not type.is_reference: type = type.const_base_type elif type.is_reference and not type.is_fake_reference: type = type.ref_base_type if not type.is_pyobject and not type.is_memoryviewslice: # Make manage_ref canonical, so that manage_ref will always mean # a decref is needed. manage_ref = False freelist = self.temps_free.get((type, manage_ref)) if freelist is not None and freelist[0]: result = freelist[0].pop() freelist[1].remove(result) else: while True: self.temp_counter += 1 result = "%s%d" % (Naming.codewriter_temp_prefix, self.temp_counter) if result not in self.names_taken: break self.temps_allocated.append((result, type, manage_ref, static)) self.temps_used_type[result] = (type, manage_ref) if DebugFlags.debug_temp_code_comments: self.owner.putln("/* %s allocated (%s) */" % (result, type)) if self.collect_temps_stack: self.collect_temps_stack[-1].add((result, type)) return result def release_temp(self, name): """ Releases a temporary so that it can be reused by other code needing a temp of the same type. """ type, manage_ref = self.temps_used_type[name] freelist = self.temps_free.get((type, manage_ref)) if freelist is None: freelist = ([], set()) # keep order in list and make lookups in set fast self.temps_free[(type, manage_ref)] = freelist if name in freelist[1]: raise RuntimeError("Temp %s freed twice!" % name) freelist[0].append(name) freelist[1].add(name) if DebugFlags.debug_temp_code_comments: self.owner.putln("/* %s released */" % name) def temps_in_use(self): """Return a list of (cname,type,manage_ref) tuples of temp names and their type that are currently in use. """ used = [] for name, type, manage_ref, static in self.temps_allocated: freelist = self.temps_free.get((type, manage_ref)) if freelist is None or name not in freelist[1]: used.append((name, type, manage_ref and type.is_pyobject)) return used def temps_holding_reference(self): """Return a list of (cname,type) tuples of temp names and their type that are currently in use. This includes only temps of a Python object type which owns its reference. """ return [(name, type) for name, type, manage_ref in self.temps_in_use() if manage_ref and type.is_pyobject] def all_managed_temps(self): """Return a list of (cname, type) tuples of refcount-managed Python objects. """ return [(cname, type) for cname, type, manage_ref, static in self.temps_allocated if manage_ref] def all_free_managed_temps(self): """Return a list of (cname, type) tuples of refcount-managed Python objects that are not currently in use. This is used by try-except and try-finally blocks to clean up temps in the error case. """ return [(cname, type) for (type, manage_ref), freelist in self.temps_free.items() if manage_ref for cname in freelist[0]] def start_collecting_temps(self): """ Useful to find out which temps were used in a code block """ self.collect_temps_stack.append(set()) def stop_collecting_temps(self): return self.collect_temps_stack.pop() def init_closure_temps(self, scope): self.closure_temps = ClosureTempAllocator(scope) class NumConst(object): """Global info about a Python number constant held by GlobalState. cname string value string py_type string int, long, float value_code string evaluation code if different from value """ def __init__(self, cname, value, py_type, value_code=None): self.cname = cname self.value = value self.py_type = py_type self.value_code = value_code or value class PyObjectConst(object): """Global info about a generic constant held by GlobalState. """ # cname string # type PyrexType def __init__(self, cname, type): self.cname = cname self.type = type cython.declare(possible_unicode_identifier=object, possible_bytes_identifier=object, replace_identifier=object, find_alphanums=object) possible_unicode_identifier = re.compile(br"(?![0-9])\w+$".decode('ascii'), re.U).match possible_bytes_identifier = re.compile(r"(?![0-9])\w+$".encode('ASCII')).match replace_identifier = re.compile(r'[^a-zA-Z0-9_]+').sub find_alphanums = re.compile('([a-zA-Z0-9]+)').findall class StringConst(object): """Global info about a C string constant held by GlobalState. """ # cname string # text EncodedString or BytesLiteral # py_strings {(identifier, encoding) : PyStringConst} def __init__(self, cname, text, byte_string): self.cname = cname self.text = text self.escaped_value = StringEncoding.escape_byte_string(byte_string) self.py_strings = None self.py_versions = [] def add_py_version(self, version): if not version: self.py_versions = [2, 3] elif version not in self.py_versions: self.py_versions.append(version) def get_py_string_const(self, encoding, identifier=None, is_str=False, py3str_cstring=None): py_strings = self.py_strings text = self.text is_str = bool(identifier or is_str) is_unicode = encoding is None and not is_str if encoding is None: # unicode string encoding_key = None else: # bytes or str encoding = encoding.lower() if encoding in ('utf8', 'utf-8', 'ascii', 'usascii', 'us-ascii'): encoding = None encoding_key = None else: encoding_key = ''.join(find_alphanums(encoding)) key = (is_str, is_unicode, encoding_key, py3str_cstring) if py_strings is not None: try: return py_strings[key] except KeyError: pass else: self.py_strings = {} if identifier: intern = True elif identifier is None: if isinstance(text, bytes): intern = bool(possible_bytes_identifier(text)) else: intern = bool(possible_unicode_identifier(text)) else: intern = False if intern: prefix = Naming.interned_prefixes['str'] else: prefix = Naming.py_const_prefix if encoding_key: encoding_prefix = '_%s' % encoding_key else: encoding_prefix = '' pystring_cname = "%s%s%s_%s" % ( prefix, (is_str and 's') or (is_unicode and 'u') or 'b', encoding_prefix, self.cname[len(Naming.const_prefix):]) py_string = PyStringConst( pystring_cname, encoding, is_unicode, is_str, py3str_cstring, intern) self.py_strings[key] = py_string return py_string class PyStringConst(object): """Global info about a Python string constant held by GlobalState. """ # cname string # py3str_cstring string # encoding string # intern boolean # is_unicode boolean # is_str boolean def __init__(self, cname, encoding, is_unicode, is_str=False, py3str_cstring=None, intern=False): self.cname = cname self.py3str_cstring = py3str_cstring self.encoding = encoding self.is_str = is_str self.is_unicode = is_unicode self.intern = intern def __lt__(self, other): return self.cname < other.cname class GlobalState(object): # filename_table {string : int} for finding filename table indexes # filename_list [string] filenames in filename table order # input_file_contents dict contents (=list of lines) of any file that was used as input # to create this output C code. This is # used to annotate the comments. # # utility_codes set IDs of used utility code (to avoid reinsertion) # # declared_cnames {string:Entry} used in a transition phase to merge pxd-declared # constants etc. into the pyx-declared ones (i.e, # check if constants are already added). # In time, hopefully the literals etc. will be # supplied directly instead. # # const_cnames_used dict global counter for unique constant identifiers # # parts {string:CCodeWriter} # interned_strings # consts # interned_nums # directives set Temporary variable used to track # the current set of directives in the code generation # process. directives = {} code_layout = [ 'h_code', 'filename_table', 'utility_code_proto_before_types', 'numeric_typedefs', # Let these detailed individual parts stay!, 'complex_type_declarations', # as the proper solution is to make a full DAG... 'type_declarations', # More coarse-grained blocks would simply hide 'utility_code_proto', # the ugliness, not fix it 'module_declarations', 'typeinfo', 'before_global_var', 'global_var', 'string_decls', 'decls', 'all_the_rest', 'pystring_table', 'cached_builtins', 'cached_constants', 'init_globals', 'init_module', 'cleanup_globals', 'cleanup_module', 'main_method', 'utility_code_def', 'end' ] def __init__(self, writer, module_node, code_config, common_utility_include_dir=None): self.filename_table = {} self.filename_list = [] self.input_file_contents = {} self.utility_codes = set() self.declared_cnames = {} self.in_utility_code_generation = False self.code_config = code_config self.common_utility_include_dir = common_utility_include_dir self.parts = {} self.module_node = module_node # because some utility code generation needs it # (generating backwards-compatible Get/ReleaseBuffer self.const_cnames_used = {} self.string_const_index = {} self.pyunicode_ptr_const_index = {} self.num_const_index = {} self.py_constants = [] self.cached_cmethods = {} writer.set_global_state(self) self.rootwriter = writer def initialize_main_c_code(self): rootwriter = self.rootwriter for part in self.code_layout: self.parts[part] = rootwriter.insertion_point() if not Options.cache_builtins: del self.parts['cached_builtins'] else: w = self.parts['cached_builtins'] w.enter_cfunc_scope() w.putln("static int __Pyx_InitCachedBuiltins(void) {") w = self.parts['cached_constants'] w.enter_cfunc_scope() w.putln("") w.putln("static int __Pyx_InitCachedConstants(void) {") w.put_declare_refcount_context() w.put_setup_refcount_context("__Pyx_InitCachedConstants") w = self.parts['init_globals'] w.enter_cfunc_scope() w.putln("") w.putln("static int __Pyx_InitGlobals(void) {") if not Options.generate_cleanup_code: del self.parts['cleanup_globals'] else: w = self.parts['cleanup_globals'] w.enter_cfunc_scope() w.putln("") w.putln("static void __Pyx_CleanupGlobals(void) {") code = self.parts['utility_code_proto'] code.putln("") code.putln("/* --- Runtime support code (head) --- */") code = self.parts['utility_code_def'] if self.code_config.emit_linenums: code.write('\n#line 1 "cython_utility"\n') code.putln("") code.putln("/* --- Runtime support code --- */") def finalize_main_c_code(self): self.close_global_decls() # # utility_code_def # code = self.parts['utility_code_def'] util = TempitaUtilityCode.load_cached("TypeConversions", "TypeConversion.c") code.put(util.format_code(util.impl)) code.putln("") def __getitem__(self, key): return self.parts[key] # # Global constants, interned objects, etc. # def close_global_decls(self): # This is called when it is known that no more global declarations will # declared. self.generate_const_declarations() if Options.cache_builtins: w = self.parts['cached_builtins'] w.putln("return 0;") if w.label_used(w.error_label): w.put_label(w.error_label) w.putln("return -1;") w.putln("}") w.exit_cfunc_scope() w = self.parts['cached_constants'] w.put_finish_refcount_context() w.putln("return 0;") if w.label_used(w.error_label): w.put_label(w.error_label) w.put_finish_refcount_context() w.putln("return -1;") w.putln("}") w.exit_cfunc_scope() w = self.parts['init_globals'] w.putln("return 0;") if w.label_used(w.error_label): w.put_label(w.error_label) w.putln("return -1;") w.putln("}") w.exit_cfunc_scope() if Options.generate_cleanup_code: w = self.parts['cleanup_globals'] w.putln("}") w.exit_cfunc_scope() if Options.generate_cleanup_code: w = self.parts['cleanup_module'] w.putln("}") w.exit_cfunc_scope() def put_pyobject_decl(self, entry): self['global_var'].putln("static PyObject *%s;" % entry.cname) # constant handling at code generation time def get_cached_constants_writer(self): return self.parts['cached_constants'] def get_int_const(self, str_value, longness=False): py_type = longness and 'long' or 'int' try: c = self.num_const_index[(str_value, py_type)] except KeyError: c = self.new_num_const(str_value, py_type) return c def get_float_const(self, str_value, value_code): try: c = self.num_const_index[(str_value, 'float')] except KeyError: c = self.new_num_const(str_value, 'float', value_code) return c def get_py_const(self, type, prefix='', cleanup_level=None): # create a new Python object constant const = self.new_py_const(type, prefix) if cleanup_level is not None \ and cleanup_level <= Options.generate_cleanup_code: cleanup_writer = self.parts['cleanup_globals'] cleanup_writer.putln('Py_CLEAR(%s);' % const.cname) return const def get_string_const(self, text, py_version=None): # return a C string constant, creating a new one if necessary if text.is_unicode: byte_string = text.utf8encode() else: byte_string = text.byteencode() try: c = self.string_const_index[byte_string] except KeyError: c = self.new_string_const(text, byte_string) c.add_py_version(py_version) return c def get_pyunicode_ptr_const(self, text): # return a Py_UNICODE[] constant, creating a new one if necessary assert text.is_unicode try: c = self.pyunicode_ptr_const_index[text] except KeyError: c = self.pyunicode_ptr_const_index[text] = self.new_const_cname() return c def get_py_string_const(self, text, identifier=None, is_str=False, unicode_value=None): # return a Python string constant, creating a new one if necessary py3str_cstring = None if is_str and unicode_value is not None \ and unicode_value.utf8encode() != text.byteencode(): py3str_cstring = self.get_string_const(unicode_value, py_version=3) c_string = self.get_string_const(text, py_version=2) else: c_string = self.get_string_const(text) py_string = c_string.get_py_string_const( text.encoding, identifier, is_str, py3str_cstring) return py_string def get_interned_identifier(self, text): return self.get_py_string_const(text, identifier=True) def new_string_const(self, text, byte_string): cname = self.new_string_const_cname(byte_string) c = StringConst(cname, text, byte_string) self.string_const_index[byte_string] = c return c def new_num_const(self, value, py_type, value_code=None): cname = self.new_num_const_cname(value, py_type) c = NumConst(cname, value, py_type, value_code) self.num_const_index[(value, py_type)] = c return c def new_py_const(self, type, prefix=''): cname = self.new_const_cname(prefix) c = PyObjectConst(cname, type) self.py_constants.append(c) return c def new_string_const_cname(self, bytes_value): # Create a new globally-unique nice name for a C string constant. value = bytes_value.decode('ASCII', 'ignore') return self.new_const_cname(value=value) def new_num_const_cname(self, value, py_type): if py_type == 'long': value += 'L' py_type = 'int' prefix = Naming.interned_prefixes[py_type] cname = "%s%s" % (prefix, value) cname = cname.replace('+', '_').replace('-', 'neg_').replace('.', '_') return cname def new_const_cname(self, prefix='', value=''): value = replace_identifier('_', value)[:32].strip('_') used = self.const_cnames_used name_suffix = value while name_suffix in used: counter = used[value] = used[value] + 1 name_suffix = '%s_%d' % (value, counter) used[name_suffix] = 1 if prefix: prefix = Naming.interned_prefixes[prefix] else: prefix = Naming.const_prefix return "%s%s" % (prefix, name_suffix) def get_cached_unbound_method(self, type_cname, method_name, args_count): key = (type_cname, method_name, args_count) try: cname = self.cached_cmethods[key] except KeyError: cname = self.cached_cmethods[key] = self.new_const_cname( 'umethod', '%s_%s' % (type_cname, method_name)) return cname def add_cached_builtin_decl(self, entry): if entry.is_builtin and entry.is_const: if self.should_declare(entry.cname, entry): self.put_pyobject_decl(entry) w = self.parts['cached_builtins'] condition = None if entry.name in non_portable_builtins_map: condition, replacement = non_portable_builtins_map[entry.name] w.putln('#if %s' % condition) self.put_cached_builtin_init( entry.pos, StringEncoding.EncodedString(replacement), entry.cname) w.putln('#else') self.put_cached_builtin_init( entry.pos, StringEncoding.EncodedString(entry.name), entry.cname) if condition: w.putln('#endif') def put_cached_builtin_init(self, pos, name, cname): w = self.parts['cached_builtins'] interned_cname = self.get_interned_identifier(name).cname self.use_utility_code( UtilityCode.load_cached("GetBuiltinName", "ObjectHandling.c")) w.putln('%s = __Pyx_GetBuiltinName(%s); if (!%s) %s' % ( cname, interned_cname, cname, w.error_goto(pos))) def generate_const_declarations(self): self.generate_cached_methods_decls() self.generate_string_constants() self.generate_num_constants() self.generate_object_constant_decls() def generate_object_constant_decls(self): consts = [(len(c.cname), c.cname, c) for c in self.py_constants] consts.sort() decls_writer = self.parts['decls'] for _, cname, c in consts: decls_writer.putln( "static %s;" % c.type.declaration_code(cname)) def generate_cached_methods_decls(self): if not self.cached_cmethods: return decl = self.parts['decls'] init = self.parts['init_globals'] cnames = [] for (type_cname, method_name, _), cname in sorted(self.cached_cmethods.items()): cnames.append(cname) method_name_cname = self.get_interned_identifier(StringEncoding.EncodedString(method_name)).cname decl.putln('static __Pyx_CachedCFunction %s = {0, &%s, 0, 0, 0};' % ( cname, method_name_cname)) # split type reference storage as it might not be static init.putln('%s.type = (PyObject*)&%s;' % ( cname, type_cname)) if Options.generate_cleanup_code: cleanup = self.parts['cleanup_globals'] for cname in cnames: cleanup.putln("Py_CLEAR(%s.method);" % cname) def generate_string_constants(self): c_consts = [(len(c.cname), c.cname, c) for c in self.string_const_index.values()] c_consts.sort() py_strings = [] decls_writer = self.parts['string_decls'] for _, cname, c in c_consts: conditional = False if c.py_versions and (2 not in c.py_versions or 3 not in c.py_versions): conditional = True decls_writer.putln("#if PY_MAJOR_VERSION %s 3" % ( (2 in c.py_versions) and '<' or '>=')) decls_writer.putln('static const char %s[] = "%s";' % ( cname, StringEncoding.split_string_literal(c.escaped_value))) if conditional: decls_writer.putln("#endif") if c.py_strings is not None: for py_string in c.py_strings.values(): py_strings.append((c.cname, len(py_string.cname), py_string)) for c, cname in sorted(self.pyunicode_ptr_const_index.items()): utf16_array, utf32_array = StringEncoding.encode_pyunicode_string(c) if utf16_array: # Narrow and wide representations differ decls_writer.putln("#ifdef Py_UNICODE_WIDE") decls_writer.putln("static Py_UNICODE %s[] = { %s };" % (cname, utf32_array)) if utf16_array: decls_writer.putln("#else") decls_writer.putln("static Py_UNICODE %s[] = { %s };" % (cname, utf16_array)) decls_writer.putln("#endif") if py_strings: self.use_utility_code(UtilityCode.load_cached("InitStrings", "StringTools.c")) py_strings.sort() w = self.parts['pystring_table'] w.putln("") w.putln("static __Pyx_StringTabEntry %s[] = {" % Naming.stringtab_cname) for c_cname, _, py_string in py_strings: if not py_string.is_str or not py_string.encoding or \ py_string.encoding in ('ASCII', 'USASCII', 'US-ASCII', 'UTF8', 'UTF-8'): encoding = '0' else: encoding = '"%s"' % py_string.encoding.lower() decls_writer.putln( "static PyObject *%s;" % py_string.cname) if py_string.py3str_cstring: w.putln("#if PY_MAJOR_VERSION >= 3") w.putln("{&%s, %s, sizeof(%s), %s, %d, %d, %d}," % ( py_string.cname, py_string.py3str_cstring.cname, py_string.py3str_cstring.cname, '0', 1, 0, py_string.intern )) w.putln("#else") w.putln("{&%s, %s, sizeof(%s), %s, %d, %d, %d}," % ( py_string.cname, c_cname, c_cname, encoding, py_string.is_unicode, py_string.is_str, py_string.intern )) if py_string.py3str_cstring: w.putln("#endif") w.putln("{0, 0, 0, 0, 0, 0, 0}") w.putln("};") init_globals = self.parts['init_globals'] init_globals.putln( "if (__Pyx_InitStrings(%s) < 0) %s;" % ( Naming.stringtab_cname, init_globals.error_goto(self.module_pos))) def generate_num_constants(self): consts = [(c.py_type, c.value[0] == '-', len(c.value), c.value, c.value_code, c) for c in self.num_const_index.values()] consts.sort() decls_writer = self.parts['decls'] init_globals = self.parts['init_globals'] for py_type, _, _, value, value_code, c in consts: cname = c.cname decls_writer.putln("static PyObject *%s;" % cname) if py_type == 'float': function = 'PyFloat_FromDouble(%s)' elif py_type == 'long': function = 'PyLong_FromString((char *)"%s", 0, 0)' elif Utils.long_literal(value): function = 'PyInt_FromString((char *)"%s", 0, 0)' elif len(value.lstrip('-')) > 4: function = "PyInt_FromLong(%sL)" else: function = "PyInt_FromLong(%s)" init_globals.putln('%s = %s; %s' % ( cname, function % value_code, init_globals.error_goto_if_null(cname, self.module_pos))) # The functions below are there in a transition phase only # and will be deprecated. They are called from Nodes.BlockNode. # The copy&paste duplication is intentional in order to be able # to see quickly how BlockNode worked, until this is replaced. def should_declare(self, cname, entry): if cname in self.declared_cnames: other = self.declared_cnames[cname] assert str(entry.type) == str(other.type) assert entry.init == other.init return False else: self.declared_cnames[cname] = entry return True # # File name state # def lookup_filename(self, source_desc): entry = source_desc.get_filenametable_entry() try: index = self.filename_table[entry] except KeyError: index = len(self.filename_list) self.filename_list.append(source_desc) self.filename_table[entry] = index return index def commented_file_contents(self, source_desc): try: return self.input_file_contents[source_desc] except KeyError: pass source_file = source_desc.get_lines(encoding='ASCII', error_handling='ignore') try: F = [u' * ' + line.rstrip().replace( u'*/', u'*[inserted by cython to avoid comment closer]/' ).replace( u'/*', u'/[inserted by cython to avoid comment start]*' ) for line in source_file] finally: if hasattr(source_file, 'close'): source_file.close() if not F: F.append(u'') self.input_file_contents[source_desc] = F return F # # Utility code state # def use_utility_code(self, utility_code): """ Adds code to the C file. utility_code should a) implement __eq__/__hash__ for the purpose of knowing whether the same code has already been included b) implement put_code, which takes a globalstate instance See UtilityCode. """ if utility_code and utility_code not in self.utility_codes: self.utility_codes.add(utility_code) utility_code.put_code(self) def use_entry_utility_code(self, entry): if entry is None: return if entry.utility_code: self.use_utility_code(entry.utility_code) if entry.utility_code_definition: self.use_utility_code(entry.utility_code_definition) def funccontext_property(name): attribute_of = operator.attrgetter(name) def get(self): return attribute_of(self.funcstate) def set(self, value): setattr(self.funcstate, name, value) return property(get, set) class CCodeConfig(object): # emit_linenums boolean write #line pragmas? # emit_code_comments boolean copy the original code into C comments? # c_line_in_traceback boolean append the c file and line number to the traceback for exceptions? def __init__(self, emit_linenums=True, emit_code_comments=True, c_line_in_traceback=True): self.emit_code_comments = emit_code_comments self.emit_linenums = emit_linenums self.c_line_in_traceback = c_line_in_traceback class CCodeWriter(object): """ Utility class to output C code. When creating an insertion point one must care about the state that is kept: - formatting state (level, bol) is cloned and used in insertion points as well - labels, temps, exc_vars: One must construct a scope in which these can exist by calling enter_cfunc_scope/exit_cfunc_scope (these are for sanity checking and forward compatabilty). Created insertion points looses this scope and cannot access it. - marker: Not copied to insertion point - filename_table, filename_list, input_file_contents: All codewriters coming from the same root share the same instances simultaneously. """ # f file output file # buffer StringIOTree # level int indentation level # bol bool beginning of line? # marker string comment to emit before next line # funcstate FunctionState contains state local to a C function used for code # generation (labels and temps state etc.) # globalstate GlobalState contains state global for a C file (input file info, # utility code, declared constants etc.) # pyclass_stack list used during recursive code generation to pass information # about the current class one is in # code_config CCodeConfig configuration options for the C code writer globalstate = code_config = None def __init__(self, create_from=None, buffer=None, copy_formatting=False): if buffer is None: buffer = StringIOTree() self.buffer = buffer self.last_pos = None self.last_marked_pos = None self.pyclass_stack = [] self.funcstate = None self.level = 0 self.call_level = 0 self.bol = 1 if create_from is not None: # Use same global state self.set_global_state(create_from.globalstate) self.funcstate = create_from.funcstate # Clone formatting state if copy_formatting: self.level = create_from.level self.bol = create_from.bol self.call_level = create_from.call_level self.last_pos = create_from.last_pos self.last_marked_pos = create_from.last_marked_pos def create_new(self, create_from, buffer, copy_formatting): # polymorphic constructor -- very slightly more versatile # than using __class__ result = CCodeWriter(create_from, buffer, copy_formatting) return result def set_global_state(self, global_state): assert self.globalstate is None # prevent overwriting once it's set self.globalstate = global_state self.code_config = global_state.code_config def copyto(self, f): self.buffer.copyto(f) def getvalue(self): return self.buffer.getvalue() def write(self, s): # also put invalid markers (lineno 0), to indicate that those lines # have no Cython source code correspondence cython_lineno = self.last_marked_pos[1] if self.last_marked_pos else 0 self.buffer.markers.extend([cython_lineno] * s.count('\n')) self.buffer.write(s) def insertion_point(self): other = self.create_new(create_from=self, buffer=self.buffer.insertion_point(), copy_formatting=True) return other def new_writer(self): """ Creates a new CCodeWriter connected to the same global state, which can later be inserted using insert. """ return CCodeWriter(create_from=self) def insert(self, writer): """ Inserts the contents of another code writer (created with the same global state) in the current location. It is ok to write to the inserted writer also after insertion. """ assert writer.globalstate is self.globalstate self.buffer.insert(writer.buffer) # Properties delegated to function scope label_counter = funccontext_property("label_counter") return_label = funccontext_property("return_label") error_label = funccontext_property("error_label") labels_used = funccontext_property("labels_used") continue_label = funccontext_property("continue_label") break_label = funccontext_property("break_label") return_from_error_cleanup_label = funccontext_property("return_from_error_cleanup_label") yield_labels = funccontext_property("yield_labels") # Functions delegated to function scope def new_label(self, name=None): return self.funcstate.new_label(name) def new_error_label(self): return self.funcstate.new_error_label() def new_yield_label(self): return self.funcstate.new_yield_label() def get_loop_labels(self): return self.funcstate.get_loop_labels() def set_loop_labels(self, labels): return self.funcstate.set_loop_labels(labels) def new_loop_labels(self): return self.funcstate.new_loop_labels() def get_all_labels(self): return self.funcstate.get_all_labels() def set_all_labels(self, labels): return self.funcstate.set_all_labels(labels) def all_new_labels(self): return self.funcstate.all_new_labels() def use_label(self, lbl): return self.funcstate.use_label(lbl) def label_used(self, lbl): return self.funcstate.label_used(lbl) def enter_cfunc_scope(self, scope=None): self.funcstate = FunctionState(self, scope=scope) def exit_cfunc_scope(self): self.funcstate = None # constant handling def get_py_int(self, str_value, longness): return self.globalstate.get_int_const(str_value, longness).cname def get_py_float(self, str_value, value_code): return self.globalstate.get_float_const(str_value, value_code).cname def get_py_const(self, type, prefix='', cleanup_level=None): return self.globalstate.get_py_const(type, prefix, cleanup_level).cname def get_string_const(self, text): return self.globalstate.get_string_const(text).cname def get_pyunicode_ptr_const(self, text): return self.globalstate.get_pyunicode_ptr_const(text) def get_py_string_const(self, text, identifier=None, is_str=False, unicode_value=None): return self.globalstate.get_py_string_const( text, identifier, is_str, unicode_value).cname def get_argument_default_const(self, type): return self.globalstate.get_py_const(type).cname def intern(self, text): return self.get_py_string_const(text) def intern_identifier(self, text): return self.get_py_string_const(text, identifier=True) def get_cached_constants_writer(self): return self.globalstate.get_cached_constants_writer() # code generation def putln(self, code="", safe=False): if self.last_pos and self.bol: self.emit_marker() if self.code_config.emit_linenums and self.last_marked_pos: source_desc, line, _ = self.last_marked_pos self.write('\n#line %s "%s"\n' % (line, source_desc.get_escaped_description())) if code: if safe: self.put_safe(code) else: self.put(code) self.write("\n") self.bol = 1 def mark_pos(self, pos, trace=True): if pos is None: return if self.last_marked_pos and self.last_marked_pos[:2] == pos[:2]: return self.last_pos = (pos, trace) def emit_marker(self): pos, trace = self.last_pos self.last_marked_pos = pos self.last_pos = None self.write("\n") if self.code_config.emit_code_comments: self.indent() self.write("/* %s */\n" % self._build_marker(pos)) if trace and self.funcstate and self.funcstate.can_trace and self.globalstate.directives['linetrace']: self.indent() self.write('__Pyx_TraceLine(%d,%d,%s)\n' % ( pos[1], not self.funcstate.gil_owned, self.error_goto(pos))) def _build_marker(self, pos): source_desc, line, col = pos assert isinstance(source_desc, SourceDescriptor) contents = self.globalstate.commented_file_contents(source_desc) lines = contents[max(0, line-3):line] # line numbers start at 1 lines[-1] += u' # <<<<<<<<<<<<<<' lines += contents[line:line+2] return u'"%s":%d\n%s\n' % (source_desc.get_escaped_description(), line, u'\n'.join(lines)) def put_safe(self, code): # put code, but ignore {} self.write(code) self.bol = 0 def put_or_include(self, code, name): include_dir = self.globalstate.common_utility_include_dir if include_dir and len(code) > 1024: include_file = "%s_%s.h" % ( name, hashlib.md5(code.encode('utf8')).hexdigest()) path = os.path.join(include_dir, include_file) if not os.path.exists(path): tmp_path = '%s.tmp%s' % (path, os.getpid()) with closing(Utils.open_new_file(tmp_path)) as f: f.write(code) shutil.move(tmp_path, path) code = '#include "%s"\n' % path self.put(code) def put(self, code): if is_self_assignment(code): return fix_indent = False if "{" in code: dl = code.count("{") else: dl = 0 if "}" in code: dl -= code.count("}") if dl < 0: self.level += dl elif dl == 0 and code[0] == "}": # special cases like "} else {" need a temporary dedent fix_indent = True self.level -= 1 if self.bol: self.indent() self.write(code) self.bol = 0 if dl > 0: self.level += dl elif fix_indent: self.level += 1 def putln_tempita(self, code, **context): from ..Tempita import sub self.putln(sub(code, **context)) def put_tempita(self, code, **context): from ..Tempita import sub self.put(sub(code, **context)) def increase_indent(self): self.level += 1 def decrease_indent(self): self.level -= 1 def begin_block(self): self.putln("{") self.increase_indent() def end_block(self): self.decrease_indent() self.putln("}") def indent(self): self.write(" " * self.level) def get_py_version_hex(self, pyversion): return "0x%02X%02X%02X%02X" % (tuple(pyversion) + (0,0,0,0))[:4] def put_label(self, lbl): if lbl in self.funcstate.labels_used: self.putln("%s:;" % lbl) def put_goto(self, lbl): self.funcstate.use_label(lbl) self.putln("goto %s;" % lbl) def put_var_declaration(self, entry, storage_class="", dll_linkage=None, definition=True): #print "Code.put_var_declaration:", entry.name, "definition =", definition ### if entry.visibility == 'private' and not (definition or entry.defined_in_pxd): #print "...private and not definition, skipping", entry.cname ### return if entry.visibility == "private" and not entry.used: #print "...private and not used, skipping", entry.cname ### return if storage_class: self.put("%s " % storage_class) if not entry.cf_used: self.put('CYTHON_UNUSED ') self.put(entry.type.declaration_code( entry.cname, dll_linkage=dll_linkage)) if entry.init is not None: self.put_safe(" = %s" % entry.type.literal_code(entry.init)) elif entry.type.is_pyobject: self.put(" = NULL") self.putln(";") def put_temp_declarations(self, func_context): for name, type, manage_ref, static in func_context.temps_allocated: decl = type.declaration_code(name) if type.is_pyobject: self.putln("%s = NULL;" % decl) elif type.is_memoryviewslice: from . import MemoryView self.putln("%s = %s;" % (decl, MemoryView.memslice_entry_init)) else: self.putln("%s%s;" % (static and "static " or "", decl)) if func_context.should_declare_error_indicator: if self.funcstate.uses_error_indicator: unused = '' else: unused = 'CYTHON_UNUSED ' # Initialize these variables to silence compiler warnings self.putln("%sint %s = 0;" % (unused, Naming.lineno_cname)) self.putln("%sconst char *%s = NULL;" % (unused, Naming.filename_cname)) self.putln("%sint %s = 0;" % (unused, Naming.clineno_cname)) def put_generated_by(self): self.putln("/* Generated by Cython %s */" % Version.watermark) self.putln("") def put_h_guard(self, guard): self.putln("#ifndef %s" % guard) self.putln("#define %s" % guard) def unlikely(self, cond): if Options.gcc_branch_hints: return 'unlikely(%s)' % cond else: return cond def build_function_modifiers(self, modifiers, mapper=modifier_output_mapper): if not modifiers: return '' return '%s ' % ' '.join([mapper(m,m) for m in modifiers]) # Python objects and reference counting def entry_as_pyobject(self, entry): type = entry.type if (not entry.is_self_arg and not entry.type.is_complete() or entry.type.is_extension_type): return "(PyObject *)" + entry.cname else: return entry.cname def as_pyobject(self, cname, type): from .PyrexTypes import py_object_type, typecast return typecast(py_object_type, type, cname) def put_gotref(self, cname): self.putln("__Pyx_GOTREF(%s);" % cname) def put_giveref(self, cname): self.putln("__Pyx_GIVEREF(%s);" % cname) def put_xgiveref(self, cname): self.putln("__Pyx_XGIVEREF(%s);" % cname) def put_xgotref(self, cname): self.putln("__Pyx_XGOTREF(%s);" % cname) def put_incref(self, cname, type, nanny=True): if nanny: self.putln("__Pyx_INCREF(%s);" % self.as_pyobject(cname, type)) else: self.putln("Py_INCREF(%s);" % self.as_pyobject(cname, type)) def put_decref(self, cname, type, nanny=True): self._put_decref(cname, type, nanny, null_check=False, clear=False) def put_var_gotref(self, entry): if entry.type.is_pyobject: self.putln("__Pyx_GOTREF(%s);" % self.entry_as_pyobject(entry)) def put_var_giveref(self, entry): if entry.type.is_pyobject: self.putln("__Pyx_GIVEREF(%s);" % self.entry_as_pyobject(entry)) def put_var_xgotref(self, entry): if entry.type.is_pyobject: self.putln("__Pyx_XGOTREF(%s);" % self.entry_as_pyobject(entry)) def put_var_xgiveref(self, entry): if entry.type.is_pyobject: self.putln("__Pyx_XGIVEREF(%s);" % self.entry_as_pyobject(entry)) def put_var_incref(self, entry): if entry.type.is_pyobject: self.putln("__Pyx_INCREF(%s);" % self.entry_as_pyobject(entry)) def put_var_xincref(self, entry): if entry.type.is_pyobject: self.putln("__Pyx_XINCREF(%s);" % self.entry_as_pyobject(entry)) def put_decref_clear(self, cname, type, nanny=True, clear_before_decref=False): self._put_decref(cname, type, nanny, null_check=False, clear=True, clear_before_decref=clear_before_decref) def put_xdecref(self, cname, type, nanny=True, have_gil=True): self._put_decref(cname, type, nanny, null_check=True, have_gil=have_gil, clear=False) def put_xdecref_clear(self, cname, type, nanny=True, clear_before_decref=False): self._put_decref(cname, type, nanny, null_check=True, clear=True, clear_before_decref=clear_before_decref) def _put_decref(self, cname, type, nanny=True, null_check=False, have_gil=True, clear=False, clear_before_decref=False): if type.is_memoryviewslice: self.put_xdecref_memoryviewslice(cname, have_gil=have_gil) return prefix = nanny and '__Pyx' or 'Py' X = null_check and 'X' or '' if clear: if clear_before_decref: if not nanny: X = '' # CPython doesn't have a Py_XCLEAR() self.putln("%s_%sCLEAR(%s);" % (prefix, X, cname)) else: self.putln("%s_%sDECREF(%s); %s = 0;" % ( prefix, X, self.as_pyobject(cname, type), cname)) else: self.putln("%s_%sDECREF(%s);" % ( prefix, X, self.as_pyobject(cname, type))) def put_decref_set(self, cname, rhs_cname): self.putln("__Pyx_DECREF_SET(%s, %s);" % (cname, rhs_cname)) def put_xdecref_set(self, cname, rhs_cname): self.putln("__Pyx_XDECREF_SET(%s, %s);" % (cname, rhs_cname)) def put_var_decref(self, entry): if entry.type.is_pyobject: self.putln("__Pyx_XDECREF(%s);" % self.entry_as_pyobject(entry)) def put_var_xdecref(self, entry): if entry.type.is_pyobject: self.putln("__Pyx_XDECREF(%s);" % self.entry_as_pyobject(entry)) def put_var_decref_clear(self, entry): self._put_var_decref_clear(entry, null_check=False) def put_var_xdecref_clear(self, entry): self._put_var_decref_clear(entry, null_check=True) def _put_var_decref_clear(self, entry, null_check): if entry.type.is_pyobject: if entry.in_closure: # reset before DECREF to make sure closure state is # consistent during call to DECREF() self.putln("__Pyx_%sCLEAR(%s);" % ( null_check and 'X' or '', entry.cname)) else: self.putln("__Pyx_%sDECREF(%s); %s = 0;" % ( null_check and 'X' or '', self.entry_as_pyobject(entry), entry.cname)) def put_var_decrefs(self, entries, used_only = 0): for entry in entries: if not used_only or entry.used: if entry.xdecref_cleanup: self.put_var_xdecref(entry) else: self.put_var_decref(entry) def put_var_xdecrefs(self, entries): for entry in entries: self.put_var_xdecref(entry) def put_var_xdecrefs_clear(self, entries): for entry in entries: self.put_var_xdecref_clear(entry) def put_incref_memoryviewslice(self, slice_cname, have_gil=False): from . import MemoryView self.globalstate.use_utility_code(MemoryView.memviewslice_init_code) self.putln("__PYX_INC_MEMVIEW(&%s, %d);" % (slice_cname, int(have_gil))) def put_xdecref_memoryviewslice(self, slice_cname, have_gil=False): from . import MemoryView self.globalstate.use_utility_code(MemoryView.memviewslice_init_code) self.putln("__PYX_XDEC_MEMVIEW(&%s, %d);" % (slice_cname, int(have_gil))) def put_xgiveref_memoryviewslice(self, slice_cname): self.put_xgiveref("%s.memview" % slice_cname) def put_init_to_py_none(self, cname, type, nanny=True): from .PyrexTypes import py_object_type, typecast py_none = typecast(type, py_object_type, "Py_None") if nanny: self.putln("%s = %s; __Pyx_INCREF(Py_None);" % (cname, py_none)) else: self.putln("%s = %s; Py_INCREF(Py_None);" % (cname, py_none)) def put_init_var_to_py_none(self, entry, template = "%s", nanny=True): code = template % entry.cname #if entry.type.is_extension_type: # code = "((PyObject*)%s)" % code self.put_init_to_py_none(code, entry.type, nanny) if entry.in_closure: self.put_giveref('Py_None') def put_pymethoddef(self, entry, term, allow_skip=True): if entry.is_special or entry.name == '__getattribute__': if entry.name not in special_py_methods: if entry.name == '__getattr__' and not self.globalstate.directives['fast_getattr']: pass # Python's typeobject.c will automatically fill in our slot # in add_operators() (called by PyType_Ready) with a value # that's better than ours. elif allow_skip: return from .TypeSlots import method_coexist if entry.doc: doc_code = entry.doc_cname else: doc_code = 0 method_flags = entry.signature.method_flags() if method_flags: if entry.is_special: method_flags += [method_coexist] self.putln( '{"%s", (PyCFunction)%s, %s, %s}%s' % ( entry.name, entry.func_cname, "|".join(method_flags), doc_code, term)) # GIL methods def put_ensure_gil(self, declare_gilstate=True, variable=None): """ Acquire the GIL. The generated code is safe even when no PyThreadState has been allocated for this thread (for threads not initialized by using the Python API). Additionally, the code generated by this method may be called recursively. """ self.globalstate.use_utility_code( UtilityCode.load_cached("ForceInitThreads", "ModuleSetupCode.c")) if self.globalstate.directives['fast_gil']: self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c")) else: self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c")) self.putln("#ifdef WITH_THREAD") if not variable: variable = '__pyx_gilstate_save' if declare_gilstate: self.put("PyGILState_STATE ") self.putln("%s = __Pyx_PyGILState_Ensure();" % variable) self.putln("#endif") def put_release_ensured_gil(self, variable=None): """ Releases the GIL, corresponds to `put_ensure_gil`. """ if self.globalstate.directives['fast_gil']: self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c")) else: self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c")) if not variable: variable = '__pyx_gilstate_save' self.putln("#ifdef WITH_THREAD") self.putln("__Pyx_PyGILState_Release(%s);" % variable) self.putln("#endif") def put_acquire_gil(self, variable=None): """ Acquire the GIL. The thread's thread state must have been initialized by a previous `put_release_gil` """ if self.globalstate.directives['fast_gil']: self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c")) else: self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c")) self.putln("#ifdef WITH_THREAD") self.putln("__Pyx_FastGIL_Forget();") if variable: self.putln('_save = %s;' % variable) self.putln("Py_BLOCK_THREADS") self.putln("#endif") def put_release_gil(self, variable=None): "Release the GIL, corresponds to `put_acquire_gil`." if self.globalstate.directives['fast_gil']: self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c")) else: self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c")) self.putln("#ifdef WITH_THREAD") self.putln("PyThreadState *_save;") self.putln("Py_UNBLOCK_THREADS") if variable: self.putln('%s = _save;' % variable) self.putln("__Pyx_FastGIL_Remember();") self.putln("#endif") def declare_gilstate(self): self.putln("#ifdef WITH_THREAD") self.putln("PyGILState_STATE __pyx_gilstate_save;") self.putln("#endif") # error handling def put_error_if_neg(self, pos, value): # return self.putln("if (unlikely(%s < 0)) %s" % (value, self.error_goto(pos))) # TODO this path is almost _never_ taken, yet this macro makes is slower! return self.putln("if (%s < 0) %s" % (value, self.error_goto(pos))) def put_error_if_unbound(self, pos, entry, in_nogil_context=False): from . import ExprNodes if entry.from_closure: func = '__Pyx_RaiseClosureNameError' self.globalstate.use_utility_code( ExprNodes.raise_closure_name_error_utility_code) elif entry.type.is_memoryviewslice and in_nogil_context: func = '__Pyx_RaiseUnboundMemoryviewSliceNogil' self.globalstate.use_utility_code( ExprNodes.raise_unbound_memoryview_utility_code_nogil) else: func = '__Pyx_RaiseUnboundLocalError' self.globalstate.use_utility_code( ExprNodes.raise_unbound_local_error_utility_code) self.putln('if (unlikely(!%s)) { %s("%s"); %s }' % ( entry.type.check_for_null_code(entry.cname), func, entry.name, self.error_goto(pos))) def set_error_info(self, pos, used=False): self.funcstate.should_declare_error_indicator = True if used: self.funcstate.uses_error_indicator = True if self.code_config.c_line_in_traceback: cinfo = " %s = %s;" % (Naming.clineno_cname, Naming.line_c_macro) else: cinfo = "" return "%s = %s[%s]; %s = %s;%s" % ( Naming.filename_cname, Naming.filetable_cname, self.lookup_filename(pos[0]), Naming.lineno_cname, pos[1], cinfo) def error_goto(self, pos): lbl = self.funcstate.error_label self.funcstate.use_label(lbl) return "__PYX_ERR(%s, %s, %s)" % ( self.lookup_filename(pos[0]), pos[1], lbl) def error_goto_if(self, cond, pos): return "if (%s) %s" % (self.unlikely(cond), self.error_goto(pos)) def error_goto_if_null(self, cname, pos): return self.error_goto_if("!%s" % cname, pos) def error_goto_if_neg(self, cname, pos): return self.error_goto_if("%s < 0" % cname, pos) def error_goto_if_PyErr(self, pos): return self.error_goto_if("PyErr_Occurred()", pos) def lookup_filename(self, filename): return self.globalstate.lookup_filename(filename) def put_declare_refcount_context(self): self.putln('__Pyx_RefNannyDeclarations') def put_setup_refcount_context(self, name, acquire_gil=False): if acquire_gil: self.globalstate.use_utility_code( UtilityCode.load_cached("ForceInitThreads", "ModuleSetupCode.c")) self.putln('__Pyx_RefNannySetupContext("%s", %d);' % (name, acquire_gil and 1 or 0)) def put_finish_refcount_context(self): self.putln("__Pyx_RefNannyFinishContext();") def put_add_traceback(self, qualified_name, include_cline=True): """ Build a Python traceback for propagating exceptions. qualified_name should be the qualified name of the function. """ format_tuple = ( qualified_name, Naming.clineno_cname if include_cline else 0, Naming.lineno_cname, Naming.filename_cname, ) self.funcstate.uses_error_indicator = True self.putln('__Pyx_AddTraceback("%s", %s, %s, %s);' % format_tuple) def put_unraisable(self, qualified_name, nogil=False): """ Generate code to print a Python warning for an unraisable exception. qualified_name should be the qualified name of the function. """ format_tuple = ( qualified_name, Naming.clineno_cname, Naming.lineno_cname, Naming.filename_cname, self.globalstate.directives['unraisable_tracebacks'], nogil, ) self.funcstate.uses_error_indicator = True self.putln('__Pyx_WriteUnraisable("%s", %s, %s, %s, %d, %d);' % format_tuple) self.globalstate.use_utility_code( UtilityCode.load_cached("WriteUnraisableException", "Exceptions.c")) def put_trace_declarations(self): self.putln('__Pyx_TraceDeclarations') def put_trace_frame_init(self, codeobj=None): if codeobj: self.putln('__Pyx_TraceFrameInit(%s)' % codeobj) def put_trace_call(self, name, pos, nogil=False): self.putln('__Pyx_TraceCall("%s", %s[%s], %s, %d, %s);' % ( name, Naming.filetable_cname, self.lookup_filename(pos[0]), pos[1], nogil, self.error_goto(pos))) def put_trace_exception(self): self.putln("__Pyx_TraceException();") def put_trace_return(self, retvalue_cname, nogil=False): self.putln("__Pyx_TraceReturn(%s, %d);" % (retvalue_cname, nogil)) def putln_openmp(self, string): self.putln("#ifdef _OPENMP") self.putln(string) self.putln("#endif /* _OPENMP */") def undef_builtin_expect(self, cond): """ Redefine the macros likely() and unlikely to no-ops, depending on condition 'cond' """ self.putln("#if %s" % cond) self.putln(" #undef likely") self.putln(" #undef unlikely") self.putln(" #define likely(x) (x)") self.putln(" #define unlikely(x) (x)") self.putln("#endif") def redef_builtin_expect(self, cond): self.putln("#if %s" % cond) self.putln(" #undef likely") self.putln(" #undef unlikely") self.putln(" #define likely(x) __builtin_expect(!!(x), 1)") self.putln(" #define unlikely(x) __builtin_expect(!!(x), 0)") self.putln("#endif") class PyrexCodeWriter(object): # f file output file # level int indentation level def __init__(self, outfile_name): self.f = Utils.open_new_file(outfile_name) self.level = 0 def putln(self, code): self.f.write("%s%s\n" % (" " * self.level, code)) def indent(self): self.level += 1 def dedent(self): self.level -= 1 class PyxCodeWriter(object): """ Can be used for writing out some Cython code. To use the indenter functionality, the Cython.Compiler.Importer module will have to be used to load the code to support python 2.4 """ def __init__(self, buffer=None, indent_level=0, context=None, encoding='ascii'): self.buffer = buffer or StringIOTree() self.level = indent_level self.context = context self.encoding = encoding def indent(self, levels=1): self.level += levels return True def dedent(self, levels=1): self.level -= levels def indenter(self, line): """ Instead of with pyx_code.indenter("for i in range(10):"): pyx_code.putln("print i") write if pyx_code.indenter("for i in range(10);"): pyx_code.putln("print i") pyx_code.dedent() """ self.putln(line) self.indent() return True def getvalue(self): result = self.buffer.getvalue() if isinstance(result, bytes): result = result.decode(self.encoding) return result def putln(self, line, context=None): context = context or self.context if context: line = sub_tempita(line, context) self._putln(line) def _putln(self, line): self.buffer.write("%s%s\n" % (self.level * " ", line)) def put_chunk(self, chunk, context=None): context = context or self.context if context: chunk = sub_tempita(chunk, context) chunk = textwrap.dedent(chunk) for line in chunk.splitlines(): self._putln(line) def insertion_point(self): return PyxCodeWriter(self.buffer.insertion_point(), self.level, self.context) def named_insertion_point(self, name): setattr(self, name, self.insertion_point()) class ClosureTempAllocator(object): def __init__(self, klass): self.klass = klass self.temps_allocated = {} self.temps_free = {} self.temps_count = 0 def reset(self): for type, cnames in self.temps_allocated.items(): self.temps_free[type] = list(cnames) def allocate_temp(self, type): if type not in self.temps_allocated: self.temps_allocated[type] = [] self.temps_free[type] = [] elif self.temps_free[type]: return self.temps_free[type].pop(0) cname = '%s%d' % (Naming.codewriter_temp_prefix, self.temps_count) self.klass.declare_var(pos=None, name=cname, cname=cname, type=type, is_cdef=True) self.temps_allocated[type].append(cname) self.temps_count += 1 return cname Cython-0.26.1/Cython/Compiler/FlowControl.py0000664000175000017500000013136513023021033021464 0ustar stefanstefan00000000000000from __future__ import absolute_import import cython cython.declare(PyrexTypes=object, ExprNodes=object, Nodes=object, Builtin=object, InternalError=object, error=object, warning=object, py_object_type=object, unspecified_type=object, object_expr=object, fake_rhs_expr=object, TypedExprNode=object) from . import Builtin from . import ExprNodes from . import Nodes from . import Options from .PyrexTypes import py_object_type, unspecified_type from . import PyrexTypes from .Visitor import TreeVisitor, CythonTransform from .Errors import error, warning, InternalError from .Optimize import ConstantFolding class TypedExprNode(ExprNodes.ExprNode): # Used for declaring assignments of a specified type without a known entry. def __init__(self, type, may_be_none=None, pos=None): super(TypedExprNode, self).__init__(pos) self.type = type self._may_be_none = may_be_none def may_be_none(self): return self._may_be_none != False object_expr = TypedExprNode(py_object_type, may_be_none=True) # Fake rhs to silence "unused variable" warning fake_rhs_expr = TypedExprNode(unspecified_type) class ControlBlock(object): """Control flow graph node. Sequence of assignments and name references. children set of children nodes parents set of parent nodes positions set of position markers stats list of block statements gen dict of assignments generated by this block bounded set of entries that are definitely bounded in this block Example: a = 1 b = a + c # 'c' is already bounded or exception here stats = [Assignment(a), NameReference(a), NameReference(c), Assignment(b)] gen = {Entry(a): Assignment(a), Entry(b): Assignment(b)} bounded = set([Entry(a), Entry(c)]) """ def __init__(self): self.children = set() self.parents = set() self.positions = set() self.stats = [] self.gen = {} self.bounded = set() self.i_input = 0 self.i_output = 0 self.i_gen = 0 self.i_kill = 0 self.i_state = 0 def empty(self): return (not self.stats and not self.positions) def detach(self): """Detach block from parents and children.""" for child in self.children: child.parents.remove(self) for parent in self.parents: parent.children.remove(self) self.parents.clear() self.children.clear() def add_child(self, block): self.children.add(block) block.parents.add(self) class ExitBlock(ControlBlock): """Non-empty exit point block.""" def empty(self): return False class AssignmentList(object): def __init__(self): self.stats = [] class ControlFlow(object): """Control-flow graph. entry_point ControlBlock entry point for this graph exit_point ControlBlock normal exit point block ControlBlock current block blocks set children nodes entries set tracked entries loops list stack for loop descriptors exceptions list stack for exception descriptors """ def __init__(self): self.blocks = set() self.entries = set() self.loops = [] self.exceptions = [] self.entry_point = ControlBlock() self.exit_point = ExitBlock() self.blocks.add(self.exit_point) self.block = self.entry_point def newblock(self, parent=None): """Create floating block linked to `parent` if given. NOTE: Block is NOT added to self.blocks """ block = ControlBlock() self.blocks.add(block) if parent: parent.add_child(block) return block def nextblock(self, parent=None): """Create block children block linked to current or `parent` if given. NOTE: Block is added to self.blocks """ block = ControlBlock() self.blocks.add(block) if parent: parent.add_child(block) elif self.block: self.block.add_child(block) self.block = block return self.block def is_tracked(self, entry): if entry.is_anonymous: return False return (entry.is_local or entry.is_pyclass_attr or entry.is_arg or entry.from_closure or entry.in_closure or entry.error_on_uninitialized) def is_statically_assigned(self, entry): if (entry.is_local and entry.is_variable and (entry.type.is_struct_or_union or entry.type.is_complex or entry.type.is_array or entry.type.is_cpp_class)): # stack allocated structured variable => never uninitialised return True return False def mark_position(self, node): """Mark position, will be used to draw graph nodes.""" if self.block: self.block.positions.add(node.pos[:2]) def mark_assignment(self, lhs, rhs, entry): if self.block and self.is_tracked(entry): assignment = NameAssignment(lhs, rhs, entry) self.block.stats.append(assignment) self.block.gen[entry] = assignment self.entries.add(entry) def mark_argument(self, lhs, rhs, entry): if self.block and self.is_tracked(entry): assignment = Argument(lhs, rhs, entry) self.block.stats.append(assignment) self.block.gen[entry] = assignment self.entries.add(entry) def mark_deletion(self, node, entry): if self.block and self.is_tracked(entry): assignment = NameDeletion(node, entry) self.block.stats.append(assignment) self.block.gen[entry] = Uninitialized self.entries.add(entry) def mark_reference(self, node, entry): if self.block and self.is_tracked(entry): self.block.stats.append(NameReference(node, entry)) ## XXX: We don't track expression evaluation order so we can't use ## XXX: successful reference as initialization sign. ## # Local variable is definitely bound after this reference ## if not node.allow_null: ## self.block.bounded.add(entry) self.entries.add(entry) def normalize(self): """Delete unreachable and orphan blocks.""" queue = set([self.entry_point]) visited = set() while queue: root = queue.pop() visited.add(root) for child in root.children: if child not in visited: queue.add(child) unreachable = self.blocks - visited for block in unreachable: block.detach() visited.remove(self.entry_point) for block in visited: if block.empty(): for parent in block.parents: # Re-parent for child in block.children: parent.add_child(child) block.detach() unreachable.add(block) self.blocks -= unreachable def initialize(self): """Set initial state, map assignments to bits.""" self.assmts = {} bit = 1 for entry in self.entries: assmts = AssignmentList() assmts.mask = assmts.bit = bit self.assmts[entry] = assmts bit <<= 1 for block in self.blocks: for stat in block.stats: if isinstance(stat, NameAssignment): stat.bit = bit assmts = self.assmts[stat.entry] assmts.stats.append(stat) assmts.mask |= bit bit <<= 1 for block in self.blocks: for entry, stat in block.gen.items(): assmts = self.assmts[entry] if stat is Uninitialized: block.i_gen |= assmts.bit else: block.i_gen |= stat.bit block.i_kill |= assmts.mask block.i_output = block.i_gen for entry in block.bounded: block.i_kill |= self.assmts[entry].bit for assmts in self.assmts.values(): self.entry_point.i_gen |= assmts.bit self.entry_point.i_output = self.entry_point.i_gen def map_one(self, istate, entry): ret = set() assmts = self.assmts[entry] if istate & assmts.bit: if self.is_statically_assigned(entry): ret.add(StaticAssignment(entry)) elif entry.from_closure: ret.add(Unknown) else: ret.add(Uninitialized) for assmt in assmts.stats: if istate & assmt.bit: ret.add(assmt) return ret def reaching_definitions(self): """Per-block reaching definitions analysis.""" dirty = True while dirty: dirty = False for block in self.blocks: i_input = 0 for parent in block.parents: i_input |= parent.i_output i_output = (i_input & ~block.i_kill) | block.i_gen if i_output != block.i_output: dirty = True block.i_input = i_input block.i_output = i_output class LoopDescr(object): def __init__(self, next_block, loop_block): self.next_block = next_block self.loop_block = loop_block self.exceptions = [] class ExceptionDescr(object): """Exception handling helper. entry_point ControlBlock Exception handling entry point finally_enter ControlBlock Normal finally clause entry point finally_exit ControlBlock Normal finally clause exit point """ def __init__(self, entry_point, finally_enter=None, finally_exit=None): self.entry_point = entry_point self.finally_enter = finally_enter self.finally_exit = finally_exit class NameAssignment(object): def __init__(self, lhs, rhs, entry): if lhs.cf_state is None: lhs.cf_state = set() self.lhs = lhs self.rhs = rhs self.entry = entry self.pos = lhs.pos self.refs = set() self.is_arg = False self.is_deletion = False self.inferred_type = None def __repr__(self): return '%s(entry=%r)' % (self.__class__.__name__, self.entry) def infer_type(self): self.inferred_type = self.rhs.infer_type(self.entry.scope) return self.inferred_type def type_dependencies(self): return self.rhs.type_dependencies(self.entry.scope) @property def type(self): if not self.entry.type.is_unspecified: return self.entry.type return self.inferred_type def __getstate__(self): return (self.lhs, self.rhs, self.entry, self.pos, self.refs, self.is_arg, self.is_deletion, self.inferred_type) def __setstate__(self, state): (self.lhs, self.rhs, self.entry, self.pos, self.refs, self.is_arg, self.is_deletion, self.inferred_type) = state class StaticAssignment(NameAssignment): """Initialised at declaration time, e.g. stack allocation.""" def __init__(self, entry): if not entry.type.is_pyobject: may_be_none = False else: may_be_none = None # unknown lhs = TypedExprNode( entry.type, may_be_none=may_be_none, pos=entry.pos) super(StaticAssignment, self).__init__(lhs, lhs, entry) def infer_type(self): return self.entry.type def type_dependencies(self): return () class Argument(NameAssignment): def __init__(self, lhs, rhs, entry): NameAssignment.__init__(self, lhs, rhs, entry) self.is_arg = True class NameDeletion(NameAssignment): def __init__(self, lhs, entry): NameAssignment.__init__(self, lhs, lhs, entry) self.is_deletion = True def infer_type(self): inferred_type = self.rhs.infer_type(self.entry.scope) if (not inferred_type.is_pyobject and inferred_type.can_coerce_to_pyobject(self.entry.scope)): return py_object_type self.inferred_type = inferred_type return inferred_type class Uninitialized(object): """Definitely not initialised yet.""" class Unknown(object): """Coming from outer closure, might be initialised or not.""" class NameReference(object): def __init__(self, node, entry): if node.cf_state is None: node.cf_state = set() self.node = node self.entry = entry self.pos = node.pos def __repr__(self): return '%s(entry=%r)' % (self.__class__.__name__, self.entry) class ControlFlowState(list): # Keeps track of Node's entry assignments # # cf_is_null [boolean] It is uninitialized # cf_maybe_null [boolean] May be uninitialized # is_single [boolean] Has only one assignment at this point cf_maybe_null = False cf_is_null = False is_single = False def __init__(self, state): if Uninitialized in state: state.discard(Uninitialized) self.cf_maybe_null = True if not state: self.cf_is_null = True elif Unknown in state: state.discard(Unknown) self.cf_maybe_null = True else: if len(state) == 1: self.is_single = True # XXX: Remove fake_rhs_expr super(ControlFlowState, self).__init__( [i for i in state if i.rhs is not fake_rhs_expr]) def one(self): return self[0] class GVContext(object): """Graphviz subgraph object.""" def __init__(self): self.blockids = {} self.nextid = 0 self.children = [] self.sources = {} def add(self, child): self.children.append(child) def nodeid(self, block): if block not in self.blockids: self.blockids[block] = 'block%d' % self.nextid self.nextid += 1 return self.blockids[block] def extract_sources(self, block): if not block.positions: return '' start = min(block.positions) stop = max(block.positions) srcdescr = start[0] if not srcdescr in self.sources: self.sources[srcdescr] = list(srcdescr.get_lines()) lines = self.sources[srcdescr] return '\\n'.join([l.strip() for l in lines[start[1] - 1:stop[1]]]) def render(self, fp, name, annotate_defs=False): """Render graphviz dot graph""" fp.write('digraph %s {\n' % name) fp.write(' node [shape=box];\n') for child in self.children: child.render(fp, self, annotate_defs) fp.write('}\n') def escape(self, text): return text.replace('"', '\\"').replace('\n', '\\n') class GV(object): """Graphviz DOT renderer.""" def __init__(self, name, flow): self.name = name self.flow = flow def render(self, fp, ctx, annotate_defs=False): fp.write(' subgraph %s {\n' % self.name) for block in self.flow.blocks: label = ctx.extract_sources(block) if annotate_defs: for stat in block.stats: if isinstance(stat, NameAssignment): label += '\n %s [%s %s]' % ( stat.entry.name, 'deletion' if stat.is_deletion else 'definition', stat.pos[1]) elif isinstance(stat, NameReference): if stat.entry: label += '\n %s [reference %s]' % (stat.entry.name, stat.pos[1]) if not label: label = 'empty' pid = ctx.nodeid(block) fp.write(' %s [label="%s"];\n' % (pid, ctx.escape(label))) for block in self.flow.blocks: pid = ctx.nodeid(block) for child in block.children: fp.write(' %s -> %s;\n' % (pid, ctx.nodeid(child))) fp.write(' }\n') class MessageCollection(object): """Collect error/warnings messages first then sort""" def __init__(self): self.messages = set() def error(self, pos, message): self.messages.add((pos, True, message)) def warning(self, pos, message): self.messages.add((pos, False, message)) def report(self): for pos, is_error, message in sorted(self.messages): if is_error: error(pos, message) else: warning(pos, message, 2) def check_definitions(flow, compiler_directives): flow.initialize() flow.reaching_definitions() # Track down state assignments = set() # Node to entry map references = {} assmt_nodes = set() for block in flow.blocks: i_state = block.i_input for stat in block.stats: i_assmts = flow.assmts[stat.entry] state = flow.map_one(i_state, stat.entry) if isinstance(stat, NameAssignment): stat.lhs.cf_state.update(state) assmt_nodes.add(stat.lhs) i_state = i_state & ~i_assmts.mask if stat.is_deletion: i_state |= i_assmts.bit else: i_state |= stat.bit assignments.add(stat) if stat.rhs is not fake_rhs_expr: stat.entry.cf_assignments.append(stat) elif isinstance(stat, NameReference): references[stat.node] = stat.entry stat.entry.cf_references.append(stat) stat.node.cf_state.update(state) ## if not stat.node.allow_null: ## i_state &= ~i_assmts.bit ## # after successful read, the state is known to be initialised state.discard(Uninitialized) state.discard(Unknown) for assmt in state: assmt.refs.add(stat) # Check variable usage warn_maybe_uninitialized = compiler_directives['warn.maybe_uninitialized'] warn_unused_result = compiler_directives['warn.unused_result'] warn_unused = compiler_directives['warn.unused'] warn_unused_arg = compiler_directives['warn.unused_arg'] messages = MessageCollection() # assignment hints for node in assmt_nodes: if Uninitialized in node.cf_state: node.cf_maybe_null = True if len(node.cf_state) == 1: node.cf_is_null = True else: node.cf_is_null = False elif Unknown in node.cf_state: node.cf_maybe_null = True else: node.cf_is_null = False node.cf_maybe_null = False # Find uninitialized references and cf-hints for node, entry in references.items(): if Uninitialized in node.cf_state: node.cf_maybe_null = True if not entry.from_closure and len(node.cf_state) == 1: node.cf_is_null = True if (node.allow_null or entry.from_closure or entry.is_pyclass_attr or entry.type.is_error): pass # Can be uninitialized here elif node.cf_is_null: if entry.error_on_uninitialized or ( Options.error_on_uninitialized and ( entry.type.is_pyobject or entry.type.is_unspecified)): messages.error( node.pos, "local variable '%s' referenced before assignment" % entry.name) else: messages.warning( node.pos, "local variable '%s' referenced before assignment" % entry.name) elif warn_maybe_uninitialized: messages.warning( node.pos, "local variable '%s' might be referenced before assignment" % entry.name) elif Unknown in node.cf_state: # TODO: better cross-closure analysis to know when inner functions # are being called before a variable is being set, and when # a variable is known to be set before even defining the # inner function, etc. node.cf_maybe_null = True else: node.cf_is_null = False node.cf_maybe_null = False # Unused result for assmt in assignments: if (not assmt.refs and not assmt.entry.is_pyclass_attr and not assmt.entry.in_closure): if assmt.entry.cf_references and warn_unused_result: if assmt.is_arg: messages.warning(assmt.pos, "Unused argument value '%s'" % assmt.entry.name) else: messages.warning(assmt.pos, "Unused result in '%s'" % assmt.entry.name) assmt.lhs.cf_used = False # Unused entries for entry in flow.entries: if (not entry.cf_references and not entry.is_pyclass_attr): if entry.name != '_' and not entry.name.startswith('unused'): # '_' is often used for unused variables, e.g. in loops if entry.is_arg: if warn_unused_arg: messages.warning(entry.pos, "Unused argument '%s'" % entry.name) else: if warn_unused: messages.warning(entry.pos, "Unused entry '%s'" % entry.name) entry.cf_used = False messages.report() for node in assmt_nodes: node.cf_state = ControlFlowState(node.cf_state) for node in references: node.cf_state = ControlFlowState(node.cf_state) class AssignmentCollector(TreeVisitor): def __init__(self): super(AssignmentCollector, self).__init__() self.assignments = [] def visit_Node(self): self._visitchildren(self, None) def visit_SingleAssignmentNode(self, node): self.assignments.append((node.lhs, node.rhs)) def visit_CascadedAssignmentNode(self, node): for lhs in node.lhs_list: self.assignments.append((lhs, node.rhs)) class ControlFlowAnalysis(CythonTransform): def visit_ModuleNode(self, node): self.gv_ctx = GVContext() self.constant_folder = ConstantFolding() # Set of NameNode reductions self.reductions = set() self.in_inplace_assignment = False self.env_stack = [] self.env = node.scope self.stack = [] self.flow = ControlFlow() self.visitchildren(node) check_definitions(self.flow, self.current_directives) dot_output = self.current_directives['control_flow.dot_output'] if dot_output: annotate_defs = self.current_directives['control_flow.dot_annotate_defs'] fp = open(dot_output, 'wt') try: self.gv_ctx.render(fp, 'module', annotate_defs=annotate_defs) finally: fp.close() return node def visit_FuncDefNode(self, node): for arg in node.args: if arg.default: self.visitchildren(arg) self.visitchildren(node, ('decorators',)) self.env_stack.append(self.env) self.env = node.local_scope self.stack.append(self.flow) self.flow = ControlFlow() # Collect all entries for entry in node.local_scope.entries.values(): if self.flow.is_tracked(entry): self.flow.entries.add(entry) self.mark_position(node) # Function body block self.flow.nextblock() for arg in node.args: self._visit(arg) if node.star_arg: self.flow.mark_argument(node.star_arg, TypedExprNode(Builtin.tuple_type, may_be_none=False), node.star_arg.entry) if node.starstar_arg: self.flow.mark_argument(node.starstar_arg, TypedExprNode(Builtin.dict_type, may_be_none=False), node.starstar_arg.entry) self._visit(node.body) # Workaround for generators if node.is_generator: self._visit(node.gbody.body) # Exit point if self.flow.block: self.flow.block.add_child(self.flow.exit_point) # Cleanup graph self.flow.normalize() check_definitions(self.flow, self.current_directives) self.flow.blocks.add(self.flow.entry_point) self.gv_ctx.add(GV(node.local_scope.name, self.flow)) self.flow = self.stack.pop() self.env = self.env_stack.pop() return node def visit_DefNode(self, node): node.used = True return self.visit_FuncDefNode(node) def visit_GeneratorBodyDefNode(self, node): return node def visit_CTypeDefNode(self, node): return node def mark_assignment(self, lhs, rhs=None): if not self.flow.block: return if self.flow.exceptions: exc_descr = self.flow.exceptions[-1] self.flow.block.add_child(exc_descr.entry_point) self.flow.nextblock() if not rhs: rhs = object_expr if lhs.is_name: if lhs.entry is not None: entry = lhs.entry else: entry = self.env.lookup(lhs.name) if entry is None: # TODO: This shouldn't happen... return self.flow.mark_assignment(lhs, rhs, entry) elif lhs.is_sequence_constructor: for i, arg in enumerate(lhs.args): if not rhs or arg.is_starred: item_node = None else: item_node = rhs.inferable_item_node(i) self.mark_assignment(arg, item_node) else: self._visit(lhs) if self.flow.exceptions: exc_descr = self.flow.exceptions[-1] self.flow.block.add_child(exc_descr.entry_point) self.flow.nextblock() def mark_position(self, node): """Mark position if DOT output is enabled.""" if self.current_directives['control_flow.dot_output']: self.flow.mark_position(node) def visit_FromImportStatNode(self, node): for name, target in node.items: if name != "*": self.mark_assignment(target) self.visitchildren(node) return node def visit_AssignmentNode(self, node): raise InternalError("Unhandled assignment node") def visit_SingleAssignmentNode(self, node): self._visit(node.rhs) self.mark_assignment(node.lhs, node.rhs) return node def visit_CascadedAssignmentNode(self, node): self._visit(node.rhs) for lhs in node.lhs_list: self.mark_assignment(lhs, node.rhs) return node def visit_ParallelAssignmentNode(self, node): collector = AssignmentCollector() collector.visitchildren(node) for lhs, rhs in collector.assignments: self._visit(rhs) for lhs, rhs in collector.assignments: self.mark_assignment(lhs, rhs) return node def visit_InPlaceAssignmentNode(self, node): self.in_inplace_assignment = True self.visitchildren(node) self.in_inplace_assignment = False self.mark_assignment(node.lhs, self.constant_folder(node.create_binop_node())) return node def visit_DelStatNode(self, node): for arg in node.args: if arg.is_name: entry = arg.entry or self.env.lookup(arg.name) if entry.in_closure or entry.from_closure: error(arg.pos, "can not delete variable '%s' " "referenced in nested scope" % entry.name) if not node.ignore_nonexisting: self._visit(arg) # mark reference self.flow.mark_deletion(arg, entry) else: self._visit(arg) return node def visit_CArgDeclNode(self, node): entry = self.env.lookup(node.name) if entry: may_be_none = not node.not_none self.flow.mark_argument( node, TypedExprNode(entry.type, may_be_none), entry) return node def visit_NameNode(self, node): if self.flow.block: entry = node.entry or self.env.lookup(node.name) if entry: self.flow.mark_reference(node, entry) if entry in self.reductions and not self.in_inplace_assignment: error(node.pos, "Cannot read reduction variable in loop body") return node def visit_StatListNode(self, node): if self.flow.block: for stat in node.stats: self._visit(stat) if not self.flow.block: stat.is_terminator = True break return node def visit_Node(self, node): self.visitchildren(node) self.mark_position(node) return node def visit_IfStatNode(self, node): next_block = self.flow.newblock() parent = self.flow.block # If clauses for clause in node.if_clauses: parent = self.flow.nextblock(parent) self._visit(clause.condition) self.flow.nextblock() self._visit(clause.body) if self.flow.block: self.flow.block.add_child(next_block) # Else clause if node.else_clause: self.flow.nextblock(parent=parent) self._visit(node.else_clause) if self.flow.block: self.flow.block.add_child(next_block) else: parent.add_child(next_block) if next_block.parents: self.flow.block = next_block else: self.flow.block = None return node def visit_WhileStatNode(self, node): condition_block = self.flow.nextblock() next_block = self.flow.newblock() # Condition block self.flow.loops.append(LoopDescr(next_block, condition_block)) if node.condition: self._visit(node.condition) # Body block self.flow.nextblock() self._visit(node.body) self.flow.loops.pop() # Loop it if self.flow.block: self.flow.block.add_child(condition_block) self.flow.block.add_child(next_block) # Else clause if node.else_clause: self.flow.nextblock(parent=condition_block) self._visit(node.else_clause) if self.flow.block: self.flow.block.add_child(next_block) else: condition_block.add_child(next_block) if next_block.parents: self.flow.block = next_block else: self.flow.block = None return node def mark_forloop_target(self, node): # TODO: Remove redundancy with range optimization... is_special = False sequence = node.iterator.sequence target = node.target if isinstance(sequence, ExprNodes.SimpleCallNode): function = sequence.function if sequence.self is None and function.is_name: entry = self.env.lookup(function.name) if not entry or entry.is_builtin: if function.name == 'reversed' and len(sequence.args) == 1: sequence = sequence.args[0] elif function.name == 'enumerate' and len(sequence.args) == 1: if target.is_sequence_constructor and len(target.args) == 2: iterator = sequence.args[0] if iterator.is_name: iterator_type = iterator.infer_type(self.env) if iterator_type.is_builtin_type: # assume that builtin types have a length within Py_ssize_t self.mark_assignment( target.args[0], ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX', type=PyrexTypes.c_py_ssize_t_type)) target = target.args[1] sequence = sequence.args[0] if isinstance(sequence, ExprNodes.SimpleCallNode): function = sequence.function if sequence.self is None and function.is_name: entry = self.env.lookup(function.name) if not entry or entry.is_builtin: if function.name in ('range', 'xrange'): is_special = True for arg in sequence.args[:2]: self.mark_assignment(target, arg) if len(sequence.args) > 2: self.mark_assignment(target, self.constant_folder( ExprNodes.binop_node(node.pos, '+', sequence.args[0], sequence.args[2]))) if not is_special: # A for-loop basically translates to subsequent calls to # __getitem__(), so using an IndexNode here allows us to # naturally infer the base type of pointers, C arrays, # Python strings, etc., while correctly falling back to an # object type when the base type cannot be handled. self.mark_assignment(target, node.item) def visit_AsyncForStatNode(self, node): return self.visit_ForInStatNode(node) def visit_ForInStatNode(self, node): condition_block = self.flow.nextblock() next_block = self.flow.newblock() # Condition with iterator self.flow.loops.append(LoopDescr(next_block, condition_block)) self._visit(node.iterator) # Target assignment self.flow.nextblock() if isinstance(node, Nodes.ForInStatNode): self.mark_forloop_target(node) elif isinstance(node, Nodes.AsyncForStatNode): # not entirely correct, but good enough for now self.mark_assignment(node.target, node.item) else: # Parallel self.mark_assignment(node.target) # Body block if isinstance(node, Nodes.ParallelRangeNode): # In case of an invalid self._delete_privates(node, exclude=node.target.entry) self.flow.nextblock() self._visit(node.body) self.flow.loops.pop() # Loop it if self.flow.block: self.flow.block.add_child(condition_block) # Else clause if node.else_clause: self.flow.nextblock(parent=condition_block) self._visit(node.else_clause) if self.flow.block: self.flow.block.add_child(next_block) else: condition_block.add_child(next_block) if next_block.parents: self.flow.block = next_block else: self.flow.block = None return node def _delete_privates(self, node, exclude=None): for private_node in node.assigned_nodes: if not exclude or private_node.entry is not exclude: self.flow.mark_deletion(private_node, private_node.entry) def visit_ParallelRangeNode(self, node): reductions = self.reductions # if node.target is None or not a NameNode, an error will have # been previously issued if hasattr(node.target, 'entry'): self.reductions = set(reductions) for private_node in node.assigned_nodes: private_node.entry.error_on_uninitialized = True pos, reduction = node.assignments[private_node.entry] if reduction: self.reductions.add(private_node.entry) node = self.visit_ForInStatNode(node) self.reductions = reductions return node def visit_ParallelWithBlockNode(self, node): for private_node in node.assigned_nodes: private_node.entry.error_on_uninitialized = True self._delete_privates(node) self.visitchildren(node) self._delete_privates(node) return node def visit_ForFromStatNode(self, node): condition_block = self.flow.nextblock() next_block = self.flow.newblock() # Condition with iterator self.flow.loops.append(LoopDescr(next_block, condition_block)) self._visit(node.bound1) self._visit(node.bound2) if node.step is not None: self._visit(node.step) # Target assignment self.flow.nextblock() self.mark_assignment(node.target, node.bound1) if node.step is not None: self.mark_assignment(node.target, self.constant_folder( ExprNodes.binop_node(node.pos, '+', node.bound1, node.step))) # Body block self.flow.nextblock() self._visit(node.body) self.flow.loops.pop() # Loop it if self.flow.block: self.flow.block.add_child(condition_block) # Else clause if node.else_clause: self.flow.nextblock(parent=condition_block) self._visit(node.else_clause) if self.flow.block: self.flow.block.add_child(next_block) else: condition_block.add_child(next_block) if next_block.parents: self.flow.block = next_block else: self.flow.block = None return node def visit_LoopNode(self, node): raise InternalError("Generic loops are not supported") def visit_WithTargetAssignmentStatNode(self, node): self.mark_assignment(node.lhs, node.with_node.enter_call) return node def visit_WithStatNode(self, node): self._visit(node.manager) self._visit(node.enter_call) self._visit(node.body) return node def visit_TryExceptStatNode(self, node): # After exception handling next_block = self.flow.newblock() # Body block self.flow.newblock() # Exception entry point entry_point = self.flow.newblock() self.flow.exceptions.append(ExceptionDescr(entry_point)) self.flow.nextblock() ## XXX: links to exception handling point should be added by ## XXX: children nodes self.flow.block.add_child(entry_point) self.flow.nextblock() self._visit(node.body) self.flow.exceptions.pop() # After exception if self.flow.block: if node.else_clause: self.flow.nextblock() self._visit(node.else_clause) if self.flow.block: self.flow.block.add_child(next_block) for clause in node.except_clauses: self.flow.block = entry_point if clause.pattern: for pattern in clause.pattern: self._visit(pattern) else: # TODO: handle * pattern pass entry_point = self.flow.newblock(parent=self.flow.block) self.flow.nextblock() if clause.target: self.mark_assignment(clause.target) self._visit(clause.body) if self.flow.block: self.flow.block.add_child(next_block) if self.flow.exceptions: entry_point.add_child(self.flow.exceptions[-1].entry_point) if next_block.parents: self.flow.block = next_block else: self.flow.block = None return node def visit_TryFinallyStatNode(self, node): body_block = self.flow.nextblock() # Exception entry point entry_point = self.flow.newblock() self.flow.block = entry_point self._visit(node.finally_except_clause) if self.flow.block and self.flow.exceptions: self.flow.block.add_child(self.flow.exceptions[-1].entry_point) # Normal execution finally_enter = self.flow.newblock() self.flow.block = finally_enter self._visit(node.finally_clause) finally_exit = self.flow.block descr = ExceptionDescr(entry_point, finally_enter, finally_exit) self.flow.exceptions.append(descr) if self.flow.loops: self.flow.loops[-1].exceptions.append(descr) self.flow.block = body_block ## XXX: Is it still required body_block.add_child(entry_point) self.flow.nextblock() self._visit(node.body) self.flow.exceptions.pop() if self.flow.loops: self.flow.loops[-1].exceptions.pop() if self.flow.block: self.flow.block.add_child(finally_enter) if finally_exit: self.flow.block = self.flow.nextblock(parent=finally_exit) else: self.flow.block = None return node def visit_RaiseStatNode(self, node): self.mark_position(node) self.visitchildren(node) if self.flow.exceptions: self.flow.block.add_child(self.flow.exceptions[-1].entry_point) self.flow.block = None return node def visit_ReraiseStatNode(self, node): self.mark_position(node) if self.flow.exceptions: self.flow.block.add_child(self.flow.exceptions[-1].entry_point) self.flow.block = None return node def visit_ReturnStatNode(self, node): self.mark_position(node) self.visitchildren(node) for exception in self.flow.exceptions[::-1]: if exception.finally_enter: self.flow.block.add_child(exception.finally_enter) if exception.finally_exit: exception.finally_exit.add_child(self.flow.exit_point) break else: if self.flow.block: self.flow.block.add_child(self.flow.exit_point) self.flow.block = None return node def visit_BreakStatNode(self, node): if not self.flow.loops: #error(node.pos, "break statement not inside loop") return node loop = self.flow.loops[-1] self.mark_position(node) for exception in loop.exceptions[::-1]: if exception.finally_enter: self.flow.block.add_child(exception.finally_enter) if exception.finally_exit: exception.finally_exit.add_child(loop.next_block) break else: self.flow.block.add_child(loop.next_block) self.flow.block = None return node def visit_ContinueStatNode(self, node): if not self.flow.loops: #error(node.pos, "continue statement not inside loop") return node loop = self.flow.loops[-1] self.mark_position(node) for exception in loop.exceptions[::-1]: if exception.finally_enter: self.flow.block.add_child(exception.finally_enter) if exception.finally_exit: exception.finally_exit.add_child(loop.loop_block) break else: self.flow.block.add_child(loop.loop_block) self.flow.block = None return node def visit_ComprehensionNode(self, node): if node.expr_scope: self.env_stack.append(self.env) self.env = node.expr_scope # Skip append node here self._visit(node.loop) if node.expr_scope: self.env = self.env_stack.pop() return node def visit_ScopedExprNode(self, node): if node.expr_scope: self.env_stack.append(self.env) self.env = node.expr_scope self.visitchildren(node) if node.expr_scope: self.env = self.env_stack.pop() return node def visit_PyClassDefNode(self, node): self.visitchildren(node, ('dict', 'metaclass', 'mkw', 'bases', 'class_result')) self.flow.mark_assignment(node.target, node.classobj, self.env.lookup(node.name)) self.env_stack.append(self.env) self.env = node.scope self.flow.nextblock() self.visitchildren(node, ('body',)) self.flow.nextblock() self.env = self.env_stack.pop() return node def visit_AmpersandNode(self, node): if node.operand.is_name: # Fake assignment to silence warning self.mark_assignment(node.operand, fake_rhs_expr) self.visitchildren(node) return node Cython-0.26.1/Cython/Compiler/ParseTreeTransforms.pxd0000664000175000017500000000415113150045407023334 0ustar stefanstefan00000000000000 from __future__ import absolute_import cimport cython from .Visitor cimport ( CythonTransform, VisitorTransform, TreeVisitor, ScopeTrackingTransform, EnvTransform) cdef class NameNodeCollector(TreeVisitor): cdef list name_nodes cdef class SkipDeclarations: # (object): pass cdef class NormalizeTree(CythonTransform): cdef bint is_in_statlist cdef bint is_in_expr cpdef visit_StatNode(self, node, is_listcontainer=*) cdef class PostParse(ScopeTrackingTransform): cdef dict specialattribute_handlers cdef size_t lambda_counter cdef size_t genexpr_counter cdef _visit_assignment_node(self, node, list expr_list) #def eliminate_rhs_duplicates(list expr_list_list, list ref_node_sequence) #def sort_common_subsequences(list items) @cython.locals(starred_targets=Py_ssize_t, lhs_size=Py_ssize_t, rhs_size=Py_ssize_t) cdef flatten_parallel_assignments(list input, list output) cdef map_starred_assignment(list lhs_targets, list starred_assignments, list lhs_args, list rhs_args) #class PxdPostParse(CythonTransform, SkipDeclarations): #class InterpretCompilerDirectives(CythonTransform, SkipDeclarations): #class WithTransform(CythonTransform, SkipDeclarations): #class DecoratorTransform(CythonTransform, SkipDeclarations): #class AnalyseDeclarationsTransform(EnvTransform): cdef class AnalyseExpressionsTransform(CythonTransform): pass cdef class ExpandInplaceOperators(EnvTransform): pass cdef class AlignFunctionDefinitions(CythonTransform): cdef dict directives cdef set imported_names cdef object scope cdef class YieldNodeCollector(TreeVisitor): cdef public list yields cdef public list returns cdef public bint has_return_value cdef class MarkClosureVisitor(CythonTransform): cdef bint needs_closure cdef class CreateClosureClasses(CythonTransform): cdef list path cdef bint in_lambda cdef module_scope cdef generator_class cdef class GilCheck(VisitorTransform): cdef list env_stack cdef bint nogil cdef bint nogil_declarator_only cdef class TransformBuiltinMethods(EnvTransform): cdef visit_cython_attribute(self, node) Cython-0.26.1/Cython/Build/0000775000175000017500000000000013151203436016131 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Build/IpythonMagic.py0000664000175000017500000003160713143605603021110 0ustar stefanstefan00000000000000# -*- coding: utf-8 -*- """ ===================== Cython related magics ===================== Magic command interface for interactive work with Cython .. note:: The ``Cython`` package needs to be installed separately. It can be obtained using ``easy_install`` or ``pip``. Usage ===== To enable the magics below, execute ``%load_ext cythonmagic``. ``%%cython`` {CYTHON_DOC} ``%%cython_inline`` {CYTHON_INLINE_DOC} ``%%cython_pyximport`` {CYTHON_PYXIMPORT_DOC} Author: * Brian Granger Code moved from IPython and adapted by: * Martín Gaitán Parts of this code were taken from Cython.inline. """ #----------------------------------------------------------------------------- # Copyright (C) 2010-2011, IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file ipython-COPYING.rst, distributed with this software. #----------------------------------------------------------------------------- from __future__ import absolute_import, print_function import imp import io import os import re import sys import time try: reload except NameError: # Python 3 from imp import reload try: import hashlib except ImportError: import md5 as hashlib from distutils.core import Distribution, Extension from distutils.command.build_ext import build_ext from IPython.core import display from IPython.core import magic_arguments from IPython.core.magic import Magics, magics_class, cell_magic from IPython.utils import py3compat try: from IPython.paths import get_ipython_cache_dir except ImportError: # older IPython version from IPython.utils.path import get_ipython_cache_dir from IPython.utils.text import dedent from ..Shadow import __version__ as cython_version from ..Compiler.Errors import CompileError from .Inline import cython_inline from .Dependencies import cythonize @magics_class class CythonMagics(Magics): def __init__(self, shell): super(CythonMagics, self).__init__(shell) self._reloads = {} self._code_cache = {} self._pyximport_installed = False def _import_all(self, module): mdict = module.__dict__ if '__all__' in mdict: keys = mdict['__all__'] else: keys = [k for k in mdict if not k.startswith('_')] for k in keys: try: self.shell.push({k: mdict[k]}) except KeyError: msg = "'module' object has no attribute '%s'" % k raise AttributeError(msg) @cell_magic def cython_inline(self, line, cell): """Compile and run a Cython code cell using Cython.inline. This magic simply passes the body of the cell to Cython.inline and returns the result. If the variables `a` and `b` are defined in the user's namespace, here is a simple example that returns their sum:: %%cython_inline return a+b For most purposes, we recommend the usage of the `%%cython` magic. """ locs = self.shell.user_global_ns globs = self.shell.user_ns return cython_inline(cell, locals=locs, globals=globs) @cell_magic def cython_pyximport(self, line, cell): """Compile and import a Cython code cell using pyximport. The contents of the cell are written to a `.pyx` file in the current working directory, which is then imported using `pyximport`. This magic requires a module name to be passed:: %%cython_pyximport modulename def f(x): return 2.0*x The compiled module is then imported and all of its symbols are injected into the user's namespace. For most purposes, we recommend the usage of the `%%cython` magic. """ module_name = line.strip() if not module_name: raise ValueError('module name must be given') fname = module_name + '.pyx' with io.open(fname, 'w', encoding='utf-8') as f: f.write(cell) if 'pyximport' not in sys.modules or not self._pyximport_installed: import pyximport pyximport.install(reload_support=True) self._pyximport_installed = True if module_name in self._reloads: module = self._reloads[module_name] reload(module) else: __import__(module_name) module = sys.modules[module_name] self._reloads[module_name] = module self._import_all(module) @magic_arguments.magic_arguments() @magic_arguments.argument( '-3', dest='language_level', action='store_const', const=3, default=None, help="Select Python 3 syntax." ) @magic_arguments.argument( '-2', dest='language_level', action='store_const', const=2, default=None, help="Select Python 2 syntax." ) @magic_arguments.argument( '-c', '--compile-args', action='append', default=[], help="Extra flags to pass to compiler via the `extra_compile_args` " "Extension flag (can be specified multiple times)." ) @magic_arguments.argument( '--link-args', action='append', default=[], help="Extra flags to pass to linker via the `extra_link_args` " "Extension flag (can be specified multiple times)." ) @magic_arguments.argument( '-l', '--lib', action='append', default=[], help="Add a library to link the extension against (can be specified " "multiple times)." ) @magic_arguments.argument( '-n', '--name', help="Specify a name for the Cython module." ) @magic_arguments.argument( '-L', dest='library_dirs', metavar='dir', action='append', default=[], help="Add a path to the list of library directories (can be specified " "multiple times)." ) @magic_arguments.argument( '-I', '--include', action='append', default=[], help="Add a path to the list of include directories (can be specified " "multiple times)." ) @magic_arguments.argument( '-S', '--src', action='append', default=[], help="Add a path to the list of src files (can be specified " "multiple times)." ) @magic_arguments.argument( '-+', '--cplus', action='store_true', default=False, help="Output a C++ rather than C file." ) @magic_arguments.argument( '-f', '--force', action='store_true', default=False, help="Force the compilation of a new module, even if the source has been " "previously compiled." ) @magic_arguments.argument( '-a', '--annotate', action='store_true', default=False, help="Produce a colorized HTML version of the source." ) @cell_magic def cython(self, line, cell): """Compile and import everything from a Cython code cell. The contents of the cell are written to a `.pyx` file in the directory `IPYTHONDIR/cython` using a filename with the hash of the code. This file is then cythonized and compiled. The resulting module is imported and all of its symbols are injected into the user's namespace. The usage is similar to that of `%%cython_pyximport` but you don't have to pass a module name:: %%cython def f(x): return 2.0*x To compile OpenMP codes, pass the required `--compile-args` and `--link-args`. For example with gcc:: %%cython --compile-args=-fopenmp --link-args=-fopenmp ... """ args = magic_arguments.parse_argstring(self.cython, line) code = cell if cell.endswith('\n') else cell + '\n' lib_dir = os.path.join(get_ipython_cache_dir(), 'cython') quiet = True key = code, line, sys.version_info, sys.executable, cython_version if not os.path.exists(lib_dir): os.makedirs(lib_dir) if args.force: # Force a new module name by adding the current time to the # key which is hashed to determine the module name. key += time.time(), if args.name: module_name = py3compat.unicode_to_str(args.name) else: module_name = "_cython_magic_" + hashlib.md5(str(key).encode('utf-8')).hexdigest() module_path = os.path.join(lib_dir, module_name + self.so_ext) have_module = os.path.isfile(module_path) need_cythonize = not have_module if args.annotate: html_file = os.path.join(lib_dir, module_name + '.html') if not os.path.isfile(html_file): need_cythonize = True if need_cythonize: c_include_dirs = args.include c_src_files = list(map(str, args.src)) if 'numpy' in code: import numpy c_include_dirs.append(numpy.get_include()) pyx_file = os.path.join(lib_dir, module_name + '.pyx') pyx_file = py3compat.cast_bytes_py2(pyx_file, encoding=sys.getfilesystemencoding()) with io.open(pyx_file, 'w', encoding='utf-8') as f: f.write(code) extension = Extension( name=module_name, sources=[pyx_file] + c_src_files, include_dirs=c_include_dirs, library_dirs=args.library_dirs, extra_compile_args=args.compile_args, extra_link_args=args.link_args, libraries=args.lib, language='c++' if args.cplus else 'c', ) build_extension = self._get_build_extension() try: opts = dict( quiet=quiet, annotate=args.annotate, force=True, ) if args.language_level is not None: assert args.language_level in (2, 3) opts['language_level'] = args.language_level elif sys.version_info[0] > 2: opts['language_level'] = 3 build_extension.extensions = cythonize([extension], **opts) except CompileError: return if not have_module: build_extension.build_temp = os.path.dirname(pyx_file) build_extension.build_lib = lib_dir build_extension.run() self._code_cache[key] = module_name module = imp.load_dynamic(module_name, module_path) self._import_all(module) if args.annotate: try: with io.open(html_file, encoding='utf-8') as f: annotated_html = f.read() except IOError as e: # File could not be opened. Most likely the user has a version # of Cython before 0.15.1 (when `cythonize` learned the # `force` keyword argument) and has already compiled this # exact source without annotation. print('Cython completed successfully but the annotated ' 'source could not be read.', file=sys.stderr) print(e, file=sys.stderr) else: return display.HTML(self.clean_annotated_html(annotated_html)) @property def so_ext(self): """The extension suffix for compiled modules.""" try: return self._so_ext except AttributeError: self._so_ext = self._get_build_extension().get_ext_filename('') return self._so_ext def _clear_distutils_mkpath_cache(self): """clear distutils mkpath cache prevents distutils from skipping re-creation of dirs that have been removed """ try: from distutils.dir_util import _path_created except ImportError: pass else: _path_created.clear() def _get_build_extension(self): self._clear_distutils_mkpath_cache() dist = Distribution() config_files = dist.find_config_files() try: config_files.remove('setup.cfg') except ValueError: pass dist.parse_config_files(config_files) build_extension = build_ext(dist) build_extension.finalize_options() return build_extension @staticmethod def clean_annotated_html(html): """Clean up the annotated HTML source. Strips the link to the generated C or C++ file, which we do not present to the user. """ r = re.compile('

Raw output: (.*)') html = '\n'.join(l for l in html.splitlines() if not r.match(l)) return html __doc__ = __doc__.format( # rST doesn't see the -+ flag as part of an option list, so we # hide it from the module-level docstring. CYTHON_DOC=dedent(CythonMagics.cython.__doc__\ .replace('-+, --cplus', '--cplus ')), CYTHON_INLINE_DOC=dedent(CythonMagics.cython_inline.__doc__), CYTHON_PYXIMPORT_DOC=dedent(CythonMagics.cython_pyximport.__doc__), ) Cython-0.26.1/Cython/Build/Cythonize.py0000664000175000017500000001562413023021033020454 0ustar stefanstefan00000000000000#!/usr/bin/env python from __future__ import absolute_import import os import shutil import tempfile from distutils.core import setup from .Dependencies import cythonize, extended_iglob from ..Utils import is_package_dir from ..Compiler import Options try: import multiprocessing parallel_compiles = int(multiprocessing.cpu_count() * 1.5) except ImportError: multiprocessing = None parallel_compiles = 0 class _FakePool(object): def map_async(self, func, args): from itertools import imap for _ in imap(func, args): pass def close(self): pass def terminate(self): pass def join(self): pass def parse_directives(option, name, value, parser): dest = option.dest old_directives = dict(getattr(parser.values, dest, Options.get_directive_defaults())) directives = Options.parse_directive_list( value, relaxed_bool=True, current_settings=old_directives) setattr(parser.values, dest, directives) def parse_options(option, name, value, parser): dest = option.dest options = dict(getattr(parser.values, dest, {})) for opt in value.split(','): if '=' in opt: n, v = opt.split('=', 1) v = v.lower() not in ('false', 'f', '0', 'no') else: n, v = opt, True options[n] = v setattr(parser.values, dest, options) def find_package_base(path): base_dir, package_path = os.path.split(path) while os.path.isfile(os.path.join(base_dir, '__init__.py')): base_dir, parent = os.path.split(base_dir) package_path = '%s/%s' % (parent, package_path) return base_dir, package_path def cython_compile(path_pattern, options): pool = None all_paths = map(os.path.abspath, extended_iglob(path_pattern)) try: for path in all_paths: if options.build_inplace: base_dir = path while not os.path.isdir(base_dir) or is_package_dir(base_dir): base_dir = os.path.dirname(base_dir) else: base_dir = None if os.path.isdir(path): # recursively compiling a package paths = [os.path.join(path, '**', '*.{py,pyx}')] else: # assume it's a file(-like thing) paths = [path] ext_modules = cythonize( paths, nthreads=options.parallel, exclude_failures=options.keep_going, exclude=options.excludes, compiler_directives=options.directives, force=options.force, quiet=options.quiet, **options.options) if ext_modules and options.build: if len(ext_modules) > 1 and options.parallel > 1: if pool is None: try: pool = multiprocessing.Pool(options.parallel) except OSError: pool = _FakePool() pool.map_async(run_distutils, [ (base_dir, [ext]) for ext in ext_modules]) else: run_distutils((base_dir, ext_modules)) except: if pool is not None: pool.terminate() raise else: if pool is not None: pool.close() pool.join() def run_distutils(args): base_dir, ext_modules = args script_args = ['build_ext', '-i'] cwd = os.getcwd() temp_dir = None try: if base_dir: os.chdir(base_dir) temp_dir = tempfile.mkdtemp(dir=base_dir) script_args.extend(['--build-temp', temp_dir]) setup( script_name='setup.py', script_args=script_args, ext_modules=ext_modules, ) finally: if base_dir: os.chdir(cwd) if temp_dir and os.path.isdir(temp_dir): shutil.rmtree(temp_dir) def parse_args(args): from optparse import OptionParser parser = OptionParser(usage='%prog [options] [sources and packages]+') parser.add_option('-X', '--directive', metavar='NAME=VALUE,...', dest='directives', type=str, action='callback', callback=parse_directives, default={}, help='set a compiler directive') parser.add_option('-s', '--option', metavar='NAME=VALUE', dest='options', type=str, action='callback', callback=parse_options, default={}, help='set a cythonize option') parser.add_option('-3', dest='python3_mode', action='store_true', help='use Python 3 syntax mode by default') parser.add_option('-a', '--annotate', dest='annotate', action='store_true', help='generate annotated HTML page for source files') parser.add_option('-x', '--exclude', metavar='PATTERN', dest='excludes', action='append', default=[], help='exclude certain file patterns from the compilation') parser.add_option('-b', '--build', dest='build', action='store_true', help='build extension modules using distutils') parser.add_option('-i', '--inplace', dest='build_inplace', action='store_true', help='build extension modules in place using distutils (implies -b)') parser.add_option('-j', '--parallel', dest='parallel', metavar='N', type=int, default=parallel_compiles, help=('run builds in N parallel jobs (default: %d)' % parallel_compiles or 1)) parser.add_option('-f', '--force', dest='force', action='store_true', help='force recompilation') parser.add_option('-q', '--quiet', dest='quiet', action='store_true', help='be less verbose during compilation') parser.add_option('--lenient', dest='lenient', action='store_true', help='increase Python compatibility by ignoring some compile time errors') parser.add_option('-k', '--keep-going', dest='keep_going', action='store_true', help='compile as much as possible, ignore compilation failures') options, args = parser.parse_args(args) if not args: parser.error("no source files provided") if options.build_inplace: options.build = True if multiprocessing is None: options.parallel = 0 if options.python3_mode: options.options['language_level'] = 3 return options, args def main(args=None): options, paths = parse_args(args) if options.lenient: # increase Python compatibility by ignoring compile time errors Options.error_on_unknown_names = False Options.error_on_uninitialized = False if options.annotate: Options.annotate = True for path in paths: cython_compile(path, options) if __name__ == '__main__': main() Cython-0.26.1/Cython/Build/Tests/0000775000175000017500000000000013151203436017233 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Build/Tests/TestInline.py0000664000175000017500000000403513023021033021652 0ustar stefanstefan00000000000000import os, tempfile from Cython.Shadow import inline from Cython.Build.Inline import safe_type from Cython.TestUtils import CythonTest try: import numpy has_numpy = True except: has_numpy = False test_kwds = dict(force=True, quiet=True) global_value = 100 class TestInline(CythonTest): def setUp(self): CythonTest.setUp(self) self.test_kwds = dict(test_kwds) if os.path.isdir('TEST_TMP'): lib_dir = os.path.join('TEST_TMP','inline') else: lib_dir = tempfile.mkdtemp(prefix='cython_inline_') self.test_kwds['lib_dir'] = lib_dir def test_simple(self): self.assertEquals(inline("return 1+2", **self.test_kwds), 3) def test_types(self): self.assertEquals(inline(""" cimport cython return cython.typeof(a), cython.typeof(b) """, a=1.0, b=[], **self.test_kwds), ('double', 'list object')) def test_locals(self): a = 1 b = 2 self.assertEquals(inline("return a+b", **self.test_kwds), 3) def test_globals(self): self.assertEquals(inline("return global_value + 1", **self.test_kwds), global_value + 1) def test_no_return(self): self.assertEquals(inline(""" a = 1 cdef double b = 2 cdef c = [] """, **self.test_kwds), dict(a=1, b=2.0, c=[])) def test_def_node(self): foo = inline("def foo(x): return x * x", **self.test_kwds)['foo'] self.assertEquals(foo(7), 49) def test_pure(self): import cython as cy b = inline(""" b = cy.declare(float, a) c = cy.declare(cy.pointer(cy.float), &b) return b """, a=3, **self.test_kwds) self.assertEquals(type(b), float) if has_numpy: def test_numpy(self): import numpy a = numpy.ndarray((10, 20)) a[0,0] = 10 self.assertEquals(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]') self.assertEquals(inline("return a[0,0]", a=a, **self.test_kwds), 10.0) Cython-0.26.1/Cython/Build/Tests/TestIpythonMagic.py0000664000175000017500000000744313150045407023051 0ustar stefanstefan00000000000000# -*- coding: utf-8 -*- # tag: ipython """Tests for the Cython magics extension.""" import os import sys try: from IPython.testing.globalipapp import get_ipython from IPython.utils import py3compat except: __test__ = False try: # disable IPython history thread to avoid having to clean it up from IPython.core.history import HistoryManager HistoryManager.enabled = False except ImportError: pass from Cython.TestUtils import CythonTest ip = get_ipython() code = py3compat.str_to_unicode("""\ def f(x): return 2*x """) cython3_code = py3compat.str_to_unicode("""\ def f(int x): return 2 / x def call(x): return f(*(x,)) """) if sys.platform == 'win32': # not using IPython's decorators here because they depend on "nose" try: from unittest import skip as skip_win32 except ImportError: # poor dev's silent @unittest.skip() def skip_win32(dummy): def _skip_win32(func): return None return _skip_win32 else: def skip_win32(dummy): def _skip_win32(func): def wrapper(*args, **kwargs): func(*args, **kwargs) return wrapper return _skip_win32 class TestIPythonMagic(CythonTest): def setUp(self): CythonTest.setUp(self) ip.extension_manager.load_extension('cython') def test_cython_inline(self): ip.ex('a=10; b=20') result = ip.run_cell_magic('cython_inline', '', 'return a+b') self.assertEqual(result, 30) @skip_win32('Skip on Windows') def test_cython_pyximport(self): module_name = '_test_cython_pyximport' ip.run_cell_magic('cython_pyximport', module_name, code) ip.ex('g = f(10)') self.assertEqual(ip.user_ns['g'], 20.0) ip.run_cell_magic('cython_pyximport', module_name, code) ip.ex('h = f(-10)') self.assertEqual(ip.user_ns['h'], -20.0) try: os.remove(module_name + '.pyx') except OSError: pass def test_cython(self): ip.run_cell_magic('cython', '', code) ip.ex('g = f(10)') self.assertEqual(ip.user_ns['g'], 20.0) def test_cython_name(self): # The Cython module named 'mymodule' defines the function f. ip.run_cell_magic('cython', '--name=mymodule', code) # This module can now be imported in the interactive namespace. ip.ex('import mymodule; g = mymodule.f(10)') self.assertEqual(ip.user_ns['g'], 20.0) def test_cython_language_level(self): # The Cython cell defines the functions f() and call(). ip.run_cell_magic('cython', '', cython3_code) ip.ex('g = f(10); h = call(10)') if sys.version_info[0] < 3: self.assertEqual(ip.user_ns['g'], 2 // 10) self.assertEqual(ip.user_ns['h'], 2 // 10) else: self.assertEqual(ip.user_ns['g'], 2.0 / 10.0) self.assertEqual(ip.user_ns['h'], 2.0 / 10.0) def test_cython3(self): # The Cython cell defines the functions f() and call(). ip.run_cell_magic('cython', '-3', cython3_code) ip.ex('g = f(10); h = call(10)') self.assertEqual(ip.user_ns['g'], 2.0 / 10.0) self.assertEqual(ip.user_ns['h'], 2.0 / 10.0) def test_cython2(self): # The Cython cell defines the functions f() and call(). ip.run_cell_magic('cython', '-2', cython3_code) ip.ex('g = f(10); h = call(10)') self.assertEqual(ip.user_ns['g'], 2 // 10) self.assertEqual(ip.user_ns['h'], 2 // 10) @skip_win32('Skip on Windows') def test_extlibs(self): code = py3compat.str_to_unicode(""" from libc.math cimport sin x = sin(0.0) """) ip.user_ns['x'] = 1 ip.run_cell_magic('cython', '-l m', code) self.assertEqual(ip.user_ns['x'], 0) Cython-0.26.1/Cython/Build/Tests/__init__.py0000664000175000017500000000001512542002467021344 0ustar stefanstefan00000000000000# empty file Cython-0.26.1/Cython/Build/Tests/TestStripLiterals.py0000664000175000017500000000302012542002467023245 0ustar stefanstefan00000000000000from Cython.Build.Dependencies import strip_string_literals from Cython.TestUtils import CythonTest class TestStripLiterals(CythonTest): def t(self, before, expected): actual, literals = strip_string_literals(before, prefix="_L") self.assertEquals(expected, actual) for key, value in literals.items(): actual = actual.replace(key, value) self.assertEquals(before, actual) def test_empty(self): self.t("", "") def test_single_quote(self): self.t("'x'", "'_L1_'") def test_double_quote(self): self.t('"x"', '"_L1_"') def test_nested_quotes(self): self.t(""" '"' "'" """, """ '_L1_' "_L2_" """) def test_triple_quote(self): self.t(" '''a\n''' ", " '''_L1_''' ") def test_backslash(self): self.t(r"'a\'b'", "'_L1_'") self.t(r"'a\\'", "'_L1_'") self.t(r"'a\\\'b'", "'_L1_'") def test_unicode(self): self.t("u'abc'", "u'_L1_'") def test_raw(self): self.t(r"r'abc\\'", "r'_L1_'") def test_raw_unicode(self): self.t(r"ru'abc\\'", "ru'_L1_'") def test_comment(self): self.t("abc # foo", "abc #_L1_") def test_comment_and_quote(self): self.t("abc # 'x'", "abc #_L1_") self.t("'abc#'", "'_L1_'") def test_include(self): self.t("include 'a.pxi' # something here", "include '_L1_' #_L2_") def test_extern(self): self.t("cdef extern from 'a.h': # comment", "cdef extern from '_L1_': #_L2_") Cython-0.26.1/Cython/Build/BuildExecutable.py0000664000175000017500000001033613143605603021552 0ustar stefanstefan00000000000000""" Compile a Python script into an executable that embeds CPython and run it. Requires CPython to be built as a shared library ('libpythonX.Y'). Basic usage: python cythonrun somefile.py [ARGS] """ from __future__ import absolute_import DEBUG = True import sys import os from distutils import sysconfig def get_config_var(name, default=''): return sysconfig.get_config_var(name) or default INCDIR = sysconfig.get_python_inc() LIBDIR1 = get_config_var('LIBDIR') LIBDIR2 = get_config_var('LIBPL') PYLIB = get_config_var('LIBRARY') PYLIB_DYN = get_config_var('LDLIBRARY') if PYLIB_DYN == PYLIB: # no shared library PYLIB_DYN = '' else: PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ CC = get_config_var('CC', os.environ.get('CC', '')) CFLAGS = get_config_var('CFLAGS') + ' ' + os.environ.get('CFLAGS', '') LINKCC = get_config_var('LINKCC', os.environ.get('LINKCC', CC)) LINKFORSHARED = get_config_var('LINKFORSHARED') LIBS = get_config_var('LIBS') SYSLIBS = get_config_var('SYSLIBS') EXE_EXT = sysconfig.get_config_var('EXE') def _debug(msg, *args): if DEBUG: if args: msg = msg % args sys.stderr.write(msg + '\n') def dump_config(): _debug('INCDIR: %s', INCDIR) _debug('LIBDIR1: %s', LIBDIR1) _debug('LIBDIR2: %s', LIBDIR2) _debug('PYLIB: %s', PYLIB) _debug('PYLIB_DYN: %s', PYLIB_DYN) _debug('CC: %s', CC) _debug('CFLAGS: %s', CFLAGS) _debug('LINKCC: %s', LINKCC) _debug('LINKFORSHARED: %s', LINKFORSHARED) _debug('LIBS: %s', LIBS) _debug('SYSLIBS: %s', SYSLIBS) _debug('EXE_EXT: %s', EXE_EXT) def runcmd(cmd, shell=True): if shell: cmd = ' '.join(cmd) _debug(cmd) else: _debug(' '.join(cmd)) try: import subprocess except ImportError: # Python 2.3 ... returncode = os.system(cmd) else: returncode = subprocess.call(cmd, shell=shell) if returncode: sys.exit(returncode) def clink(basename): runcmd([LINKCC, '-o', basename + EXE_EXT, basename+'.o', '-L'+LIBDIR1, '-L'+LIBDIR2] + [PYLIB_DYN and ('-l'+PYLIB_DYN) or os.path.join(LIBDIR1, PYLIB)] + LIBS.split() + SYSLIBS.split() + LINKFORSHARED.split()) def ccompile(basename): runcmd([CC, '-c', '-o', basename+'.o', basename+'.c', '-I' + INCDIR] + CFLAGS.split()) def cycompile(input_file, options=()): from ..Compiler import Version, CmdLine, Main options, sources = CmdLine.parse_command_line(list(options or ()) + ['--embed', input_file]) _debug('Using Cython %s to compile %s', Version.version, input_file) result = Main.compile(sources, options) if result.num_errors > 0: sys.exit(1) def exec_file(program_name, args=()): runcmd([os.path.abspath(program_name)] + list(args), shell=False) def build(input_file, compiler_args=(), force=False): """ Build an executable program from a Cython module. Returns the name of the executable file. """ basename = os.path.splitext(input_file)[0] exe_file = basename + EXE_EXT if not force and os.path.abspath(exe_file) == os.path.abspath(input_file): raise ValueError("Input and output file names are the same, refusing to overwrite") if (not force and os.path.exists(exe_file) and os.path.exists(input_file) and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)): _debug("File is up to date, not regenerating %s", exe_file) return exe_file cycompile(input_file, compiler_args) ccompile(basename) clink(basename) return exe_file def build_and_run(args): """ Build an executable program from a Cython module and runs it. Arguments after the module name will be passed verbatimely to the program. """ cy_args = [] last_arg = None for i, arg in enumerate(args): if arg.startswith('-'): cy_args.append(arg) elif last_arg in ('-X', '--directive'): cy_args.append(arg) else: input_file = arg args = args[i+1:] break last_arg = arg else: raise ValueError('no input file provided') program_name = build(input_file, cy_args) exec_file(program_name, args) if __name__ == '__main__': build_and_run(sys.argv[1:]) Cython-0.26.1/Cython/Build/Inline.py0000664000175000017500000003004013023021033017703 0ustar stefanstefan00000000000000from __future__ import absolute_import import sys, os, re, inspect import imp try: import hashlib except ImportError: import md5 as hashlib from distutils.core import Distribution, Extension from distutils.command.build_ext import build_ext import Cython from ..Compiler.Main import Context, CompilationOptions, default_options from ..Compiler.ParseTreeTransforms import (CythonTransform, SkipDeclarations, AnalyseDeclarationsTransform, EnvTransform) from ..Compiler.TreeFragment import parse_from_strings from ..Compiler.StringEncoding import _unicode from .Dependencies import strip_string_literals, cythonize, cached_function from ..Compiler import Pipeline, Nodes from ..Utils import get_cython_cache_dir import cython as cython_module IS_PY3 = sys.version_info >= (3, 0) # A utility function to convert user-supplied ASCII strings to unicode. if sys.version_info[0] < 3: def to_unicode(s): if isinstance(s, bytes): return s.decode('ascii') else: return s else: to_unicode = lambda x: x class UnboundSymbols(EnvTransform, SkipDeclarations): def __init__(self): CythonTransform.__init__(self, None) self.unbound = set() def visit_NameNode(self, node): if not self.current_env().lookup(node.name): self.unbound.add(node.name) return node def __call__(self, node): super(UnboundSymbols, self).__call__(node) return self.unbound @cached_function def unbound_symbols(code, context=None): code = to_unicode(code) if context is None: context = Context([], default_options) from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform tree = parse_from_strings('(tree fragment)', code) for phase in Pipeline.create_pipeline(context, 'pyx'): if phase is None: continue tree = phase(tree) if isinstance(phase, AnalyseDeclarationsTransform): break try: import builtins except ImportError: import __builtin__ as builtins return tuple(UnboundSymbols()(tree) - set(dir(builtins))) def unsafe_type(arg, context=None): py_type = type(arg) if py_type is int: return 'long' else: return safe_type(arg, context) def safe_type(arg, context=None): py_type = type(arg) if py_type in (list, tuple, dict, str): return py_type.__name__ elif py_type is complex: return 'double complex' elif py_type is float: return 'double' elif py_type is bool: return 'bint' elif 'numpy' in sys.modules and isinstance(arg, sys.modules['numpy'].ndarray): return 'numpy.ndarray[numpy.%s_t, ndim=%s]' % (arg.dtype.name, arg.ndim) else: for base_type in py_type.mro(): if base_type.__module__ in ('__builtin__', 'builtins'): return 'object' module = context.find_module(base_type.__module__, need_pxd=False) if module: entry = module.lookup(base_type.__name__) if entry.is_type: return '%s.%s' % (base_type.__module__, base_type.__name__) return 'object' def _get_build_extension(): dist = Distribution() # Ensure the build respects distutils configuration by parsing # the configuration files config_files = dist.find_config_files() dist.parse_config_files(config_files) build_extension = build_ext(dist) build_extension.finalize_options() return build_extension @cached_function def _create_context(cython_include_dirs): return Context(list(cython_include_dirs), default_options) _cython_inline_cache = {} _cython_inline_default_context = _create_context(('.',)) def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None): for symbol in unbound_symbols: if symbol not in kwds: if locals is None or globals is None: calling_frame = inspect.currentframe().f_back.f_back.f_back if locals is None: locals = calling_frame.f_locals if globals is None: globals = calling_frame.f_globals if symbol in locals: kwds[symbol] = locals[symbol] elif symbol in globals: kwds[symbol] = globals[symbol] else: print("Couldn't find %r" % symbol) def cython_inline(code, get_type=unsafe_type, lib_dir=os.path.join(get_cython_cache_dir(), 'inline'), cython_include_dirs=None, force=False, quiet=False, locals=None, globals=None, **kwds): if get_type is None: get_type = lambda x: 'object' ctx = _create_context(tuple(cython_include_dirs)) if cython_include_dirs else _cython_inline_default_context # Fast path if this has been called in this session. _unbound_symbols = _cython_inline_cache.get(code) if _unbound_symbols is not None: _populate_unbound(kwds, _unbound_symbols, locals, globals) args = sorted(kwds.items()) arg_sigs = tuple([(get_type(value, ctx), arg) for arg, value in args]) invoke = _cython_inline_cache.get((code, arg_sigs)) if invoke is not None: arg_list = [arg[1] for arg in args] return invoke(*arg_list) orig_code = code code = to_unicode(code) code, literals = strip_string_literals(code) code = strip_common_indent(code) if locals is None: locals = inspect.currentframe().f_back.f_back.f_locals if globals is None: globals = inspect.currentframe().f_back.f_back.f_globals try: _cython_inline_cache[orig_code] = _unbound_symbols = unbound_symbols(code) _populate_unbound(kwds, _unbound_symbols, locals, globals) except AssertionError: if not quiet: # Parsing from strings not fully supported (e.g. cimports). print("Could not parse code as a string (to extract unbound symbols).") cimports = [] for name, arg in list(kwds.items()): if arg is cython_module: cimports.append('\ncimport cython as %s' % name) del kwds[name] arg_names = sorted(kwds) arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names]) key = orig_code, arg_sigs, sys.version_info, sys.executable, Cython.__version__ module_name = "_cython_inline_" + hashlib.md5(_unicode(key).encode('utf-8')).hexdigest() if module_name in sys.modules: module = sys.modules[module_name] else: build_extension = None if cython_inline.so_ext is None: # Figure out and cache current extension suffix build_extension = _get_build_extension() cython_inline.so_ext = build_extension.get_ext_filename('') module_path = os.path.join(lib_dir, module_name + cython_inline.so_ext) if not os.path.exists(lib_dir): os.makedirs(lib_dir) if force or not os.path.isfile(module_path): cflags = [] c_include_dirs = [] qualified = re.compile(r'([.\w]+)[.]') for type, _ in arg_sigs: m = qualified.match(type) if m: cimports.append('\ncimport %s' % m.groups()[0]) # one special case if m.groups()[0] == 'numpy': import numpy c_include_dirs.append(numpy.get_include()) # cflags.append('-Wno-unused') module_body, func_body = extract_func_code(code) params = ', '.join(['%s %s' % a for a in arg_sigs]) module_code = """ %(module_body)s %(cimports)s def __invoke(%(params)s): %(func_body)s return locals() """ % {'cimports': '\n'.join(cimports), 'module_body': module_body, 'params': params, 'func_body': func_body } for key, value in literals.items(): module_code = module_code.replace(key, value) pyx_file = os.path.join(lib_dir, module_name + '.pyx') fh = open(pyx_file, 'w') try: fh.write(module_code) finally: fh.close() extension = Extension( name = module_name, sources = [pyx_file], include_dirs = c_include_dirs, extra_compile_args = cflags) if build_extension is None: build_extension = _get_build_extension() build_extension.extensions = cythonize([extension], include_path=cython_include_dirs or ['.'], quiet=quiet) build_extension.build_temp = os.path.dirname(pyx_file) build_extension.build_lib = lib_dir build_extension.run() module = imp.load_dynamic(module_name, module_path) _cython_inline_cache[orig_code, arg_sigs] = module.__invoke arg_list = [kwds[arg] for arg in arg_names] return module.__invoke(*arg_list) # Cached suffix used by cython_inline above. None should get # overridden with actual value upon the first cython_inline invocation cython_inline.so_ext = None _find_non_space = re.compile('[^ ]').search def strip_common_indent(code): min_indent = None lines = code.splitlines() for line in lines: match = _find_non_space(line) if not match: continue # blank indent = match.start() if line[indent] == '#': continue # comment if min_indent is None or min_indent > indent: min_indent = indent for ix, line in enumerate(lines): match = _find_non_space(line) if not match or not line or line[indent:indent+1] == '#': continue lines[ix] = line[min_indent:] return '\n'.join(lines) module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimport)|(from .+ import +[*]))') def extract_func_code(code): module = [] function = [] current = function code = code.replace('\t', ' ') lines = code.split('\n') for line in lines: if not line.startswith(' '): if module_statement.match(line): current = module else: current = function current.append(line) return '\n'.join(module), ' ' + '\n '.join(function) try: from inspect import getcallargs except ImportError: def getcallargs(func, *arg_values, **kwd_values): all = {} args, varargs, kwds, defaults = inspect.getargspec(func) if varargs is not None: all[varargs] = arg_values[len(args):] for name, value in zip(args, arg_values): all[name] = value for name, value in list(kwd_values.items()): if name in args: if name in all: raise TypeError("Duplicate argument %s" % name) all[name] = kwd_values.pop(name) if kwds is not None: all[kwds] = kwd_values elif kwd_values: raise TypeError("Unexpected keyword arguments: %s" % list(kwd_values)) if defaults is None: defaults = () first_default = len(args) - len(defaults) for ix, name in enumerate(args): if name not in all: if ix >= first_default: all[name] = defaults[ix - first_default] else: raise TypeError("Missing argument: %s" % name) return all def get_body(source): ix = source.index(':') if source[:5] == 'lambda': return "return %s" % source[ix+1:] else: return source[ix+1:] # Lots to be done here... It would be especially cool if compiled functions # could invoke each other quickly. class RuntimeCompiledFunction(object): def __init__(self, f): self._f = f self._body = get_body(inspect.getsource(f)) def __call__(self, *args, **kwds): all = getcallargs(self._f, *args, **kwds) if IS_PY3: return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all) else: return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all) Cython-0.26.1/Cython/Build/Distutils.py0000664000175000017500000000006113023021033020451 0ustar stefanstefan00000000000000from Cython.Distutils.build_ext import build_ext Cython-0.26.1/Cython/Build/__init__.py0000664000175000017500000000010513023021033020223 0ustar stefanstefan00000000000000from .Dependencies import cythonize from .Distutils import build_ext Cython-0.26.1/Cython/Build/Dependencies.py0000664000175000017500000012546113150045407021103 0ustar stefanstefan00000000000000from __future__ import absolute_import, print_function import cython from .. import __version__ import collections import re, os, sys, time from glob import iglob try: import gzip gzip_open = gzip.open gzip_ext = '.gz' except ImportError: gzip_open = open gzip_ext = '' import shutil import subprocess import os try: import hashlib except ImportError: import md5 as hashlib try: from io import open as io_open except ImportError: from codecs import open as io_open try: from os.path import relpath as _relpath except ImportError: # Py<2.6 def _relpath(path, start=os.path.curdir): if not path: raise ValueError("no path specified") start_list = os.path.abspath(start).split(os.path.sep) path_list = os.path.abspath(path).split(os.path.sep) i = len(os.path.commonprefix([start_list, path_list])) rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:] if not rel_list: return os.path.curdir return os.path.join(*rel_list) try: import pythran PythranAvailable = True except: PythranAvailable = False from distutils.extension import Extension from distutils.util import strtobool from .. import Utils from ..Utils import (cached_function, cached_method, path_exists, safe_makedirs, copy_file_to_dir_if_newer, is_package_dir) from ..Compiler.Main import Context, CompilationOptions, default_options join_path = cached_function(os.path.join) copy_once_if_newer = cached_function(copy_file_to_dir_if_newer) safe_makedirs_once = cached_function(safe_makedirs) if sys.version_info[0] < 3: # stupid Py2 distutils enforces str type in list of sources _fs_encoding = sys.getfilesystemencoding() if _fs_encoding is None: _fs_encoding = sys.getdefaultencoding() def encode_filename_in_py2(filename): if not isinstance(filename, bytes): return filename.encode(_fs_encoding) return filename else: def encode_filename_in_py2(filename): return filename basestring = str def _make_relative(file_paths, base=None): if not base: base = os.getcwd() if base[-1] != os.path.sep: base += os.path.sep return [_relpath(path, base) if path.startswith(base) else path for path in file_paths] def extended_iglob(pattern): if '{' in pattern: m = re.match('(.*){([^}]+)}(.*)', pattern) if m: before, switch, after = m.groups() for case in switch.split(','): for path in extended_iglob(before + case + after): yield path return if '**/' in pattern: seen = set() first, rest = pattern.split('**/', 1) if first: first = iglob(first+'/') else: first = [''] for root in first: for path in extended_iglob(join_path(root, rest)): if path not in seen: seen.add(path) yield path for path in extended_iglob(join_path(root, '*', '**/' + rest)): if path not in seen: seen.add(path) yield path else: for path in iglob(pattern): yield path def nonempty(it, error_msg="expected non-empty iterator"): empty = True for value in it: empty = False yield value if empty: raise ValueError(error_msg) @cached_function def file_hash(filename): path = os.path.normpath(filename.encode("UTF-8")) prefix = (str(len(path)) + ":").encode("UTF-8") m = hashlib.md5(prefix) m.update(path) f = open(filename, 'rb') try: data = f.read(65000) while data: m.update(data) data = f.read(65000) finally: f.close() return m.hexdigest() def parse_list(s): """ >>> parse_list("") [] >>> parse_list("a") ['a'] >>> parse_list("a b c") ['a', 'b', 'c'] >>> parse_list("[a, b, c]") ['a', 'b', 'c'] >>> parse_list('a " " b') ['a', ' ', 'b'] >>> parse_list('[a, ",a", "a,", ",", ]') ['a', ',a', 'a,', ','] """ if len(s) >= 2 and s[0] == '[' and s[-1] == ']': s = s[1:-1] delimiter = ',' else: delimiter = ' ' s, literals = strip_string_literals(s) def unquote(literal): literal = literal.strip() if literal[0] in "'\"": return literals[literal[1:-1]] else: return literal return [unquote(item) for item in s.split(delimiter) if item.strip()] transitive_str = object() transitive_list = object() bool_or = object() distutils_settings = { 'name': str, 'sources': list, 'define_macros': list, 'undef_macros': list, 'libraries': transitive_list, 'library_dirs': transitive_list, 'runtime_library_dirs': transitive_list, 'include_dirs': transitive_list, 'extra_objects': list, 'extra_compile_args': transitive_list, 'extra_link_args': transitive_list, 'export_symbols': list, 'depends': transitive_list, 'language': transitive_str, 'np_pythran': bool_or } @cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t) def line_iter(source): if isinstance(source, basestring): start = 0 while True: end = source.find('\n', start) if end == -1: yield source[start:] return yield source[start:end] start = end+1 else: for line in source: yield line class DistutilsInfo(object): def __init__(self, source=None, exn=None): self.values = {} if source is not None: for line in line_iter(source): line = line.lstrip() if not line: continue if line[0] != '#': break line = line[1:].lstrip() kind = next((k for k in ("distutils:","cython:") if line.startswith(k)), None) if not kind is None: key, _, value = [s.strip() for s in line[len(kind):].partition('=')] type = distutils_settings.get(key, None) if line.startswith("cython:") and type is None: continue if type in (list, transitive_list): value = parse_list(value) if key == 'define_macros': value = [tuple(macro.split('=', 1)) if '=' in macro else (macro, None) for macro in value] if type is bool_or: value = strtobool(value) self.values[key] = value elif exn is not None: for key in distutils_settings: if key in ('name', 'sources','np_pythran'): continue value = getattr(exn, key, None) if value: self.values[key] = value def merge(self, other): if other is None: return self for key, value in other.values.items(): type = distutils_settings[key] if type is transitive_str and key not in self.values: self.values[key] = value elif type is transitive_list: if key in self.values: # Change a *copy* of the list (Trac #845) all = self.values[key][:] for v in value: if v not in all: all.append(v) value = all self.values[key] = value elif type is bool_or: self.values[key] = self.values.get(key, False) | value return self def subs(self, aliases): if aliases is None: return self resolved = DistutilsInfo() for key, value in self.values.items(): type = distutils_settings[key] if type in [list, transitive_list]: new_value_list = [] for v in value: if v in aliases: v = aliases[v] if isinstance(v, list): new_value_list += v else: new_value_list.append(v) value = new_value_list else: if value in aliases: value = aliases[value] resolved.values[key] = value return resolved def apply(self, extension): for key, value in self.values.items(): type = distutils_settings[key] if type in [list, transitive_list]: value = getattr(extension, key) + list(value) setattr(extension, key, value) @cython.locals(start=cython.Py_ssize_t, q=cython.Py_ssize_t, single_q=cython.Py_ssize_t, double_q=cython.Py_ssize_t, hash_mark=cython.Py_ssize_t, end=cython.Py_ssize_t, k=cython.Py_ssize_t, counter=cython.Py_ssize_t, quote_len=cython.Py_ssize_t) def strip_string_literals(code, prefix='__Pyx_L'): """ Normalizes every string literal to be of the form '__Pyx_Lxxx', returning the normalized code and a mapping of labels to string literals. """ new_code = [] literals = {} counter = 0 start = q = 0 in_quote = False hash_mark = single_q = double_q = -1 code_len = len(code) quote_type = quote_len = None while True: if hash_mark < q: hash_mark = code.find('#', q) if single_q < q: single_q = code.find("'", q) if double_q < q: double_q = code.find('"', q) q = min(single_q, double_q) if q == -1: q = max(single_q, double_q) # We're done. if q == -1 and hash_mark == -1: new_code.append(code[start:]) break # Try to close the quote. elif in_quote: if code[q-1] == u'\\': k = 2 while q >= k and code[q-k] == u'\\': k += 1 if k % 2 == 0: q += 1 continue if code[q] == quote_type and ( quote_len == 1 or (code_len > q + 2 and quote_type == code[q+1] == code[q+2])): counter += 1 label = "%s%s_" % (prefix, counter) literals[label] = code[start+quote_len:q] full_quote = code[q:q+quote_len] new_code.append(full_quote) new_code.append(label) new_code.append(full_quote) q += quote_len in_quote = False start = q else: q += 1 # Process comment. elif -1 != hash_mark and (hash_mark < q or q == -1): new_code.append(code[start:hash_mark+1]) end = code.find('\n', hash_mark) counter += 1 label = "%s%s_" % (prefix, counter) if end == -1: end_or_none = None else: end_or_none = end literals[label] = code[hash_mark+1:end_or_none] new_code.append(label) if end == -1: break start = q = end # Open the quote. else: if code_len >= q+3 and (code[q] == code[q+1] == code[q+2]): quote_len = 3 else: quote_len = 1 in_quote = True quote_type = code[q] new_code.append(code[start:q]) start = q q += quote_len return "".join(new_code), literals # We need to allow spaces to allow for conditional compilation like # IF ...: # cimport ... dependency_regex = re.compile(r"(?:^\s*from +([0-9a-zA-Z_.]+) +cimport)|" r"(?:^\s*cimport +([0-9a-zA-Z_.]+(?: *, *[0-9a-zA-Z_.]+)*))|" r"(?:^\s*cdef +extern +from +['\"]([^'\"]+)['\"])|" r"(?:^\s*include +['\"]([^'\"]+)['\"])", re.M) def normalize_existing(base_path, rel_paths): return normalize_existing0(os.path.dirname(base_path), tuple(set(rel_paths))) @cached_function def normalize_existing0(base_dir, rel_paths): """ Given some base directory ``base_dir`` and a list of path names ``rel_paths``, normalize each relative path name ``rel`` by replacing it by ``os.path.join(base, rel)`` if that file exists. Return a couple ``(normalized, needed_base)`` where ``normalized`` if the list of normalized file names and ``needed_base`` is ``base_dir`` if we actually needed ``base_dir``. If no paths were changed (for example, if all paths were already absolute), then ``needed_base`` is ``None``. """ normalized = [] needed_base = None for rel in rel_paths: if os.path.isabs(rel): normalized.append(rel) continue path = join_path(base_dir, rel) if path_exists(path): normalized.append(os.path.normpath(path)) needed_base = base_dir else: normalized.append(rel) return (normalized, needed_base) def resolve_depends(depends, include_dirs): include_dirs = tuple(include_dirs) resolved = [] for depend in depends: path = resolve_depend(depend, include_dirs) if path is not None: resolved.append(path) return resolved @cached_function def resolve_depend(depend, include_dirs): if depend[0] == '<' and depend[-1] == '>': return None for dir in include_dirs: path = join_path(dir, depend) if path_exists(path): return os.path.normpath(path) return None @cached_function def package(filename): dir = os.path.dirname(os.path.abspath(str(filename))) if dir != filename and is_package_dir(dir): return package(dir) + (os.path.basename(dir),) else: return () @cached_function def fully_qualified_name(filename): module = os.path.splitext(os.path.basename(filename))[0] return '.'.join(package(filename) + (module,)) @cached_function def parse_dependencies(source_filename): # Actual parsing is way too slow, so we use regular expressions. # The only catch is that we must strip comments and string # literals ahead of time. fh = Utils.open_source_file(source_filename, error_handling='ignore') try: source = fh.read() finally: fh.close() distutils_info = DistutilsInfo(source) source, literals = strip_string_literals(source) source = source.replace('\\\n', ' ').replace('\t', ' ') # TODO: pure mode cimports = [] includes = [] externs = [] for m in dependency_regex.finditer(source): cimport_from, cimport_list, extern, include = m.groups() if cimport_from: cimports.append(cimport_from) elif cimport_list: cimports.extend(x.strip() for x in cimport_list.split(",")) elif extern: externs.append(literals[extern]) else: includes.append(literals[include]) return cimports, includes, externs, distutils_info class DependencyTree(object): def __init__(self, context, quiet=False): self.context = context self.quiet = quiet self._transitive_cache = {} def parse_dependencies(self, source_filename): if path_exists(source_filename): source_filename = os.path.normpath(source_filename) return parse_dependencies(source_filename) @cached_method def included_files(self, filename): # This is messy because included files are textually included, resolving # cimports (but not includes) relative to the including file. all = set() for include in self.parse_dependencies(filename)[1]: include_path = join_path(os.path.dirname(filename), include) if not path_exists(include_path): include_path = self.context.find_include_file(include, None) if include_path: if '.' + os.path.sep in include_path: include_path = os.path.normpath(include_path) all.add(include_path) all.update(self.included_files(include_path)) elif not self.quiet: print("Unable to locate '%s' referenced from '%s'" % (filename, include)) return all @cached_method def cimports_externs_incdirs(self, filename): # This is really ugly. Nested cimports are resolved with respect to the # includer, but includes are resolved with respect to the includee. cimports, includes, externs = self.parse_dependencies(filename)[:3] cimports = set(cimports) externs = set(externs) incdirs = set() for include in self.included_files(filename): included_cimports, included_externs, included_incdirs = self.cimports_externs_incdirs(include) cimports.update(included_cimports) externs.update(included_externs) incdirs.update(included_incdirs) externs, incdir = normalize_existing(filename, externs) if incdir: incdirs.add(incdir) return tuple(cimports), externs, incdirs def cimports(self, filename): return self.cimports_externs_incdirs(filename)[0] def package(self, filename): return package(filename) def fully_qualified_name(self, filename): return fully_qualified_name(filename) @cached_method def find_pxd(self, module, filename=None): is_relative = module[0] == '.' if is_relative and not filename: raise NotImplementedError("New relative imports.") if filename is not None: module_path = module.split('.') if is_relative: module_path.pop(0) # just explicitly relative package_path = list(self.package(filename)) while module_path and not module_path[0]: try: package_path.pop() except IndexError: return None # FIXME: error? module_path.pop(0) relative = '.'.join(package_path + module_path) pxd = self.context.find_pxd_file(relative, None) if pxd: return pxd if is_relative: return None # FIXME: error? return self.context.find_pxd_file(module, None) @cached_method def cimported_files(self, filename): if filename[-4:] == '.pyx' and path_exists(filename[:-4] + '.pxd'): pxd_list = [filename[:-4] + '.pxd'] else: pxd_list = [] for module in self.cimports(filename): if module[:7] == 'cython.' or module == 'cython': continue pxd_file = self.find_pxd(module, filename) if pxd_file is not None: pxd_list.append(pxd_file) elif not self.quiet: print("%s: cannot find cimported module '%s'" % (filename, module)) return tuple(pxd_list) @cached_method def immediate_dependencies(self, filename): all = set([filename]) all.update(self.cimported_files(filename)) all.update(self.included_files(filename)) return all def all_dependencies(self, filename): return self.transitive_merge(filename, self.immediate_dependencies, set.union) @cached_method def timestamp(self, filename): return os.path.getmtime(filename) def extract_timestamp(self, filename): return self.timestamp(filename), filename def newest_dependency(self, filename): return max([self.extract_timestamp(f) for f in self.all_dependencies(filename)]) def transitive_fingerprint(self, filename, extra=None): try: m = hashlib.md5(__version__.encode('UTF-8')) m.update(file_hash(filename).encode('UTF-8')) for x in sorted(self.all_dependencies(filename)): if os.path.splitext(x)[1] not in ('.c', '.cpp', '.h'): m.update(file_hash(x).encode('UTF-8')) if extra is not None: m.update(str(extra).encode('UTF-8')) return m.hexdigest() except IOError: return None def distutils_info0(self, filename): info = self.parse_dependencies(filename)[3] kwds = info.values cimports, externs, incdirs = self.cimports_externs_incdirs(filename) basedir = os.getcwd() # Add dependencies on "cdef extern from ..." files if externs: externs = _make_relative(externs, basedir) if 'depends' in kwds: kwds['depends'] = list(set(kwds['depends']).union(externs)) else: kwds['depends'] = list(externs) # Add include_dirs to ensure that the C compiler will find the # "cdef extern from ..." files if incdirs: include_dirs = list(kwds.get('include_dirs', [])) for inc in _make_relative(incdirs, basedir): if inc not in include_dirs: include_dirs.append(inc) kwds['include_dirs'] = include_dirs return info def distutils_info(self, filename, aliases=None, base=None): return (self.transitive_merge(filename, self.distutils_info0, DistutilsInfo.merge) .subs(aliases) .merge(base)) def transitive_merge(self, node, extract, merge): try: seen = self._transitive_cache[extract, merge] except KeyError: seen = self._transitive_cache[extract, merge] = {} return self.transitive_merge_helper( node, extract, merge, seen, {}, self.cimported_files)[0] def transitive_merge_helper(self, node, extract, merge, seen, stack, outgoing): if node in seen: return seen[node], None deps = extract(node) if node in stack: return deps, node try: stack[node] = len(stack) loop = None for next in outgoing(node): sub_deps, sub_loop = self.transitive_merge_helper(next, extract, merge, seen, stack, outgoing) if sub_loop is not None: if loop is not None and stack[loop] < stack[sub_loop]: pass else: loop = sub_loop deps = merge(deps, sub_deps) if loop == node: loop = None if loop is None: seen[node] = deps return deps, loop finally: del stack[node] _dep_tree = None def create_dependency_tree(ctx=None, quiet=False): global _dep_tree if _dep_tree is None: if ctx is None: ctx = Context(["."], CompilationOptions(default_options)) _dep_tree = DependencyTree(ctx, quiet=quiet) return _dep_tree # If this changes, change also docs/src/reference/compilation.rst # which mentions this function def default_create_extension(template, kwds): if 'depends' in kwds: include_dirs = kwds.get('include_dirs', []) + ["."] depends = resolve_depends(kwds['depends'], include_dirs) kwds['depends'] = sorted(set(depends + template.depends)) t = template.__class__ ext = t(**kwds) metadata = dict(distutils=kwds, module_name=kwds['name']) return (ext, metadata) # This may be useful for advanced users? def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=False, language=None, exclude_failures=False): if language is not None: print('Please put "# distutils: language=%s" in your .pyx or .pxd file(s)' % language) if exclude is None: exclude = [] if patterns is None: return [], {} elif isinstance(patterns, basestring) or not isinstance(patterns, collections.Iterable): patterns = [patterns] explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)]) seen = set() deps = create_dependency_tree(ctx, quiet=quiet) to_exclude = set() if not isinstance(exclude, list): exclude = [exclude] for pattern in exclude: to_exclude.update(map(os.path.abspath, extended_iglob(pattern))) module_list = [] module_metadata = {} # workaround for setuptools if 'setuptools' in sys.modules: Extension_distutils = sys.modules['setuptools.extension']._Extension Extension_setuptools = sys.modules['setuptools'].Extension else: # dummy class, in case we do not have setuptools Extension_distutils = Extension class Extension_setuptools(Extension): pass # if no create_extension() function is defined, use a simple # default function. create_extension = ctx.options.create_extension or default_create_extension for pattern in patterns: if isinstance(pattern, str): filepattern = pattern template = Extension(pattern, []) # Fake Extension without sources name = '*' base = None ext_language = language elif isinstance(pattern, (Extension_distutils, Extension_setuptools)): cython_sources = [s for s in pattern.sources if os.path.splitext(s)[1] in ('.py', '.pyx')] if cython_sources: filepattern = cython_sources[0] if len(cython_sources) > 1: print("Warning: Multiple cython sources found for extension '%s': %s\n" "See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html " "for sharing declarations among Cython files." % (pattern.name, cython_sources)) else: # ignore non-cython modules module_list.append(pattern) continue template = pattern name = template.name base = DistutilsInfo(exn=template) ext_language = None # do not override whatever the Extension says else: msg = str("pattern is not of type str nor subclass of Extension (%s)" " but of type %s and class %s" % (repr(Extension), type(pattern), pattern.__class__)) raise TypeError(msg) for file in nonempty(sorted(extended_iglob(filepattern)), "'%s' doesn't match any files" % filepattern): if os.path.abspath(file) in to_exclude: continue pkg = deps.package(file) module_name = deps.fully_qualified_name(file) if '*' in name: if module_name in explicit_modules: continue elif name != module_name: print("Warning: Extension name '%s' does not match fully qualified name '%s' of '%s'" % ( name, module_name, file)) module_name = name if module_name not in seen: try: kwds = deps.distutils_info(file, aliases, base).values except Exception: if exclude_failures: continue raise if base is not None: for key, value in base.values.items(): if key not in kwds: kwds[key] = value kwds['name'] = module_name sources = [file] + [m for m in template.sources if m != filepattern] if 'sources' in kwds: # allow users to add .c files etc. for source in kwds['sources']: source = encode_filename_in_py2(source) if source not in sources: sources.append(source) kwds['sources'] = sources if ext_language and 'language' not in kwds: kwds['language'] = ext_language np_pythran = kwds.pop('np_pythran', False) # Create the new extension m, metadata = create_extension(template, kwds) if np_pythran: if not PythranAvailable: raise RuntimeError("You first need to install Pythran to use the np_pythran directive.") pythran_ext = pythran.config.make_extension() m.include_dirs.extend(pythran_ext['include_dirs']) m.extra_compile_args.extend(pythran_ext['extra_compile_args']) m.extra_link_args.extend(pythran_ext['extra_link_args']) m.define_macros.extend(pythran_ext['define_macros']) m.undef_macros.extend(pythran_ext['undef_macros']) m.library_dirs.extend(pythran_ext['library_dirs']) m.libraries.extend(pythran_ext['libraries']) # These options are not compatible with the way normal Cython extensions work try: m.extra_compile_args.remove("-fwhole-program") except ValueError: pass try: m.extra_compile_args.remove("-fvisibility=hidden") except ValueError: pass m.language = 'c++' m.np_pythran = np_pythran module_list.append(m) # Store metadata (this will be written as JSON in the # generated C file but otherwise has no purpose) module_metadata[module_name] = metadata if file not in m.sources: # Old setuptools unconditionally replaces .pyx with .c m.sources.remove(file.rsplit('.')[0] + '.c') m.sources.insert(0, file) seen.add(name) return module_list, module_metadata # This is the user-exposed entry point. def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=False, language=None, exclude_failures=False, **options): """ Compile a set of source modules into C/C++ files and return a list of distutils Extension objects for them. As module list, pass either a glob pattern, a list of glob patterns or a list of Extension objects. The latter allows you to configure the extensions separately through the normal distutils options. When using glob patterns, you can exclude certain module names explicitly by passing them into the 'exclude' option. To globally enable C++ mode, you can pass language='c++'. Otherwise, this will be determined at a per-file level based on compiler directives. This affects only modules found based on file names. Extension instances passed into cythonize() will not be changed. For parallel compilation, set the 'nthreads' option to the number of concurrent builds. For a broad 'try to compile' mode that ignores compilation failures and simply excludes the failed extensions, pass 'exclude_failures=True'. Note that this only really makes sense for compiling .py files which can also be used without compilation. Additional compilation options can be passed as keyword arguments. """ if exclude is None: exclude = [] if 'include_path' not in options: options['include_path'] = ['.'] if 'common_utility_include_dir' in options: if options.get('cache'): raise NotImplementedError("common_utility_include_dir does not yet work with caching") safe_makedirs(options['common_utility_include_dir']) if PythranAvailable: pythran_options = CompilationOptions(**options); pythran_options.cplus = True pythran_options.np_pythran = True pythran_include_dir = os.path.dirname(pythran.__file__) c_options = CompilationOptions(**options) cpp_options = CompilationOptions(**options); cpp_options.cplus = True ctx = c_options.create_context() options = c_options module_list, module_metadata = create_extension_list( module_list, exclude=exclude, ctx=ctx, quiet=quiet, exclude_failures=exclude_failures, language=language, aliases=aliases) deps = create_dependency_tree(ctx, quiet=quiet) build_dir = getattr(options, 'build_dir', None) modules_by_cfile = {} to_compile = [] for m in module_list: if build_dir: root = os.getcwd() # distutil extension depends are relative to cwd def copy_to_build_dir(filepath, root=root): filepath_abs = os.path.abspath(filepath) if os.path.isabs(filepath): filepath = filepath_abs if filepath_abs.startswith(root): mod_dir = join_path(build_dir, os.path.dirname(_relpath(filepath, root))) copy_once_if_newer(filepath_abs, mod_dir) for dep in m.depends: copy_to_build_dir(dep) new_sources = [] for source in m.sources: base, ext = os.path.splitext(source) if ext in ('.pyx', '.py'): if m.np_pythran: c_file = base + '.cpp' options = pythran_options elif m.language == 'c++': c_file = base + '.cpp' options = cpp_options else: c_file = base + '.c' options = c_options # setup for out of place build directory if enabled if build_dir: c_file = os.path.join(build_dir, c_file) dir = os.path.dirname(c_file) safe_makedirs_once(dir) if os.path.exists(c_file): c_timestamp = os.path.getmtime(c_file) else: c_timestamp = -1 # Priority goes first to modified files, second to direct # dependents, and finally to indirect dependents. if c_timestamp < deps.timestamp(source): dep_timestamp, dep = deps.timestamp(source), source priority = 0 else: dep_timestamp, dep = deps.newest_dependency(source) priority = 2 - (dep in deps.immediate_dependencies(source)) if force or c_timestamp < dep_timestamp: if not quiet and not force: if source == dep: print("Compiling %s because it changed." % source) else: print("Compiling %s because it depends on %s." % (source, dep)) if not force and options.cache: extra = m.language fingerprint = deps.transitive_fingerprint(source, extra) else: fingerprint = None to_compile.append((priority, source, c_file, fingerprint, quiet, options, not exclude_failures, module_metadata.get(m.name))) new_sources.append(c_file) if c_file not in modules_by_cfile: modules_by_cfile[c_file] = [m] else: modules_by_cfile[c_file].append(m) else: new_sources.append(source) if build_dir: copy_to_build_dir(source) m.sources = new_sources if options.cache: if not os.path.exists(options.cache): os.makedirs(options.cache) to_compile.sort() # Drop "priority" component of "to_compile" entries and add a # simple progress indicator. N = len(to_compile) progress_fmt = "[{0:%d}/{1}] " % len(str(N)) for i in range(N): progress = progress_fmt.format(i+1, N) to_compile[i] = to_compile[i][1:] + (progress,) if N <= 1: nthreads = 0 if nthreads: # Requires multiprocessing (or Python >= 2.6) try: import multiprocessing pool = multiprocessing.Pool( nthreads, initializer=_init_multiprocessing_helper) except (ImportError, OSError): print("multiprocessing required for parallel cythonization") nthreads = 0 else: # This is a bit more involved than it should be, because KeyboardInterrupts # break the multiprocessing workers when using a normal pool.map(). # See, for example: # http://noswap.com/blog/python-multiprocessing-keyboardinterrupt try: result = pool.map_async(cythonize_one_helper, to_compile, chunksize=1) pool.close() while not result.ready(): try: result.get(99999) # seconds except multiprocessing.TimeoutError: pass except KeyboardInterrupt: pool.terminate() raise pool.join() if not nthreads: for args in to_compile: cythonize_one(*args) if exclude_failures: failed_modules = set() for c_file, modules in modules_by_cfile.items(): if not os.path.exists(c_file): failed_modules.update(modules) elif os.path.getsize(c_file) < 200: f = io_open(c_file, 'r', encoding='iso8859-1') try: if f.read(len('#error ')) == '#error ': # dead compilation result failed_modules.update(modules) finally: f.close() if failed_modules: for module in failed_modules: module_list.remove(module) print("Failed compilations: %s" % ', '.join(sorted([ module.name for module in failed_modules]))) if options.cache: cleanup_cache(options.cache, getattr(options, 'cache_size', 1024 * 1024 * 100)) # cythonize() is often followed by the (non-Python-buffered) # compiler output, flush now to avoid interleaving output. sys.stdout.flush() return module_list if os.environ.get('XML_RESULTS'): compile_result_dir = os.environ['XML_RESULTS'] def record_results(func): def with_record(*args): t = time.time() success = True try: try: func(*args) except: success = False finally: t = time.time() - t module = fully_qualified_name(args[0]) name = "cythonize." + module failures = 1 - success if success: failure_item = "" else: failure_item = "failure" output = open(os.path.join(compile_result_dir, name + ".xml"), "w") output.write(""" %(failure_item)s """.strip() % locals()) output.close() return with_record else: def record_results(func): return func # TODO: Share context? Issue: pyx processing leaks into pxd module @record_results def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None, raise_on_failure=True, embedded_metadata=None, progress=""): from ..Compiler.Main import compile, default_options from ..Compiler.Errors import CompileError, PyrexError if fingerprint: if not os.path.exists(options.cache): try: os.mkdir(options.cache) except: if not os.path.exists(options.cache): raise # Cython-generated c files are highly compressible. # (E.g. a compression ratio of about 10 for Sage). fingerprint_file = join_path( options.cache, "%s-%s%s" % (os.path.basename(c_file), fingerprint, gzip_ext)) if os.path.exists(fingerprint_file): if not quiet: print("%sFound compiled %s in cache" % (progress, pyx_file)) os.utime(fingerprint_file, None) g = gzip_open(fingerprint_file, 'rb') try: f = open(c_file, 'wb') try: shutil.copyfileobj(g, f) finally: f.close() finally: g.close() return if not quiet: print("%sCythonizing %s" % (progress, pyx_file)) if options is None: options = CompilationOptions(default_options) options.output_file = c_file options.embedded_metadata = embedded_metadata any_failures = 0 try: result = compile([pyx_file], options) if result.num_errors > 0: any_failures = 1 except (EnvironmentError, PyrexError) as e: sys.stderr.write('%s\n' % e) any_failures = 1 # XXX import traceback traceback.print_exc() except Exception: if raise_on_failure: raise import traceback traceback.print_exc() any_failures = 1 if any_failures: if raise_on_failure: raise CompileError(None, pyx_file) elif os.path.exists(c_file): os.remove(c_file) elif fingerprint: f = open(c_file, 'rb') try: g = gzip_open(fingerprint_file, 'wb') try: shutil.copyfileobj(f, g) finally: g.close() finally: f.close() def cythonize_one_helper(m): import traceback try: return cythonize_one(*m) except Exception: traceback.print_exc() raise def _init_multiprocessing_helper(): # KeyboardInterrupt kills workers, so don't let them get it import signal signal.signal(signal.SIGINT, signal.SIG_IGN) def cleanup_cache(cache, target_size, ratio=.85): try: p = subprocess.Popen(['du', '-s', '-k', os.path.abspath(cache)], stdout=subprocess.PIPE) res = p.wait() if res == 0: total_size = 1024 * int(p.stdout.read().strip().split()[0]) if total_size < target_size: return except (OSError, ValueError): pass total_size = 0 all = [] for file in os.listdir(cache): path = join_path(cache, file) s = os.stat(path) total_size += s.st_size all.append((s.st_atime, s.st_size, path)) if total_size > target_size: for time, size, file in reversed(sorted(all)): os.unlink(file) total_size -= size if total_size < target_size * ratio: break Cython-0.26.1/Cython/Coverage.py0000664000175000017500000002713113023021033017170 0ustar stefanstefan00000000000000""" A Cython plugin for coverage.py Requires the coverage package at least in version 4.0 (which added the plugin API). """ from __future__ import absolute_import import re import os.path import sys from collections import defaultdict from coverage.plugin import CoveragePlugin, FileTracer, FileReporter # requires coverage.py 4.0+ from .Utils import find_root_package_dir, is_package_dir, open_source_file from . import __version__ def _find_c_source(base_path): if os.path.exists(base_path + '.c'): c_file = base_path + '.c' elif os.path.exists(base_path + '.cpp'): c_file = base_path + '.cpp' else: c_file = None return c_file def _find_dep_file_path(main_file, file_path): abs_path = os.path.abspath(file_path) if file_path.endswith('.pxi') and not os.path.exists(abs_path): # include files are looked up relative to the main source file pxi_file_path = os.path.join(os.path.dirname(main_file), file_path) if os.path.exists(pxi_file_path): abs_path = os.path.abspath(pxi_file_path) # search sys.path for external locations if a valid file hasn't been found if not os.path.exists(abs_path): for sys_path in sys.path: test_path = os.path.realpath(os.path.join(sys_path, file_path)) if os.path.exists(test_path): return test_path return abs_path class Plugin(CoveragePlugin): # map from traced file paths to absolute file paths _file_path_map = None # map from traced file paths to corresponding C files _c_files_map = None # map from parsed C files to their content _parsed_c_files = None def sys_info(self): return [('Cython version', __version__)] def file_tracer(self, filename): """ Try to find a C source file for a file path found by the tracer. """ if filename.startswith('<') or filename.startswith('memory:'): return None c_file = py_file = None filename = os.path.abspath(filename) if self._c_files_map and filename in self._c_files_map: c_file = self._c_files_map[filename][0] if c_file is None: c_file, py_file = self._find_source_files(filename) if not c_file: return None # parse all source file paths and lines from C file # to learn about all relevant source files right away (pyx/pxi/pxd) # FIXME: this might already be too late if the first executed line # is not from the main .pyx file but a file with a different # name than the .c file (which prevents us from finding the # .c file) self._parse_lines(c_file, filename) if self._file_path_map is None: self._file_path_map = {} return CythonModuleTracer(filename, py_file, c_file, self._c_files_map, self._file_path_map) def file_reporter(self, filename): # TODO: let coverage.py handle .py files itself #ext = os.path.splitext(filename)[1].lower() #if ext == '.py': # from coverage.python import PythonFileReporter # return PythonFileReporter(filename) filename = os.path.abspath(filename) if self._c_files_map and filename in self._c_files_map: c_file, rel_file_path, code = self._c_files_map[filename] else: c_file, _ = self._find_source_files(filename) if not c_file: return None # unknown file rel_file_path, code = self._parse_lines(c_file, filename) return CythonModuleReporter(c_file, filename, rel_file_path, code) def _find_source_files(self, filename): basename, ext = os.path.splitext(filename) ext = ext.lower() if ext in ('.py', '.pyx', '.pxd', '.c', '.cpp'): pass elif ext in ('.so', '.pyd'): platform_suffix = re.search(r'[.]cpython-[0-9]+[a-z]*$', basename, re.I) if platform_suffix: basename = basename[:platform_suffix.start()] elif ext == '.pxi': # if we get here, it means that the first traced line of a Cython module was # not in the main module but in an include file, so try a little harder to # find the main source file self._find_c_source_files(os.path.dirname(filename), filename) if filename in self._c_files_map: return self._c_files_map[filename][0], None else: # none of our business return None, None c_file = filename if ext in ('.c', '.cpp') else _find_c_source(basename) if c_file is None: # a module "pkg/mod.so" can have a source file "pkg/pkg.mod.c" package_root = find_root_package_dir.uncached(filename) package_path = os.path.relpath(basename, package_root).split(os.path.sep) if len(package_path) > 1: test_basepath = os.path.join(os.path.dirname(filename), '.'.join(package_path)) c_file = _find_c_source(test_basepath) py_source_file = None if c_file: py_source_file = os.path.splitext(c_file)[0] + '.py' if not os.path.exists(py_source_file): py_source_file = None try: with open(c_file, 'rb') as f: if b'/* Generated by Cython ' not in f.read(30): return None, None # not a Cython file except (IOError, OSError): c_file = None return c_file, py_source_file def _find_c_source_files(self, dir_path, source_file): """ Desperately parse all C files in the directory or its package parents (not re-descending) to find the (included) source file in one of them. """ if not os.path.isdir(dir_path): return splitext = os.path.splitext for filename in os.listdir(dir_path): ext = splitext(filename)[1].lower() if ext in ('.c', '.cpp'): self._parse_lines(os.path.join(dir_path, filename), source_file) if source_file in self._c_files_map: return # not found? then try one package up if is_package_dir(dir_path): self._find_c_source_files(os.path.dirname(dir_path), source_file) def _parse_lines(self, c_file, sourcefile): """ Parse a Cython generated C/C++ source file and find the executable lines. Each executable line starts with a comment header that states source file and line number, as well as the surrounding range of source code lines. """ if self._parsed_c_files is None: self._parsed_c_files = {} if c_file in self._parsed_c_files: code_lines = self._parsed_c_files[c_file] else: match_source_path_line = re.compile(r' */[*] +"(.*)":([0-9]+)$').match match_current_code_line = re.compile(r' *[*] (.*) # <<<<<<+$').match match_comment_end = re.compile(r' *[*]/$').match not_executable = re.compile( r'\s*c(?:type)?def\s+' r'(?:(?:public|external)\s+)?' r'(?:struct|union|enum|class)' r'(\s+[^:]+|)\s*:' ).match code_lines = defaultdict(dict) filenames = set() with open(c_file) as lines: lines = iter(lines) for line in lines: match = match_source_path_line(line) if not match: continue filename, lineno = match.groups() filenames.add(filename) lineno = int(lineno) for comment_line in lines: match = match_current_code_line(comment_line) if match: code_line = match.group(1).rstrip() if not_executable(code_line): break code_lines[filename][lineno] = code_line break elif match_comment_end(comment_line): # unexpected comment format - false positive? break self._parsed_c_files[c_file] = code_lines if self._c_files_map is None: self._c_files_map = {} for filename, code in code_lines.items(): abs_path = _find_dep_file_path(c_file, filename) self._c_files_map[abs_path] = (c_file, filename, code) if sourcefile not in self._c_files_map: return (None,) * 2 # e.g. shared library file return self._c_files_map[sourcefile][1:] class CythonModuleTracer(FileTracer): """ Find the Python/Cython source file for a Cython module. """ def __init__(self, module_file, py_file, c_file, c_files_map, file_path_map): super(CythonModuleTracer, self).__init__() self.module_file = module_file self.py_file = py_file self.c_file = c_file self._c_files_map = c_files_map self._file_path_map = file_path_map def has_dynamic_source_filename(self): return True def dynamic_source_filename(self, filename, frame): """ Determine source file path. Called by the function call tracer. """ source_file = frame.f_code.co_filename try: return self._file_path_map[source_file] except KeyError: pass abs_path = _find_dep_file_path(filename, source_file) if self.py_file and source_file[-3:].lower() == '.py': # always let coverage.py handle this case itself self._file_path_map[source_file] = self.py_file return self.py_file assert self._c_files_map is not None if abs_path not in self._c_files_map: self._c_files_map[abs_path] = (self.c_file, source_file, None) self._file_path_map[source_file] = abs_path return abs_path class CythonModuleReporter(FileReporter): """ Provide detailed trace information for one source file to coverage.py. """ def __init__(self, c_file, source_file, rel_file_path, code): super(CythonModuleReporter, self).__init__(source_file) self.name = rel_file_path self.c_file = c_file self._code = code def lines(self): """ Return set of line numbers that are possibly executable. """ return set(self._code) def _iter_source_tokens(self): current_line = 1 for line_no, code_line in sorted(self._code.items()): while line_no > current_line: yield [] current_line += 1 yield [('txt', code_line)] current_line += 1 def source(self): """ Return the source code of the file as a string. """ if os.path.exists(self.filename): with open_source_file(self.filename) as f: return f.read() else: return '\n'.join( (tokens[0][1] if tokens else '') for tokens in self._iter_source_tokens()) def source_token_lines(self): """ Iterate over the source code tokens. """ if os.path.exists(self.filename): with open_source_file(self.filename) as f: for line in f: yield [('txt', line.rstrip('\n'))] else: for line in self._iter_source_tokens(): yield [('txt', line)] def coverage_init(reg, options): reg.add_file_tracer(Plugin()) Cython-0.26.1/Cython/Includes/0000775000175000017500000000000013151203436016640 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Includes/posix/0000775000175000017500000000000013151203436020002 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Includes/posix/types.pxd0000664000175000017500000000221213143605603021663 0ustar stefanstefan00000000000000# Note that the actual size of these types is system-dependent, and # can't be detected at C compile time. However, the generated C code # will correctly use the actual size of these types *except* for # determining promotion in binary arithmetic expressions involving # mixed types. In this case, operands are promoted to the declared # larger type, with a bias towards typedef types. Thus, with the # declarations below, long + time_t will result in a time_t whereas # long long + time_t will result in a long long which should be # acceptable for either 32-bit or 64-bit signed time_t (though admittedly # the POSIX standard doesn't even specify that time_t must be an integral # type). cdef extern from "": ctypedef long blkcnt_t ctypedef long blksize_t ctypedef long clockid_t ctypedef long dev_t ctypedef long gid_t ctypedef long id_t ctypedef unsigned long ino_t ctypedef long mode_t ctypedef long nlink_t ctypedef long off_t ctypedef long pid_t ctypedef struct sigset_t: pass ctypedef long suseconds_t ctypedef long time_t ctypedef long timer_t ctypedef long uid_t Cython-0.26.1/Cython/Includes/posix/signal.pxd0000664000175000017500000000327713023021033021772 0ustar stefanstefan00000000000000# 7.14 Signal handling from posix.types cimport pid_t, sigset_t, uid_t cdef extern from "" nogil: cdef union sigval: int sival_int void *sival_ptr cdef struct sigevent: int sigev_notify int sigev_signo sigval sigev_value void sigev_notify_function(sigval) ctypedef struct siginfo_t: int si_signo int si_code int si_errno pid_t si_pid uid_t si_uid void *si_addr int si_status long si_band sigval si_value cdef struct sigaction_t "sigaction": void sa_handler(int) void sa_sigaction(int, siginfo_t *, void *) sigset_t sa_mask int sa_flags enum: SA_NOCLDSTOP enum: SIG_BLOCK enum: SIG_UNBLOCK enum: SIG_SETMASK enum: SA_ONSTACK enum: SA_RESETHAND enum: SA_RESTART enum: SA_SIGINFO enum: SA_NOCLDWAIT enum: SA_NODEFER enum: SS_ONSTACK enum: SS_DISABLE enum: MINSIGSTKSZ enum: SIGSTKSZ enum: SIGEV_NONE enum: SIGEV_SIGNAL enum: SIGEV_THREAD enum: SIGEV_THREAD_ID int kill (pid_t, int) int killpg (pid_t, int) int sigaction (int, const sigaction_t *, sigaction_t *) int sigpending (sigset_t *) int sigprocmask (int, const sigset_t *, sigset_t *) int sigsuspend (const sigset_t *) int sigaddset (sigset_t *, int) int sigdelset (sigset_t *, int) int sigemptyset (sigset_t *) int sigfillset (sigset_t *) int sigismember (const sigset_t *) Cython-0.26.1/Cython/Includes/posix/strings.pxd0000664000175000017500000000056613023021033022204 0ustar stefanstefan00000000000000cdef extern from "" nogil: int bcmp(const void *, const void *, size_t) void bcopy(const void *, void *, size_t) void bzero(void *, size_t) int ffs(int) char *index(const char *, int) char *rindex(const char *, int) int strcasecmp(const char *, const char *) int strncasecmp(const char *, const char *, size_t) Cython-0.26.1/Cython/Includes/posix/fcntl.pxd0000664000175000017500000000222713023021033021615 0ustar stefanstefan00000000000000# http://www.opengroup.org/onlinepubs/009695399/basedefs/fcntl.h.html cdef extern from "" nogil: enum: F_DUPFD enum: F_GETFD enum: F_SETFD enum: F_GETFL enum: F_SETFL enum: F_GETLK enum: F_SETLK enum: F_SETLKW enum: F_GETOWN enum: F_SETOWN enum: FD_CLOEXEC enum: F_RDLCK enum: F_UNLCK enum: F_WRLCK enum: SEEK_SET enum: SEEK_CUR enum: SEEK_END enum: O_CREAT enum: O_EXCL enum: O_NOCTTY enum: O_TRUNC enum: O_APPEND enum: O_DSYNC enum: O_NONBLOCK enum: O_RSYNC enum: O_SYNC enum: O_ACCMODE # O_RDONLY|O_WRONLY|O_RDWR enum: O_RDONLY enum: O_WRONLY enum: O_RDWR enum: S_IFMT enum: S_IFBLK enum: S_IFCHR enum: S_IFIFO enum: S_IFREG enum: S_IFDIR enum: S_IFLNK enum: S_IFSOCK ctypedef int mode_t ctypedef signed pid_t ctypedef signed off_t struct flock: short l_type short l_whence off_t l_start off_t l_len pid_t l_pid int creat(char *, mode_t) int fcntl(int, int, ...) int open(char *, int, ...) #int open (char *, int, mode_t) Cython-0.26.1/Cython/Includes/posix/stat.pxd0000664000175000017500000000273513023021033021466 0ustar stefanstefan00000000000000from posix.types cimport (blkcnt_t, blksize_t, dev_t, gid_t, ino_t, mode_t, nlink_t, off_t, time_t, uid_t) cdef extern from "" nogil: cdef struct struct_stat "stat": dev_t st_dev ino_t st_ino mode_t st_mode nlink_t st_nlink uid_t st_uid gid_t st_gid dev_t st_rdev off_t st_size blksize_t st_blksize blkcnt_t st_blocks time_t st_atime time_t st_mtime time_t st_ctime # POSIX prescribes including both and for these cdef extern from "" nogil: int fchmod(int, mode_t) int chmod(const char *, mode_t) int fstat(int, struct_stat *) int lstat(const char *, struct_stat *) int stat(const char *, struct_stat *) # Macros for st_mode mode_t S_ISREG(mode_t) mode_t S_ISDIR(mode_t) mode_t S_ISCHR(mode_t) mode_t S_ISBLK(mode_t) mode_t S_ISFIFO(mode_t) mode_t S_ISLNK(mode_t) mode_t S_ISSOCK(mode_t) mode_t S_IFMT mode_t S_IFREG mode_t S_IFDIR mode_t S_IFCHR mode_t S_IFBLK mode_t S_IFIFO mode_t S_IFLNK mode_t S_IFSOCK # Permissions mode_t S_ISUID mode_t S_ISGID mode_t S_ISVTX mode_t S_IRWXU mode_t S_IRUSR mode_t S_IWUSR mode_t S_IXUSR mode_t S_IRWXG mode_t S_IRGRP mode_t S_IWGRP mode_t S_IXGRP mode_t S_IRWXO mode_t S_IROTH mode_t S_IWOTH mode_t S_IXOTH Cython-0.26.1/Cython/Includes/posix/mman.pxd0000664000175000017500000000531413023021033021437 0ustar stefanstefan00000000000000# http://pubs.opengroup.org/onlinepubs/009695399/basedefs/sys/mman.h.html from posix.types cimport off_t, mode_t cdef extern from "" nogil: enum: PROT_EXEC # protection bits for mmap/mprotect enum: PROT_READ enum: PROT_WRITE enum: PROT_NONE enum: MAP_PRIVATE # flag bits for mmap enum: MAP_SHARED enum: MAP_FIXED enum: MAP_ANON # These three are not in POSIX, but are enum: MAP_ANONYMOUS # fairly common in spelling/semantics enum: MAP_STACK enum: MAP_LOCKED # Typically available only on Linux enum: MAP_HUGETLB enum: MAP_POPULATE enum: MAP_NORESERVE enum: MAP_GROWSDOWN enum: MAP_NOCORE # Typically available only on BSD enum: MAP_NOSYNC void *mmap(void *addr, size_t Len, int prot, int flags, int fd, off_t off) int munmap(void *addr, size_t Len) int mprotect(void *addr, size_t Len, int prot) enum: MS_ASYNC enum: MS_SYNC enum: MS_INVALIDATE int msync(void *addr, size_t Len, int flags) enum: POSIX_MADV_NORMAL # POSIX advice flags enum: POSIX_MADV_SEQUENTIAL enum: POSIX_MADV_RANDOM enum: POSIX_MADV_WILLNEED enum: POSIX_MADV_DONTNEED int posix_madvise(void *addr, size_t Len, int advice) enum: MCL_CURRENT enum: MCL_FUTURE int mlock(const void *addr, size_t Len) int munlock(const void *addr, size_t Len) int mlockall(int flags) int munlockall() int shm_open(const char *name, int oflag, mode_t mode) int shm_unlink(const char *name) # often available enum: MADV_REMOVE # pre-POSIX advice flags; often available enum: MADV_DONTFORK enum: MADV_DOFORK enum: MADV_HWPOISON enum: MADV_MERGEABLE, enum: MADV_UNMERGEABLE int madvise(void *addr, size_t Len, int advice) # sometimes available int mincore(void *addr, size_t Len, unsigned char *vec) # These two are Linux specific but sometimes very efficient void *mremap(void *old_addr, size_t old_len, size_t new_len, int flags, ...) int remap_file_pages(void *addr, size_t Len, int prot, size_t pgoff, int flags) # The rare but standardized typed memory option enum: POSIX_TYPED_MEM_ALLOCATE enum: POSIX_TYPED_MEM_ALLOCATE_CONTIG enum: POSIX_TYPED_MEM_MAP_ALLOCATABLE int posix_typed_mem_open(const char *name, int oflag, int tflag) int posix_mem_offset(const void *addr, size_t Len, off_t *off, size_t *contig_len, int *fildes) cdef struct posix_typed_mem_info: size_t posix_tmi_length int posix_typed_mem_get_info(int fildes, posix_typed_mem_info *info) Cython-0.26.1/Cython/Includes/posix/__init__.pxd0000664000175000017500000000001512542002467022256 0ustar stefanstefan00000000000000# empty file Cython-0.26.1/Cython/Includes/posix/dlfcn.pxd0000664000175000017500000000054313023021033021574 0ustar stefanstefan00000000000000# POSIX dynamic linking/loading interface. # http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/dlfcn.h.html cdef extern from "" nogil: void *dlopen(const char *, int) char *dlerror() void *dlsym(void *, const char *) int dlclose(void *) enum: RTLD_LAZY RTLD_NOW RTLD_GLOBAL RTLD_LOCAL Cython-0.26.1/Cython/Includes/posix/time.pxd0000664000175000017500000000400213023021033021436 0ustar stefanstefan00000000000000# http://pubs.opengroup.org/onlinepubs/009695399/basedefs/sys/time.h.html from posix.types cimport suseconds_t, time_t, clockid_t, timer_t from posix.signal cimport sigevent cdef extern from "" nogil: enum: CLOCK_PROCESS_CPUTIME_ID enum: CLOCK_THREAD_CPUTIME_ID enum: CLOCK_REALTIME enum: TIMER_ABSTIME enum: CLOCK_MONOTONIC # FreeBSD-specific clocks enum: CLOCK_UPTIME enum: CLOCK_UPTIME_PRECISE enum: CLOCK_UPTIME_FAST enum: CLOCK_REALTIME_PRECISE enum: CLOCK_REALTIME_FAST enum: CLOCK_MONOTONIC_PRECISE enum: CLOCK_MONOTONIC_FAST enum: CLOCK_SECOND # Linux-specific clocks enum: CLOCK_PROCESS_CPUTIME_ID enum: CLOCK_THREAD_CPUTIME_ID enum: CLOCK_MONOTONIC_RAW enum: CLOCK_REALTIME_COARSE enum: CLOCK_MONOTONIC_COARSE enum: CLOCK_BOOTTIME enum: CLOCK_REALTIME_ALARM enum: CLOCK_BOOTTIME_ALARM enum: ITIMER_REAL enum: ITIMER_VIRTUAL enum: ITIMER_PROF cdef struct timezone: int tz_minuteswest int dsttime cdef struct timeval: time_t tv_sec suseconds_t tv_usec cdef struct timespec: time_t tv_sec long tv_nsec cdef struct itimerval: timeval it_interval timeval it_value cdef struct itimerspec: timespec it_interval timespec it_value int nanosleep(const timespec *, timespec *) int getitimer(int, itimerval *) int gettimeofday(timeval *tp, timezone *tzp) int setitimer(int, const itimerval *, itimerval *) int clock_getcpuclockid(pid_t, clockid_t *) int clock_getres(clockid_t, timespec *) int clock_gettime(clockid_t, timespec *) int clock_nanosleep(clockid_t, int, const timespec *, timespec *) int clock_settime(clockid_t, const timespec *) int timer_create(clockid_t, sigevent *, timer_t *) int timer_delete(timer_t) int timer_gettime(timer_t, itimerspec *) int timer_getoverrun(timer_t) int timer_settime(timer_t, int, const itimerspec *, itimerspec *) Cython-0.26.1/Cython/Includes/posix/unistd.pxd0000664000175000017500000001757513023021033022031 0ustar stefanstefan00000000000000# http://www.opengroup.org/onlinepubs/009695399/basedefs/unistd.h.html from posix.types cimport gid_t, pid_t, off_t, uid_t cdef extern from "" nogil: #:NULL enum: R_OK enum: W_OK enum: X_OK enum: F_OK enum: _CS_PATH enum: _CS_POSIX_V6_ILP32_OFF32_CFLAGS enum: _CS_POSIX_V6_ILP32_OFF32_LDFLAGS enum: _CS_POSIX_V6_ILP32_OFF32_LIBS enum: _CS_POSIX_V6_ILP32_OFFBIG_CFLAGS enum: _CS_POSIX_V6_ILP32_OFFBIG_LDFLAGS enum: _CS_POSIX_V6_ILP32_OFFBIG_LIBS enum: _CS_POSIX_V6_LP64_OFF64_CFLAGS enum: _CS_POSIX_V6_LP64_OFF64_LDFLAGS enum: _CS_POSIX_V6_LP64_OFF64_LIBS enum: _CS_POSIX_V6_LPBIG_OFFBIG_CFLAGS enum: _CS_POSIX_V6_LPBIG_OFFBIG_LDFLAGS enum: _CS_POSIX_V6_LPBIG_OFFBIG_LIBS enum: _CS_POSIX_V6_WIDTH_RESTRICTED_ENVS enum: SEEK_SET enum: SEEK_CUR enum: SEEK_END enum: F_LOCK enum: F_TEST enum: F_TLOCK enum: F_ULOCK enum: _PC_2_SYMLINKS enum: _PC_ALLOC_SIZE_MIN enum: _PC_ASYNC_IO enum: _PC_CHOWN_RESTRICTED enum: _PC_FILESIZEBITS enum: _PC_LINK_MAX enum: _PC_MAX_CANON enum: _PC_MAX_INPUT enum: _PC_NAME_MAX enum: _PC_NO_TRUNC enum: _PC_PATH_MAX enum: _PC_PIPE_BUF enum: _PC_PRIO_IO enum: _PC_REC_INCR_XFER_SIZE enum: _PC_REC_MIN_XFER_SIZE enum: _PC_REC_XFER_ALIGN enum: _PC_SYMLINK_MAX enum: _PC_SYNC_IO enum: _PC_VDISABLE enum: _SC_2_C_BIND enum: _SC_2_C_DEV enum: _SC_2_CHAR_TERM enum: _SC_2_FORT_DEV enum: _SC_2_FORT_RUN enum: _SC_2_LOCALEDEF enum: _SC_2_PBS enum: _SC_2_PBS_ACCOUNTING enum: _SC_2_PBS_CHECKPOINT enum: _SC_2_PBS_LOCATE enum: _SC_2_PBS_MESSAGE enum: _SC_2_PBS_TRACK enum: _SC_2_SW_DEV enum: _SC_2_UPE enum: _SC_2_VERSION enum: _SC_ADVISORY_INFO enum: _SC_AIO_LISTIO_MAX enum: _SC_AIO_MAX enum: _SC_AIO_PRIO_DELTA_MAX enum: _SC_ARG_MAX enum: _SC_ASYNCHRONOUS_IO enum: _SC_ATEXIT_MAX enum: _SC_BARRIERS enum: _SC_BC_BASE_MAX enum: _SC_BC_DIM_MAX enum: _SC_BC_SCALE_MAX enum: _SC_BC_STRING_MAX enum: _SC_CHILD_MAX enum: _SC_CLK_TCK enum: _SC_CLOCK_SELECTION enum: _SC_COLL_WEIGHTS_MAX enum: _SC_CPUTIME enum: _SC_DELAYTIMER_MAX enum: _SC_EXPR_NEST_MAX enum: _SC_FSYNC enum: _SC_GETGR_R_SIZE_MAX enum: _SC_GETPW_R_SIZE_MAX enum: _SC_HOST_NAME_MAX enum: _SC_IOV_MAX enum: _SC_IPV6 enum: _SC_JOB_CONTROL enum: _SC_LINE_MAX enum: _SC_LOGIN_NAME_MAX enum: _SC_MAPPED_FILES enum: _SC_MEMLOCK enum: _SC_MEMLOCK_RANGE enum: _SC_MEMORY_PROTECTION enum: _SC_MESSAGE_PASSING enum: _SC_MONOTONIC_CLOCK enum: _SC_MQ_OPEN_MAX enum: _SC_MQ_PRIO_MAX enum: _SC_NGROUPS_MAX enum: _SC_OPEN_MAX enum: _SC_PAGE_SIZE enum: _SC_PAGESIZE enum: _SC_PRIORITIZED_IO enum: _SC_PRIORITY_SCHEDULING enum: _SC_RAW_SOCKETS enum: _SC_RE_DUP_MAX enum: _SC_READER_WRITER_LOCKS enum: _SC_REALTIME_SIGNALS enum: _SC_REGEXP enum: _SC_RTSIG_MAX enum: _SC_SAVED_IDS enum: _SC_SEM_NSEMS_MAX enum: _SC_SEM_VALUE_MAX enum: _SC_SEMAPHORES enum: _SC_SHARED_MEMORY_OBJECTS enum: _SC_SHELL enum: _SC_SIGQUEUE_MAX enum: _SC_SPAWN enum: _SC_SPIN_LOCKS enum: _SC_SPORADIC_SERVER enum: _SC_SS_REPL_MAX enum: _SC_STREAM_MAX enum: _SC_SYMLOOP_MAX enum: _SC_SYNCHRONIZED_IO enum: _SC_THREAD_ATTR_STACKADDR enum: _SC_THREAD_ATTR_STACKSIZE enum: _SC_THREAD_CPUTIME enum: _SC_THREAD_DESTRUCTOR_ITERATIONS enum: _SC_THREAD_KEYS_MAX enum: _SC_THREAD_PRIO_INHERIT enum: _SC_THREAD_PRIO_PROTECT enum: _SC_THREAD_PRIORITY_SCHEDULING enum: _SC_THREAD_PROCESS_SHARED enum: _SC_THREAD_SAFE_FUNCTIONS enum: _SC_THREAD_SPORADIC_SERVER enum: _SC_THREAD_STACK_MIN enum: _SC_THREAD_THREADS_MAX enum: _SC_THREADS enum: _SC_TIMEOUTS enum: _SC_TIMER_MAX enum: _SC_TIMERS enum: _SC_TRACE enum: _SC_TRACE_EVENT_FILTER enum: _SC_TRACE_EVENT_NAME_MAX enum: _SC_TRACE_INHERIT enum: _SC_TRACE_LOG enum: _SC_TRACE_NAME_MAX enum: _SC_TRACE_SYS_MAX enum: _SC_TRACE_USER_EVENT_MAX enum: _SC_TTY_NAME_MAX enum: _SC_TYPED_MEMORY_OBJECTS enum: _SC_TZNAME_MAX enum: _SC_V6_ILP32_OFF32 enum: _SC_V6_ILP32_OFFBIG enum: _SC_V6_LP64_OFF64 enum: _SC_V6_LPBIG_OFFBIG enum: _SC_VERSION enum: _SC_XBS5_ILP32_OFF32 enum: _SC_XBS5_ILP32_OFFBIG enum: _SC_XBS5_LP64_OFF64 enum: _SC_XBS5_LPBIG_OFFBIG enum: _SC_XOPEN_CRYPT enum: _SC_XOPEN_ENH_I18N enum: _SC_XOPEN_LEGACY enum: _SC_XOPEN_REALTIME enum: _SC_XOPEN_REALTIME_THREADS enum: _SC_XOPEN_SHM enum: _SC_XOPEN_STREAMS enum: _SC_XOPEN_UNIX enum: _SC_XOPEN_VERSION enum: STDIN_FILENO #0 enum: STDOUT_FILENO #1 enum: STDERR_FILENO #2 ctypedef unsigned useconds_t int access(const char *, int) unsigned alarm(unsigned) int chdir(const char *) int chown(const char *, uid_t, gid_t) int close(int) size_t confstr(int, char *, size_t) char *crypt(const char *, const char *) char *ctermid(char *) int dup(int) int dup2(int, int) void encrypt(char[64], int) int execl(const char *, const char *, ...) int execle(const char *, const char *, ...) int execlp(const char *, const char *, ...) int execv(const char *, char *[]) int execve(const char *, char *[], char *[]) int execvp(const char *, char *[]) void _exit(int) int fchown(int, uid_t, gid_t) int fchdir(int) int fdatasync(int) pid_t fork() long fpathconf(int, int) int fsync(int) int ftruncate(int, off_t) char *getcwd(char *, size_t) gid_t getegid() uid_t geteuid() gid_t getgid() int getgroups(int, gid_t []) long gethostid() int gethostname(char *, size_t) char *getlogin() int getlogin_r(char *, size_t) int getopt(int, char * [], const char *) pid_t getpgid(pid_t) pid_t getpgrp() pid_t getpid() pid_t getppid() pid_t getsid(pid_t) uid_t getuid() char *getwd(char *) int isatty(int) int lchown(const char *, uid_t, gid_t) int link(const char *, const char *) int lockf(int, int, off_t) off_t lseek(int, off_t, int) int nice(int) long pathconf(char *, int) int pause() int pipe(int [2]) ssize_t pread(int, void *, size_t, off_t) ssize_t pwrite(int, const void *, size_t, off_t) ssize_t read(int, void *, size_t) ssize_t readlink(const char *, char *, size_t) int rmdir(const char *) int setegid(gid_t) int seteuid(uid_t) int setgid(gid_t) int setpgid(pid_t, pid_t) pid_t setpgrp() int setregid(gid_t, gid_t) int setreuid(uid_t, uid_t) pid_t setsid() int setuid(uid_t) unsigned sleep(unsigned) void swab(const void *, void *, ssize_t) int symlink(const char *, const char *) void sync() long sysconf(int) pid_t tcgetpgrp(int) int tcsetpgrp(int, pid_t) int truncate(const char *, off_t) char *ttyname(int) int ttyname_r(int, char *, size_t) useconds_t ualarm(useconds_t, useconds_t) int unlink(const char *) int usleep(useconds_t) pid_t vfork() ssize_t write(int, const void *, size_t) char *optarg int optind int opterr int optopt Cython-0.26.1/Cython/Includes/posix/ioctl.pxd0000664000175000017500000000014313023021033021614 0ustar stefanstefan00000000000000cdef extern from "" nogil: enum: FIONBIO int ioctl(int fd, int request, ...) Cython-0.26.1/Cython/Includes/posix/resource.pxd0000664000175000017500000000234613023021033022340 0ustar stefanstefan00000000000000# http://pubs.opengroup.org/onlinepubs/009695399/basedefs/sys/resource.h.html from posix.time cimport timeval from posix.types cimport id_t cdef extern from "" nogil: enum: PRIO_PROCESS enum: PRIO_PGRP enum: PRIO_USER enum: RLIM_INFINITY enum: RLIM_SAVED_MAX enum: RLIM_SAVED_CUR enum: RUSAGE_SELF enum: RUSAGE_CHILDREN enum: RLIMIT_CORE enum: RLIMIT_CPU enum: RLIMIT_DATA enum: RLIMIT_FSIZE enum: RLIMIT_NOFILE enum: RLIMIT_STACK enum: RLIMIT_AS ctypedef unsigned long rlim_t cdef struct rlimit: rlim_t rlim_cur rlim_t rlim_max cdef struct rusage: timeval ru_utime timeval ru_stime long ru_maxrss long ru_ixrss long ru_idrss long ru_isrss long ru_minflt long ru_majflt long ru_nswap long ru_inblock long ru_oublock long ru_msgsnd long ru_msgrcv long ru_nsignals long ru_nvcsw long ru_nivcsw int getpriority(int, id_t) int getrlimit(int, rlimit *) int getrusage(int, rusage *) int setpriority(int, id_t, int) int setrlimit(int, const rlimit *) Cython-0.26.1/Cython/Includes/posix/wait.pxd0000664000175000017500000000233413023021033021452 0ustar stefanstefan00000000000000# http://pubs.opengroup.org/onlinepubs/009695399/basedefs/sys/wait.h.html from posix.types cimport pid_t, id_t from posix.signal cimport siginfo_t from posix.resource cimport rusage cdef extern from "" nogil: enum: WNOHANG enum: WUNTRACED enum: WCONTINUED enum: WEXITED enum: WSTOPPED enum: WNOWAIT int WEXITSTATUS(int status) int WIFCONTINUED(int status) int WIFEXITED(int status) int WIFSIGNALED(int status) int WIFSTOPPED(int status) int WSTOPSIG(int status) int WTERMSIG(int status) ctypedef int idtype_t enum: P_ALL # idtype_t values enum: P_PID enum: P_PGID pid_t wait(int *stat_loc) pid_t waitpid(pid_t pid, int *status, int options) int waitid(idtype_t idtype, id_t id, siginfo_t *infop, int options) # wait3 was in POSIX until 2008 while wait4 was never standardized. # Even so, these calls are in almost every Unix, always in sys/wait.h. # Hence, posix.wait is the least surprising place to declare them for Cython. # libc may require _XXX_SOURCE to be defined at C-compile time to provide them. pid_t wait3(int *status, int options, rusage *rusage) pid_t wait4(pid_t pid, int *status, int options, rusage *rusage) Cython-0.26.1/Cython/Includes/posix/stdlib.pxd0000664000175000017500000000164613023021033021774 0ustar stefanstefan00000000000000# POSIX additions to # http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/stdlib.h.html cdef extern from "" nogil: void _Exit(int) double drand48() double erand48(unsigned short *) int getsubopt(char **, char *const *, char **) void lcong48(unsigned short *) long lrand() char *mkdtemp(char *) int mkstemp(char *) long mrand() long nrand48(unsigned short *) int posix_memalign(void **, size_t, size_t) int posix_openpt(int) char *ptsname(int) int putenv(char *) int rand_r(unsigned *) long random() char *realpath(const char *, char *) unsigned short *seed48(unsigned short *) int setenv(const char *, const char *, int) void setkey(const char *) char *setstate(char *) void srand48(long) void srandom(unsigned) int unlockpt(int) int unsetenv(const char *) Cython-0.26.1/Cython/Includes/posix/stdio.pxd0000664000175000017500000000203613023021033021627 0ustar stefanstefan00000000000000# POSIX additions to . # http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/stdio.h.html from libc.stdio cimport FILE from libc.stddef cimport wchar_t from posix.types cimport off_t cdef extern from "" nogil: # File descriptors FILE *fdopen(int, const char *) int fileno(FILE *) # Pipes FILE *popen(const char *, const char *) int pclose(FILE *) # Memory streams (POSIX.2008) FILE *fmemopen(void *, size_t, const char *) FILE *open_memstream(char **, size_t *) FILE *open_wmemstream(wchar_t **, size_t *) # Seek and tell with off_t int fseeko(FILE *, off_t, int) off_t ftello(FILE *) # Locking (for multithreading) void flockfile(FILE *) int ftrylockfile(FILE *) void funlockfile(FILE *) int getc_unlocked(FILE *) int getchar_unlocked() int putc_unlocked(int, FILE *) int putchar_unlocked(int) # Reading lines and records (POSIX.2008) ssize_t getline(char **, size_t *, FILE *) ssize_t getdelim(char **, size_t *, int, FILE *) Cython-0.26.1/Cython/Includes/posix/select.pxd0000664000175000017500000000104213023021033021760 0ustar stefanstefan00000000000000from .types cimport sigset_t from .time cimport timeval, timespec cdef extern from "" nogil: ctypedef struct fd_set: pass int FD_SETSIZE void FD_SET(int, fd_set*) void FD_CLR(int, fd_set*) bint FD_ISSET(int, fd_set*) void FD_ZERO(fd_set*) int select(int nfds, fd_set *readfds, fd_set *writefds, fd_set *exceptfds, const timeval *timeout) int pselect(int nfds, fd_set *readfds, fd_set *writefds, fd_set *exceptfds, const timespec *timeout, const sigset_t *sigmask) Cython-0.26.1/Cython/Includes/Deprecated/0000775000175000017500000000000013151203436020700 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Includes/Deprecated/python_unicode.pxd0000664000175000017500000000010513023021033024425 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.unicode cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_version.pxd0000664000175000017500000000010513023021033024464 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.version cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_function.pxd0000664000175000017500000000010613023021033024625 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.function cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_module.pxd0000664000175000017500000000010413023021033024263 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.module cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_set.pxd0000664000175000017500000000010113023021033023566 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.set cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_tuple.pxd0000664000175000017500000000010313023021033024126 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.tuple cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_mapping.pxd0000664000175000017500000000010513023021033024432 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.mapping cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_type.pxd0000664000175000017500000000010213023021033023755 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.type cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_exc.pxd0000664000175000017500000000010113023021033023552 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.exc cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_buffer.pxd0000664000175000017500000000010413023021033024247 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.buffer cimport * Cython-0.26.1/Cython/Includes/Deprecated/python2.5.pxd0000664000175000017500000011566412542002467023204 0ustar stefanstefan00000000000000# From: Eric Huss # # Here is my latest copy. It does not cover 100% of the API. It should be # current up to 2.5. # # -Eric # XXX: # - Need to support "long long" definitions that are different for different platforms. # - Support unicode platform dependencies. # - Add unicode calls. # - Add setobject calls. cdef extern from "stdio.h": ctypedef struct FILE: pass cdef extern from "Python.h": # XXX: This is platform dependent. ctypedef unsigned short Py_UNICODE ctypedef struct PyTypeObject: pass ctypedef struct PyObject: Py_ssize_t ob_refcnt PyTypeObject * ob_type ############################################################################################### # bool ############################################################################################### PyObject * Py_False PyObject * Py_True PyTypeObject PyBool_Type int PyBool_Check (object) # Always succeeds. object PyBool_FromLong (long) ############################################################################################### # buffer ############################################################################################### PyTypeObject PyBuffer_Type int Py_END_OF_BUFFER int PyBuffer_Check (object) # Always succeeds. object PyBuffer_FromMemory (void *, Py_ssize_t) object PyBuffer_FromObject (object, Py_ssize_t, Py_ssize_t) object PyBuffer_FromReadWriteMemory (void *, Py_ssize_t) object PyBuffer_FromReadWriteObject (object, Py_ssize_t, Py_ssize_t) object PyBuffer_New (Py_ssize_t) int PyObject_AsCharBuffer (object, char **, Py_ssize_t *) except -1 int PyObject_AsReadBuffer (object, void **, Py_ssize_t *) except -1 int PyObject_AsWriteBuffer (object, void **, Py_ssize_t *) except -1 int PyObject_CheckReadBuffer (object) # Always succeeds. ############################################################################################### # cobject ############################################################################################### PyTypeObject PyCObject_Type int PyCObject_Check(object) # Always succeeds. object PyCObject_FromVoidPtr(void *, void (*)(void*)) object PyCObject_FromVoidPtrAndDesc(void *, void *, void (*)(void*,void*)) void * PyCObject_AsVoidPtr(object) except NULL void * PyCObject_GetDesc(object) except NULL void * PyCObject_Import(char *, char *) except NULL ############################################################################################### # compile ############################################################################################### ctypedef struct PyCodeObject: int co_argcount int co_nlocals int co_stacksize int co_flags PyObject *co_code PyObject *co_consts PyObject *co_names PyObject *co_varnames PyObject *co_freevars PyObject *co_cellvars PyObject *co_filename PyObject *co_name int co_firstlineno PyObject *co_lnotab int PyCode_Addr2Line(PyCodeObject *, int) ############################################################################################### # complex ############################################################################################### ctypedef struct Py_complex: double real double imag PyTypeObject PyComplex_Type Py_complex PyComplex_AsCComplex (object) # Always succeeds. int PyComplex_Check (object) # Always succeeds. int PyComplex_CheckExact (object) # Always succeeds. object PyComplex_FromCComplex (Py_complex) object PyComplex_FromDoubles (double, double) double PyComplex_ImagAsDouble (object) except? -1 double PyComplex_RealAsDouble (object) except? -1 Py_complex _Py_c_diff (Py_complex, Py_complex) Py_complex _Py_c_neg (Py_complex) Py_complex _Py_c_pow (Py_complex, Py_complex) Py_complex _Py_c_prod (Py_complex, Py_complex) Py_complex _Py_c_quot (Py_complex, Py_complex) Py_complex _Py_c_sum (Py_complex, Py_complex) ############################################################################################### # dict ############################################################################################### PyTypeObject PyDict_Type int PyDict_Check (object) # Always succeeds. int PyDict_CheckExact (object) # Always succeeds. void PyDict_Clear (object) int PyDict_Contains (object, object) except -1 object PyDict_Copy (object) int PyDict_DelItem (object, object) except -1 int PyDict_DelItemString (object, char *) except -1 object PyDict_Items (object) object PyDict_Keys (object) int PyDict_Merge (object, object, int) except -1 int PyDict_MergeFromSeq2 (object, object, int) except -1 object PyDict_New () # XXX: Pyrex doesn't support pointer to a python object? #int PyDict_Next (object, Py_ssize_t *, object *, object *) # Always succeeds. int PyDict_SetItem (object, object, object) except -1 int PyDict_SetItemString (object, char *, object) except -1 Py_ssize_t PyDict_Size (object) except -1 int PyDict_Update (object, object) except -1 object PyDict_Values (object) # XXX: Borrowed reference. No exception on NULL. #object PyDict_GetItem (object, object) # XXX: Borrowed reference. No exception on NULL #object PyDict_GetItemString (object, char *) ############################################################################################### # float ############################################################################################### PyTypeObject PyFloat_Type int _PyFloat_Pack4 (double, unsigned char *, int) except -1 int _PyFloat_Pack8 (double, unsigned char *, int) except -1 double _PyFloat_Unpack4 (unsigned char *, int) except? -1 double _PyFloat_Unpack8 (unsigned char *, int) except? -1 double PyFloat_AS_DOUBLE (object) double PyFloat_AsDouble (object) except? -1 void PyFloat_AsReprString (char*, object) void PyFloat_AsString (char*, object) int PyFloat_Check (object) # Always succeeds. int PyFloat_CheckExact (object) # Always succeeds. object PyFloat_FromDouble (double) object PyFloat_FromString (object, char**) ############################################################################################### # frame ############################################################################################### ctypedef struct PyFrameObject: PyFrameObject *f_back PyCodeObject *f_code PyObject *f_builtins PyObject *f_globals PyObject *f_locals PyObject *f_trace PyObject *f_exc_type PyObject *f_exc_value PyObject *f_exc_traceback int f_lasti int f_lineno int f_restricted int f_iblock int f_nlocals int f_ncells int f_nfreevars int f_stacksize ############################################################################################### # int ############################################################################################### PyTypeObject PyInt_Type long PyInt_AS_LONG (object) # Always succeeds. long PyInt_AsLong (object) except? -1 Py_ssize_t PyInt_AsSsize_t (object) except? -1 unsigned long long PyInt_AsUnsignedLongLongMask (object) except? -1 unsigned long PyInt_AsUnsignedLongMask (object) except? -1 int PyInt_Check (object) # Always succeeds. int PyInt_CheckExact (object) # Always succeeds. object PyInt_FromLong (long) object PyInt_FromSsize_t (Py_ssize_t) object PyInt_FromString (char*, char**, int) object PyInt_FromUnicode (Py_UNICODE*, Py_ssize_t, int) long PyInt_GetMax () # Always succeeds. ############################################################################################### # iterator ############################################################################################### int PyIter_Check (object) # Always succeeds. object PyIter_Next (object) ############################################################################################### # list ############################################################################################### PyTypeObject PyList_Type int PyList_Append (object, object) except -1 object PyList_AsTuple (object) int PyList_Check (object) # Always succeeds. int PyList_CheckExact (object) # Always succeeds. int PyList_GET_SIZE (object) # Always suceeds. object PyList_GetSlice (object, Py_ssize_t, Py_ssize_t) int PyList_Insert (object, Py_ssize_t, object) except -1 object PyList_New (Py_ssize_t) int PyList_Reverse (object) except -1 int PyList_SetSlice (object, Py_ssize_t, Py_ssize_t, object) except -1 Py_ssize_t PyList_Size (object) except -1 int PyList_Sort (object) except -1 ############################################################################################### # long ############################################################################################### PyTypeObject PyLong_Type int _PyLong_AsByteArray (object, unsigned char *, size_t, int, int) except -1 object _PyLong_FromByteArray (unsigned char *, size_t, int, int) size_t _PyLong_NumBits (object) except -1 int _PyLong_Sign (object) # No error. long PyLong_AsLong (object) except? -1 long long PyLong_AsLongLong (object) except? -1 unsigned long PyLong_AsUnsignedLong (object) except? -1 unsigned long PyLong_AsUnsignedLongMask (object) except? -1 unsigned long long PyLong_AsUnsignedLongLong (object) except? -1 unsigned long long PyLong_AsUnsignedLongLongMask (object) except? -1 int PyLong_Check (object) # Always succeeds. int PyLong_CheckExact (object) # Always succeeds. object PyLong_FromDouble (double) object PyLong_FromLong (long) object PyLong_FromLongLong (long long) object PyLong_FromUnsignedLong (unsigned long) object PyLong_FromUnsignedLongLong (unsigned long long) double PyLong_AsDouble (object) except? -1 object PyLong_FromVoidPtr (void *) void * PyLong_AsVoidPtr (object) except NULL object PyLong_FromString (char *, char **, int) object PyLong_FromUnicode (Py_UNICODE*, Py_ssize_t, int) ############################################################################################### # mapping ############################################################################################### int PyMapping_Check (object) # Always succeeds. int PyMapping_DelItem (object, object) except -1 int PyMapping_DelItemString (object, char *) except -1 object PyMapping_GetItemString (object, char *) int PyMapping_HasKey (object, object) # Always succeeds. int PyMapping_HasKeyString (object, char *) # Always succeeds. object PyMapping_Items (object) object PyMapping_Keys (object) Py_ssize_t PyMapping_Length (object) except -1 int PyMapping_SetItemString (object, char *, object) except -1 Py_ssize_t PyMapping_Size (object) except -1 object PyMapping_Values (object) ############################################################################################### # mem ############################################################################################### void PyMem_Free (void * p) void * PyMem_Malloc (size_t n) void * PyMem_Realloc (void *, size_t) ############################################################################################### # modsupport ############################################################################################### object Py_BuildValue (char *, ...) object Py_VaBuildValue (char *, va_list) ############################################################################################### # number ############################################################################################### object PyNumber_Absolute (object) object PyNumber_Add (object, object) object PyNumber_And (object, object) Py_ssize_t PyNumber_AsSsize_t (object, object) except? -1 int PyNumber_Check (object) # Always succeeds. # XXX: Pyrex doesn't support pointer to python object? #int PyNumber_Coerce (object*, object*) except -1 object PyNumber_Divide (object, object) object PyNumber_Divmod (object, object) object PyNumber_Float (object) object PyNumber_FloorDivide (object, object) object PyNumber_InPlaceAdd (object, object) object PyNumber_InPlaceAnd (object, object) object PyNumber_InPlaceDivide (object, object) object PyNumber_InPlaceFloorDivide (object, object) object PyNumber_InPlaceLshift (object, object) object PyNumber_InPlaceMultiply (object, object) object PyNumber_InPlaceOr (object, object) object PyNumber_InPlacePower (object, object, object) object PyNumber_InPlaceRemainder (object, object) object PyNumber_InPlaceRshift (object, object) object PyNumber_InPlaceSubtract (object, object) object PyNumber_InPlaceTrueDivide (object, object) object PyNumber_InPlaceXor (object, object) object PyNumber_Int (object) object PyNumber_Invert (object) object PyNumber_Long (object) object PyNumber_Lshift (object, object) object PyNumber_Multiply (object, object) object PyNumber_Negative (object) object PyNumber_Or (object, object) object PyNumber_Positive (object) object PyNumber_Power (object, object, object) object PyNumber_Remainder (object, object) object PyNumber_Rshift (object, object) object PyNumber_Subtract (object, object) object PyNumber_TrueDivide (object, object) object PyNumber_Xor (object, object) ############################################################################################### # object ############################################################################################### int PyCallable_Check (object) # Always succeeds. int PyObject_AsFileDescriptor (object) except -1 object PyObject_Call (object, object, object) object PyObject_CallFunction (object, char *, ...) object PyObject_CallFunctionObjArgs (object, ...) object PyObject_CallMethod (object, char *, char *, ...) object PyObject_CallMethodObjArgs (object, object, ...) object PyObject_CallObject (object, object) int PyObject_Cmp (object, object, int *result) except -1 # Use PyObject_Cmp instead. #int PyObject_Compare (object, object) int PyObject_DelAttr (object, object) except -1 int PyObject_DelAttrString (object, char *) except -1 int PyObject_DelItem (object, object) except -1 int PyObject_DelItemString (object, char *) except -1 object PyObject_Dir (object) object PyObject_GetAttr (object, object) object PyObject_GetAttrString (object, char *) object PyObject_GetItem (object, object) object PyObject_GetIter (object) int PyObject_HasAttr (object, object) # Always succeeds. int PyObject_HasAttrString (object, char *) # Always succeeds. long PyObject_Hash (object) except -1 int PyObject_IsInstance (object, object) except -1 int PyObject_IsSubclass (object, object) except -1 int PyObject_IsTrue (object) except -1 Py_ssize_t PyObject_Length (object) except -1 int PyObject_Not (object) except -1 int PyObject_Print (object, FILE *, int) except -1 object PyObject_Repr (object) object PyObject_RichCompare (object, object, int) int PyObject_RichCompareBool (object, object, int) except -1 int PyObject_SetAttr (object, object, object) except -1 int PyObject_SetAttrString (object, char *, object) except -1 int PyObject_SetItem (object, object, object) except -1 Py_ssize_t PyObject_Size (object) except -1 object PyObject_Str (object) object PyObject_Type (object) int PyObject_TypeCheck (object, object) # Always succeeds. object PyObject_Unicode (object) ############################################################################################### # pyerrors ############################################################################################### int PyErr_BadArgument () void PyErr_BadInternalCall () int PyErr_CheckSignals () void PyErr_Clear () int PyErr_ExceptionMatches (object) object PyErr_Format (object, char *, ...) int PyErr_GivenExceptionMatches (object, object) object PyErr_NoMemory () object PyErr_Occurred () void PyErr_Restore (object, object, object) object PyErr_SetFromErrno (object) object PyErr_SetFromErrnoWithFilename (object, char *) object PyErr_SetFromErrnoWithFilenameObject (object, object) void PyErr_SetInterrupt () void PyErr_SetNone (object) void PyErr_SetObject (object, object) void PyErr_SetString (object, char *) int PyErr_Warn (object, char *) int PyErr_WarnExplicit (object, char *, char *, int, char *, object) void PyErr_WriteUnraisable (object) ############################################################################################### # pyeval # Be extremely careful with these functions. ############################################################################################### ctypedef struct PyThreadState: PyFrameObject * frame int recursion_depth void * curexc_type, * curexc_value, * curexc_traceback void * exc_type, * exc_value, * exc_traceback void PyEval_AcquireLock () void PyEval_ReleaseLock () void PyEval_AcquireThread (PyThreadState *) void PyEval_ReleaseThread (PyThreadState *) PyThreadState* PyEval_SaveThread () void PyEval_RestoreThread (PyThreadState *) ############################################################################################### # pystate # Be extremely careful with these functions. Read PEP 311 for more detail. ############################################################################################### ctypedef int PyGILState_STATE PyGILState_STATE PyGILState_Ensure () void PyGILState_Release (PyGILState_STATE) ctypedef struct PyInterpreterState: pass PyThreadState* PyThreadState_New (PyInterpreterState *) void PyThreadState_Clear (PyThreadState *) void PyThreadState_Delete (PyThreadState *) PyThreadState* PyThreadState_Get () PyThreadState* PyThreadState_Swap (PyThreadState *tstate) # XXX: Borrowed reference. #object PyThreadState_GetDict () ############################################################################################### # run # Functions for embedded interpreters are not included. ############################################################################################### ctypedef struct PyCompilerFlags: int cf_flags ctypedef struct _node: pass ctypedef void (*PyOS_sighandler_t)(int) void PyErr_Display (object, object, object) void PyErr_Print () void PyErr_PrintEx (int) char * PyOS_Readline (FILE *, FILE *, char *) PyOS_sighandler_t PyOS_getsig (int) PyOS_sighandler_t PyOS_setsig (int, PyOS_sighandler_t) _node * PyParser_SimpleParseFile (FILE *, char *, int) except NULL _node * PyParser_SimpleParseFileFlags (FILE *, char *, int, int) except NULL _node * PyParser_SimpleParseString (char *, int) except NULL _node * PyParser_SimpleParseStringFlagsFilename(char *, char *, int, int) except NULL _node * PyParser_SimpleParseStringFlags (char *, int, int) except NULL int PyRun_AnyFile (FILE *, char *) except -1 int PyRun_AnyFileEx (FILE *, char *, int) except -1 int PyRun_AnyFileExFlags (FILE *, char *, int, PyCompilerFlags *) except -1 int PyRun_AnyFileFlags (FILE *, char *, PyCompilerFlags *) except -1 object PyRun_File (FILE *, char *, int, object, object) object PyRun_FileEx (FILE *, char *, int, object, object, int) object PyRun_FileExFlags (FILE *, char *, int, object, object, int, PyCompilerFlags *) object PyRun_FileFlags (FILE *, char *, int, object, object, PyCompilerFlags *) int PyRun_InteractiveLoop (FILE *, char *) except -1 int PyRun_InteractiveLoopFlags (FILE *, char *, PyCompilerFlags *) except -1 int PyRun_InteractiveOne (FILE *, char *) except -1 int PyRun_InteractiveOneFlags (FILE *, char *, PyCompilerFlags *) except -1 int PyRun_SimpleFile (FILE *, char *) except -1 int PyRun_SimpleFileEx (FILE *, char *, int) except -1 int PyRun_SimpleFileExFlags (FILE *, char *, int, PyCompilerFlags *) except -1 int PyRun_SimpleString (char *) except -1 int PyRun_SimpleStringFlags (char *, PyCompilerFlags *) except -1 object PyRun_String (char *, int, object, object) object PyRun_StringFlags (char *, int, object, object, PyCompilerFlags *) int Py_AtExit (void (*func)()) object Py_CompileString (char *, char *, int) object Py_CompileStringFlags (char *, char *, int, PyCompilerFlags *) void Py_Exit (int) int Py_FdIsInteractive (FILE *, char *) # Always succeeds. char * Py_GetBuildInfo () char * Py_GetCompiler () char * Py_GetCopyright () char * Py_GetExecPrefix () char * Py_GetPath () char * Py_GetPlatform () char * Py_GetPrefix () char * Py_GetProgramFullPath () char * Py_GetProgramName () char * Py_GetPythonHome () char * Py_GetVersion () ############################################################################################### # sequence ############################################################################################### int PySequence_Check (object) # Always succeeds. object PySequence_Concat (object, object) int PySequence_Contains (object, object) except -1 Py_ssize_t PySequence_Count (object, object) except -1 int PySequence_DelItem (object, Py_ssize_t) except -1 int PySequence_DelSlice (object, Py_ssize_t, Py_ssize_t) except -1 object PySequence_Fast (object, char *) int PySequence_Fast_GET_SIZE (object) object PySequence_GetItem (object, Py_ssize_t) object PySequence_GetSlice (object, Py_ssize_t, Py_ssize_t) object PySequence_ITEM (object, int) int PySequence_In (object, object) except -1 object PySequence_InPlaceConcat (object, object) object PySequence_InPlaceRepeat (object, Py_ssize_t) Py_ssize_t PySequence_Index (object, object) except -1 Py_ssize_t PySequence_Length (object) except -1 object PySequence_List (object) object PySequence_Repeat (object, Py_ssize_t) int PySequence_SetItem (object, Py_ssize_t, object) except -1 int PySequence_SetSlice (object, Py_ssize_t, Py_ssize_t, object) except -1 Py_ssize_t PySequence_Size (object) except -1 object PySequence_Tuple (object) ############################################################################################### # string ############################################################################################### PyTypeObject PyString_Type # Pyrex cannot support resizing because you have no choice but to use # realloc which may call free() on the object, and there's no way to tell # Pyrex to "forget" reference counting for the object. #int _PyString_Resize (object *, Py_ssize_t) except -1 char * PyString_AS_STRING (object) # Always succeeds. object PyString_AsDecodedObject (object, char *, char *) object PyString_AsEncodedObject (object, char *, char *) object PyString_AsEncodedString (object, char *, char *) char * PyString_AsString (object) except NULL int PyString_AsStringAndSize (object, char **, Py_ssize_t *) except -1 int PyString_Check (object) # Always succeeds. int PyString_CHECK_INTERNED (object) # Always succeeds. int PyString_CheckExact (object) # Always succeeds. # XXX: Pyrex doesn't support pointer to a python object? #void PyString_Concat (object *, object) # XXX: Pyrex doesn't support pointer to a python object? #void PyString_ConcatAndDel (object *, object) object PyString_Decode (char *, int, char *, char *) object PyString_DecodeEscape (char *, int, char *, int, char *) object PyString_Encode (char *, int, char *, char *) object PyString_Format (object, object) object PyString_FromFormat (char*, ...) object PyString_FromFormatV (char*, va_list) object PyString_FromString (char *) object PyString_FromStringAndSize (char *, Py_ssize_t) Py_ssize_t PyString_GET_SIZE (object) # Always succeeds. object PyString_InternFromString (char *) # XXX: Pyrex doesn't support pointer to a python object? #void PyString_InternImmortal (object*) # XXX: Pyrex doesn't support pointer to a python object? #void PyString_InternInPlace (object*) object PyString_Repr (object, int) Py_ssize_t PyString_Size (object) except -1 # Disgusting hack to access internal object values. ctypedef struct PyStringObject: int ob_refcnt PyTypeObject * ob_type int ob_size long ob_shash int ob_sstate char * ob_sval ############################################################################################### # tuple ############################################################################################### PyTypeObject PyTuple_Type # See PyString_Resize note about resizing. #int _PyTuple_Resize (object*, Py_ssize_t) except -1 int PyTuple_Check (object) # Always succeeds. int PyTuple_CheckExact (object) # Always succeeds. Py_ssize_t PyTuple_GET_SIZE (object) # Always succeeds. object PyTuple_GetSlice (object, Py_ssize_t, Py_ssize_t) object PyTuple_New (Py_ssize_t) object PyTuple_Pack (Py_ssize_t, ...) Py_ssize_t PyTuple_Size (object) except -1 ############################################################################################### # Dangerous things! # Do not use these unless you really, really know what you are doing. ############################################################################################### void Py_INCREF (object) void Py_XINCREF (object) void Py_DECREF (object) void Py_XDECREF (object) void Py_CLEAR (object) # XXX: Stolen reference. void PyTuple_SET_ITEM (object, Py_ssize_t, value) # XXX: Borrowed reference. object PyTuple_GET_ITEM (object, Py_ssize_t) # XXX: Borrowed reference. object PyTuple_GetItem (object, Py_ssize_t) # XXX: Stolen reference. int PyTuple_SetItem (object, Py_ssize_t, object) except -1 # XXX: Steals reference. int PyList_SetItem (object, Py_ssize_t, object) except -1 # XXX: Borrowed reference object PyList_GetItem (object, Py_ssize_t) # XXX: Borrowed reference, no NULL on error. object PyList_GET_ITEM (object, Py_ssize_t) # XXX: Stolen reference. void PyList_SET_ITEM (object, Py_ssize_t, object) # XXX: Borrowed reference. object PySequence_Fast_GET_ITEM (object, Py_ssize_t) # First parameter _must_ be a PyStringObject. object _PyString_Join (object, object) Cython-0.26.1/Cython/Includes/Deprecated/python_bool.pxd0000664000175000017500000000010213023021033023727 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.bool cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_string.pxd0000664000175000017500000000010413023021033024304 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.string cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_sequence.pxd0000664000175000017500000000010613023021033024610 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.sequence cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_mem.pxd0000664000175000017500000000010113023021033023551 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.mem cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_object.pxd0000664000175000017500000000010413023021033024244 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.object cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_instance.pxd0000664000175000017500000000010613023021033024604 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.instance cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_cobject.pxd0000664000175000017500000000010513023021033024410 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.cobject cimport * Cython-0.26.1/Cython/Includes/Deprecated/python.pxd0000664000175000017500000000007513023021033022725 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_ref.pxd0000664000175000017500000000010113023021033023547 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.ref cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_long.pxd0000664000175000017500000000010213023021033023733 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.long cimport * Cython-0.26.1/Cython/Includes/Deprecated/stl.pxd0000664000175000017500000000421312542002467022223 0ustar stefanstefan00000000000000cdef extern from "" namespace std: cdef cppclass vector[TYPE]: #constructors __init__() __init__(vector&) __init__(int) __init__(int, TYPE&) __init__(iterator, iterator) #operators TYPE& __getitem__(int) TYPE& __setitem__(int, TYPE&) vector __new__(vector&) bool __eq__(vector&, vector&) bool __ne__(vector&, vector&) bool __lt__(vector&, vector&) bool __gt__(vector&, vector&) bool __le__(vector&, vector&) bool __ge__(vector&, vector&) #others void assign(int, TYPE) #void assign(iterator, iterator) TYPE& at(int) TYPE& back() iterator begin() int capacity() void clear() bool empty() iterator end() iterator erase(iterator) iterator erase(iterator, iterator) TYPE& front() iterator insert(iterator, TYPE&) void insert(iterator, int, TYPE&) void insert(iterator, iterator) int max_size() void pop_back() void push_back(TYPE&) iterator rbegin() iterator rend() void reserve(int) void resize(int) void resize(int, TYPE&) #void resize(size_type num, const TYPE& = TYPE()) int size() void swap(container&) cdef extern from "" namespace std: cdef cppclass deque[TYPE]: #constructors __init__() __init__(deque&) __init__(int) __init__(int, TYPE&) __init__(iterator, iterator) #operators TYPE& operator[]( size_type index ); const TYPE& operator[]( size_type index ) const; deque __new__(deque&); bool __eq__(deque&, deque&); bool __ne__(deque&, deque&); bool __lt__(deque&, deque&); bool __gt__(deque&, deque&); bool __le__(deque&, deque&); bool __ge__(deque&, deque&); #others void assign(int, TYPE&) void assign(iterator, iterator) TYPE& at(int) TYPE& back() iterator begin() void clear() bool empty() iterator end() iterator erase(iterator) iterator erase(iterator, iterator) TYPE& front() iterator insert(iterator, TYPE&) void insert(iterator, int, TYPE&) void insert(iterator, iterator, iterator) int max_size() void pop_back() void pop_front() void push_back(TYPE&) void push_front(TYPE&) iterator rbegin() iterator rend() void resize(int) void resize(int, TYPE&) int size() void swap(container&) Cython-0.26.1/Cython/Includes/Deprecated/python_getargs.pxd0000664000175000017500000000010513023021033024433 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.getargs cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_int.pxd0000664000175000017500000000010113023021033023565 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.int cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_complex.pxd0000664000175000017500000000010513023021033024446 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.complex cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_dict.pxd0000664000175000017500000000010213023021033023717 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.dict cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_oldbuffer.pxd0000664000175000017500000000010713023021033024751 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.oldbuffer cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_pycapsule.pxd0000664000175000017500000000010713023021033025006 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.pycapsule cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_weakref.pxd0000664000175000017500000000010513023021033024423 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.weakref cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_float.pxd0000664000175000017500000000010313023021033024102 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.float cimport * Cython-0.26.1/Cython/Includes/Deprecated/stdlib.pxd0000664000175000017500000000010113023021033022653 0ustar stefanstefan00000000000000# Present for backwards compatibility from libc.stdlib cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_number.pxd0000664000175000017500000000010413023021033024266 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.number cimport * Cython-0.26.1/Cython/Includes/Deprecated/stdio.pxd0000664000175000017500000000010013023021033022513 0ustar stefanstefan00000000000000# Present for backwards compatibility from libc.stdio cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_list.pxd0000664000175000017500000000010213023021033023747 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.list cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_method.pxd0000664000175000017500000000010413023021033024256 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.method cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_iterator.pxd0000664000175000017500000000010613023021033024631 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.iterator cimport * Cython-0.26.1/Cython/Includes/Deprecated/python_bytes.pxd0000664000175000017500000000010313023021033024123 0ustar stefanstefan00000000000000# Present for backwards compatibility from cpython.bytes cimport * Cython-0.26.1/Cython/Includes/libc/0000775000175000017500000000000013151203436017551 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Includes/libc/signal.pxd0000664000175000017500000000232713023021033021534 0ustar stefanstefan00000000000000# 7.14 Signal handling ctypedef void (*sighandler_t)(int SIGNUM) nogil cdef extern from "" nogil: ctypedef int sig_atomic_t enum: SIGABRT enum: SIGFPE enum: SIGILL enum: SIGINT enum: SIGSEGV enum: SIGTERM sighandler_t SIG_DFL sighandler_t SIG_IGN sighandler_t SIG_ERR sighandler_t signal (int signum, sighandler_t action) int raise_"raise" (int signum) cdef extern from "" nogil: # Program Error enum: SIGFPE enum: SIGILL enum: SIGSEGV enum: SIGBUS enum: SIGABRT enum: SIGIOT enum: SIGTRAP enum: SIGEMT enum: SIGSYS # Termination enum: SIGTERM enum: SIGINT enum: SIGQUIT enum: SIGKILL enum: SIGHUP # Alarm enum: SIGALRM enum: SIGVTALRM enum: SIGPROF # Asynchronous I/O enum: SIGIO enum: SIGURG enum: SIGPOLL # Job Control enum: SIGCHLD enum: SIGCLD enum: SIGCONT enum: SIGSTOP enum: SIGTSTP enum: SIGTTIN enum: SIGTTOU # Operation Error enum: SIGPIPE enum: SIGLOST enum: SIGXCPU enum: SIGXFSZ # Miscellaneous enum: SIGUSR1 enum: SIGUSR2 enum: SIGWINCH enum: SIGINFO Cython-0.26.1/Cython/Includes/libc/__init__.pxd0000664000175000017500000000001512542002467022025 0ustar stefanstefan00000000000000# empty file Cython-0.26.1/Cython/Includes/libc/locale.pxd0000664000175000017500000000216413023021033021515 0ustar stefanstefan00000000000000# 7.11 Localization # deprecated cimport for backwards compatibility: from libc.string cimport const_char cdef extern from "" nogil: struct lconv: char *decimal_point char *thousands_sep char *grouping char *mon_decimal_point char *mon_thousands_sep char *mon_grouping char *positive_sign char *negative_sign char *currency_symbol char frac_digits char p_cs_precedes char n_cs_precedes char p_sep_by_space char n_sep_by_space char p_sign_posn char n_sign_posn char *int_curr_symbol char int_frac_digits char int_p_cs_precedes char int_n_cs_precedes char int_p_sep_by_space char int_n_sep_by_space char int_p_sign_posn char int_n_sign_posn enum: LC_ALL enum: LC_COLLATE enum: LC_CTYPE enum: LC_MONETARY enum: LC_NUMERIC enum: LC_TIME # 7.11.1 Locale control char *setlocale (int category, const char *locale) # 7.11.2 Numeric formatting convention inquiry lconv *localeconv () Cython-0.26.1/Cython/Includes/libc/time.pxd0000664000175000017500000000244513023021033021216 0ustar stefanstefan00000000000000# http://en.wikipedia.org/wiki/C_date_and_time_functions from libc.stddef cimport wchar_t cdef extern from "" nogil: ctypedef long clock_t ctypedef long time_t enum: CLOCKS_PER_SEC clock_t clock() # CPU time time_t time(time_t *) # wall clock time since Unix epoch cdef struct tm: int tm_sec int tm_min int tm_hour int tm_mday int tm_mon int tm_year int tm_wday int tm_yday int tm_isdst char *tm_zone long tm_gmtoff int daylight # global state long timezone char *tzname[2] void tzset() char *asctime(const tm *) char *asctime_r(const tm *, char *) char *ctime(const time_t *) char *ctime_r(const time_t *, char *) double difftime(time_t, time_t) tm *getdate(const char *) tm *gmtime(const time_t *) tm *gmtime_r(const time_t *, tm *) tm *localtime(const time_t *) tm *localtime_r(const time_t *, tm *) time_t mktime(tm *) size_t strftime(char *, size_t, const char *, const tm *) size_t wcsftime(wchar_t *str, size_t cnt, const wchar_t *fmt, tm *time) # POSIX not stdC char *strptime(const char *, const char *, tm *) Cython-0.26.1/Cython/Includes/libc/stdint.pxd0000664000175000017500000000657113023021033021571 0ustar stefanstefan00000000000000# Longness only used for type promotion. # Actual compile time size used for conversions. # 7.18 Integer types cdef extern from "" nogil: # 7.18.1 Integer types # 7.18.1.1 Exact-width integer types ctypedef signed char int8_t ctypedef signed short int16_t ctypedef signed int int32_t ctypedef signed long int64_t ctypedef unsigned char uint8_t ctypedef unsigned short uint16_t ctypedef unsigned int uint32_t ctypedef unsigned long long uint64_t # 7.18.1.2 Minimum-width integer types ctypedef signed char int_least8_t ctypedef signed short int_least16_t ctypedef signed int int_least32_t ctypedef signed long int_least64_t ctypedef unsigned char uint_least8_t ctypedef unsigned short uint_least16_t ctypedef unsigned int uint_least32_t ctypedef unsigned long long uint_least64_t # 7.18.1.3 Fastest minimum-width integer types ctypedef signed char int_fast8_t ctypedef signed short int_fast16_t ctypedef signed int int_fast32_t ctypedef signed long int_fast64_t ctypedef unsigned char uint_fast8_t ctypedef unsigned short uint_fast16_t ctypedef unsigned int uint_fast32_t ctypedef unsigned long long uint_fast64_t # 7.18.1.4 Integer types capable of holding object pointers ctypedef ssize_t intptr_t ctypedef size_t uintptr_t # 7.18.1.5 Greatest-width integer types ctypedef signed long long intmax_t ctypedef unsigned long long uintmax_t # 7.18.2 Limits of specified-width integer types # 7.18.2.1 Limits of exact-width integer types int8_t INT8_MIN int16_t INT16_MIN int32_t INT32_MIN int64_t INT64_MIN int8_t INT8_MAX int16_t INT16_MAX int32_t INT32_MAX int64_t INT64_MAX uint8_t UINT8_MAX uint16_t UINT16_MAX uint32_t UINT32_MAX uint64_t UINT64_MAX #7.18.2.2 Limits of minimum-width integer types int_least8_t INT_LEAST8_MIN int_least16_t INT_LEAST16_MIN int_least32_t INT_LEAST32_MIN int_least64_t INT_LEAST64_MIN int_least8_t INT_LEAST8_MAX int_least16_t INT_LEAST16_MAX int_least32_t INT_LEAST32_MAX int_least64_t INT_LEAST64_MAX uint_least8_t UINT_LEAST8_MAX uint_least16_t UINT_LEAST16_MAX uint_least32_t UINT_LEAST32_MAX uint_least64_t UINT_LEAST64_MAX #7.18.2.3 Limits of fastest minimum-width integer types int_fast8_t INT_FAST8_MIN int_fast16_t INT_FAST16_MIN int_fast32_t INT_FAST32_MIN int_fast64_t INT_FAST64_MIN int_fast8_t INT_FAST8_MAX int_fast16_t INT_FAST16_MAX int_fast32_t INT_FAST32_MAX int_fast64_t INT_FAST64_MAX uint_fast8_t UINT_FAST8_MAX uint_fast16_t UINT_FAST16_MAX uint_fast32_t UINT_FAST32_MAX uint_fast64_t UINT_FAST64_MAX #7.18.2.4 Limits of integer types capable of holding object pointers enum: INTPTR_MIN enum: INTPTR_MAX enum: UINTPTR_MAX # 7.18.2.5 Limits of greatest-width integer types enum: INTMAX_MAX enum: INTMAX_MIN enum: UINTMAX_MAX # 7.18.3 Limits of other integer types # ptrdiff_t enum: PTRDIFF_MIN enum: PTRDIFF_MAX # sig_atomic_t enum: SIG_ATOMIC_MIN enum: SIG_ATOMIC_MAX # size_t size_t SIZE_MAX # wchar_t enum: WCHAR_MIN enum: WCHAR_MAX # wint_t enum: WINT_MIN enum: WINT_MAX Cython-0.26.1/Cython/Includes/libc/errno.pxd0000664000175000017500000000400213023021033021374 0ustar stefanstefan00000000000000# 7.5 Errors cdef extern from "" nogil: enum: EPERM ENOENT ESRCH EINTR EIO ENXIO E2BIG ENOEXEC EBADF ECHILD EAGAIN ENOMEM EACCES EFAULT ENOTBLK EBUSY EEXIST EXDEV ENODEV ENOTDIR EISDIR EINVAL ENFILE EMFILE ENOTTY ETXTBSY EFBIG ENOSPC ESPIPE EROFS EMLINK EPIPE EDOM ERANGE EDEADLOCK ENAMETOOLONG ENOLCK ENOSYS ENOTEMPTY ELOOP ENOMSG EIDRM ECHRNG EL2NSYNC EL3HLT EL3RST ELNRNG EUNATCH ENOCSI EL2HLT EBADE EBADR EXFULL ENOANO EBADRQC EBADSLT EBFONT ENOSTR ENODATA ENOATTR ETIME ENOSR ENONET ENOPKG EREMOTE ENOLINK EADV ESRMNT ECOMM EPROTO EMULTIHOP EDOTDOT EBADMSG EOVERFLOW ENOTUNIQ EBADFD EREMCHG ELIBACC ELIBBAD ELIBSCN ELIBMAX ELIBEXEC EILSEQ ERESTART ESTRPIPE EUSERS ENOTSOCK EDESTADDRREQ EMSGSIZE EPROTOTYPE ENOPROTOOPT EPROTONOSUPPORT ESOCKTNOSUPPORT EOPNOTSUPP EPFNOSUPPORT EAFNOSUPPORT EADDRINUSE EADDRNOTAVAIL ENETDOWN ENETUNREACH ENETRESET ECONNABORTED ECONNRESET ENOBUFS EISCONN ENOTCONN ESHUTDOWN ETOOMANYREFS ETIMEDOUT ECONNREFUSED EHOSTDOWN EHOSTUNREACH EALREADY EINPROGRESS ESTALE EUCLEAN ENOTNAM ENAVAIL EISNAM EREMOTEIO EDQUOT int errno Cython-0.26.1/Cython/Includes/libc/setjmp.pxd0000664000175000017500000000045113023021033021555 0ustar stefanstefan00000000000000cdef extern from "" nogil: ctypedef struct jmp_buf: pass int setjmp(jmp_buf state) void longjmp(jmp_buf state, int value) ctypedef struct sigjmp_buf: pass int sigsetjmp(sigjmp_buf state, int savesigs) void siglongjmp(sigjmp_buf state, int value) Cython-0.26.1/Cython/Includes/libc/stddef.pxd0000664000175000017500000000024413023021033021524 0ustar stefanstefan00000000000000# 7.17 Common definitions cdef extern from "": ctypedef signed int ptrdiff_t ctypedef unsigned int size_t ctypedef int wchar_t Cython-0.26.1/Cython/Includes/libc/limits.pxd0000664000175000017500000000073513023021033021561 0ustar stefanstefan00000000000000# 5.2.4.2.1 Sizes of integer types cdef extern from "": enum: CHAR_BIT enum: MB_LEN_MAX enum: CHAR_MIN enum: CHAR_MAX enum: SCHAR_MIN enum: SCHAR_MAX enum: UCHAR_MAX enum: SHRT_MIN enum: SHRT_MAX enum: USHRT_MAX enum: INT_MIN enum: INT_MAX enum: UINT_MAX enum: LONG_MIN enum: LONG_MAX enum: ULONG_MAX enum: LLONG_MIN enum: LLONG_MAX enum: ULLONG_MAX Cython-0.26.1/Cython/Includes/libc/float.pxd0000664000175000017500000000170613023021033021364 0ustar stefanstefan00000000000000# 5.2.4.2.2 Characteristics of floating types cdef extern from "": const float FLT_RADIX const float FLT_MANT_DIG const double DBL_MANT_DIG const long double LDBL_MANT_DIG const double DECIMAL_DIG const float FLT_DIG const double DBL_DIG const long double LDBL_DIG const float FLT_MIN_EXP const double DBL_MIN_EXP const long double LDBL_MIN_EXP const float FLT_MIN_10_EXP const double DBL_MIN_10_EXP const long double LDBL_MIN_10_EXP const float FLT_MAX_EXP const double DBL_MAX_EXP const long double LDBL_MAX_EXP const float FLT_MAX_10_EXP const double DBL_MAX_10_EXP const long double LDBL_MAX_10_EXP const float FLT_MAX const double DBL_MAX const long double LDBL_MAX const float FLT_EPSILON const double DBL_EPSILON const long double LDBL_EPSILON const float FLT_MIN const double DBL_MIN const long double LDBL_MIN Cython-0.26.1/Cython/Includes/libc/stdlib.pxd0000664000175000017500000000461413023021033021541 0ustar stefanstefan00000000000000# 7.20 General utilities # deprecated cimports for backwards compatibility: from libc.string cimport const_char, const_void cdef extern from "" nogil: # 7.20.1 Numeric conversion functions int atoi (const char *string) long atol (const char *string) long long atoll (const char *string) double atof (const char *string) long strtol (const char *string, char **tailptr, int base) unsigned long int strtoul (const char *string, char **tailptr, int base) long long int strtoll (const char *string, char **tailptr, int base) unsigned long long int strtoull (const char *string, char **tailptr, int base) float strtof (const char *string, char **tailptr) double strtod (const char *string, char **tailptr) long double strtold (const char *string, char **tailptr) # 7.20.2 Pseudo-random sequence generation functions enum: RAND_MAX int rand () void srand (unsigned int seed) # 7.20.3 Memory management functions void *calloc (size_t count, size_t eltsize) void free (void *ptr) void *malloc (size_t size) void *realloc (void *ptr, size_t newsize) # 7.20.4 Communication with the environment enum: EXIT_FAILURE enum: EXIT_SUCCESS void exit (int status) void _exit (int status) int atexit (void (*function) ()) void abort () char *getenv (const char *name) int system (const char *command) #7.20.5 Searching and sorting utilities void *bsearch (const void *key, const void *array, size_t count, size_t size, int (*compare)(const void *, const void *)) void qsort (void *array, size_t count, size_t size, int (*compare)(const void *, const void *)) # 7.20.6 Integer arithmetic functions int abs (int number) long int labs (long int number) long long int llabs (long long int number) ctypedef struct div_t: int quot int rem div_t div (int numerator, int denominator) ctypedef struct ldiv_t: long int quot long int rem ldiv_t ldiv (long int numerator, long int denominator) ctypedef struct lldiv_t: long long int quot long long int rem lldiv_t lldiv (long long int numerator, long long int denominator) # 7.20.7 Multibyte/wide character conversion functions # XXX TODO # 7.20.8 Multibyte/wide string conversion functions # XXX TODO Cython-0.26.1/Cython/Includes/libc/stdio.pxd0000664000175000017500000000465413023021033021406 0ustar stefanstefan00000000000000# 7.19 Input/output # deprecated cimports for backwards compatibility: from libc.string cimport const_char, const_void cdef extern from "" nogil: ctypedef struct FILE cdef FILE *stdin cdef FILE *stdout cdef FILE *stderr enum: FOPEN_MAX enum: FILENAME_MAX FILE *fopen (const char *filename, const char *opentype) FILE *freopen (const char *filename, const char *opentype, FILE *stream) FILE *fdopen (int fdescriptor, const char *opentype) int fclose (FILE *stream) int remove (const char *filename) int rename (const char *oldname, const char *newname) FILE *tmpfile () int remove (const char *pathname) int rename (const char *oldpath, const char *newpath) enum: _IOFBF enum: _IOLBF enum: _IONBF int setvbuf (FILE *stream, char *buf, int mode, size_t size) enum: BUFSIZ void setbuf (FILE *stream, char *buf) size_t fread (void *data, size_t size, size_t count, FILE *stream) size_t fwrite (const void *data, size_t size, size_t count, FILE *stream) int fflush (FILE *stream) enum: EOF void clearerr (FILE *stream) int feof (FILE *stream) int ferror (FILE *stream) enum: SEEK_SET enum: SEEK_CUR enum: SEEK_END int fseek (FILE *stream, long int offset, int whence) void rewind (FILE *stream) long int ftell (FILE *stream) ctypedef struct fpos_t ctypedef const fpos_t const_fpos_t "const fpos_t" int fgetpos (FILE *stream, fpos_t *position) int fsetpos (FILE *stream, const fpos_t *position) int scanf (const char *template, ...) int sscanf (const char *s, const char *template, ...) int fscanf (FILE *stream, const char *template, ...) int printf (const char *template, ...) int sprintf (char *s, const char *template, ...) int snprintf (char *s, size_t size, const char *template, ...) int fprintf (FILE *stream, const char *template, ...) void perror (const char *message) char *gets (char *s) char *fgets (char *s, int count, FILE *stream) int getchar () int fgetc (FILE *stream) int getc (FILE *stream) int ungetc (int c, FILE *stream) int puts (const char *s) int fputs (const char *s, FILE *stream) int putchar (int c) int fputc (int c, FILE *stream) int putc (int c, FILE *stream) size_t getline(char **lineptr, size_t *n, FILE *stream) Cython-0.26.1/Cython/Includes/libc/string.pxd0000664000175000017500000000376613023021033021575 0ustar stefanstefan00000000000000# 7.21 String handling cdef extern from *: # deprecated backwards compatibility declarations ctypedef const char const_char "const char" ctypedef const signed char const_schar "const signed char" ctypedef const unsigned char const_uchar "const unsigned char" ctypedef const void const_void "const void" cdef extern from "" nogil: void *memcpy (void *pto, const void *pfrom, size_t size) void *memmove (void *pto, const void *pfrom, size_t size) void *memset (void *block, int c, size_t size) int memcmp (const void *a1, const void *a2, size_t size) void *memchr (const void *block, int c, size_t size) void *memchr (const void *block, int c, size_t size) void *memrchr (const void *block, int c, size_t size) size_t strlen (const char *s) char *strcpy (char *pto, const char *pfrom) char *strncpy (char *pto, const char *pfrom, size_t size) char *strdup (const char *s) char *strndup (const char *s, size_t size) char *strcat (char *pto, const char *pfrom) char *strncat (char *pto, const char *pfrom, size_t size) int strcmp (const char *s1, const char *s2) int strcasecmp (const char *s1, const char *s2) int strncmp (const char *s1, const char *s2, size_t size) int strncasecmp (const char *s1, const char *s2, size_t n) int strcoll (const char *s1, const char *s2) size_t strxfrm (char *pto, const char *pfrom, size_t size) char *strerror (int errnum) char *strchr (const char *string, int c) char *strrchr (const char *string, int c) char *strstr (const char *haystack, const char *needle) char *strcasestr (const char *haystack, const char *needle) size_t strcspn (const char *string, const char *stopset) size_t strspn (const char *string, const char *set) char * strpbrk (const char *string, const char *stopset) char *strtok (char *newstring, const char *delimiters) char *strsep (char **string_ptr, const char *delimiter) Cython-0.26.1/Cython/Includes/libc/math.pxd0000664000175000017500000000516413023021033021212 0ustar stefanstefan00000000000000cdef extern from "" nogil: double M_E double e "M_E" # as in Python's math module double M_LOG2E double M_LOG10E double M_LN2 double M_LN10 double M_PI double pi "M_PI" # as in Python's math module double M_PI_2 double M_PI_4 double M_1_PI double M_2_PI double M_2_SQRTPI double M_SQRT2 double M_SQRT1_2 # C99 constants float INFINITY float NAN # note: not providing "nan" and "inf" aliases here as nan() is a function in C double HUGE_VAL float HUGE_VALF long double HUGE_VALL double acos(double x) double asin(double x) double atan(double x) double atan2(double y, double x) double cos(double x) double sin(double x) double tan(double x) double cosh(double x) double sinh(double x) double tanh(double x) double acosh(double x) double asinh(double x) double atanh(double x) double hypot(double x, double y) double exp(double x) double exp2(double x) double expm1(double x) double log(double x) double logb(double x) double log2(double x) double log10(double x) double log1p(double x) int ilogb(double x) double lgamma(double x) double tgamma(double x) double frexp(double x, int* exponent) double ldexp(double x, int exponent) double modf(double x, double* iptr) double fmod(double x, double y) double remainder(double x, double y) double remquo(double x, double y, int *quot) double pow(double x, double y) double sqrt(double x) double cbrt(double x) double fabs(double x) double ceil(double x) double floor(double x) double trunc(double x) double rint(double x) double round(double x) double nearbyint(double x) double nextafter(double, double) double nexttoward(double, long double) long long llrint(double) long lrint(double) long long llround(double) long lround(double) double copysign(double, double) float copysignf(float, float) long double copysignl(long double, long double) double erf(double) float erff(float) long double erfl(long double) double erfc(double) float erfcf(float) long double erfcl(long double) double fdim(double x, double y) double fma(double x, double y, double z) double fmax(double x, double y) double fmin(double x, double y) double scalbln(double x, long n) double scalbn(double x, int n) double nan(const char*) int isinf(long double) # -1 / 0 / 1 bint isfinite(long double) bint isnan(long double) bint isnormal(long double) bint signbit(long double) Cython-0.26.1/Cython/Includes/cpython/0000775000175000017500000000000013151203436020324 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Includes/cpython/pycapsule.pxd0000664000175000017500000001313112542002467023051 0ustar stefanstefan00000000000000 # available since Python 3.1! # note all char* in the below functions are actually const char* cdef extern from "Python.h": ctypedef struct PyCapsule_Type # This subtype of PyObject represents an opaque value, useful for # C extension modules who need to pass an opaque value (as a void* # pointer) through Python code to other C code. It is often used # to make a C function pointer defined in one module available to # other modules, so the regular import mechanism can be used to # access C APIs defined in dynamically loaded modules. ctypedef void (*PyCapsule_Destructor)(object o) # The type of a destructor callback for a capsule. # # See PyCapsule_New() for the semantics of PyCapsule_Destructor # callbacks. bint PyCapsule_CheckExact(object o) # Return true if its argument is a PyCapsule. object PyCapsule_New(void *pointer, char *name, PyCapsule_Destructor destructor) # Return value: New reference. # # Create a PyCapsule encapsulating the pointer. The pointer # argument may not be NULL. # # On failure, set an exception and return NULL. # # The name string may either be NULL or a pointer to a valid C # string. If non-NULL, this string must outlive the # capsule. (Though it is permitted to free it inside the # destructor.) # # If the destructor argument is not NULL, it will be called with # the capsule as its argument when it is destroyed. # # If this capsule will be stored as an attribute of a module, the # name should be specified as modulename.attributename. This will # enable other modules to import the capsule using # PyCapsule_Import(). void* PyCapsule_GetPointer(object capsule, char *name) except? NULL # Retrieve the pointer stored in the capsule. On failure, set an # exception and return NULL. # # The name parameter must compare exactly to the name stored in # the capsule. If the name stored in the capsule is NULL, the name # passed in must also be NULL. Python uses the C function strcmp() # to compare capsule names. PyCapsule_Destructor PyCapsule_GetDestructor(object capsule) except? NULL # Return the current destructor stored in the capsule. On failure, # set an exception and return NULL. # # It is legal for a capsule to have a NULL destructor. This makes # a NULL return code somewhat ambiguous; use PyCapsule_IsValid() # or PyErr_Occurred() to disambiguate. char* PyCapsule_GetName(object capsule) except? NULL # Return the current name stored in the capsule. On failure, set # an exception and return NULL. # # It is legal for a capsule to have a NULL name. This makes a NULL # return code somewhat ambiguous; use PyCapsule_IsValid() or # PyErr_Occurred() to disambiguate. void* PyCapsule_GetContext(object capsule) except? NULL # Return the current context stored in the capsule. On failure, # set an exception and return NULL. # # It is legal for a capsule to have a NULL context. This makes a # NULL return code somewhat ambiguous; use PyCapsule_IsValid() or # PyErr_Occurred() to disambiguate. bint PyCapsule_IsValid(object capsule, char *name) # Determines whether or not capsule is a valid capsule. A valid # capsule is non-NULL, passes PyCapsule_CheckExact(), has a # non-NULL pointer stored in it, and its internal name matches the # name parameter. (See PyCapsule_GetPointer() for information on # how capsule names are compared.) # # In other words, if PyCapsule_IsValid() returns a true value, # calls to any of the accessors (any function starting with # PyCapsule_Get()) are guaranteed to succeed. # # Return a nonzero value if the object is valid and matches the # name passed in. Return 0 otherwise. This function will not fail. int PyCapsule_SetPointer(object capsule, void *pointer) except -1 # Set the void pointer inside capsule to pointer. The pointer may # not be NULL. # # Return 0 on success. Return nonzero and set an exception on # failure. int PyCapsule_SetDestructor(object capsule, PyCapsule_Destructor destructor) except -1 # Set the destructor inside capsule to destructor. # # Return 0 on success. Return nonzero and set an exception on # failure. int PyCapsule_SetName(object capsule, char *name) except -1 # Set the name inside capsule to name. If non-NULL, the name must # outlive the capsule. If the previous name stored in the capsule # was not NULL, no attempt is made to free it. # # Return 0 on success. Return nonzero and set an exception on # failure. int PyCapsule_SetContext(object capsule, void *context) except -1 # Set the context pointer inside capsule to context. Return 0 on # success. Return nonzero and set an exception on failure. void* PyCapsule_Import(char *name, int no_block) except? NULL # Import a pointer to a C object from a capsule attribute in a # module. The name parameter should specify the full name to the # attribute, as in module.attribute. The name stored in the # capsule must match this string exactly. If no_block is true, # import the module without blocking (using # PyImport_ImportModuleNoBlock()). If no_block is false, import # the module conventionally (using PyImport_ImportModule()). # # Return the capsule’s internal pointer on success. On failure, # set an exception and return NULL. However, if PyCapsule_Import() # failed to import the module, and no_block was true, no exception # is set. Cython-0.26.1/Cython/Includes/cpython/version.pxd0000664000175000017500000000151712542002467022536 0ustar stefanstefan00000000000000# Python version constants # # It's better to evaluate these at runtime (i.e. C compile time) using # # if PY_MAJOR_VERSION >= 3: # do_stuff_in_Py3_0_and_later() # if PY_VERSION_HEX >= 0x02070000: # do_stuff_in_Py2_7_and_later() # # than using the IF/DEF statements, which are evaluated at Cython # compile time. This will keep your C code portable. cdef extern from *: # the complete version, e.g. 0x010502B2 == 1.5.2b2 int PY_VERSION_HEX # the individual sections as plain numbers int PY_MAJOR_VERSION int PY_MINOR_VERSION int PY_MICRO_VERSION int PY_RELEASE_LEVEL int PY_RELEASE_SERIAL # Note: PY_RELEASE_LEVEL is one of # 0xA (alpha) # 0xB (beta) # 0xC (release candidate) # 0xF (final) char PY_VERSION[] char PY_PATCHLEVEL_REVISION[] Cython-0.26.1/Cython/Includes/cpython/list.pxd0000664000175000017500000000776412542002467022036 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": ############################################################################ # Lists ############################################################################ list PyList_New(Py_ssize_t len) # Return a new list of length len on success, or NULL on failure. # # Note: If length is greater than zero, the returned list object's # items are set to NULL. Thus you cannot use abstract API # functions such as PySequence_SetItem() or expose the object to # Python code before setting all items to a real object with # PyList_SetItem(). bint PyList_Check(object p) # Return true if p is a list object or an instance of a subtype of # the list type. bint PyList_CheckExact(object p) # Return true if p is a list object, but not an instance of a # subtype of the list type. Py_ssize_t PyList_Size(object list) except -1 # Return the length of the list object in list; this is equivalent # to "len(list)" on a list object. Py_ssize_t PyList_GET_SIZE(object list) # Macro form of PyList_Size() without error checking. PyObject* PyList_GetItem(object list, Py_ssize_t index) except NULL # Return value: Borrowed reference. # Return the object at position pos in the list pointed to by # p. The position must be positive, indexing from the end of the # list is not supported. If pos is out of bounds, return NULL and # set an IndexError exception. PyObject* PyList_GET_ITEM(object list, Py_ssize_t i) # Return value: Borrowed reference. # Macro form of PyList_GetItem() without error checking. int PyList_SetItem(object list, Py_ssize_t index, object item) except -1 # Set the item at index index in list to item. Return 0 on success # or -1 on failure. Note: This function ``steals'' a reference to # item and discards a reference to an item already in the list at # the affected position. void PyList_SET_ITEM(object list, Py_ssize_t i, object o) # Macro form of PyList_SetItem() without error checking. This is # normally only used to fill in new lists where there is no # previous content. Note: This function ``steals'' a reference to # item, and, unlike PyList_SetItem(), does not discard a reference # to any item that it being replaced; any reference in list at # position i will be *leaked*. int PyList_Insert(object list, Py_ssize_t index, object item) except -1 # Insert the item item into list list in front of index # index. Return 0 if successful; return -1 and set an exception if # unsuccessful. Analogous to list.insert(index, item). int PyList_Append(object list, object item) except -1 # Append the object item at the end of list list. Return 0 if # successful; return -1 and set an exception if # unsuccessful. Analogous to list.append(item). list PyList_GetSlice(object list, Py_ssize_t low, Py_ssize_t high) # Return value: New reference. # Return a list of the objects in list containing the objects # between low and high. Return NULL and set an exception if # unsuccessful. Analogous to list[low:high]. int PyList_SetSlice(object list, Py_ssize_t low, Py_ssize_t high, object itemlist) except -1 # Set the slice of list between low and high to the contents of # itemlist. Analogous to list[low:high] = itemlist. The itemlist # may be NULL, indicating the assignment of an empty list (slice # deletion). Return 0 on success, -1 on failure. int PyList_Sort(object list) except -1 # Sort the items of list in place. Return 0 on success, -1 on # failure. This is equivalent to "list.sort()". int PyList_Reverse(object list) except -1 # Reverse the items of list in place. Return 0 on success, -1 on # failure. This is the equivalent of "list.reverse()". tuple PyList_AsTuple(object list) # Return value: New reference. # Return a new tuple object containing the contents of list; # equivalent to "tuple(list)". Cython-0.26.1/Cython/Includes/cpython/dict.pxd0000664000175000017500000001531312542002467021773 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": ############################################################################ # 7.4.1 Dictionary Objects ############################################################################ # PyDictObject # # This subtype of PyObject represents a Python dictionary object # (i.e. the 'dict' type). # PyTypeObject PyDict_Type # # This instance of PyTypeObject represents the Python dictionary # type. This is exposed to Python programs as dict and # types.DictType. bint PyDict_Check(object p) # Return true if p is a dict object or an instance of a subtype of # the dict type. bint PyDict_CheckExact(object p) # Return true if p is a dict object, but not an instance of a # subtype of the dict type. dict PyDict_New() # Return value: New reference. # Return a new empty dictionary, or NULL on failure. object PyDictProxy_New(object dict) # Return value: New reference. # Return a proxy object for a mapping which enforces read-only # behavior. This is normally used to create a proxy to prevent # modification of the dictionary for non-dynamic class types. void PyDict_Clear(object p) # Empty an existing dictionary of all key-value pairs. int PyDict_Contains(object p, object key) except -1 # Determine if dictionary p contains key. If an item in p is # matches key, return 1, otherwise return 0. On error, return # -1. This is equivalent to the Python expression "key in p". dict PyDict_Copy(object p) # Return value: New reference. # Return a new dictionary that contains the same key-value pairs as p. int PyDict_SetItem(object p, object key, object val) except -1 # Insert value into the dictionary p with a key of key. key must # be hashable; if it isn't, TypeError will be raised. Return 0 on # success or -1 on failure. int PyDict_SetItemString(object p, char *key, object val) except -1 # Insert value into the dictionary p using key as a key. key # should be a char*. The key object is created using # PyString_FromString(key). Return 0 on success or -1 on failure. int PyDict_DelItem(object p, object key) except -1 # Remove the entry in dictionary p with key key. key must be # hashable; if it isn't, TypeError is raised. Return 0 on success # or -1 on failure. int PyDict_DelItemString(object p, char *key) except -1 # Remove the entry in dictionary p which has a key specified by # the string key. Return 0 on success or -1 on failure. PyObject* PyDict_GetItem(object p, object key) # Return value: Borrowed reference. # Return the object from dictionary p which has a key key. Return # NULL if the key key is not present, but without setting an # exception. PyObject* PyDict_GetItemString(object p, char *key) # Return value: Borrowed reference. # This is the same as PyDict_GetItem(), but key is specified as a # char*, rather than a PyObject*. list PyDict_Items(object p) # Return value: New reference. # Return a PyListObject containing all the items from the # dictionary, as in the dictionary method items() (see the Python # Library Reference). list PyDict_Keys(object p) # Return value: New reference. # Return a PyListObject containing all the keys from the # dictionary, as in the dictionary method keys() (see the Python # Library Reference). list PyDict_Values(object p) # Return value: New reference. # Return a PyListObject containing all the values from the # dictionary p, as in the dictionary method values() (see the # Python Library Reference). Py_ssize_t PyDict_Size(object p) except -1 # Return the number of items in the dictionary. This is equivalent # to "len(p)" on a dictionary. int PyDict_Next(object p, Py_ssize_t *ppos, PyObject* *pkey, PyObject* *pvalue) # Iterate over all key-value pairs in the dictionary p. The int # referred to by ppos must be initialized to 0 prior to the first # call to this function to start the iteration; the function # returns true for each pair in the dictionary, and false once all # pairs have been reported. The parameters pkey and pvalue should # either point to PyObject* variables that will be filled in with # each key and value, respectively, or may be NULL. Any references # returned through them are borrowed. ppos should not be altered # during iteration. Its value represents offsets within the # internal dictionary structure, and since the structure is # sparse, the offsets are not consecutive. # For example: # #object key, *value; #int pos = 0; # #while (PyDict_Next(self->dict, &pos, &key, &value)) { # /* do something interesting with the values... */ # ... #} # The dictionary p should not be mutated during iteration. It is # safe (since Python 2.1) to modify the values of the keys as you # iterate over the dictionary, but only so long as the set of keys # does not change. For example: # object key, *value; # int pos = 0; # while (PyDict_Next(self->dict, &pos, &key, &value)) { # int i = PyInt_AS_LONG(value) + 1; # object o = PyInt_FromLong(i); # if (o == NULL) # return -1; # if (PyDict_SetItem(self->dict, key, o) < 0) { # Py_DECREF(o); # return -1; # } # Py_DECREF(o); # } int PyDict_Merge(object a, object b, int override) except -1 # Iterate over mapping object b adding key-value pairs to # dictionary a. b may be a dictionary, or any object supporting # PyMapping_Keys() and PyObject_GetItem(). If override is true, # existing pairs in a will be replaced if a matching key is found # in b, otherwise pairs will only be added if there is not a # matching key in a. Return 0 on success or -1 if an exception was # raised. int PyDict_Update(object a, object b) except -1 # This is the same as PyDict_Merge(a, b, 1) in C, or a.update(b) # in Python. Return 0 on success or -1 if an exception was raised. int PyDict_MergeFromSeq2(object a, object seq2, int override) except -1 # Update or merge into dictionary a, from the key-value pairs in # seq2. seq2 must be an iterable object producing iterable objects # of length 2, viewed as key-value pairs. In case of duplicate # keys, the last wins if override is true, else the first # wins. Return 0 on success or -1 if an exception was # raised. Equivalent Python (except for the return value): # #def PyDict_MergeFromSeq2(a, seq2, override): # for key, value in seq2: # if override or key not in a: # a[key] = value Cython-0.26.1/Cython/Includes/cpython/bytes.pxd0000664000175000017500000002326212542002467022200 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": ctypedef struct va_list ############################################################################ # 7.3.1 String Objects ############################################################################ # These functions raise TypeError when expecting a string # parameter and are called with a non-string parameter. # PyStringObject # This subtype of PyObject represents a Python bytes object. # PyTypeObject PyBytes_Type # This instance of PyTypeObject represents the Python bytes type; # it is the same object as bytes and types.BytesType in the Python # layer. bint PyBytes_Check(object o) # Return true if the object o is a string object or an instance of # a subtype of the string type. bint PyBytes_CheckExact(object o) # Return true if the object o is a string object, but not an instance of a subtype of the string type. bytes PyBytes_FromString(char *v) # Return value: New reference. # Return a new string object with the value v on success, and NULL # on failure. The parameter v must not be NULL; it will not be # checked. bytes PyBytes_FromStringAndSize(char *v, Py_ssize_t len) # Return value: New reference. # Return a new string object with the value v and length len on # success, and NULL on failure. If v is NULL, the contents of the # string are uninitialized. bytes PyBytes_FromFormat(char *format, ...) # Return value: New reference. # Take a C printf()-style format string and a variable number of # arguments, calculate the size of the resulting Python string and # return a string with the values formatted into it. The variable # arguments must be C types and must correspond exactly to the # format characters in the format string. The following format # characters are allowed: # Format Characters Type Comment # %% n/a The literal % character. # %c int A single character, represented as an C int. # %d int Exactly equivalent to printf("%d"). # %u unsigned int Exactly equivalent to printf("%u"). # %ld long Exactly equivalent to printf("%ld"). # %lu unsigned long Exactly equivalent to printf("%lu"). # %zd Py_ssize_t Exactly equivalent to printf("%zd"). # %zu size_t Exactly equivalent to printf("%zu"). # %i int Exactly equivalent to printf("%i"). # %x int Exactly equivalent to printf("%x"). # %s char* A null-terminated C character array. # %p void* The hex representation of a C pointer. # Mostly equivalent to printf("%p") except that it is guaranteed to # start with the literal 0x regardless of what the platform's printf # yields. # An unrecognized format character causes all the rest of the # format string to be copied as-is to the result string, and any # extra arguments discarded. bytes PyBytes_FromFormatV(char *format, va_list vargs) # Return value: New reference. # Identical to PyBytes_FromFormat() except that it takes exactly two arguments. Py_ssize_t PyBytes_Size(object string) except -1 # Return the length of the string in string object string. Py_ssize_t PyBytes_GET_SIZE(object string) # Macro form of PyBytes_Size() but without error checking. char* PyBytes_AsString(object string) except NULL # Return a NUL-terminated representation of the contents of # string. The pointer refers to the internal buffer of string, not # a copy. The data must not be modified in any way, unless the # string was just created using PyBytes_FromStringAndSize(NULL, # size). It must not be deallocated. If string is a Unicode # object, this function computes the default encoding of string # and operates on that. If string is not a string object at all, # PyBytes_AsString() returns NULL and raises TypeError. char* PyBytes_AS_STRING(object string) # Macro form of PyBytes_AsString() but without error # checking. Only string objects are supported; no Unicode objects # should be passed. int PyBytes_AsStringAndSize(object obj, char **buffer, Py_ssize_t *length) except -1 # Return a NULL-terminated representation of the contents of the # object obj through the output variables buffer and length. # # The function accepts both string and Unicode objects as # input. For Unicode objects it returns the default encoded # version of the object. If length is NULL, the resulting buffer # may not contain NUL characters; if it does, the function returns # -1 and a TypeError is raised. # The buffer refers to an internal string buffer of obj, not a # copy. The data must not be modified in any way, unless the # string was just created using PyBytes_FromStringAndSize(NULL, # size). It must not be deallocated. If string is a Unicode # object, this function computes the default encoding of string # and operates on that. If string is not a string object at all, # PyBytes_AsStringAndSize() returns -1 and raises TypeError. void PyBytes_Concat(PyObject **string, object newpart) # Create a new string object in *string containing the contents of # newpart appended to string; the caller will own the new # reference. The reference to the old value of string will be # stolen. If the new string cannot be created, the old reference # to string will still be discarded and the value of *string will # be set to NULL; the appropriate exception will be set. void PyBytes_ConcatAndDel(PyObject **string, object newpart) # Create a new string object in *string containing the contents of # newpart appended to string. This version decrements the # reference count of newpart. int _PyBytes_Resize(PyObject **string, Py_ssize_t newsize) except -1 # A way to resize a string object even though it is # ``immutable''. Only use this to build up a brand new string # object; don't use this if the string may already be known in # other parts of the code. It is an error to call this function if # the refcount on the input string object is not one. Pass the # address of an existing string object as an lvalue (it may be # written into), and the new size desired. On success, *string # holds the resized string object and 0 is returned; the address # in *string may differ from its input value. If the reallocation # fails, the original string object at *string is deallocated, # *string is set to NULL, a memory exception is set, and -1 is # returned. bytes PyBytes_Format(object format, object args) # Return value: New reference. Return a new string object from # format and args. Analogous to format % args. The args argument # must be a tuple. void PyBytes_InternInPlace(PyObject **string) # Intern the argument *string in place. The argument must be the # address of a pointer variable pointing to a Python string # object. If there is an existing interned string that is the same # as *string, it sets *string to it (decrementing the reference # count of the old string object and incrementing the reference # count of the interned string object), otherwise it leaves # *string alone and interns it (incrementing its reference # count). (Clarification: even though there is a lot of talk about # reference counts, think of this function as # reference-count-neutral; you own the object after the call if # and only if you owned it before the call.) bytes PyBytes_InternFromString(char *v) # Return value: New reference. # A combination of PyBytes_FromString() and # PyBytes_InternInPlace(), returning either a new string object # that has been interned, or a new (``owned'') reference to an # earlier interned string object with the same value. object PyBytes_Decode(char *s, Py_ssize_t size, char *encoding, char *errors) # Return value: New reference. # Create an object by decoding size bytes of the encoded buffer s # using the codec registered for encoding. encoding and errors # have the same meaning as the parameters of the same name in the # unicode() built-in function. The codec to be used is looked up # using the Python codec registry. Return NULL if an exception was # raised by the codec. object PyBytes_AsDecodedObject(object str, char *encoding, char *errors) # Return value: New reference. # Decode a string object by passing it to the codec registered for # encoding and return the result as Python object. encoding and # errors have the same meaning as the parameters of the same name # in the string encode() method. The codec to be used is looked up # using the Python codec registry. Return NULL if an exception was # raised by the codec. object PyBytes_Encode(char *s, Py_ssize_t size, char *encoding, char *errors) # Return value: New reference. # Encode the char buffer of the given size by passing it to the # codec registered for encoding and return a Python # object. encoding and errors have the same meaning as the # parameters of the same name in the string encode() method. The # codec to be used is looked up using the Python codec # registry. Return NULL if an exception was raised by the codec. object PyBytes_AsEncodedObject(object str, char *encoding, char *errors) # Return value: New reference. # Encode a string object using the codec registered for encoding # and return the result as Python object. encoding and errors have # the same meaning as the parameters of the same name in the # string encode() method. The codec to be used is looked up using # the Python codec registry. Return NULL if an exception was # raised by the codec. Cython-0.26.1/Cython/Includes/cpython/cobject.pxd0000664000175000017500000000276412542002467022467 0ustar stefanstefan00000000000000 cdef extern from "Python.h": ########################################################################### # Warning: # # The CObject API is deprecated as of Python 3.1. Please switch to # the new Capsules API. ########################################################################### int PyCObject_Check(object p) # Return true if its argument is a PyCObject. object PyCObject_FromVoidPtr(void* cobj, void (*destr)(void *)) # Return value: New reference. # # Create a PyCObject from the void * cobj. The destr function will # be called when the object is reclaimed, unless it is NULL. object PyCObject_FromVoidPtrAndDesc(void* cobj, void* desc, void (*destr)(void *, void *)) # Return value: New reference. # # Create a PyCObject from the void * cobj. The destr function will # be called when the object is reclaimed. The desc argument can be # used to pass extra callback data for the destructor function. void* PyCObject_AsVoidPtr(object self) except? NULL # Return the object void * that the PyCObject self was created with. void* PyCObject_GetDesc(object self) except? NULL # Return the description void * that the PyCObject self was created with. int PyCObject_SetVoidPtr(object self, void* cobj) except 0 # Set the void pointer inside self to cobj. The PyCObject must not # have an associated destructor. Return true on success, false on # failure. Cython-0.26.1/Cython/Includes/cpython/module.pxd0000664000175000017500000002173213143605603022336 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": ctypedef struct _inittab ##################################################################### # 5.3 Importing Modules ##################################################################### object PyImport_ImportModule(char *name) # Return value: New reference. # This is a simplified interface to PyImport_ImportModuleEx() # below, leaving the globals and locals arguments set to # NULL. When the name argument contains a dot (when it specifies a # submodule of a package), the fromlist argument is set to the # list ['*'] so that the return value is the named module rather # than the top-level package containing it as would otherwise be # the case. (Unfortunately, this has an additional side effect # when name in fact specifies a subpackage instead of a submodule: # the submodules specified in the package's __all__ variable are # loaded.) Return a new reference to the imported module, or NULL # with an exception set on failure. object PyImport_ImportModuleEx(char *name, object globals, object locals, object fromlist) # Return value: New reference. # Import a module. This is best described by referring to the # built-in Python function __import__(), as the standard # __import__() function calls this function directly. # The return value is a new reference to the imported module or # top-level package, or NULL with an exception set on failure # (before Python 2.4, the module may still be created in this # case). Like for __import__(), the return value when a submodule # of a package was requested is normally the top-level package, # unless a non-empty fromlist was given. Changed in version 2.4: # failing imports remove incomplete module objects. object PyImport_ImportModuleLevel(char *name, object globals, object locals, object fromlist, int level) # Return value: New reference. # Import a module. This is best described by referring to the # built-in Python function __import__(), as the standard # __import__() function calls this function directly. # The return value is a new reference to the imported module or # top-level package, or NULL with an exception set on failure. Like # for __import__(), the return value when a submodule of a package # was requested is normally the top-level package, unless a # non-empty fromlist was given. object PyImport_Import(object name) # Return value: New reference. # This is a higher-level interface that calls the current ``import # hook function''. It invokes the __import__() function from the # __builtins__ of the current globals. This means that the import # is done using whatever import hooks are installed in the current # environment, e.g. by rexec or ihooks. object PyImport_ReloadModule(object m) # Return value: New reference. # Reload a module. This is best described by referring to the # built-in Python function reload(), as the standard reload() # function calls this function directly. Return a new reference to # the reloaded module, or NULL with an exception set on failure # (the module still exists in this case). PyObject* PyImport_AddModule(char *name) except NULL # Return value: Borrowed reference. # Return the module object corresponding to a module name. The # name argument may be of the form package.module. First check the # modules dictionary if there's one there, and if not, create a # new one and insert it in the modules dictionary. Return NULL # with an exception set on failure. Note: This function does not # load or import the module; if the module wasn't already loaded, # you will get an empty module object. Use PyImport_ImportModule() # or one of its variants to import a module. Package structures # implied by a dotted name for name are not created if not already # present. object PyImport_ExecCodeModule(char *name, object co) # Return value: New reference. # Given a module name (possibly of the form package.module) and a # code object read from a Python bytecode file or obtained from # the built-in function compile(), load the module. Return a new # reference to the module object, or NULL with an exception set if # an error occurred. Name is removed from sys.modules in error # cases, and even if name was already in sys.modules on entry to # PyImport_ExecCodeModule(). Leaving incompletely initialized # modules in sys.modules is dangerous, as imports of such modules # have no way to know that the module object is an unknown (and # probably damaged with respect to the module author's intents) # state. # This function will reload the module if it was already # imported. See PyImport_ReloadModule() for the intended way to # reload a module. # If name points to a dotted name of the form package.module, any # package structures not already created will still not be # created. long PyImport_GetMagicNumber() # Return the magic number for Python bytecode files (a.k.a. .pyc # and .pyo files). The magic number should be present in the first # four bytes of the bytecode file, in little-endian byte order. PyObject* PyImport_GetModuleDict() except NULL # Return value: Borrowed reference. # Return the dictionary used for the module administration # (a.k.a. sys.modules). Note that this is a per-interpreter # variable. int PyImport_ImportFrozenModule(char *name) except -1 # Load a frozen module named name. Return 1 for success, 0 if the # module is not found, and -1 with an exception set if the # initialization failed. To access the imported module on a # successful load, use PyImport_ImportModule(). (Note the misnomer # -- this function would reload the module if it was already # imported.) int PyImport_ExtendInittab(_inittab *newtab) except -1 # Add a collection of modules to the table of built-in # modules. The newtab array must end with a sentinel entry which # contains NULL for the name field; failure to provide the # sentinel value can result in a memory fault. Returns 0 on # success or -1 if insufficient memory could be allocated to # extend the internal table. In the event of failure, no modules # are added to the internal table. This should be called before # Py_Initialize(). ##################################################################### # 7.5.5 Module Objects ##################################################################### # PyTypeObject PyModule_Type # # This instance of PyTypeObject represents the Python module # type. This is exposed to Python programs as types.ModuleType. bint PyModule_Check(object p) # Return true if p is a module object, or a subtype of a module # object. bint PyModule_CheckExact(object p) # Return true if p is a module object, but not a subtype of PyModule_Type. object PyModule_New(char *name) # Return value: New reference. # Return a new module object with the __name__ attribute set to # name. Only the module's __doc__ and __name__ attributes are # filled in; the caller is responsible for providing a __file__ # attribute. PyObject* PyModule_GetDict(object module) except NULL # Return value: Borrowed reference. # Return the dictionary object that implements module's namespace; # this object is the same as the __dict__ attribute of the module # object. This function never fails. It is recommended extensions # use other PyModule_*() and PyObject_*() functions rather than # directly manipulate a module's __dict__. char* PyModule_GetName(object module) except NULL # Return module's __name__ value. If the module does not provide # one, or if it is not a string, SystemError is raised and NULL is # returned. char* PyModule_GetFilename(object module) except NULL # Return the name of the file from which module was loaded using # module's __file__ attribute. If this is not defined, or if it is # not a string, raise SystemError and return NULL. int PyModule_AddObject(object module, char *name, object value) except -1 # Add an object to module as name. This is a convenience function # which can be used from the module's initialization # function. This steals a reference to value. Return -1 on error, # 0 on success. int PyModule_AddIntConstant(object module, char *name, long value) except -1 # Add an integer constant to module as name. This convenience # function can be used from the module's initialization # function. Return -1 on error, 0 on success. int PyModule_AddStringConstant(object module, char *name, char *value) except -1 # Add a string constant to module as name. This convenience # function can be used from the module's initialization # function. The string value must be null-terminated. Return -1 on # error, 0 on success. Cython-0.26.1/Cython/Includes/cpython/__init__.pxd0000664000175000017500000002007412542002467022607 0ustar stefanstefan00000000000000##################################################################### # # These are the Cython pxd files for (most of) the Python/C API. # # REFERENCE COUNTING: # # JUST TO SCARE YOU: # If you are going to use any of the Python/C API in your Cython # program, you might be responsible for doing reference counting. # Read http://docs.python.org/api/refcounts.html which is so # important I've copied it below. # # For all the declaration below, whenver the Py_ function returns # a *new reference* to a PyObject*, the return type is "object". # When the function returns a borrowed reference, the return # type is PyObject*. When Cython sees "object" as a return type # it doesn't increment the reference count. When it sees PyObject* # in order to use the result you must explicitly cast to , # and when you do that Cython increments the reference count wether # you want it to or not, forcing you to an explicit DECREF (or leak memory). # To avoid this we make the above convention. Note, you can # always locally override this convention by putting something like # # cdef extern from "Python.h": # PyObject* PyNumber_Add(PyObject *o1, PyObject *o2) # # in your .pyx file or into a cimported .pxd file. You just have to # use the one from the right (pxd-)namespace then. # # Cython automatically takes care of reference counting for anything # of type object. # ## More precisely, I think the correct convention for ## using the Python/C API from Cython is as follows. ## ## (1) Declare all input arguments as type "object". This way no explicit ## casting is needed, and moreover Cython doesn't generate ## any funny reference counting. ## (2) Declare output as object if a new reference is returned. ## (3) Declare output as PyObject* if a borrowed reference is returned. ## ## This way when you call objects, no cast is needed, and if the api ## calls returns a new reference (which is about 95% of them), then ## you can just assign to a variable of type object. With borrowed ## references if you do an explicit typecast to , Cython generates an ## INCREF and DECREF so you have to be careful. However, you got a ## borrowed reference in this case, so there's got to be another reference ## to your object, so you're OK, as long as you relealize this ## and use the result of an explicit cast to as a borrowed ## reference (and you can call Py_INCREF if you want to turn it ## into another reference for some reason). # # "The reference count is important because today's computers have # a finite (and often severely limited) memory size; it counts how # many different places there are that have a reference to an # object. Such a place could be another object, or a global (or # static) C variable, or a local variable in some C function. When # an object's reference count becomes zero, the object is # deallocated. If it contains references to other objects, their # reference count is decremented. Those other objects may be # deallocated in turn, if this decrement makes their reference # count become zero, and so on. (There's an obvious problem with # objects that reference each other here; for now, the solution is # ``don't do that.'') # # Reference counts are always manipulated explicitly. The normal # way is to use the macro Py_INCREF() to increment an object's # reference count by one, and Py_DECREF() to decrement it by # one. The Py_DECREF() macro is considerably more complex than the # incref one, since it must check whether the reference count # becomes zero and then cause the object's deallocator to be # called. The deallocator is a function pointer contained in the # object's type structure. The type-specific deallocator takes # care of decrementing the reference counts for other objects # contained in the object if this is a compound object type, such # as a list, as well as performing any additional finalization # that's needed. There's no chance that the reference count can # overflow; at least as many bits are used to hold the reference # count as there are distinct memory locations in virtual memory # (assuming sizeof(long) >= sizeof(char*)). Thus, the reference # count increment is a simple operation. # # It is not necessary to increment an object's reference count for # every local variable that contains a pointer to an object. In # theory, the object's reference count goes up by one when the # variable is made to point to it and it goes down by one when the # variable goes out of scope. However, these two cancel each other # out, so at the end the reference count hasn't changed. The only # real reason to use the reference count is to prevent the object # from being deallocated as long as our variable is pointing to # it. If we know that there is at least one other reference to the # object that lives at least as long as our variable, there is no # need to increment the reference count temporarily. An important # situation where this arises is in objects that are passed as # arguments to C functions in an extension module that are called # from Python; the call mechanism guarantees to hold a reference # to every argument for the duration of the call. # # However, a common pitfall is to extract an object from a list # and hold on to it for a while without incrementing its reference # count. Some other operation might conceivably remove the object # from the list, decrementing its reference count and possible # deallocating it. The real danger is that innocent-looking # operations may invoke arbitrary Python code which could do this; # there is a code path which allows control to flow back to the # user from a Py_DECREF(), so almost any operation is potentially # dangerous. # # A safe approach is to always use the generic operations # (functions whose name begins with "PyObject_", "PyNumber_", # "PySequence_" or "PyMapping_"). These operations always # increment the reference count of the object they return. This # leaves the caller with the responsibility to call Py_DECREF() # when they are done with the result; this soon becomes second # nature. # # Now you should read http://docs.python.org/api/refcountDetails.html # just to be sure you understand what is going on. # ################################################################# ################################################################# # BIG FAT DEPRECATION WARNING ################################################################# # Do NOT cimport any names directly from the cpython package, # despite of the star-imports below. They will be removed at # some point. # Instead, use the correct sub-module to draw your cimports from. # # A direct cimport from the package will make your code depend on # all of the existing declarations. This may have side-effects # and reduces the portability of your code. ################################################################# # START OF DEPRECATED SECTION ################################################################# from cpython.version cimport * from cpython.ref cimport * from cpython.exc cimport * from cpython.module cimport * from cpython.mem cimport * from cpython.tuple cimport * from cpython.list cimport * from cpython.object cimport * from cpython.sequence cimport * from cpython.mapping cimport * from cpython.iterator cimport * from cpython.type cimport * from cpython.number cimport * from cpython.int cimport * from cpython.bool cimport * from cpython.long cimport * from cpython.float cimport * from cpython.complex cimport * from cpython.string cimport * from cpython.unicode cimport * from cpython.dict cimport * from cpython.instance cimport * from cpython.function cimport * from cpython.method cimport * from cpython.weakref cimport * from cpython.getargs cimport * from cpython.pythread cimport * from cpython.pystate cimport * # Python <= 2.x from cpython.cobject cimport * from cpython.oldbuffer cimport * # Python >= 2.4 from cpython.set cimport * # Python >= 2.6 from cpython.buffer cimport * from cpython.bytes cimport * # Python >= 3.0 from cpython.pycapsule cimport * ################################################################# # END OF DEPRECATED SECTION ################################################################# Cython-0.26.1/Cython/Includes/cpython/longintrepr.pxd0000664000175000017500000000074013023021033023372 0ustar stefanstefan00000000000000# Internals of the "long" type (Python 2) or "int" type (Python 3). # This is not part of Python's published API. cdef extern from "longintrepr.h": # Add explicit cast to avoid compiler warnings cdef _PyLong_New "(PyObject*)_PyLong_New"(Py_ssize_t s) ctypedef unsigned int digit ctypedef int sdigit # Python >= 2.7 only ctypedef struct PyLongObject: digit* ob_digit cdef long PyLong_SHIFT cdef digit PyLong_BASE cdef digit PyLong_MASK Cython-0.26.1/Cython/Includes/cpython/pythread.pxd0000664000175000017500000000216712542002467022673 0ustar stefanstefan00000000000000 cdef extern from "pythread.h": ctypedef void *PyThread_type_lock ctypedef void *PyThread_type_sema void PyThread_init_thread() long PyThread_start_new_thread(void (*)(void *), void *) void PyThread_exit_thread() long PyThread_get_thread_ident() PyThread_type_lock PyThread_allocate_lock() void PyThread_free_lock(PyThread_type_lock) int PyThread_acquire_lock(PyThread_type_lock, int mode) nogil void PyThread_release_lock(PyThread_type_lock) nogil enum: # 'mode' in PyThread_acquire_lock() WAIT_LOCK # 1 NOWAIT_LOCK # 0 ctypedef enum PyLockStatus: # return values of PyThread_acquire_lock() in CPython 3.2+ PY_LOCK_FAILURE = 0 PY_LOCK_ACQUIRED = 1 PY_LOCK_INTR size_t PyThread_get_stacksize() int PyThread_set_stacksize(size_t) # Thread Local Storage (TLS) API int PyThread_create_key() void PyThread_delete_key(int) int PyThread_set_key_value(int, void *) void * PyThread_get_key_value(int) void PyThread_delete_key_value(int key) # Cleanup after a fork void PyThread_ReInitTLS() Cython-0.26.1/Cython/Includes/cpython/datetime.pxd0000664000175000017500000001517313143605603022647 0ustar stefanstefan00000000000000from cpython.object cimport PyObject cdef extern from "Python.h": ctypedef struct PyTypeObject: pass cdef extern from "datetime.h": ctypedef extern class datetime.date[object PyDateTime_Date]: pass ctypedef extern class datetime.time[object PyDateTime_Time]: pass ctypedef extern class datetime.datetime[object PyDateTime_DateTime]: pass ctypedef extern class datetime.timedelta[object PyDateTime_Delta]: pass ctypedef extern class datetime.tzinfo[object PyDateTime_TZInfo]: pass ctypedef struct PyDateTime_Date: pass ctypedef struct PyDateTime_Time: char hastzinfo PyObject *tzinfo ctypedef struct PyDateTime_DateTime: char hastzinfo PyObject *tzinfo ctypedef struct PyDateTime_Delta: int days int seconds int microseconds # Define structure for C API. ctypedef struct PyDateTime_CAPI: # type objects PyTypeObject *DateType PyTypeObject *DateTimeType PyTypeObject *TimeType PyTypeObject *DeltaType PyTypeObject *TZInfoType # constructors object (*Date_FromDate)(int, int, int, PyTypeObject*) object (*DateTime_FromDateAndTime)(int, int, int, int, int, int, int, object, PyTypeObject*) object (*Time_FromTime)(int, int, int, int, object, PyTypeObject*) object (*Delta_FromDelta)(int, int, int, int, PyTypeObject*) # constructors for the DB API object (*DateTime_FromTimestamp)(object, object, object) object (*Date_FromTimestamp)(object, object) # Check type of the object. bint PyDate_Check(object op) bint PyDate_CheckExact(object op) bint PyDateTime_Check(object op) bint PyDateTime_CheckExact(object op) bint PyTime_Check(object op) bint PyTime_CheckExact(object op) bint PyDelta_Check(object op) bint PyDelta_CheckExact(object op) bint PyTZInfo_Check(object op) bint PyTZInfo_CheckExact(object op) # Getters for date and datetime (C macros). int PyDateTime_GET_YEAR(object o) int PyDateTime_GET_MONTH(object o) int PyDateTime_GET_DAY(object o) # Getters for datetime (C macros). int PyDateTime_DATE_GET_HOUR(object o) int PyDateTime_DATE_GET_MINUTE(object o) int PyDateTime_DATE_GET_SECOND(object o) int PyDateTime_DATE_GET_MICROSECOND(object o) # Getters for time (C macros). int PyDateTime_TIME_GET_HOUR(object o) int PyDateTime_TIME_GET_MINUTE(object o) int PyDateTime_TIME_GET_SECOND(object o) int PyDateTime_TIME_GET_MICROSECOND(object o) # Getters for timedelta (C macros). #int PyDateTime_DELTA_GET_DAYS(object o) #int PyDateTime_DELTA_GET_SECONDS(object o) #int PyDateTime_DELTA_GET_MICROSECONDS(object o) # PyDateTime CAPI object. PyDateTime_CAPI *PyDateTimeAPI void PyDateTime_IMPORT() # Datetime C API initialization function. # You have to call it before any usage of DateTime CAPI functions. cdef inline void import_datetime(): PyDateTime_IMPORT # Create date object using DateTime CAPI factory function. # Note, there are no range checks for any of the arguments. cdef inline object date_new(int year, int month, int day): return PyDateTimeAPI.Date_FromDate(year, month, day, PyDateTimeAPI.DateType) # Create time object using DateTime CAPI factory function # Note, there are no range checks for any of the arguments. cdef inline object time_new(int hour, int minute, int second, int microsecond, object tz): return PyDateTimeAPI.Time_FromTime(hour, minute, second, microsecond, tz, PyDateTimeAPI.TimeType) # Create datetime object using DateTime CAPI factory function. # Note, there are no range checks for any of the arguments. cdef inline object datetime_new(int year, int month, int day, int hour, int minute, int second, int microsecond, object tz): return PyDateTimeAPI.DateTime_FromDateAndTime(year, month, day, hour, minute, second, microsecond, tz, PyDateTimeAPI.DateTimeType) # Create timedelta object using DateTime CAPI factory function. # Note, there are no range checks for any of the arguments. cdef inline object timedelta_new(int days, int seconds, int useconds): return PyDateTimeAPI.Delta_FromDelta(days, seconds, useconds, 1, PyDateTimeAPI.DeltaType) # More recognizable getters for date/time/datetime/timedelta. # There are no setters because datetime.h hasn't them. # This is because of immutable nature of these objects by design. # If you would change time/date/datetime/timedelta object you need to recreate. # Get tzinfo of time cdef inline object time_tzinfo(object o): if (o).hastzinfo: return (o).tzinfo else: return None # Get tzinfo of datetime cdef inline object datetime_tzinfo(object o): if (o).hastzinfo: return (o).tzinfo else: return None # Get year of date cdef inline int date_year(object o): return PyDateTime_GET_YEAR(o) # Get month of date cdef inline int date_month(object o): return PyDateTime_GET_MONTH(o) # Get day of date cdef inline int date_day(object o): return PyDateTime_GET_DAY(o) # Get year of datetime cdef inline int datetime_year(object o): return PyDateTime_GET_YEAR(o) # Get month of datetime cdef inline int datetime_month(object o): return PyDateTime_GET_MONTH(o) # Get day of datetime cdef inline int datetime_day(object o): return PyDateTime_GET_DAY(o) # Get hour of time cdef inline int time_hour(object o): return PyDateTime_TIME_GET_HOUR(o) # Get minute of time cdef inline int time_minute(object o): return PyDateTime_TIME_GET_MINUTE(o) # Get second of time cdef inline int time_second(object o): return PyDateTime_TIME_GET_SECOND(o) # Get microsecond of time cdef inline int time_microsecond(object o): return PyDateTime_TIME_GET_MICROSECOND(o) # Get hour of datetime cdef inline int datetime_hour(object o): return PyDateTime_DATE_GET_HOUR(o) # Get minute of datetime cdef inline int datetime_minute(object o): return PyDateTime_DATE_GET_MINUTE(o) # Get second of datetime cdef inline int datetime_second(object o): return PyDateTime_DATE_GET_SECOND(o) # Get microsecond of datetime cdef inline int datetime_microsecond(object o): return PyDateTime_DATE_GET_MICROSECOND(o) # Get days of timedelta cdef inline int timedelta_days(object o): return (o).days # Get seconds of timedelta cdef inline int timedelta_seconds(object o): return (o).seconds # Get microseconds of timedelta cdef inline int timedelta_microseconds(object o): return (o).microseconds Cython-0.26.1/Cython/Includes/cpython/array.pxd0000664000175000017500000001362113143605603022165 0ustar stefanstefan00000000000000""" array.pxd Cython interface to Python's array.array module. * 1D contiguous data view * tools for fast array creation, maximum C-speed and handiness * suitable as allround light weight auto-array within Cython code too Usage: >>> cimport array Usage through Cython buffer interface (Py2.3+): >>> def f(arg1, unsigned i, double dx) ... array.array[double] a = arg1 ... a[i] += dx Fast C-level new_array(_zeros), resize_array, copy_array, Py_SIZE(obj), zero_array cdef array.array[double] k = array.copy(d) cdef array.array[double] n = array.array(d, Py_SIZE(d) * 2 ) cdef array.array[double] m = array.zeros_like(FLOAT_TEMPLATE) array.resize(f, 200000) Zero overhead with naked data pointer views by union: _f, _d, _i, _c, _u, ... => Original C array speed + Python dynamic memory management cdef array.array a = inarray if a._d[2] += 0.66 # use as double array without extra casting float *subview = vector._f + 10 # starting from 10th element unsigned char *subview_buffer = vector._B + 4 Suitable as lightweight arrays intra Cython without speed penalty. Replacement for C stack/malloc arrays; no trouble with refcounting, mem.leaks; seamless Python compatibility, buffer() optional last changes: 2009-05-15 rk : 2009-12-06 bp : 2012-05-02 andreasvc : (see revision control) """ from libc.string cimport strcat, strncat, \ memset, memchr, memcmp, memcpy, memmove from cpython.object cimport Py_SIZE from cpython.ref cimport PyTypeObject, Py_TYPE from cpython.exc cimport PyErr_BadArgument from cpython.mem cimport PyObject_Malloc, PyObject_Free cdef extern from *: # Hard-coded utility code hack. ctypedef class array.array [object arrayobject] ctypedef object GETF(array a, Py_ssize_t ix) ctypedef object SETF(array a, Py_ssize_t ix, object o) ctypedef struct arraydescr: # [object arraydescr]: int typecode int itemsize GETF getitem # PyObject * (*getitem)(struct arrayobject *, Py_ssize_t); SETF setitem # int (*setitem)(struct arrayobject *, Py_ssize_t, PyObject *); ctypedef union __data_union: # views of ob_item: float* as_floats # direct float pointer access to buffer double* as_doubles # double ... int* as_ints unsigned int *as_uints unsigned char *as_uchars signed char *as_schars char *as_chars unsigned long *as_ulongs long *as_longs unsigned long long *as_ulonglongs long long *as_longlongs short *as_shorts unsigned short *as_ushorts Py_UNICODE *as_pyunicodes void *as_voidptr ctypedef class array.array [object arrayobject]: cdef __cythonbufferdefaults__ = {'ndim' : 1, 'mode':'c'} cdef: Py_ssize_t ob_size arraydescr* ob_descr # struct arraydescr *ob_descr; __data_union data def __getbuffer__(self, Py_buffer* info, int flags): # This implementation of getbuffer is geared towards Cython # requirements, and does not yet fullfill the PEP. # In particular strided access is always provided regardless # of flags item_count = Py_SIZE(self) info.suboffsets = NULL info.buf = self.data.as_chars info.readonly = 0 info.ndim = 1 info.itemsize = self.ob_descr.itemsize # e.g. sizeof(float) info.len = info.itemsize * item_count info.shape = PyObject_Malloc(sizeof(Py_ssize_t) + 2) if not info.shape: raise MemoryError() info.shape[0] = item_count # constant regardless of resizing info.strides = &info.itemsize info.format = (info.shape + 1) info.format[0] = self.ob_descr.typecode info.format[1] = 0 info.obj = self def __releasebuffer__(self, Py_buffer* info): PyObject_Free(info.shape) array newarrayobject(PyTypeObject* type, Py_ssize_t size, arraydescr *descr) # fast resize/realloc # not suitable for small increments; reallocation 'to the point' int resize(array self, Py_ssize_t n) except -1 # efficient for small increments (not in Py2.3-) int resize_smart(array self, Py_ssize_t n) except -1 cdef inline array clone(array template, Py_ssize_t length, bint zero): """ fast creation of a new array, given a template array. type will be same as template. if zero is true, new array will be initialized with zeroes.""" op = newarrayobject(Py_TYPE(template), length, template.ob_descr) if zero and op is not None: memset(op.data.as_chars, 0, length * op.ob_descr.itemsize) return op cdef inline array copy(array self): """ make a copy of an array. """ op = newarrayobject(Py_TYPE(self), Py_SIZE(self), self.ob_descr) memcpy(op.data.as_chars, self.data.as_chars, Py_SIZE(op) * op.ob_descr.itemsize) return op cdef inline int extend_buffer(array self, char* stuff, Py_ssize_t n) except -1: """ efficent appending of new stuff of same type (e.g. of same array type) n: number of elements (not number of bytes!) """ cdef Py_ssize_t itemsize = self.ob_descr.itemsize cdef Py_ssize_t origsize = Py_SIZE(self) resize_smart(self, origsize + n) memcpy(self.data.as_chars + origsize * itemsize, stuff, n * itemsize) return 0 cdef inline int extend(array self, array other) except -1: """ extend array with data from another array; types must match. """ if self.ob_descr.typecode != other.ob_descr.typecode: PyErr_BadArgument() return extend_buffer(self, other.data.as_chars, Py_SIZE(other)) cdef inline void zero(array self): """ set all elements of array to zero. """ memset(self.data.as_chars, 0, Py_SIZE(self) * self.ob_descr.itemsize) Cython-0.26.1/Cython/Includes/cpython/type.pxd0000664000175000017500000000344712542002467022036 0ustar stefanstefan00000000000000 cdef extern from "Python.h": # The C structure of the objects used to describe built-in types. ############################################################################ # 7.1.1 Type Objects ############################################################################ ctypedef class __builtin__.type [object PyTypeObject]: pass # PyObject* PyType_Type # This is the type object for type objects; it is the same object # as type and types.TypeType in the Python layer. bint PyType_Check(object o) # Return true if the object o is a type object, including # instances of types derived from the standard type object. Return # false in all other cases. bint PyType_CheckExact(object o) # Return true if the object o is a type object, but not a subtype # of the standard type object. Return false in all other # cases. bint PyType_HasFeature(object o, int feature) # Return true if the type object o sets the feature feature. Type # features are denoted by single bit flags. bint PyType_IS_GC(object o) # Return true if the type object includes support for the cycle # detector; this tests the type flag Py_TPFLAGS_HAVE_GC. bint PyType_IsSubtype(type a, type b) # Return true if a is a subtype of b. object PyType_GenericAlloc(object type, Py_ssize_t nitems) # Return value: New reference. object PyType_GenericNew(type type, object args, object kwds) # Return value: New reference. bint PyType_Ready(type type) except -1 # Finalize a type object. This should be called on all type # objects to finish their initialization. This function is # responsible for adding inherited slots from a type's base # class. Return 0 on success, or return -1 and sets an exception # on error. Cython-0.26.1/Cython/Includes/cpython/method.pxd0000664000175000017500000000422412542002467022327 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": ############################################################################ # 7.5.4 Method Objects ############################################################################ # There are some useful functions that are useful for working with method objects. # PyTypeObject PyMethod_Type # This instance of PyTypeObject represents the Python method type. This is exposed to Python programs as types.MethodType. bint PyMethod_Check(object o) # Return true if o is a method object (has type # PyMethod_Type). The parameter must not be NULL. object PyMethod_New(object func, object self, object cls) # Return value: New reference. # Return a new method object, with func being any callable object; # this is the function that will be called when the method is # called. If this method should be bound to an instance, self # should be the instance and class should be the class of self, # otherwise self should be NULL and class should be the class # which provides the unbound method.. PyObject* PyMethod_Class(object meth) except NULL # Return value: Borrowed reference. # Return the class object from which the method meth was created; # if this was created from an instance, it will be the class of # the instance. PyObject* PyMethod_GET_CLASS(object meth) # Return value: Borrowed reference. # Macro version of PyMethod_Class() which avoids error checking. PyObject* PyMethod_Function(object meth) except NULL # Return value: Borrowed reference. # Return the function object associated with the method meth. PyObject* PyMethod_GET_FUNCTION(object meth) # Return value: Borrowed reference. # Macro version of PyMethod_Function() which avoids error checking. PyObject* PyMethod_Self(object meth) except? NULL # Return value: Borrowed reference. # Return the instance associated with the method meth if it is bound, otherwise return NULL. PyObject* PyMethod_GET_SELF(object meth) # Return value: Borrowed reference. # Macro version of PyMethod_Self() which avoids error checking. Cython-0.26.1/Cython/Includes/cpython/mem.pxd0000664000175000017500000001220613023021033021605 0ustar stefanstefan00000000000000cdef extern from "Python.h": ##################################################################### # 9.2 Memory Interface ##################################################################### # You are definitely *supposed* to use these: "In most situations, # however, it is recommended to allocate memory from the Python # heap specifically because the latter is under control of the # Python memory manager. For example, this is required when the # interpreter is extended with new object types written in # C. Another reason for using the Python heap is the desire to # inform the Python memory manager about the memory needs of the # extension module. Even when the requested memory is used # exclusively for internal, highly-specific purposes, delegating # all memory requests to the Python memory manager causes the # interpreter to have a more accurate image of its memory # footprint as a whole. Consequently, under certain circumstances, # the Python memory manager may or may not trigger appropriate # actions, like garbage collection, memory compaction or other # preventive procedures. Note that by using the C library # allocator as shown in the previous example, the allocated memory # for the I/O buffer escapes completely the Python memory # manager." # The following function sets, modeled after the ANSI C standard, # but specifying behavior when requesting zero bytes, are # available for allocating and releasing memory from the Python # heap: void* PyMem_Malloc(size_t n) # Allocates n bytes and returns a pointer of type void* to the # allocated memory, or NULL if the request fails. Requesting zero # bytes returns a distinct non-NULL pointer if possible, as if # PyMem_Malloc(1) had been called instead. The memory will not # have been initialized in any way. void* PyMem_Realloc(void *p, size_t n) # Resizes the memory block pointed to by p to n bytes. The # contents will be unchanged to the minimum of the old and the new # sizes. If p is NULL, the call is equivalent to PyMem_Malloc(n); # else if n is equal to zero, the memory block is resized but is # not freed, and the returned pointer is non-NULL. Unless p is # NULL, it must have been returned by a previous call to # PyMem_Malloc() or PyMem_Realloc(). void PyMem_Free(void *p) # Frees the memory block pointed to by p, which must have been # returned by a previous call to PyMem_Malloc() or # PyMem_Realloc(). Otherwise, or if PyMem_Free(p) has been called # before, undefined behavior occurs. If p is NULL, no operation is # performed. # The following type-oriented macros are provided for # convenience. Note that TYPE refers to any C type. # TYPE* PyMem_New(TYPE, size_t n) # Same as PyMem_Malloc(), but allocates (n * sizeof(TYPE)) bytes # of memory. Returns a pointer cast to TYPE*. The memory will not # have been initialized in any way. # TYPE* PyMem_Resize(void *p, TYPE, size_t n) # Same as PyMem_Realloc(), but the memory block is resized to (n * # sizeof(TYPE)) bytes. Returns a pointer cast to TYPE*. void PyMem_Del(void *p) # Same as PyMem_Free(). # In addition, the following macro sets are provided for calling # the Python memory allocator directly, without involving the C # API functions listed above. However, note that their use does # not preserve binary compatibility across Python versions and is # therefore deprecated in extension modules. # PyMem_MALLOC(), PyMem_REALLOC(), PyMem_FREE(). # PyMem_NEW(), PyMem_RESIZE(), PyMem_DEL(). ##################################################################### # Raw object memory interface ##################################################################### # Functions to call the same malloc/realloc/free as used by Python's # object allocator. If WITH_PYMALLOC is enabled, these may differ from # the platform malloc/realloc/free. The Python object allocator is # designed for fast, cache-conscious allocation of many "small" objects, # and with low hidden memory overhead. # # PyObject_Malloc(0) returns a unique non-NULL pointer if possible. # # PyObject_Realloc(NULL, n) acts like PyObject_Malloc(n). # PyObject_Realloc(p != NULL, 0) does not return NULL, or free the memory # at p. # # Returned pointers must be checked for NULL explicitly; no action is # performed on failure other than to return NULL (no warning it printed, no # exception is set, etc). # # For allocating objects, use PyObject_{New, NewVar} instead whenever # possible. The PyObject_{Malloc, Realloc, Free} family is exposed # so that you can exploit Python's small-block allocator for non-object # uses. If you must use these routines to allocate object memory, make sure # the object gets initialized via PyObject_{Init, InitVar} after obtaining # the raw memory. void* PyObject_Malloc(size_t size) void* PyObject_Calloc(size_t nelem, size_t elsize) void* PyObject_Realloc(void *ptr, size_t new_size) void PyObject_Free(void *ptr) Cython-0.26.1/Cython/Includes/cpython/getargs.pxd0000664000175000017500000000140712542002467022503 0ustar stefanstefan00000000000000 cdef extern from "Python.h": ##################################################################### # 5.5 Parsing arguments and building values ##################################################################### ctypedef struct va_list int PyArg_ParseTuple(object args, char *format, ...) except 0 int PyArg_VaParse(object args, char *format, va_list vargs) except 0 int PyArg_ParseTupleAndKeywords(object args, object kw, char *format, char *keywords[], ...) except 0 int PyArg_VaParseTupleAndKeywords(object args, object kw, char *format, char *keywords[], va_list vargs) except 0 int PyArg_Parse(object args, char *format, ...) except 0 int PyArg_UnpackTuple(object args, char *name, Py_ssize_t min, Py_ssize_t max, ...) except 0 Cython-0.26.1/Cython/Includes/cpython/int.pxd0000664000175000017500000001004312542002467021635 0ustar stefanstefan00000000000000cdef extern from "Python.h": ctypedef unsigned long long PY_LONG_LONG ############################################################################ # Integer Objects ############################################################################ # PyTypeObject PyInt_Type # This instance of PyTypeObject represents the Python plain # integer type. This is the same object as int and types.IntType. bint PyInt_Check(object o) # Return true if o is of type PyInt_Type or a subtype of # PyInt_Type. bint PyInt_CheckExact(object o) # Return true if o is of type PyInt_Type, but not a subtype of # PyInt_Type. object PyInt_FromString(char *str, char **pend, int base) # Return value: New reference. # Return a new PyIntObject or PyLongObject based on the string # value in str, which is interpreted according to the radix in # base. If pend is non-NULL, *pend will point to the first # character in str which follows the representation of the # number. If base is 0, the radix will be determined based on the # leading characters of str: if str starts with '0x' or '0X', # radix 16 will be used; if str starts with '0', radix 8 will be # used; otherwise radix 10 will be used. If base is not 0, it must # be between 2 and 36, inclusive. Leading spaces are ignored. If # there are no digits, ValueError will be raised. If the string # represents a number too large to be contained within the # machine's long int type and overflow warnings are being # suppressed, a PyLongObject will be returned. If overflow # warnings are not being suppressed, NULL will be returned in this # case. object PyInt_FromLong(long ival) # Return value: New reference. # Create a new integer object with a value of ival. # The current implementation keeps an array of integer objects for # all integers between -5 and 256, when you create an int in that # range you actually just get back a reference to the existing # object. So it should be possible to change the value of 1. I # suspect the behaviour of Python in this case is undefined. :-) object PyInt_FromSsize_t(Py_ssize_t ival) # Return value: New reference. # Create a new integer object with a value of ival. If the value # is larger than LONG_MAX or smaller than LONG_MIN, a long integer # object is returned. object PyInt_FromSize_t(size_t ival) # Return value: New reference. # Create a new integer object with a value of ival. If the value # exceeds LONG_MAX, a long integer object is returned. long PyInt_AsLong(object io) except? -1 # Will first attempt to cast the object to a PyIntObject, if it is # not already one, and then return its value. If there is an # error, -1 is returned, and the caller should check # PyErr_Occurred() to find out whether there was an error, or # whether the value just happened to be -1. long PyInt_AS_LONG(object io) # Return the value of the object io. No error checking is performed. unsigned long PyInt_AsUnsignedLongMask(object io) except? -1 # Will first attempt to cast the object to a PyIntObject or # PyLongObject, if it is not already one, and then return its # value as unsigned long. This function does not check for # overflow. PY_LONG_LONG PyInt_AsUnsignedLongLongMask(object io) except? -1 # Will first attempt to cast the object to a PyIntObject or # PyLongObject, if it is not already one, and then return its # value as unsigned long long, without checking for overflow. Py_ssize_t PyInt_AsSsize_t(object io) except? -1 # Will first attempt to cast the object to a PyIntObject or # PyLongObject, if it is not already one, and then return its # value as Py_ssize_t. long PyInt_GetMax() # Return the system's idea of the largest integer it can handle # (LONG_MAX, as defined in the system header files). int PyInt_ClearFreeList() # Clear the integer free list. Return the number of items that could not be freed. # New in version 2.6. Cython-0.26.1/Cython/Includes/cpython/set.pxd0000664000175000017500000001206312542002467021642 0ustar stefanstefan00000000000000cdef extern from "Python.h": ############################################################################ # 7.5.14 Set Objects ############################################################################ # This section details the public API for set and frozenset # objects. Any functionality not listed below is best accessed # using the either the abstract object protocol (including # PyObject_CallMethod(), PyObject_RichCompareBool(), # PyObject_Hash(), PyObject_Repr(), PyObject_IsTrue(), # PyObject_Print(), and PyObject_GetIter()) or the abstract number # protocol (including PyNumber_Add(), PyNumber_Subtract(), # PyNumber_Or(), PyNumber_Xor(), PyNumber_InPlaceAdd(), # PyNumber_InPlaceSubtract(), PyNumber_InPlaceOr(), and # PyNumber_InPlaceXor()). # PySetObject # This subtype of PyObject is used to hold the internal data for # both set and frozenset objects. It is like a PyDictObject in # that it is a fixed size for small sets (much like tuple storage) # and will point to a separate, variable sized block of memory for # medium and large sized sets (much like list storage). None of # the fields of this structure should be considered public and are # subject to change. All access should be done through the # documented API rather than by manipulating the values in the # structure. # PyTypeObject PySet_Type # This is an instance of PyTypeObject representing the Python set type. # PyTypeObject PyFrozenSet_Type # This is an instance of PyTypeObject representing the Python frozenset type. # The following type check macros work on pointers to any Python # object. Likewise, the constructor functions work with any # iterable Python object. bint PyAnySet_Check(object p) # Return true if p is a set object, a frozenset object, or an # instance of a subtype. bint PyAnySet_CheckExact(object p) # Return true if p is a set object or a frozenset object but not # an instance of a subtype. bint PyFrozenSet_CheckExact(object p) # Return true if p is a frozenset object but not an instance of a subtype. object PySet_New(object iterable) # Return value: New reference. # Return a new set containing objects returned by the # iterable. The iterable may be NULL to create a new empty # set. Return the new set on success or NULL on failure. Raise # TypeError if iterable is not actually iterable. The constructor # is also useful for copying a set (c=set(s)). object PyFrozenSet_New(object iterable) # Return value: New reference. # Return a new frozenset containing objects returned by the # iterable. The iterable may be NULL to create a new empty # frozenset. Return the new set on success or NULL on # failure. Raise TypeError if iterable is not actually iterable. # The following functions and macros are available for instances # of set or frozenset or instances of their subtypes. Py_ssize_t PySet_Size(object anyset) except -1 # Return the length of a set or frozenset object. Equivalent to # "len(anyset)". Raises a PyExc_SystemError if anyset is not a # set, frozenset, or an instance of a subtype. Py_ssize_t PySet_GET_SIZE(object anyset) # Macro form of PySet_Size() without error checking. bint PySet_Contains(object anyset, object key) except -1 # Return 1 if found, 0 if not found, and -1 if an error is # encountered. Unlike the Python __contains__() method, this # function does not automatically convert unhashable sets into # temporary frozensets. Raise a TypeError if the key is # unhashable. Raise PyExc_SystemError if anyset is not a set, # frozenset, or an instance of a subtype. # The following functions are available for instances of set or # its subtypes but not for instances of frozenset or its subtypes. int PySet_Add(object set, object key) except -1 # Add key to a set instance. Does not apply to frozenset # instances. Return 0 on success or -1 on failure. Raise a # TypeError if the key is unhashable. Raise a MemoryError if there # is no room to grow. Raise a SystemError if set is an not an # instance of set or its subtype. bint PySet_Discard(object set, object key) except -1 # Return 1 if found and removed, 0 if not found (no action taken), # and -1 if an error is encountered. Does not raise KeyError for # missing keys. Raise a TypeError if the key is unhashable. Unlike # the Python discard() method, this function does not # automatically convert unhashable sets into temporary # frozensets. Raise PyExc_SystemError if set is an not an instance # of set or its subtype. object PySet_Pop(object set) # Return value: New reference. # Return a new reference to an arbitrary object in the set, and # removes the object from the set. Return NULL on failure. Raise # KeyError if the set is empty. Raise a SystemError if set is an # not an instance of set or its subtype. int PySet_Clear(object set) # Empty an existing set of all elements. Cython-0.26.1/Cython/Includes/cpython/number.pxd0000664000175000017500000002606312542002467022344 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": ##################################################################### # 6.2 Number Protocol ##################################################################### bint PyNumber_Check(object o) # Returns 1 if the object o provides numeric protocols, and false # otherwise. This function always succeeds. object PyNumber_Add(object o1, object o2) # Return value: New reference. # Returns the result of adding o1 and o2, or NULL on failure. This # is the equivalent of the Python expression "o1 + o2". object PyNumber_Subtract(object o1, object o2) # Return value: New reference. # Returns the result of subtracting o2 from o1, or NULL on # failure. This is the equivalent of the Python expression "o1 - # o2". object PyNumber_Multiply(object o1, object o2) # Return value: New reference. # Returns the result of multiplying o1 and o2, or NULL on # failure. This is the equivalent of the Python expression "o1 * # o2". object PyNumber_Divide(object o1, object o2) # Return value: New reference. # Returns the result of dividing o1 by o2, or NULL on # failure. This is the equivalent of the Python expression "o1 / # o2". object PyNumber_FloorDivide(object o1, object o2) # Return value: New reference. # Return the floor of o1 divided by o2, or NULL on failure. This # is equivalent to the ``classic'' division of integers. object PyNumber_TrueDivide(object o1, object o2) # Return value: New reference. # Return a reasonable approximation for the mathematical value of # o1 divided by o2, or NULL on failure. The return value is # ``approximate'' because binary floating point numbers are # approximate; it is not possible to represent all real numbers in # base two. This function can return a floating point value when # passed two integers. object PyNumber_Remainder(object o1, object o2) # Return value: New reference. # Returns the remainder of dividing o1 by o2, or NULL on # failure. This is the equivalent of the Python expression "o1 % # o2". object PyNumber_Divmod(object o1, object o2) # Return value: New reference. # See the built-in function divmod(). Returns NULL on # failure. This is the equivalent of the Python expression # "divmod(o1, o2)". object PyNumber_Power(object o1, object o2, object o3) # Return value: New reference. # See the built-in function pow(). Returns NULL on failure. This # is the equivalent of the Python expression "pow(o1, o2, o3)", # where o3 is optional. If o3 is to be ignored, pass Py_None in # its place (passing NULL for o3 would cause an illegal memory # access). object PyNumber_Negative(object o) # Return value: New reference. # Returns the negation of o on success, or NULL on failure. This # is the equivalent of the Python expression "-o". object PyNumber_Positive(object o) # Return value: New reference. # Returns o on success, or NULL on failure. This is the equivalent # of the Python expression "+o". object PyNumber_Absolute(object o) # Return value: New reference. # Returns the absolute value of o, or NULL on failure. This is the # equivalent of the Python expression "abs(o)". object PyNumber_Invert(object o) # Return value: New reference. # Returns the bitwise negation of o on success, or NULL on # failure. This is the equivalent of the Python expression "~o". object PyNumber_Lshift(object o1, object o2) # Return value: New reference. # Returns the result of left shifting o1 by o2 on success, or NULL # on failure. This is the equivalent of the Python expression "o1 # << o2". object PyNumber_Rshift(object o1, object o2) # Return value: New reference. # Returns the result of right shifting o1 by o2 on success, or # NULL on failure. This is the equivalent of the Python expression # "o1 >> o2". object PyNumber_And(object o1, object o2) # Return value: New reference. # Returns the ``bitwise and'' of o1 and o2 on success and NULL on # failure. This is the equivalent of the Python expression "o1 & # o2". object PyNumber_Xor(object o1, object o2) # Return value: New reference. # Returns the ``bitwise exclusive or'' of o1 by o2 on success, or # NULL on failure. This is the equivalent of the Python expression # "o1 ^ o2". object PyNumber_Or(object o1, object o2) # Return value: New reference. # Returns the ``bitwise or'' of o1 and o2 on success, or NULL on failure. This is the equivalent of the Python expression "o1 | o2". object PyNumber_InPlaceAdd(object o1, object o2) # Return value: New reference. # Returns the result of adding o1 and o2, or NULL on failure. The # operation is done in-place when o1 supports it. This is the # equivalent of the Python statement "o1 += o2". object PyNumber_InPlaceSubtract(object o1, object o2) # Return value: New reference. # Returns the result of subtracting o2 from o1, or NULL on # failure. The operation is done in-place when o1 supports # it. This is the equivalent of the Python statement "o1 -= o2". object PyNumber_InPlaceMultiply(object o1, object o2) # Return value: New reference. # Returns the result of multiplying o1 and o2, or NULL on # failure. The operation is done in-place when o1 supports # it. This is the equivalent of the Python statement "o1 *= o2". object PyNumber_InPlaceDivide(object o1, object o2) # Return value: New reference. # Returns the result of dividing o1 by o2, or NULL on failure. The # operation is done in-place when o1 supports it. This is the # equivalent of the Python statement "o1 /= o2". object PyNumber_InPlaceFloorDivide(object o1, object o2) # Return value: New reference. # Returns the mathematical floor of dividing o1 by o2, or NULL on # failure. The operation is done in-place when o1 supports # it. This is the equivalent of the Python statement "o1 //= # o2". object PyNumber_InPlaceTrueDivide(object o1, object o2) # Return value: New reference. # Return a reasonable approximation for the mathematical value of # o1 divided by o2, or NULL on failure. The return value is # ``approximate'' because binary floating point numbers are # approximate; it is not possible to represent all real numbers in # base two. This function can return a floating point value when # passed two integers. The operation is done in-place when o1 # supports it. object PyNumber_InPlaceRemainder(object o1, object o2) # Return value: New reference. # Returns the remainder of dividing o1 by o2, or NULL on # failure. The operation is done in-place when o1 supports # it. This is the equivalent of the Python statement "o1 %= o2". object PyNumber_InPlacePower(object o1, object o2, object o3) # Return value: New reference. # See the built-in function pow(). Returns NULL on failure. The # operation is done in-place when o1 supports it. This is the # equivalent of the Python statement "o1 **= o2" when o3 is # Py_None, or an in-place variant of "pow(o1, o2, o3)" # otherwise. If o3 is to be ignored, pass Py_None in its place # (passing NULL for o3 would cause an illegal memory access). object PyNumber_InPlaceLshift(object o1, object o2) # Return value: New reference. # Returns the result of left shifting o1 by o2 on success, or NULL # on failure. The operation is done in-place when o1 supports # it. This is the equivalent of the Python statement "o1 <<= o2". object PyNumber_InPlaceRshift(object o1, object o2) # Return value: New reference. # Returns the result of right shifting o1 by o2 on success, or # NULL on failure. The operation is done in-place when o1 supports # it. This is the equivalent of the Python statement "o1 >>= o2". object PyNumber_InPlaceAnd(object o1, object o2) # Return value: New reference. # Returns the ``bitwise and'' of o1 and o2 on success and NULL on # failure. The operation is done in-place when o1 supports # it. This is the equivalent of the Python statement "o1 &= o2". object PyNumber_InPlaceXor(object o1, object o2) # Return value: New reference. # Returns the ``bitwise exclusive or'' of o1 by o2 on success, or # NULL on failure. The operation is done in-place when o1 supports # it. This is the equivalent of the Python statement "o1 ^= o2". object PyNumber_InPlaceOr(object o1, object o2) # Return value: New reference. # Returns the ``bitwise or'' of o1 and o2 on success, or NULL on # failure. The operation is done in-place when o1 supports # it. This is the equivalent of the Python statement "o1 |= o2". int PyNumber_Coerce(PyObject **p1, PyObject **p2) except -1 # This function takes the addresses of two variables of type # PyObject*. If the objects pointed to by *p1 and *p2 have the # same type, increment their reference count and return 0 # (success). If the objects can be converted to a common numeric # type, replace *p1 and *p2 by their converted value (with 'new' # reference counts), and return 0. If no conversion is possible, # or if some other error occurs, return -1 (failure) and don't # increment the reference counts. The call PyNumber_Coerce(&o1, # &o2) is equivalent to the Python statement "o1, o2 = coerce(o1, # o2)". object PyNumber_Int(object o) # Return value: New reference. # Returns the o converted to an integer object on success, or NULL # on failure. If the argument is outside the integer range a long # object will be returned instead. This is the equivalent of the # Python expression "int(o)". object PyNumber_Long(object o) # Return value: New reference. # Returns the o converted to a long integer object on success, or # NULL on failure. This is the equivalent of the Python expression # "long(o)". object PyNumber_Float(object o) # Return value: New reference. # Returns the o converted to a float object on success, or NULL on # failure. This is the equivalent of the Python expression # "float(o)". object PyNumber_Index(object o) # Returns the o converted to a Python int or long on success or # NULL with a TypeError exception raised on failure. Py_ssize_t PyNumber_AsSsize_t(object o, object exc) except? -1 # Returns o converted to a Py_ssize_t value if o can be # interpreted as an integer. If o can be converted to a Python int # or long but the attempt to convert to a Py_ssize_t value would # raise an OverflowError, then the exc argument is the type of # exception that will be raised (usually IndexError or # OverflowError). If exc is NULL, then the exception is cleared # and the value is clipped to PY_SSIZE_T_MIN for a negative # integer or PY_SSIZE_T_MAX for a positive integer. bint PyIndex_Check(object) # Returns True if o is an index integer (has the nb_index slot of # the tp_as_number structure filled in). Cython-0.26.1/Cython/Includes/cpython/object.pxd0000664000175000017500000004340413143605603022317 0ustar stefanstefan00000000000000from libc.stdio cimport FILE cimport cpython.type cdef extern from "Python.h": ctypedef struct PyObject # forward declaration ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs) ctypedef object (*unaryfunc)(object) ctypedef object (*binaryfunc)(object, object) ctypedef object (*ternaryfunc)(object, object, object) ctypedef int (*inquiry)(object) ctypedef Py_ssize_t (*lenfunc)(object) ctypedef object (*ssizeargfunc)(object, Py_ssize_t) ctypedef object (*ssizessizeargfunc)(object, Py_ssize_t, Py_ssize_t) ctypedef int (*ssizeobjargproc)(object, Py_ssize_t, object) ctypedef int (*ssizessizeobjargproc)(object, Py_ssize_t, Py_ssize_t, object) ctypedef int (*objobjargproc)(object, object, object) ctypedef int (*objobjproc)(object, object) ctypedef Py_hash_t (*hashfunc)(object) ctypedef object (*reprfunc)(object) ctypedef int (*cmpfunc)(object, object) ctypedef object (*richcmpfunc)(object, object, int) # The following functions use 'PyObject*' as first argument instead of 'object' to prevent # accidental reference counting when calling them during a garbage collection run. ctypedef void (*destructor)(PyObject*) ctypedef int (*visitproc)(PyObject*, void *) ctypedef int (*traverseproc)(PyObject*, visitproc, void*) ctypedef object (*descrgetfunc)(object, object, object) ctypedef int (*descrsetfunc)(object, object, object) except -1 ctypedef struct PyTypeObject: const char* tp_name const char* tp_doc Py_ssize_t tp_basicsize Py_ssize_t tp_itemsize Py_ssize_t tp_dictoffset unsigned long tp_flags newfunc tp_new destructor tp_dealloc traverseproc tp_traverse inquiry tp_clear ternaryfunc tp_call hashfunc tp_hash reprfunc tp_str reprfunc tp_repr cmpfunc tp_compare richcmpfunc tp_richcompare PyTypeObject* tp_base PyObject* tp_dict descrgetfunc tp_descr_get descrsetfunc tp_descr_set ctypedef struct PyObject: Py_ssize_t ob_refcnt PyTypeObject *ob_type cdef PyTypeObject *Py_TYPE(object) void* PyObject_Malloc(size_t) void* PyObject_Realloc(void *, size_t) void PyObject_Free(void *) ##################################################################### # 6.1 Object Protocol ##################################################################### int PyObject_Print(object o, FILE *fp, int flags) except -1 # Print an object o, on file fp. Returns -1 on error. The flags # argument is used to enable certain printing options. The only # option currently supported is Py_PRINT_RAW; if given, the str() # of the object is written instead of the repr(). bint PyObject_HasAttrString(object o, char *attr_name) # Returns 1 if o has the attribute attr_name, and 0 # otherwise. This is equivalent to the Python expression # "hasattr(o, attr_name)". This function always succeeds. object PyObject_GetAttrString(object o, char *attr_name) # Return value: New reference. Retrieve an attribute named # attr_name from object o. Returns the attribute value on success, # or NULL on failure. This is the equivalent of the Python # expression "o.attr_name". bint PyObject_HasAttr(object o, object attr_name) # Returns 1 if o has the attribute attr_name, and 0 # otherwise. This is equivalent to the Python expression # "hasattr(o, attr_name)". This function always succeeds. object PyObject_GetAttr(object o, object attr_name) # Return value: New reference. Retrieve an attribute named # attr_name from object o. Returns the attribute value on success, # or NULL on failure. This is the equivalent of the Python # expression "o.attr_name". object PyObject_GenericGetAttr(object o, object attr_name) int PyObject_SetAttrString(object o, char *attr_name, object v) except -1 # Set the value of the attribute named attr_name, for object o, to # the value v. Returns -1 on failure. This is the equivalent of # the Python statement "o.attr_name = v". int PyObject_SetAttr(object o, object attr_name, object v) except -1 # Set the value of the attribute named attr_name, for object o, to # the value v. Returns -1 on failure. This is the equivalent of # the Python statement "o.attr_name = v". int PyObject_GenericSetAttr(object o, object attr_name, object v) except -1 int PyObject_DelAttrString(object o, char *attr_name) except -1 # Delete attribute named attr_name, for object o. Returns -1 on # failure. This is the equivalent of the Python statement: "del # o.attr_name". int PyObject_DelAttr(object o, object attr_name) except -1 # Delete attribute named attr_name, for object o. Returns -1 on # failure. This is the equivalent of the Python statement "del # o.attr_name". int Py_LT, Py_LE, Py_EQ, Py_NE, Py_GT, Py_GE object PyObject_RichCompare(object o1, object o2, int opid) # Return value: New reference. # Compare the values of o1 and o2 using the operation specified by # opid, which must be one of Py_LT, Py_LE, Py_EQ, Py_NE, Py_GT, or # Py_GE, corresponding to <, <=, ==, !=, >, or >= # respectively. This is the equivalent of the Python expression # "o1 op o2", where op is the operator corresponding to # opid. Returns the value of the comparison on success, or NULL on # failure. bint PyObject_RichCompareBool(object o1, object o2, int opid) except -1 # Compare the values of o1 and o2 using the operation specified by # opid, which must be one of Py_LT, Py_LE, Py_EQ, Py_NE, Py_GT, or # Py_GE, corresponding to <, <=, ==, !=, >, or >= # respectively. Returns -1 on error, 0 if the result is false, 1 # otherwise. This is the equivalent of the Python expression "o1 # op o2", where op is the operator corresponding to opid. int PyObject_Cmp(object o1, object o2, int *result) except -1 # Compare the values of o1 and o2 using a routine provided by o1, # if one exists, otherwise with a routine provided by o2. The # result of the comparison is returned in result. Returns -1 on # failure. This is the equivalent of the Python statement "result # = cmp(o1, o2)". int PyObject_Compare(object o1, object o2) except * # Compare the values of o1 and o2 using a routine provided by o1, # if one exists, otherwise with a routine provided by o2. Returns # the result of the comparison on success. On error, the value # returned is undefined; use PyErr_Occurred() to detect an # error. This is equivalent to the Python expression "cmp(o1, # o2)". object PyObject_Repr(object o) # Return value: New reference. # Compute a string representation of object o. Returns the string # representation on success, NULL on failure. This is the # equivalent of the Python expression "repr(o)". Called by the # repr() built-in function and by reverse quotes. object PyObject_Str(object o) # Return value: New reference. # Compute a string representation of object o. Returns the string # representation on success, NULL on failure. This is the # equivalent of the Python expression "str(o)". Called by the # str() built-in function and by the print statement. object PyObject_Unicode(object o) # Return value: New reference. # Compute a Unicode string representation of object o. Returns the # Unicode string representation on success, NULL on failure. This # is the equivalent of the Python expression "unicode(o)". Called # by the unicode() built-in function. bint PyObject_IsInstance(object inst, object cls) except -1 # Returns 1 if inst is an instance of the class cls or a subclass # of cls, or 0 if not. On error, returns -1 and sets an # exception. If cls is a type object rather than a class object, # PyObject_IsInstance() returns 1 if inst is of type cls. If cls # is a tuple, the check will be done against every entry in # cls. The result will be 1 when at least one of the checks # returns 1, otherwise it will be 0. If inst is not a class # instance and cls is neither a type object, nor a class object, # nor a tuple, inst must have a __class__ attribute -- the class # relationship of the value of that attribute with cls will be # used to determine the result of this function. # Subclass determination is done in a fairly straightforward way, # but includes a wrinkle that implementors of extensions to the # class system may want to be aware of. If A and B are class # objects, B is a subclass of A if it inherits from A either # directly or indirectly. If either is not a class object, a more # general mechanism is used to determine the class relationship of # the two objects. When testing if B is a subclass of A, if A is # B, PyObject_IsSubclass() returns true. If A and B are different # objects, B's __bases__ attribute is searched in a depth-first # fashion for A -- the presence of the __bases__ attribute is # considered sufficient for this determination. bint PyObject_IsSubclass(object derived, object cls) except -1 # Returns 1 if the class derived is identical to or derived from # the class cls, otherwise returns 0. In case of an error, returns # -1. If cls is a tuple, the check will be done against every # entry in cls. The result will be 1 when at least one of the # checks returns 1, otherwise it will be 0. If either derived or # cls is not an actual class object (or tuple), this function uses # the generic algorithm described above. New in version # 2.1. Changed in version 2.3: Older versions of Python did not # support a tuple as the second argument. bint PyCallable_Check(object o) # Determine if the object o is callable. Return 1 if the object is # callable and 0 otherwise. This function always succeeds. object PyObject_Call(object callable_object, object args, object kw) # Return value: New reference. # Call a callable Python object callable_object, with arguments # given by the tuple args, and named arguments given by the # dictionary kw. If no named arguments are needed, kw may be # NULL. args must not be NULL, use an empty tuple if no arguments # are needed. Returns the result of the call on success, or NULL # on failure. This is the equivalent of the Python expression # "apply(callable_object, args, kw)" or "callable_object(*args, # **kw)". object PyObject_CallObject(object callable_object, object args) # Return value: New reference. # Call a callable Python object callable_object, with arguments # given by the tuple args. If no arguments are needed, then args # may be NULL. Returns the result of the call on success, or NULL # on failure. This is the equivalent of the Python expression # "apply(callable_object, args)" or "callable_object(*args)". object PyObject_CallFunction(object callable, char *format, ...) # Return value: New reference. # Call a callable Python object callable, with a variable number # of C arguments. The C arguments are described using a # Py_BuildValue() style format string. The format may be NULL, # indicating that no arguments are provided. Returns the result of # the call on success, or NULL on failure. This is the equivalent # of the Python expression "apply(callable, args)" or # "callable(*args)". Note that if you only pass object args, # PyObject_CallFunctionObjArgs is a faster alternative. object PyObject_CallMethod(object o, char *method, char *format, ...) # Return value: New reference. # Call the method named method of object o with a variable number # of C arguments. The C arguments are described by a # Py_BuildValue() format string that should produce a tuple. The # format may be NULL, indicating that no arguments are # provided. Returns the result of the call on success, or NULL on # failure. This is the equivalent of the Python expression # "o.method(args)". Note that if you only pass object args, # PyObject_CallMethodObjArgs is a faster alternative. #object PyObject_CallFunctionObjArgs(object callable, ..., NULL) object PyObject_CallFunctionObjArgs(object callable, ...) # Return value: New reference. # Call a callable Python object callable, with a variable number # of PyObject* arguments. The arguments are provided as a variable # number of parameters followed by NULL. Returns the result of the # call on success, or NULL on failure. #PyObject* PyObject_CallMethodObjArgs(object o, object name, ..., NULL) object PyObject_CallMethodObjArgs(object o, object name, ...) # Return value: New reference. # Calls a method of the object o, where the name of the method is # given as a Python string object in name. It is called with a # variable number of PyObject* arguments. The arguments are # provided as a variable number of parameters followed by # NULL. Returns the result of the call on success, or NULL on # failure. long PyObject_Hash(object o) except? -1 # Compute and return the hash value of an object o. On failure, # return -1. This is the equivalent of the Python expression # "hash(o)". bint PyObject_IsTrue(object o) except -1 # Returns 1 if the object o is considered to be true, and 0 # otherwise. This is equivalent to the Python expression "not not # o". On failure, return -1. bint PyObject_Not(object o) except -1 # Returns 0 if the object o is considered to be true, and 1 # otherwise. This is equivalent to the Python expression "not # o". On failure, return -1. object PyObject_Type(object o) # Return value: New reference. # When o is non-NULL, returns a type object corresponding to the # object type of object o. On failure, raises SystemError and # returns NULL. This is equivalent to the Python expression # type(o). This function increments the reference count of the # return value. There's really no reason to use this function # instead of the common expression o->ob_type, which returns a # pointer of type PyTypeObject*, except when the incremented # reference count is needed. bint PyObject_TypeCheck(object o, PyTypeObject *type) # Return true if the object o is of type type or a subtype of # type. Both parameters must be non-NULL. Py_ssize_t PyObject_Length(object o) except -1 Py_ssize_t PyObject_Size(object o) except -1 # Return the length of object o. If the object o provides either # the sequence and mapping protocols, the sequence length is # returned. On error, -1 is returned. This is the equivalent to # the Python expression "len(o)". object PyObject_GetItem(object o, object key) # Return value: New reference. # Return element of o corresponding to the object key or NULL on # failure. This is the equivalent of the Python expression # "o[key]". int PyObject_SetItem(object o, object key, object v) except -1 # Map the object key to the value v. Returns -1 on failure. This # is the equivalent of the Python statement "o[key] = v". int PyObject_DelItem(object o, object key) except -1 # Delete the mapping for key from o. Returns -1 on failure. This # is the equivalent of the Python statement "del o[key]". int PyObject_AsFileDescriptor(object o) except -1 # Derives a file-descriptor from a Python object. If the object is # an integer or long integer, its value is returned. If not, the # object's fileno() method is called if it exists; the method must # return an integer or long integer, which is returned as the file # descriptor value. Returns -1 on failure. object PyObject_Dir(object o) # Return value: New reference. # This is equivalent to the Python expression "dir(o)", returning # a (possibly empty) list of strings appropriate for the object # argument, or NULL if there was an error. If the argument is # NULL, this is like the Python "dir()", returning the names of # the current locals; in this case, if no execution frame is # active then NULL is returned but PyErr_Occurred() will return # false. object PyObject_GetIter(object o) # Return value: New reference. # This is equivalent to the Python expression "iter(o)". It # returns a new iterator for the object argument, or the object # itself if the object is already an iterator. Raises TypeError # and returns NULL if the object cannot be iterated. Py_ssize_t Py_SIZE(object o) object PyObject_Format(object obj, object format_spec) # Takes an arbitrary object and returns the result of calling # obj.__format__(format_spec). # Added in Py2.6 # Type flags (tp_flags of PyTypeObject) long Py_TPFLAGS_HAVE_GETCHARBUFFER long Py_TPFLAGS_HAVE_SEQUENCE_IN long Py_TPFLAGS_HAVE_INPLACEOPS long Py_TPFLAGS_CHECKTYPES long Py_TPFLAGS_HAVE_RICHCOMPARE long Py_TPFLAGS_HAVE_WEAKREFS long Py_TPFLAGS_HAVE_ITER long Py_TPFLAGS_HAVE_CLASS long Py_TPFLAGS_HEAPTYPE long Py_TPFLAGS_BASETYPE long Py_TPFLAGS_READY long Py_TPFLAGS_READYING long Py_TPFLAGS_HAVE_GC long Py_TPFLAGS_HAVE_STACKLESS_EXTENSION long Py_TPFLAGS_HAVE_INDEX long Py_TPFLAGS_HAVE_VERSION_TAG long Py_TPFLAGS_VALID_VERSION_TAG long Py_TPFLAGS_IS_ABSTRACT long Py_TPFLAGS_HAVE_NEWBUFFER long Py_TPFLAGS_INT_SUBCLASS long Py_TPFLAGS_LONG_SUBCLASS long Py_TPFLAGS_LIST_SUBCLASS long Py_TPFLAGS_TUPLE_SUBCLASS long Py_TPFLAGS_STRING_SUBCLASS long Py_TPFLAGS_UNICODE_SUBCLASS long Py_TPFLAGS_DICT_SUBCLASS long Py_TPFLAGS_BASE_EXC_SUBCLASS long Py_TPFLAGS_TYPE_SUBCLASS long Py_TPFLAGS_DEFAULT_EXTERNAL long Py_TPFLAGS_DEFAULT_CORE long Py_TPFLAGS_DEFAULT Cython-0.26.1/Cython/Includes/cpython/bool.pxd0000664000175000017500000000251712542002467022005 0ustar stefanstefan00000000000000 cdef extern from "Python.h": ############################################################################ # 7.2.2 Boolean Objects ############################################################################ ctypedef class __builtin__.bool [object PyBoolObject]: pass # Booleans in Python are implemented as a subclass of # integers. There are only two booleans, Py_False and Py_True. As # such, the normal creation and deletion functions don't apply to # booleans. The following macros are available, however. bint PyBool_Check(object o) # Return true if o is of type PyBool_Type. #PyObject* Py_False # The Python False object. This object has no methods. It needs to # be treated just like any other object with respect to reference # counts. #PyObject* Py_True # The Python True object. This object has no methods. It needs to # be treated just like any other object with respect to reference # counts. # Py_RETURN_FALSE # Return Py_False from a function, properly incrementing its reference count. # Py_RETURN_TRUE # Return Py_True from a function, properly incrementing its reference count. object PyBool_FromLong(long v) # Return value: New reference. # Return a new reference to Py_True or Py_False depending on the truth value of v. Cython-0.26.1/Cython/Includes/cpython/pystate.pxd0000664000175000017500000000664312542002467022547 0ustar stefanstefan00000000000000# Thread and interpreter state structures and their interfaces from .object cimport PyObject cdef extern from "Python.h": # We make these an opague types. If the user wants specific attributes, # they can be declared manually. ctypedef struct PyInterpreterState: pass ctypedef struct PyThreadState: pass ctypedef struct PyFrameObject: pass # This is not actually a struct, but make sure it can never be coerced to # an int or used in arithmetic expressions ctypedef struct PyGILState_STATE # The type of the trace function registered using PyEval_SetProfile() and # PyEval_SetTrace(). # Py_tracefunc return -1 when raising an exception, or 0 for success. ctypedef int (*Py_tracefunc)(PyObject *, PyFrameObject *, int, PyObject *) # The following values are used for 'what' for tracefunc functions enum: PyTrace_CALL PyTrace_EXCEPTION PyTrace_LINE PyTrace_RETURN PyTrace_C_CALL PyTrace_C_EXCEPTION PyTrace_C_RETURN PyInterpreterState * PyInterpreterState_New() void PyInterpreterState_Clear(PyInterpreterState *) void PyInterpreterState_Delete(PyInterpreterState *) PyThreadState * PyThreadState_New(PyInterpreterState *) void PyThreadState_Clear(PyThreadState *) void PyThreadState_Delete(PyThreadState *) PyThreadState * PyThreadState_Get() PyThreadState * PyThreadState_Swap(PyThreadState *) PyObject * PyThreadState_GetDict() int PyThreadState_SetAsyncExc(long, PyObject *) # Ensure that the current thread is ready to call the Python # C API, regardless of the current state of Python, or of its # thread lock. This may be called as many times as desired # by a thread so long as each call is matched with a call to # PyGILState_Release(). In general, other thread-state APIs may # be used between _Ensure() and _Release() calls, so long as the # thread-state is restored to its previous state before the Release(). # For example, normal use of the Py_BEGIN_ALLOW_THREADS/ # Py_END_ALLOW_THREADS macros are acceptable. # The return value is an opaque "handle" to the thread state when # PyGILState_Ensure() was called, and must be passed to # PyGILState_Release() to ensure Python is left in the same state. Even # though recursive calls are allowed, these handles can *not* be shared - # each unique call to PyGILState_Ensure must save the handle for its # call to PyGILState_Release. # When the function returns, the current thread will hold the GIL. # Failure is a fatal error. PyGILState_STATE PyGILState_Ensure() # Release any resources previously acquired. After this call, Python's # state will be the same as it was prior to the corresponding # PyGILState_Ensure() call (but generally this state will be unknown to # the caller, hence the use of the GILState API.) # Every call to PyGILState_Ensure must be matched by a call to # PyGILState_Release on the same thread. void PyGILState_Release(PyGILState_STATE) # Routines for advanced debuggers, requested by David Beazley. # Don't use unless you know what you are doing! PyInterpreterState * PyInterpreterState_Head() PyInterpreterState * PyInterpreterState_Next(PyInterpreterState *) PyThreadState * PyInterpreterState_ThreadHead(PyInterpreterState *) PyThreadState * PyThreadState_Next(PyThreadState *) Cython-0.26.1/Cython/Includes/cpython/instance.pxd0000664000175000017500000000173112542002467022653 0ustar stefanstefan00000000000000cdef extern from "Python.h": ############################################################################ # 7.5.2 Instance Objects ############################################################################ # PyTypeObject PyInstance_Type # # Type object for class instances. int PyInstance_Check(object obj) # Return true if obj is an instance. object PyInstance_New(object cls, object arg, object kw) # Return value: New reference. # Create a new instance of a specific class. The parameters arg # and kw are used as the positional and keyword parameters to the # object's constructor. object PyInstance_NewRaw(object cls, object dict) # Return value: New reference. # Create a new instance of a specific class without calling its # constructor. class is the class of new object. The dict # parameter will be used as the object's __dict__; if NULL, a new # dictionary will be created for the instance. Cython-0.26.1/Cython/Includes/cpython/ref.pxd0000664000175000017500000000477512542002467021636 0ustar stefanstefan00000000000000from .object cimport PyObject, PyTypeObject, Py_TYPE # legacy imports for re-export cdef extern from "Python.h": ##################################################################### # 3. Reference Counts ##################################################################### # The macros in this section are used for managing reference counts of Python objects. void Py_INCREF(object o) # Increment the reference count for object o. The object must not # be NULL; if you aren't sure that it isn't NULL, use # Py_XINCREF(). void Py_XINCREF(PyObject* o) # Increment the reference count for object o. The object may be NULL, in which case the macro has no effect. void Py_DECREF(object o) # Decrement the reference count for object o. The object must not # be NULL; if you aren't sure that it isn't NULL, use # Py_XDECREF(). If the reference count reaches zero, the object's # type's deallocation function (which must not be NULL) is # invoked. # Warning: The deallocation function can cause arbitrary Python # code to be invoked (e.g. when a class instance with a __del__() # method is deallocated). While exceptions in such code are not # propagated, the executed code has free access to all Python # global variables. This means that any object that is reachable # from a global variable should be in a consistent state before # Py_DECREF() is invoked. For example, code to delete an object # from a list should copy a reference to the deleted object in a # temporary variable, update the list data structure, and then # call Py_DECREF() for the temporary variable. void Py_XDECREF(PyObject* o) # Decrement the reference count for object o. The object may be # NULL, in which case the macro has no effect; otherwise the # effect is the same as for Py_DECREF(), and the same warning # applies. void Py_CLEAR(PyObject* o) # Decrement the reference count for object o. The object may be # NULL, in which case the macro has no effect; otherwise the # effect is the same as for Py_DECREF(), except that the argument # is also set to NULL. The warning for Py_DECREF() does not apply # with respect to the object passed because the macro carefully # uses a temporary variable and sets the argument to NULL before # decrementing its reference count. # It is a good idea to use this macro whenever decrementing the # value of a variable that might be traversed during garbage # collection. Cython-0.26.1/Cython/Includes/cpython/exc.pxd0000664000175000017500000003163113023021033021611 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": ##################################################################### # 3. Exception Handling ##################################################################### # The functions described in this chapter will let you handle and # raise Python exceptions. It is important to understand some of # the basics of Python exception handling. It works somewhat like # the Unix errno variable: there is a global indicator (per # thread) of the last error that occurred. Most functions don't # clear this on success, but will set it to indicate the cause of # the error on failure. Most functions also return an error # indicator, usually NULL if they are supposed to return a # pointer, or -1 if they return an integer (exception: the # PyArg_*() functions return 1 for success and 0 for failure). # When a function must fail because some function it called # failed, it generally doesn't set the error indicator; the # function it called already set it. It is responsible for either # handling the error and clearing the exception or returning after # cleaning up any resources it holds (such as object references or # memory allocations); it should not continue normally if it is # not prepared to handle the error. If returning due to an error, # it is important to indicate to the caller that an error has been # set. If the error is not handled or carefully propagated, # additional calls into the Python/C API may not behave as # intended and may fail in mysterious ways. # The error indicator consists of three Python objects # corresponding to the Python variables sys.exc_type, # sys.exc_value and sys.exc_traceback. API functions exist to # interact with the error indicator in various ways. There is a # separate error indicator for each thread. void PyErr_Print() # Print a standard traceback to sys.stderr and clear the error # indicator. Call this function only when the error indicator is # set. (Otherwise it will cause a fatal error!) PyObject* PyErr_Occurred() # Return value: Borrowed reference. # Test whether the error indicator is set. If set, return the # exception type (the first argument to the last call to one of # the PyErr_Set*() functions or to PyErr_Restore()). If not set, # return NULL. You do not own a reference to the return value, so # you do not need to Py_DECREF() it. Note: Do not compare the # return value to a specific exception; use # PyErr_ExceptionMatches() instead, shown below. (The comparison # could easily fail since the exception may be an instance instead # of a class, in the case of a class exception, or it may be a # subclass of the expected exception.) bint PyErr_ExceptionMatches(object exc) # Equivalent to "PyErr_GivenExceptionMatches(PyErr_Occurred(), # exc)". This should only be called when an exception is actually # set; a memory access violation will occur if no exception has # been raised. bint PyErr_GivenExceptionMatches(object given, object exc) # Return true if the given exception matches the exception in # exc. If exc is a class object, this also returns true when given # is an instance of a subclass. If exc is a tuple, all exceptions # in the tuple (and recursively in subtuples) are searched for a # match. If given is NULL, a memory access violation will occur. void PyErr_NormalizeException(PyObject** exc, PyObject** val, PyObject** tb) # Under certain circumstances, the values returned by # PyErr_Fetch() below can be ``unnormalized'', meaning that *exc # is a class object but *val is not an instance of the same # class. This function can be used to instantiate the class in # that case. If the values are already normalized, nothing # happens. The delayed normalization is implemented to improve # performance. void PyErr_Clear() # Clear the error indicator. If the error indicator is not set, there is no effect. void PyErr_Fetch(PyObject** ptype, PyObject** pvalue, PyObject** ptraceback) # Retrieve the error indicator into three variables whose # addresses are passed. If the error indicator is not set, set all # three variables to NULL. If it is set, it will be cleared and # you own a reference to each object retrieved. The value and # traceback object may be NULL even when the type object is # not. Note: This function is normally only used by code that # needs to handle exceptions or by code that needs to save and # restore the error indicator temporarily. void PyErr_Restore(PyObject* type, PyObject* value, PyObject* traceback) # Set the error indicator from the three objects. If the error # indicator is already set, it is cleared first. If the objects # are NULL, the error indicator is cleared. Do not pass a NULL # type and non-NULL value or traceback. The exception type should # be a class. Do not pass an invalid exception type or # value. (Violating these rules will cause subtle problems later.) # This call takes away a reference to each object: you must own a # reference to each object before the call and after the call you # no longer own these references. (If you don't understand this, # don't use this function. I warned you.) Note: This function is # normally only used by code that needs to save and restore the # error indicator temporarily; use PyErr_Fetch() to save the # current exception state. void PyErr_SetString(object type, char *message) # This is the most common way to set the error indicator. The # first argument specifies the exception type; it is normally one # of the standard exceptions, e.g. PyExc_RuntimeError. You need # not increment its reference count. The second argument is an # error message; it is converted to a string object. void PyErr_SetObject(object type, object value) # This function is similar to PyErr_SetString() but lets you # specify an arbitrary Python object for the ``value'' of the # exception. PyObject* PyErr_Format(object exception, char *format, ...) except NULL # Return value: Always NULL. # This function sets the error indicator and returns # NULL. exception should be a Python exception (class, not an # instance). format should be a string, containing format codes, # similar to printf(). The width.precision before a format code is # parsed, but the width part is ignored. void PyErr_SetNone(object type) # This is a shorthand for "PyErr_SetObject(type, Py_None)". int PyErr_BadArgument() except 0 # This is a shorthand for "PyErr_SetString(PyExc_TypeError, # message)", where message indicates that a built-in operation was # invoked with an illegal argument. It is mostly for internal use. PyObject* PyErr_NoMemory() except NULL # Return value: Always NULL. # This is a shorthand for "PyErr_SetNone(PyExc_MemoryError)"; it # returns NULL so an object allocation function can write "return # PyErr_NoMemory();" when it runs out of memory. PyObject* PyErr_SetFromErrno(object type) except NULL # Return value: Always NULL. # This is a convenience function to raise an exception when a C # library function has returned an error and set the C variable # errno. It constructs a tuple object whose first item is the # integer errno value and whose second item is the corresponding # error message (gotten from strerror()), and then calls # "PyErr_SetObject(type, object)". On Unix, when the errno value # is EINTR, indicating an interrupted system call, this calls # PyErr_CheckSignals(), and if that set the error indicator, # leaves it set to that. The function always returns NULL, so a # wrapper function around a system call can write "return # PyErr_SetFromErrno(type);" when the system call returns an # error. PyObject* PyErr_SetFromErrnoWithFilename(object type, char *filename) except NULL # Return value: Always NULL. Similar to PyErr_SetFromErrno(), # with the additional behavior that if filename is not NULL, it is # passed to the constructor of type as a third parameter. In the # case of exceptions such as IOError and OSError, this is used to # define the filename attribute of the exception instance. PyObject* PyErr_SetFromWindowsErr(int ierr) except NULL # Return value: Always NULL. This is a convenience function to # raise WindowsError. If called with ierr of 0, the error code # returned by a call to GetLastError() is used instead. It calls # the Win32 function FormatMessage() to retrieve the Windows # description of error code given by ierr or GetLastError(), then # it constructs a tuple object whose first item is the ierr value # and whose second item is the corresponding error message (gotten # from FormatMessage()), and then calls # "PyErr_SetObject(PyExc_WindowsError, object)". This function # always returns NULL. Availability: Windows. PyObject* PyErr_SetExcFromWindowsErr(object type, int ierr) except NULL # Return value: Always NULL. Similar to # PyErr_SetFromWindowsErr(), with an additional parameter # specifying the exception type to be raised. Availability: # Windows. New in version 2.3. PyObject* PyErr_SetFromWindowsErrWithFilename(int ierr, char *filename) except NULL # Return value: Always NULL. Similar to # PyErr_SetFromWindowsErr(), with the additional behavior that if # filename is not NULL, it is passed to the constructor of # WindowsError as a third parameter. Availability: Windows. PyObject* PyErr_SetExcFromWindowsErrWithFilename(object type, int ierr, char *filename) except NULL # Return value: Always NULL. # Similar to PyErr_SetFromWindowsErrWithFilename(), with an # additional parameter specifying the exception type to be # raised. Availability: Windows. void PyErr_BadInternalCall() # This is a shorthand for "PyErr_SetString(PyExc_TypeError, # message)", where message indicates that an internal operation # (e.g. a Python/C API function) was invoked with an illegal # argument. It is mostly for internal use. int PyErr_WarnEx(object category, char *message, int stacklevel) except -1 # Issue a warning message. The category argument is a warning # category (see below) or NULL; the message argument is a message # string. stacklevel is a positive number giving a number of stack # frames; the warning will be issued from the currently executing # line of code in that stack frame. A stacklevel of 1 is the # function calling PyErr_WarnEx(), 2 is the function above that, # and so forth. int PyErr_WarnExplicit(object category, char *message, char *filename, int lineno, char *module, object registry) except -1 # Issue a warning message with explicit control over all warning # attributes. This is a straightforward wrapper around the Python # function warnings.warn_explicit(), see there for more # information. The module and registry arguments may be set to # NULL to get the default effect described there. int PyErr_CheckSignals() except -1 # This function interacts with Python's signal handling. It checks # whether a signal has been sent to the processes and if so, # invokes the corresponding signal handler. If the signal module # is supported, this can invoke a signal handler written in # Python. In all cases, the default effect for SIGINT is to raise # the KeyboardInterrupt exception. If an exception is raised the # error indicator is set and the function returns 1; otherwise the # function returns 0. The error indicator may or may not be # cleared if it was previously set. void PyErr_SetInterrupt() nogil # This function simulates the effect of a SIGINT signal arriving # -- the next time PyErr_CheckSignals() is called, # KeyboardInterrupt will be raised. It may be called without # holding the interpreter lock. object PyErr_NewException(char *name, object base, object dict) # Return value: New reference. # This utility function creates and returns a new exception # object. The name argument must be the name of the new exception, # a C string of the form module.class. The base and dict arguments # are normally NULL. This creates a class object derived from # Exception (accessible in C as PyExc_Exception). void PyErr_WriteUnraisable(object obj) # This utility function prints a warning message to sys.stderr # when an exception has been set but it is impossible for the # interpreter to actually raise the exception. It is used, for # example, when an exception occurs in an __del__() method. # # The function is called with a single argument obj that # identifies the context in which the unraisable exception # occurred. The repr of obj will be printed in the warning # message. Cython-0.26.1/Cython/Includes/cpython/float.pxd0000664000175000017500000000262012542002467022152 0ustar stefanstefan00000000000000cdef extern from "Python.h": ############################################################################ # 7.2.3 ############################################################################ # PyFloatObject # # This subtype of PyObject represents a Python floating point object. # PyTypeObject PyFloat_Type # # This instance of PyTypeObject represents the Python floating # point type. This is the same object as float and # types.FloatType. bint PyFloat_Check(object p) # Return true if its argument is a PyFloatObject or a subtype of # PyFloatObject. bint PyFloat_CheckExact(object p) # Return true if its argument is a PyFloatObject, but not a # subtype of PyFloatObject. object PyFloat_FromString(object str, char **pend) # Return value: New reference. # Create a PyFloatObject object based on the string value in str, # or NULL on failure. The pend argument is ignored. It remains # only for backward compatibility. object PyFloat_FromDouble(double v) # Return value: New reference. # Create a PyFloatObject object from v, or NULL on failure. double PyFloat_AsDouble(object pyfloat) except? -1 # Return a C double representation of the contents of pyfloat. double PyFloat_AS_DOUBLE(object pyfloat) # Return a C double representation of the contents of pyfloat, but # without error checking. Cython-0.26.1/Cython/Includes/cpython/mapping.pxd0000664000175000017500000000520512542002467022502 0ustar stefanstefan00000000000000cdef extern from "Python.h": ############################################################################ # 6.4 Mapping Protocol ############################################################################ bint PyMapping_Check(object o) # Return 1 if the object provides mapping protocol, and 0 # otherwise. This function always succeeds. Py_ssize_t PyMapping_Length(object o) except -1 # Returns the number of keys in object o on success, and -1 on # failure. For objects that do not provide mapping protocol, this # is equivalent to the Python expression "len(o)". int PyMapping_DelItemString(object o, char *key) except -1 # Remove the mapping for object key from the object o. Return -1 # on failure. This is equivalent to the Python statement "del # o[key]". int PyMapping_DelItem(object o, object key) except -1 # Remove the mapping for object key from the object o. Return -1 # on failure. This is equivalent to the Python statement "del # o[key]". bint PyMapping_HasKeyString(object o, char *key) # On success, return 1 if the mapping object has the key key and 0 # otherwise. This is equivalent to the Python expression # "o.has_key(key)". This function always succeeds. bint PyMapping_HasKey(object o, object key) # Return 1 if the mapping object has the key key and 0 # otherwise. This is equivalent to the Python expression # "o.has_key(key)". This function always succeeds. object PyMapping_Keys(object o) # Return value: New reference. # On success, return a list of the keys in object o. On failure, # return NULL. This is equivalent to the Python expression # "o.keys()". object PyMapping_Values(object o) # Return value: New reference. # On success, return a list of the values in object o. On failure, # return NULL. This is equivalent to the Python expression # "o.values()". object PyMapping_Items(object o) # Return value: New reference. # On success, return a list of the items in object o, where each # item is a tuple containing a key-value pair. On failure, return # NULL. This is equivalent to the Python expression "o.items()". object PyMapping_GetItemString(object o, char *key) # Return value: New reference. # Return element of o corresponding to the object key or NULL on # failure. This is the equivalent of the Python expression # "o[key]". int PyMapping_SetItemString(object o, char *key, object v) except -1 # Map the object key to the value v in object o. Returns -1 on # failure. This is the equivalent of the Python statement "o[key] # = v". Cython-0.26.1/Cython/Includes/cpython/sequence.pxd0000664000175000017500000001357012542002467022663 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": ############################################################################ # 6.3 Sequence Protocol ############################################################################ bint PySequence_Check(object o) # Return 1 if the object provides sequence protocol, and 0 # otherwise. This function always succeeds. Py_ssize_t PySequence_Size(object o) except -1 # Returns the number of objects in sequence o on success, and -1 # on failure. For objects that do not provide sequence protocol, # this is equivalent to the Python expression "len(o)". Py_ssize_t PySequence_Length(object o) except -1 # Alternate name for PySequence_Size(). object PySequence_Concat(object o1, object o2) # Return value: New reference. # Return the concatenation of o1 and o2 on success, and NULL on # failure. This is the equivalent of the Python expression "o1 + # o2". object PySequence_Repeat(object o, Py_ssize_t count) # Return value: New reference. # Return the result of repeating sequence object o count times, or # NULL on failure. This is the equivalent of the Python expression # "o * count". object PySequence_InPlaceConcat(object o1, object o2) # Return value: New reference. # Return the concatenation of o1 and o2 on success, and NULL on # failure. The operation is done in-place when o1 supports # it. This is the equivalent of the Python expression "o1 += o2". object PySequence_InPlaceRepeat(object o, Py_ssize_t count) # Return value: New reference. # Return the result of repeating sequence object o count times, or # NULL on failure. The operation is done in-place when o supports # it. This is the equivalent of the Python expression "o *= # count". object PySequence_GetItem(object o, Py_ssize_t i) # Return value: New reference. # Return the ith element of o, or NULL on failure. This is the # equivalent of the Python expression "o[i]". object PySequence_GetSlice(object o, Py_ssize_t i1, Py_ssize_t i2) # Return value: New reference. # Return the slice of sequence object o between i1 and i2, or NULL # on failure. This is the equivalent of the Python expression # "o[i1:i2]". int PySequence_SetItem(object o, Py_ssize_t i, object v) except -1 # Assign object v to the ith element of o. Returns -1 on # failure. This is the equivalent of the Python statement "o[i] = # v". This function does not steal a reference to v. int PySequence_DelItem(object o, Py_ssize_t i) except -1 # Delete the ith element of object o. Returns -1 on failure. This # is the equivalent of the Python statement "del o[i]". int PySequence_SetSlice(object o, Py_ssize_t i1, Py_ssize_t i2, object v) except -1 # Assign the sequence object v to the slice in sequence object o # from i1 to i2. This is the equivalent of the Python statement # "o[i1:i2] = v". int PySequence_DelSlice(object o, Py_ssize_t i1, Py_ssize_t i2) except -1 # Delete the slice in sequence object o from i1 to i2. Returns -1 # on failure. This is the equivalent of the Python statement "del # o[i1:i2]". int PySequence_Count(object o, object value) except -1 # Return the number of occurrences of value in o, that is, return # the number of keys for which o[key] == value. On failure, return # -1. This is equivalent to the Python expression # "o.count(value)". int PySequence_Contains(object o, object value) except -1 # Determine if o contains value. If an item in o is equal to # value, return 1, otherwise return 0. On error, return -1. This # is equivalent to the Python expression "value in o". Py_ssize_t PySequence_Index(object o, object value) except -1 # Return the first index i for which o[i] == value. On error, # return -1. This is equivalent to the Python expression # "o.index(value)". object PySequence_List(object o) # Return value: New reference. # Return a list object with the same contents as the arbitrary # sequence o. The returned list is guaranteed to be new. object PySequence_Tuple(object o) # Return value: New reference. # Return a tuple object with the same contents as the arbitrary # sequence o or NULL on failure. If o is a tuple, a new reference # will be returned, otherwise a tuple will be constructed with the # appropriate contents. This is equivalent to the Python # expression "tuple(o)". object PySequence_Fast(object o, char *m) # Return value: New reference. # Returns the sequence o as a tuple, unless it is already a tuple # or list, in which case o is returned. Use # PySequence_Fast_GET_ITEM() to access the members of the # result. Returns NULL on failure. If the object is not a # sequence, raises TypeError with m as the message text. PyObject* PySequence_Fast_GET_ITEM(object o, Py_ssize_t i) # Return value: Borrowed reference. # Return the ith element of o, assuming that o was returned by # PySequence_Fast(), o is not NULL, and that i is within bounds. PyObject** PySequence_Fast_ITEMS(object o) # Return the underlying array of PyObject pointers. Assumes that o # was returned by PySequence_Fast() and o is not NULL. object PySequence_ITEM(object o, Py_ssize_t i) # Return value: New reference. # Return the ith element of o or NULL on failure. Macro form of # PySequence_GetItem() but without checking that # PySequence_Check(o) is true and without adjustment for negative # indices. Py_ssize_t PySequence_Fast_GET_SIZE(object o) # Returns the length of o, assuming that o was returned by # PySequence_Fast() and that o is not NULL. The size can also be # gotten by calling PySequence_Size() on o, but # PySequence_Fast_GET_SIZE() is faster because it can assume o is # a list or tuple. Cython-0.26.1/Cython/Includes/cpython/tuple.pxd0000664000175000017500000000620613143605603022201 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": ############################################################################ # Tuples ############################################################################ bint PyTuple_Check(object p) # Return true if p is a tuple object or an instance of a subtype # of the tuple type. bint PyTuple_CheckExact(object p) # Return true if p is a tuple object, but not an instance of a subtype of the tuple type. tuple PyTuple_New(Py_ssize_t len) # Return value: New reference. # Return a new tuple object of size len, or NULL on failure. tuple PyTuple_Pack(Py_ssize_t n, ...) # Return value: New reference. # Return a new tuple object of size n, or NULL on failure. The # tuple values are initialized to the subsequent n C arguments # pointing to Python objects. "PyTuple_Pack(2, a, b)" is # equivalent to "Py_BuildValue("(OO)", a, b)". Py_ssize_t PyTuple_Size(object p) except -1 # Take a pointer to a tuple object, and return the size of that tuple. Py_ssize_t PyTuple_GET_SIZE(object p) # Return the size of the tuple p, which must be non-NULL and point # to a tuple; no error checking is performed. PyObject* PyTuple_GetItem(object p, Py_ssize_t pos) except NULL # Return value: Borrowed reference. # Return the object at position pos in the tuple pointed to by # p. If pos is out of bounds, return NULL and sets an IndexError # exception. PyObject* PyTuple_GET_ITEM(object p, Py_ssize_t pos) # Return value: Borrowed reference. # Like PyTuple_GetItem(), but does no checking of its arguments. tuple PyTuple_GetSlice(object p, Py_ssize_t low, Py_ssize_t high) # Return value: New reference. # Take a slice of the tuple pointed to by p from low to high and return it as a new tuple. int PyTuple_SetItem(object p, Py_ssize_t pos, object o) except -1 # Insert a reference to object o at position pos of the tuple # pointed to by p. Return 0 on success. Note: This function # ``steals'' a reference to o. void PyTuple_SET_ITEM(object p, Py_ssize_t pos, object o) # Like PyTuple_SetItem(), but does no error checking, and should # only be used to fill in brand new tuples. Note: This function # ``steals'' a reference to o. int _PyTuple_Resize(PyObject **p, Py_ssize_t newsize) except -1 # Can be used to resize a tuple. newsize will be the new length of # the tuple. Because tuples are supposed to be immutable, this # should only be used if there is only one reference to the # object. Do not use this if the tuple may already be known to # some other part of the code. The tuple will always grow or # shrink at the end. Think of this as destroying the old tuple and # creating a new one, only more efficiently. Returns 0 on # success. Client code should never assume that the resulting # value of *p will be the same as before calling this function. If # the object referenced by *p is replaced, the original *p is # destroyed. On failure, returns -1 and sets *p to NULL, and # raises MemoryError or SystemError. Cython-0.26.1/Cython/Includes/cpython/complex.pxd0000664000175000017500000000336112542002467022517 0ustar stefanstefan00000000000000 cdef extern from "Python.h": ctypedef struct Py_complex: double imag double real ############################################################################ # 7.2.5.2 Complex Numbers as Python Objects ############################################################################ # PyComplexObject # This subtype of PyObject represents a Python complex number object. ctypedef class __builtin__.complex [object PyComplexObject]: cdef Py_complex cval # not making these available to keep them read-only: #cdef double imag "cval.imag" #cdef double real "cval.real" # PyTypeObject PyComplex_Type # This instance of PyTypeObject represents the Python complex # number type. It is the same object as complex and # types.ComplexType. bint PyComplex_Check(object p) # Return true if its argument is a PyComplexObject or a subtype of # PyComplexObject. bint PyComplex_CheckExact(object p) # Return true if its argument is a PyComplexObject, but not a subtype of PyComplexObject. object PyComplex_FromCComplex(Py_complex v) # Return value: New reference. # Create a new Python complex number object from a C Py_complex value. object PyComplex_FromDoubles(double real, double imag) # Return value: New reference. # Return a new PyComplexObject object from real and imag. double PyComplex_RealAsDouble(object op) except? -1 # Return the real part of op as a C double. double PyComplex_ImagAsDouble(object op) except? -1 # Return the imaginary part of op as a C double. Py_complex PyComplex_AsCComplex(object op) # Return the Py_complex value of the complex number op. # # Returns (-1+0i) in case of an error Cython-0.26.1/Cython/Includes/cpython/unicode.pxd0000664000175000017500000006236113023021033022464 0ustar stefanstefan00000000000000cdef extern from *: # Return true if the object o is a Unicode object or an instance # of a Unicode subtype. Changed in version 2.2: Allowed subtypes # to be accepted. bint PyUnicode_Check(object o) # Return true if the object o is a Unicode object, but not an # instance of a subtype. New in version 2.2. bint PyUnicode_CheckExact(object o) # Return the size of the object. o has to be a PyUnicodeObject # (not checked). Py_ssize_t PyUnicode_GET_SIZE(object o) # Return the size of the object's internal buffer in bytes. o has # to be a PyUnicodeObject (not checked). Py_ssize_t PyUnicode_GET_DATA_SIZE(object o) # Return a pointer to the internal Py_UNICODE buffer of the # object. o has to be a PyUnicodeObject (not checked). Py_UNICODE* PyUnicode_AS_UNICODE(object o) # Return a pointer to the internal buffer of the object. o has to # be a PyUnicodeObject (not checked). char* PyUnicode_AS_DATA(object o) # Return 1 or 0 depending on whether ch is a whitespace character. bint Py_UNICODE_ISSPACE(Py_UCS4 ch) # Return 1 or 0 depending on whether ch is a lowercase character. bint Py_UNICODE_ISLOWER(Py_UCS4 ch) # Return 1 or 0 depending on whether ch is an uppercase character. bint Py_UNICODE_ISUPPER(Py_UCS4 ch) # Return 1 or 0 depending on whether ch is a titlecase character. bint Py_UNICODE_ISTITLE(Py_UCS4 ch) # Return 1 or 0 depending on whether ch is a linebreak character. bint Py_UNICODE_ISLINEBREAK(Py_UCS4 ch) # Return 1 or 0 depending on whether ch is a decimal character. bint Py_UNICODE_ISDECIMAL(Py_UCS4 ch) # Return 1 or 0 depending on whether ch is a digit character. bint Py_UNICODE_ISDIGIT(Py_UCS4 ch) # Return 1 or 0 depending on whether ch is a numeric character. bint Py_UNICODE_ISNUMERIC(Py_UCS4 ch) # Return 1 or 0 depending on whether ch is an alphabetic character. bint Py_UNICODE_ISALPHA(Py_UCS4 ch) # Return 1 or 0 depending on whether ch is an alphanumeric character. bint Py_UNICODE_ISALNUM(Py_UCS4 ch) # Return the character ch converted to lower case. # Used to return a Py_UNICODE value before Py3.3. Py_UCS4 Py_UNICODE_TOLOWER(Py_UCS4 ch) # Return the character ch converted to upper case. # Used to return a Py_UNICODE value before Py3.3. Py_UCS4 Py_UNICODE_TOUPPER(Py_UCS4 ch) # Return the character ch converted to title case. # Used to return a Py_UNICODE value before Py3.3. Py_UCS4 Py_UNICODE_TOTITLE(Py_UCS4 ch) # Return the character ch converted to a decimal positive # integer. Return -1 if this is not possible. This macro does not # raise exceptions. int Py_UNICODE_TODECIMAL(Py_UCS4 ch) # Return the character ch converted to a single digit # integer. Return -1 if this is not possible. This macro does not # raise exceptions. int Py_UNICODE_TODIGIT(Py_UCS4 ch) # Return the character ch converted to a double. Return -1.0 if # this is not possible. This macro does not raise exceptions. double Py_UNICODE_TONUMERIC(Py_UCS4 ch) # To create Unicode objects and access their basic sequence # properties, use these APIs: # Create a Unicode Object from the Py_UNICODE buffer u of the # given size. u may be NULL which causes the contents to be # undefined. It is the user's responsibility to fill in the needed # data. The buffer is copied into the new object. If the buffer is # not NULL, the return value might be a shared object. Therefore, # modification of the resulting Unicode object is only allowed # when u is NULL. unicode PyUnicode_FromUnicode(Py_UNICODE *u, Py_ssize_t size) # Create a Unicode Object from the given Unicode code point ordinal. # # The ordinal must be in range(0x10000) on narrow Python builds # (UCS2), and range(0x110000) on wide builds (UCS4). A ValueError # is raised in case it is not. unicode PyUnicode_FromOrdinal(int ordinal) # Return a read-only pointer to the Unicode object's internal # Py_UNICODE buffer, NULL if unicode is not a Unicode object. Py_UNICODE* PyUnicode_AsUnicode(object o) except NULL # Return the length of the Unicode object. Py_ssize_t PyUnicode_GetSize(object o) except -1 # Coerce an encoded object obj to an Unicode object and return a # reference with incremented refcount. # String and other char buffer compatible objects are decoded # according to the given encoding and using the error handling # defined by errors. Both can be NULL to have the interface use # the default values (see the next section for details). # All other objects, including Unicode objects, cause a TypeError # to be set. object PyUnicode_FromEncodedObject(object o, char *encoding, char *errors) # Shortcut for PyUnicode_FromEncodedObject(obj, NULL, "strict") # which is used throughout the interpreter whenever coercion to # Unicode is needed. object PyUnicode_FromObject(object obj) # If the platform supports wchar_t and provides a header file # wchar.h, Python can interface directly to this type using the # following functions. Support is optimized if Python's own # Py_UNICODE type is identical to the system's wchar_t. #ctypedef int wchar_t # Create a Unicode object from the wchar_t buffer w of the given # size. Return NULL on failure. #PyObject* PyUnicode_FromWideChar(wchar_t *w, Py_ssize_t size) #Py_ssize_t PyUnicode_AsWideChar(object o, wchar_t *w, Py_ssize_t size) # Unicode Methods # Concat two strings giving a new Unicode string. # Return value: New reference. unicode PyUnicode_Concat(object left, object right) # Split a string giving a list of Unicode strings. If sep is NULL, # splitting will be done at all whitespace substrings. Otherwise, # splits occur at the given separator. At most maxsplit splits will # be done. If negative, no limit is set. Separators are not included # in the resulting list. # Return value: New reference. list PyUnicode_Split(object s, object sep, Py_ssize_t maxsplit) # Split a Unicode string at line breaks, returning a list of Unicode # strings. CRLF is considered to be one line break. If keepend is 0, # the Line break characters are not included in the resulting strings. # Return value: New reference. list PyUnicode_Splitlines(object s, bint keepend) # Translate a string by applying a character mapping table to it and # return the resulting Unicode object. # # The mapping table must map Unicode ordinal integers to Unicode ordinal # integers or None (causing deletion of the character). # # Mapping tables need only provide the __getitem__() interface; # dictionaries and sequences work well. Unmapped character ordinals (ones # which cause a LookupError) are left untouched and are copied as-is. # # errors has the usual meaning for codecs. It may be NULL which indicates # to use the default error handling. # Return value: New reference. unicode PyUnicode_Translate(object str, object table, const char *errors) # Join a sequence of strings using the given separator and return the # resulting Unicode string. # Return value: New reference. unicode PyUnicode_Join(object separator, object seq) # Return 1 if substr matches str[start:end] at the given tail end # (direction == -1 means to do a prefix match, direction == 1 a # suffix match), 0 otherwise. # Return -1 if an error occurred. Py_ssize_t PyUnicode_Tailmatch(object str, object substr, Py_ssize_t start, Py_ssize_t end, int direction) except -1 # Return the first position of substr in str[start:end] using the given # direction (direction == 1 means to do a forward search, direction == -1 # a backward search). The return value is the index of the first match; # a value of -1 indicates that no match was found, and -2 indicates that an # error occurred and an exception has been set. Py_ssize_t PyUnicode_Find(object str, object substr, Py_ssize_t start, Py_ssize_t end, int direction) except -2 # Return the first position of the character ch in str[start:end] using # the given direction (direction == 1 means to do a forward search, # direction == -1 a backward search). The return value is the index of # the first match; a value of -1 indicates that no match was found, and # -2 indicates that an error occurred and an exception has been set. # New in version 3.3. Py_ssize_t PyUnicode_FindChar(object str, Py_UCS4 ch, Py_ssize_t start, Py_ssize_t end, int direction) except -2 # Return the number of non-overlapping occurrences of substr in # str[start:end]. Return -1 if an error occurred. Py_ssize_t PyUnicode_Count(object str, object substr, Py_ssize_t start, Py_ssize_t end) except -1 # Replace at most maxcount occurrences of substr in str with replstr and # return the resulting Unicode object. maxcount == -1 means replace all # occurrences. # Return value: New reference. unicode PyUnicode_Replace(object str, object substr, object replstr, Py_ssize_t maxcount) # Compare two strings and return -1, 0, 1 for less than, # equal, and greater than, respectively. int PyUnicode_Compare(object left, object right) except? -1 # Compare a unicode object, uni, with string and return -1, 0, 1 for less than, # equal, and greater than, respectively. It is best to pass only ASCII-encoded # strings, but the function interprets the input string as ISO-8859-1 if it # contains non-ASCII characters. int PyUnicode_CompareWithASCIIString(object uni, char *string) except? -1 # Rich compare two unicode strings and return one of the following: # # NULL in case an exception was raised # Py_True or Py_False for successful comparisons # Py_NotImplemented in case the type combination is unknown # # Note that Py_EQ and Py_NE comparisons can cause a UnicodeWarning in case # the conversion of the arguments to Unicode fails with a UnicodeDecodeError. # # Possible values for op are Py_GT, Py_GE, Py_EQ, Py_NE, Py_LT, and Py_LE. object PyUnicode_RichCompare(object left, object right, int op) # Return a new string object from format and args; this is analogous to # format % args. # Return value: New reference. unicode PyUnicode_Format(object format, object args) # Check whether element is contained in container and return true or false # accordingly. # # element has to coerce to a one element Unicode string. -1 is returned # if there was an error. int PyUnicode_Contains(object container, object element) except -1 # Intern the argument *string in place. The argument must be the address # of a pointer variable pointing to a Python unicode string object. If # there is an existing interned string that is the same as *string, it sets # *string to it (decrementing the reference count of the old string object # and incrementing the reference count of the interned string object), # otherwise it leaves *string alone and interns it (incrementing its reference # count). (Clarification: even though there is a lot of talk about reference # counts, think of this function as reference-count-neutral; you own the object # after the call if and only if you owned it before the call.) #void PyUnicode_InternInPlace(PyObject **string) # A combination of PyUnicode_FromString() and PyUnicode_InternInPlace(), # returning either a new unicode string object that has been interned, or # a new ("owned") reference to an earlier interned string object with the # same value. unicode PyUnicode_InternFromString(const char *v) # Codecs # Create a Unicode object by decoding size bytes of the encoded # string s. encoding and errors have the same meaning as the # parameters of the same name in the unicode() builtin # function. The codec to be used is looked up using the Python # codec registry. Return NULL if an exception was raised by the # codec. object PyUnicode_Decode(char *s, Py_ssize_t size, char *encoding, char *errors) # Encode the Py_UNICODE buffer of the given size and return a # Python string object. encoding and errors have the same meaning # as the parameters of the same name in the Unicode encode() # method. The codec to be used is looked up using the Python codec # registry. Return NULL if an exception was raised by the codec. object PyUnicode_Encode(Py_UNICODE *s, Py_ssize_t size, char *encoding, char *errors) # Encode a Unicode object and return the result as Python string # object. encoding and errors have the same meaning as the # parameters of the same name in the Unicode encode() method. The # codec to be used is looked up using the Python codec # registry. Return NULL if an exception was raised by the codec. object PyUnicode_AsEncodedString(object unicode, char *encoding, char *errors) # These are the UTF-8 codec APIs: # Create a Unicode object by decoding size bytes of the UTF-8 # encoded string s. Return NULL if an exception was raised by the # codec. unicode PyUnicode_DecodeUTF8(char *s, Py_ssize_t size, char *errors) # If consumed is NULL, behave like PyUnicode_DecodeUTF8(). If # consumed is not NULL, trailing incomplete UTF-8 byte sequences # will not be treated as an error. Those bytes will not be decoded # and the number of bytes that have been decoded will be stored in # consumed. New in version 2.4. unicode PyUnicode_DecodeUTF8Stateful(char *s, Py_ssize_t size, char *errors, Py_ssize_t *consumed) # Encode the Py_UNICODE buffer of the given size using UTF-8 and # return a Python string object. Return NULL if an exception was # raised by the codec. bytes PyUnicode_EncodeUTF8(Py_UNICODE *s, Py_ssize_t size, char *errors) # Encode a Unicode objects using UTF-8 and return the result as Python string object. Error handling is ``strict''. Return NULL if an exception was raised by the codec. bytes PyUnicode_AsUTF8String(object unicode) # These are the UTF-16 codec APIs: # Decode length bytes from a UTF-16 encoded buffer string and # return the corresponding Unicode object. errors (if non-NULL) # defines the error handling. It defaults to ``strict''. # # If byteorder is non-NULL, the decoder starts decoding using the # given byte order: # # *byteorder == -1: little endian # *byteorder == 0: native order # *byteorder == 1: big endian # # and then switches if the first two bytes of the input data are a # byte order mark (BOM) and the specified byte order is native # order. This BOM is not copied into the resulting Unicode # string. After completion, *byteorder is set to the current byte # order at the. # # If byteorder is NULL, the codec starts in native order mode. unicode PyUnicode_DecodeUTF16(char *s, Py_ssize_t size, char *errors, int *byteorder) # If consumed is NULL, behave like PyUnicode_DecodeUTF16(). If # consumed is not NULL, PyUnicode_DecodeUTF16Stateful() will not # treat trailing incomplete UTF-16 byte sequences (such as an odd # number of bytes or a split surrogate pair) as an error. Those # bytes will not be decoded and the number of bytes that have been # decoded will be stored in consumed. New in version 2.4. unicode PyUnicode_DecodeUTF16Stateful(char *s, Py_ssize_t size, char *errors, int *byteorder, Py_ssize_t *consumed) # Return a Python string object holding the UTF-16 encoded value # of the Unicode data in s. If byteorder is not 0, output is # written according to the following byte order: # # byteorder == -1: little endian # byteorder == 0: native byte order (writes a BOM mark) # byteorder == 1: big endian # # If byteorder is 0, the output string will always start with the # Unicode BOM mark (U+FEFF). In the other two modes, no BOM mark # is prepended. # # If Py_UNICODE_WIDE is defined, a single Py_UNICODE value may get # represented as a surrogate pair. If it is not defined, each # Py_UNICODE values is interpreted as an UCS-2 character. bytes PyUnicode_EncodeUTF16(Py_UNICODE *s, Py_ssize_t size, char *errors, int byteorder) # Return a Python string using the UTF-16 encoding in native byte # order. The string always starts with a BOM mark. Error handling # is ``strict''. Return NULL if an exception was raised by the # codec. bytes PyUnicode_AsUTF16String(object unicode) # These are the ``Unicode Escape'' codec APIs: # Create a Unicode object by decoding size bytes of the # Unicode-Escape encoded string s. Return NULL if an exception was # raised by the codec. object PyUnicode_DecodeUnicodeEscape(char *s, Py_ssize_t size, char *errors) # Encode the Py_UNICODE buffer of the given size using # Unicode-Escape and return a Python string object. Return NULL if # an exception was raised by the codec. object PyUnicode_EncodeUnicodeEscape(Py_UNICODE *s, Py_ssize_t size) # Encode a Unicode objects using Unicode-Escape and return the # result as Python string object. Error handling is # ``strict''. Return NULL if an exception was raised by the codec. object PyUnicode_AsUnicodeEscapeString(object unicode) # These are the ``Raw Unicode Escape'' codec APIs: # Create a Unicode object by decoding size bytes of the # Raw-Unicode-Escape encoded string s. Return NULL if an exception # was raised by the codec. object PyUnicode_DecodeRawUnicodeEscape(char *s, Py_ssize_t size, char *errors) # Encode the Py_UNICODE buffer of the given size using # Raw-Unicode-Escape and return a Python string object. Return # NULL if an exception was raised by the codec. object PyUnicode_EncodeRawUnicodeEscape(Py_UNICODE *s, Py_ssize_t size, char *errors) # Encode a Unicode objects using Raw-Unicode-Escape and return the # result as Python string object. Error handling is # ``strict''. Return NULL if an exception was raised by the codec. object PyUnicode_AsRawUnicodeEscapeString(object unicode) # These are the Latin-1 codec APIs: Latin-1 corresponds to the first 256 Unicode ordinals and only these are accepted by the codecs during encoding. # Create a Unicode object by decoding size bytes of the Latin-1 # encoded string s. Return NULL if an exception was raised by the # codec. unicode PyUnicode_DecodeLatin1(char *s, Py_ssize_t size, char *errors) # Encode the Py_UNICODE buffer of the given size using Latin-1 and # return a Python bytes object. Return NULL if an exception was # raised by the codec. bytes PyUnicode_EncodeLatin1(Py_UNICODE *s, Py_ssize_t size, char *errors) # Encode a Unicode objects using Latin-1 and return the result as # Python bytes object. Error handling is ``strict''. Return NULL # if an exception was raised by the codec. bytes PyUnicode_AsLatin1String(object unicode) # These are the ASCII codec APIs. Only 7-bit ASCII data is # accepted. All other codes generate errors. # Create a Unicode object by decoding size bytes of the ASCII # encoded string s. Return NULL if an exception was raised by the # codec. unicode PyUnicode_DecodeASCII(char *s, Py_ssize_t size, char *errors) # Encode the Py_UNICODE buffer of the given size using ASCII and # return a Python bytes object. Return NULL if an exception was # raised by the codec. bytes PyUnicode_EncodeASCII(Py_UNICODE *s, Py_ssize_t size, char *errors) # Encode a Unicode objects using ASCII and return the result as # Python bytes object. Error handling is ``strict''. Return NULL # if an exception was raised by the codec. bytes PyUnicode_AsASCIIString(object o) # These are the mapping codec APIs: # # This codec is special in that it can be used to implement many # different codecs (and this is in fact what was done to obtain most # of the standard codecs included in the encodings package). The codec # uses mapping to encode and decode characters. # # Decoding mappings must map single string characters to single # Unicode characters, integers (which are then interpreted as Unicode # ordinals) or None (meaning "undefined mapping" and causing an # error). # # Encoding mappings must map single Unicode characters to single # string characters, integers (which are then interpreted as Latin-1 # ordinals) or None (meaning "undefined mapping" and causing an # error). # # The mapping objects provided must only support the __getitem__ # mapping interface. # # If a character lookup fails with a LookupError, the character is # copied as-is meaning that its ordinal value will be interpreted as # Unicode or Latin-1 ordinal resp. Because of this, mappings only need # to contain those mappings which map characters to different code # points. # Create a Unicode object by decoding size bytes of the encoded # string s using the given mapping object. Return NULL if an # exception was raised by the codec. If mapping is NULL latin-1 # decoding will be done. Else it can be a dictionary mapping byte # or a unicode string, which is treated as a lookup table. Byte # values greater that the length of the string and U+FFFE # "characters" are treated as "undefined mapping". Changed in # version 2.4: Allowed unicode string as mapping argument. object PyUnicode_DecodeCharmap(char *s, Py_ssize_t size, object mapping, char *errors) # Encode the Py_UNICODE buffer of the given size using the given # mapping object and return a Python string object. Return NULL if # an exception was raised by the codec. # # Deprecated since version 3.3, will be removed in version 4.0. object PyUnicode_EncodeCharmap(Py_UNICODE *s, Py_ssize_t size, object mapping, char *errors) # Encode a Unicode objects using the given mapping object and # return the result as Python string object. Error handling is # ``strict''. Return NULL if an exception was raised by the codec. object PyUnicode_AsCharmapString(object o, object mapping) # The following codec API is special in that maps Unicode to Unicode. # Translate a Py_UNICODE buffer of the given length by applying a # character mapping table to it and return the resulting Unicode # object. Return NULL when an exception was raised by the codec. # # The mapping table must map Unicode ordinal integers to Unicode # ordinal integers or None (causing deletion of the character). # # Mapping tables need only provide the __getitem__() interface; # dictionaries and sequences work well. Unmapped character # ordinals (ones which cause a LookupError) are left untouched and # are copied as-is. # # Deprecated since version 3.3, will be removed in version 4.0. object PyUnicode_TranslateCharmap(Py_UNICODE *s, Py_ssize_t size, object table, char *errors) # These are the MBCS codec APIs. They are currently only available on # Windows and use the Win32 MBCS converters to implement the # conversions. Note that MBCS (or DBCS) is a class of encodings, not # just one. The target encoding is defined by the user settings on the # machine running the codec. # Create a Unicode object by decoding size bytes of the MBCS # encoded string s. Return NULL if an exception was raised by the # codec. unicode PyUnicode_DecodeMBCS(char *s, Py_ssize_t size, char *errors) # If consumed is NULL, behave like PyUnicode_DecodeMBCS(). If # consumed is not NULL, PyUnicode_DecodeMBCSStateful() will not # decode trailing lead byte and the number of bytes that have been # decoded will be stored in consumed. New in version 2.5. # NOTE: Python 2.x uses 'int' values for 'size' and 'consumed' (changed in 3.0) unicode PyUnicode_DecodeMBCSStateful(char *s, Py_ssize_t size, char *errors, Py_ssize_t *consumed) # Encode the Py_UNICODE buffer of the given size using MBCS and # return a Python string object. Return NULL if an exception was # raised by the codec. bytes PyUnicode_EncodeMBCS(Py_UNICODE *s, Py_ssize_t size, char *errors) # Encode a Unicode objects using MBCS and return the result as # Python string object. Error handling is ``strict''. Return NULL # if an exception was raised by the codec. bytes PyUnicode_AsMBCSString(object o) # Encode the Unicode object using the specified code page and return # a Python bytes object. Return NULL if an exception was raised by the # codec. Use CP_ACP code page to get the MBCS encoder. # # New in version 3.3. bytes PyUnicode_EncodeCodePage(int code_page, object unicode, const char *errors) # Py_UCS4 helpers (new in CPython 3.3) # These utility functions work on strings of Py_UCS4 characters and # otherwise behave like the C standard library functions with the same name. size_t Py_UCS4_strlen(const Py_UCS4 *u) Py_UCS4* Py_UCS4_strcpy(Py_UCS4 *s1, const Py_UCS4 *s2) Py_UCS4* Py_UCS4_strncpy(Py_UCS4 *s1, const Py_UCS4 *s2, size_t n) Py_UCS4* Py_UCS4_strcat(Py_UCS4 *s1, const Py_UCS4 *s2) int Py_UCS4_strcmp(const Py_UCS4 *s1, const Py_UCS4 *s2) int Py_UCS4_strncmp(const Py_UCS4 *s1, const Py_UCS4 *s2, size_t n) Py_UCS4* Py_UCS4_strchr(const Py_UCS4 *s, Py_UCS4 c) Py_UCS4* Py_UCS4_strrchr(const Py_UCS4 *s, Py_UCS4 c) Cython-0.26.1/Cython/Includes/cpython/buffer.pxd0000664000175000017500000001133713023021033022304 0ustar stefanstefan00000000000000# Please see the Python header files (object.h/abstract.h) for docs cdef extern from "Python.h": cdef enum: PyBUF_SIMPLE, PyBUF_WRITABLE, PyBUF_WRITEABLE, # backwards compatibility PyBUF_FORMAT, PyBUF_ND, PyBUF_STRIDES, PyBUF_C_CONTIGUOUS, PyBUF_F_CONTIGUOUS, PyBUF_ANY_CONTIGUOUS, PyBUF_INDIRECT, PyBUF_CONTIG, PyBUF_CONTIG_RO, PyBUF_STRIDED, PyBUF_STRIDED_RO, PyBUF_RECORDS, PyBUF_RECORDS_RO, PyBUF_FULL, PyBUF_FULL_RO, PyBUF_READ, PyBUF_WRITE, PyBUF_SHADOW bint PyObject_CheckBuffer(object obj) # Return 1 if obj supports the buffer interface otherwise 0. int PyObject_GetBuffer(object obj, Py_buffer *view, int flags) except -1 # Export obj into a Py_buffer, view. These arguments must never be # NULL. The flags argument is a bit field indicating what kind of # buffer the caller is prepared to deal with and therefore what # kind of buffer the exporter is allowed to return. The buffer # interface allows for complicated memory sharing possibilities, # but some caller may not be able to handle all the complexity but # may want to see if the exporter will let them take a simpler # view to its memory. # Some exporters may not be able to share memory in every possible # way and may need to raise errors to signal to some consumers # that something is just not possible. These errors should be a # BufferError unless there is another error that is actually # causing the problem. The exporter can use flags information to # simplify how much of the Py_buffer structure is filled in with # non-default values and/or raise an error if the object can’t # support a simpler view of its memory. # 0 is returned on success and -1 on error. void PyBuffer_Release(Py_buffer *view) # Release the buffer view. This should be called when the buffer # is no longer being used as it may free memory from it. void* PyBuffer_GetPointer(Py_buffer *view, Py_ssize_t *indices) # ?? Py_ssize_t PyBuffer_SizeFromFormat(char *) # actually const char # Return the implied ~Py_buffer.itemsize from the struct-stype # ~Py_buffer.format int PyBuffer_ToContiguous(void *buf, Py_buffer *view, Py_ssize_t len, char fort) # ?? int PyBuffer_FromContiguous(Py_buffer *view, void *buf, Py_ssize_t len, char fort) # ?? int PyObject_CopyToObject(object obj, void *buf, Py_ssize_t len, char fortran) except -1 # Copy len bytes of data pointed to by the contiguous chunk of # memory pointed to by buf into the buffer exported by obj. The # buffer must of course be writable. Return 0 on success and # return -1 and raise an error on failure. If the object does not # have a writable buffer, then an error is raised. If fortran is # 'F', then if the object is multi-dimensional, then the data will # be copied into the array in Fortran-style (first dimension # varies the fastest). If fortran is 'C', then the data will be # copied into the array in C-style (last dimension varies the # fastest). If fortran is 'A', then it does not matter and the # copy will be made in whatever way is more efficient. int PyObject_CopyData(object dest, object src) except -1 # Copy the data from the src buffer to the buffer of destination bint PyBuffer_IsContiguous(Py_buffer *view, char fort) # Return 1 if the memory defined by the view is C-style (fortran # is 'C') or Fortran-style (fortran is 'F') contiguous or either # one (fortran is 'A'). Return 0 otherwise. void PyBuffer_FillContiguousStrides(int ndims, Py_ssize_t *shape, Py_ssize_t *strides, Py_ssize_t itemsize, char fort) # Fill the strides array with byte-strides of a contiguous # (Fortran-style if fort is 'F' or C-style otherwise) array of the # given shape with the given number of bytes per element. int PyBuffer_FillInfo(Py_buffer *view, object exporter, void *buf, Py_ssize_t len, int readonly, int flags) except -1 # Fill in a buffer-info structure, view, correctly for an exporter # that can only share a contiguous chunk of memory of “unsigned # bytes†of the given length. Return 0 on success and -1 (with # raising an error) on error. # DEPRECATED HERE: do not cimport from here, cimport from cpython.object instead object PyObject_Format(object obj, object format_spec) # Takes an arbitrary object and returns the result of calling # obj.__format__(format_spec). Cython-0.26.1/Cython/Includes/cpython/long.pxd0000664000175000017500000001561413143605603022012 0ustar stefanstefan00000000000000 cdef extern from "Python.h": ctypedef long long PY_LONG_LONG ctypedef unsigned long long uPY_LONG_LONG "unsigned PY_LONG_LONG" ############################################################################ # 7.2.3 Long Integer Objects ############################################################################ # PyLongObject # # This subtype of PyObject represents a Python long integer object. # PyTypeObject PyLong_Type # # This instance of PyTypeObject represents the Python long integer # type. This is the same object as long and types.LongType. bint PyLong_Check(object p) # Return true if its argument is a PyLongObject or a subtype of PyLongObject. bint PyLong_CheckExact(object p) # Return true if its argument is a PyLongObject, but not a subtype of PyLongObject. object PyLong_FromLong(long v) # Return value: New reference. # Return a new PyLongObject object from v, or NULL on failure. object PyLong_FromUnsignedLong(unsigned long v) # Return value: New reference. # Return a new PyLongObject object from a C unsigned long, or NULL on failure. object PyLong_FromSsize_t(Py_ssize_t v) # Return value: New reference. # Return a new PyLongObject object from a C Py_ssize_t, or NULL on failure.) object PyLong_FromSize_t(size_t v) # Return value: New reference. # Return a new PyLongObject object from a C size_t, or NULL on failure. object PyLong_FromLongLong(PY_LONG_LONG v) # Return value: New reference. # Return a new PyLongObject object from a C long long, or NULL on failure. object PyLong_FromUnsignedLongLong(uPY_LONG_LONG v) # Return value: New reference. # Return a new PyLongObject object from a C unsigned long long, or NULL on failure. object PyLong_FromDouble(double v) # Return value: New reference. # Return a new PyLongObject object from the integer part of v, or NULL on failure. object PyLong_FromString(char *str, char **pend, int base) # Return value: New reference. # Return a new PyLongObject based on the string value in str, # which is interpreted according to the radix in base. If pend is # non-NULL, *pend will point to the first character in str which # follows the representation of the number. If base is 0, the # radix will be determined based on the leading characters of str: # if str starts with '0x' or '0X', radix 16 will be used; if str # starts with '0', radix 8 will be used; otherwise radix 10 will # be used. If base is not 0, it must be between 2 and 36, # inclusive. Leading spaces are ignored. If there are no digits, # ValueError will be raised. object PyLong_FromUnicode(Py_UNICODE *u, Py_ssize_t length, int base) # Return value: New reference. # Convert a sequence of Unicode digits to a Python long integer # value. The first parameter, u, points to the first character of # the Unicode string, length gives the number of characters, and # base is the radix for the conversion. The radix must be in the # range [2, 36]; if it is out of range, ValueError will be # raised. # object PyLong_FromUnicodeObject(object u, int base) # Convert a sequence of Unicode digits in the string u to a Python integer # value. The Unicode string is first encoded to a byte string using # PyUnicode_EncodeDecimal() and then converted using PyLong_FromString(). # New in version 3.3. object PyLong_FromVoidPtr(void *p) # Return value: New reference. # Create a Python integer or long integer from the pointer p. The # pointer value can be retrieved from the resulting value using # PyLong_AsVoidPtr(). If the integer is larger than LONG_MAX, a # positive long integer is returned. long PyLong_AsLong(object pylong) except? -1 # Return a C long representation of the contents of pylong. If # pylong is greater than LONG_MAX, an OverflowError is raised. # long PyLong_AsLongAndOverflow(object pylong, int *overflow) except? -1 # Return a C long representation of the contents of pylong. If pylong is # greater than LONG_MAX or less than LONG_MIN, set *overflow to 1 or -1, # respectively, and return -1; otherwise, set *overflow to 0. If any other # exception occurs (for example a TypeError or MemoryError), then -1 will # be returned and *overflow will be 0. # New in version 2.7. # PY_LONG_LONG PyLong_AsLongLongAndOverflow(object pylong, int *overflow) except? -1 # Return a C long long representation of the contents of pylong. If pylong # is greater than PY_LLONG_MAX or less than PY_LLONG_MIN, set *overflow to # 1 or -1, respectively, and return -1; otherwise, set *overflow to 0. If # any other exception occurs (for example a TypeError or MemoryError), then # -1 will be returned and *overflow will be 0. # New in version 2.7. Py_ssize_t PyLong_AsSsize_t(object pylong) except? -1 # Return a C Py_ssize_t representation of the contents of pylong. If pylong # is greater than PY_SSIZE_T_MAX, an OverflowError is raised and -1 will be # returned. unsigned long PyLong_AsUnsignedLong(object pylong) except? -1 # Return a C unsigned long representation of the contents of # pylong. If pylong is greater than ULONG_MAX, an OverflowError is # raised. PY_LONG_LONG PyLong_AsLongLong(object pylong) except? -1 # Return a C long long from a Python long integer. If pylong # cannot be represented as a long long, an OverflowError will be # raised. uPY_LONG_LONG PyLong_AsUnsignedLongLong(object pylong) except? -1 #unsigned PY_LONG_LONG PyLong_AsUnsignedLongLong(object pylong) # Return a C unsigned long long from a Python long integer. If # pylong cannot be represented as an unsigned long long, an # OverflowError will be raised if the value is positive, or a # TypeError will be raised if the value is negative. unsigned long PyLong_AsUnsignedLongMask(object io) except? -1 # Return a C unsigned long from a Python long integer, without # checking for overflow. uPY_LONG_LONG PyLong_AsUnsignedLongLongMask(object io) except? -1 #unsigned PY_LONG_LONG PyLong_AsUnsignedLongLongMask(object io) # Return a C unsigned long long from a Python long integer, # without checking for overflow. double PyLong_AsDouble(object pylong) except? -1.0 # Return a C double representation of the contents of pylong. If # pylong cannot be approximately represented as a double, an # OverflowError exception is raised and -1.0 will be returned. void* PyLong_AsVoidPtr(object pylong) except? NULL # Convert a Python integer or long integer pylong to a C void # pointer. If pylong cannot be converted, an OverflowError will be # raised. This is only assured to produce a usable void pointer # for values created with PyLong_FromVoidPtr(). For values outside # 0..LONG_MAX, both signed and unsigned integers are acccepted. Cython-0.26.1/Cython/Includes/cpython/string.pxd0000664000175000017500000002333012542002467022354 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": ctypedef struct va_list ############################################################################ # 7.3.1 String Objects ############################################################################ # These functions raise TypeError when expecting a string # parameter and are called with a non-string parameter. # PyStringObject # This subtype of PyObject represents a Python string object. # PyTypeObject PyString_Type # This instance of PyTypeObject represents the Python string type; # it is the same object as str and types.StringType in the Python # layer. bint PyString_Check(object o) # Return true if the object o is a string object or an instance of # a subtype of the string type. bint PyString_CheckExact(object o) # Return true if the object o is a string object, but not an instance of a subtype of the string type. object PyString_FromString(char *v) # Return value: New reference. # Return a new string object with the value v on success, and NULL # on failure. The parameter v must not be NULL; it will not be # checked. object PyString_FromStringAndSize(char *v, Py_ssize_t len) # Return value: New reference. # Return a new string object with the value v and length len on # success, and NULL on failure. If v is NULL, the contents of the # string are uninitialized. object PyString_FromFormat(char *format, ...) # Return value: New reference. # Take a C printf()-style format string and a variable number of # arguments, calculate the size of the resulting Python string and # return a string with the values formatted into it. The variable # arguments must be C types and must correspond exactly to the # format characters in the format string. The following format # characters are allowed: # Format Characters Type Comment # %% n/a The literal % character. # %c int A single character, represented as an C int. # %d int Exactly equivalent to printf("%d"). # %u unsigned int Exactly equivalent to printf("%u"). # %ld long Exactly equivalent to printf("%ld"). # %lu unsigned long Exactly equivalent to printf("%lu"). # %zd Py_ssize_t Exactly equivalent to printf("%zd"). # %zu size_t Exactly equivalent to printf("%zu"). # %i int Exactly equivalent to printf("%i"). # %x int Exactly equivalent to printf("%x"). # %s char* A null-terminated C character array. # %p void* The hex representation of a C pointer. # Mostly equivalent to printf("%p") except that it is guaranteed to # start with the literal 0x regardless of what the platform's printf # yields. # An unrecognized format character causes all the rest of the # format string to be copied as-is to the result string, and any # extra arguments discarded. object PyString_FromFormatV(char *format, va_list vargs) # Return value: New reference. # Identical to PyString_FromFormat() except that it takes exactly two arguments. Py_ssize_t PyString_Size(object string) except -1 # Return the length of the string in string object string. Py_ssize_t PyString_GET_SIZE(object string) # Macro form of PyString_Size() but without error checking. char* PyString_AsString(object string) except NULL # Return a NUL-terminated representation of the contents of # string. The pointer refers to the internal buffer of string, not # a copy. The data must not be modified in any way, unless the # string was just created using PyString_FromStringAndSize(NULL, # size). It must not be deallocated. If string is a Unicode # object, this function computes the default encoding of string # and operates on that. If string is not a string object at all, # PyString_AsString() returns NULL and raises TypeError. char* PyString_AS_STRING(object string) # Macro form of PyString_AsString() but without error # checking. Only string objects are supported; no Unicode objects # should be passed. int PyString_AsStringAndSize(object obj, char **buffer, Py_ssize_t *length) except -1 # Return a NULL-terminated representation of the contents of the # object obj through the output variables buffer and length. # # The function accepts both string and Unicode objects as # input. For Unicode objects it returns the default encoded # version of the object. If length is NULL, the resulting buffer # may not contain NUL characters; if it does, the function returns # -1 and a TypeError is raised. # The buffer refers to an internal string buffer of obj, not a # copy. The data must not be modified in any way, unless the # string was just created using PyString_FromStringAndSize(NULL, # size). It must not be deallocated. If string is a Unicode # object, this function computes the default encoding of string # and operates on that. If string is not a string object at all, # PyString_AsStringAndSize() returns -1 and raises TypeError. void PyString_Concat(PyObject **string, object newpart) # Create a new string object in *string containing the contents of # newpart appended to string; the caller will own the new # reference. The reference to the old value of string will be # stolen. If the new string cannot be created, the old reference # to string will still be discarded and the value of *string will # be set to NULL; the appropriate exception will be set. void PyString_ConcatAndDel(PyObject **string, object newpart) # Create a new string object in *string containing the contents of # newpart appended to string. This version decrements the # reference count of newpart. int _PyString_Resize(PyObject **string, Py_ssize_t newsize) except -1 # A way to resize a string object even though it is # ``immutable''. Only use this to build up a brand new string # object; don't use this if the string may already be known in # other parts of the code. It is an error to call this function if # the refcount on the input string object is not one. Pass the # address of an existing string object as an lvalue (it may be # written into), and the new size desired. On success, *string # holds the resized string object and 0 is returned; the address # in *string may differ from its input value. If the reallocation # fails, the original string object at *string is deallocated, # *string is set to NULL, a memory exception is set, and -1 is # returned. object PyString_Format(object format, object args) # Return value: New reference. Return a new string object from # format and args. Analogous to format % args. The args argument # must be a tuple. void PyString_InternInPlace(PyObject **string) # Intern the argument *string in place. The argument must be the # address of a pointer variable pointing to a Python string # object. If there is an existing interned string that is the same # as *string, it sets *string to it (decrementing the reference # count of the old string object and incrementing the reference # count of the interned string object), otherwise it leaves # *string alone and interns it (incrementing its reference # count). (Clarification: even though there is a lot of talk about # reference counts, think of this function as # reference-count-neutral; you own the object after the call if # and only if you owned it before the call.) object PyString_InternFromString(char *v) # Return value: New reference. # A combination of PyString_FromString() and # PyString_InternInPlace(), returning either a new string object # that has been interned, or a new (``owned'') reference to an # earlier interned string object with the same value. object PyString_Decode(char *s, Py_ssize_t size, char *encoding, char *errors) # Return value: New reference. # Create an object by decoding size bytes of the encoded buffer s # using the codec registered for encoding. encoding and errors # have the same meaning as the parameters of the same name in the # unicode() built-in function. The codec to be used is looked up # using the Python codec registry. Return NULL if an exception was # raised by the codec. object PyString_AsDecodedObject(object str, char *encoding, char *errors) # Return value: New reference. # Decode a string object by passing it to the codec registered for # encoding and return the result as Python object. encoding and # errors have the same meaning as the parameters of the same name # in the string encode() method. The codec to be used is looked up # using the Python codec registry. Return NULL if an exception was # raised by the codec. object PyString_Encode(char *s, Py_ssize_t size, char *encoding, char *errors) # Return value: New reference. # Encode the char buffer of the given size by passing it to the # codec registered for encoding and return a Python # object. encoding and errors have the same meaning as the # parameters of the same name in the string encode() method. The # codec to be used is looked up using the Python codec # registry. Return NULL if an exception was raised by the codec. object PyString_AsEncodedObject(object str, char *encoding, char *errors) # Return value: New reference. # Encode a string object using the codec registered for encoding # and return the result as Python object. encoding and errors have # the same meaning as the parameters of the same name in the # string encode() method. The codec to be used is looked up using # the Python codec registry. Return NULL if an exception was # raised by the codec. Cython-0.26.1/Cython/Includes/cpython/iterator.pxd0000664000175000017500000000244712542002467022705 0ustar stefanstefan00000000000000cdef extern from "Python.h": ############################################################################ # 6.5 Iterator Protocol ############################################################################ bint PyIter_Check(object o) # Return true if the object o supports the iterator protocol. object PyIter_Next(object o) # Return value: New reference. # Return the next value from the iteration o. If the object is an # iterator, this retrieves the next value from the iteration, and # returns NULL with no exception set if there are no remaining # items. If the object is not an iterator, TypeError is raised, or # if there is an error in retrieving the item, returns NULL and # passes along the exception. # To write a loop which iterates over an iterator, the C code should look something like this: # PyObject *iterator = PyObject_GetIter(obj); # PyObject *item; # if (iterator == NULL) { # /* propagate error */ # } # while (item = PyIter_Next(iterator)) { # /* do something with item */ # ... # /* release reference when done */ # Py_DECREF(item); # } # Py_DECREF(iterator); # if (PyErr_Occurred()) { # /* propagate error */ # } # else { # /* continue doing useful work */ # } Cython-0.26.1/Cython/Includes/cpython/weakref.pxd0000664000175000017500000000366412542002467022502 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": bint PyWeakref_Check(object ob) # Return true if ob is either a reference or proxy object. bint PyWeakref_CheckRef(object ob) # Return true if ob is a reference object. bint PyWeakref_CheckProxy(ob) # Return true if *ob* is a proxy object. object PyWeakref_NewRef(object ob, object callback) # Return a weak reference object for the object ob. This will # always return a new reference, but is not guaranteed to create a # new object; an existing reference object may be returned. The # second parameter, callback, can be a callable object that # receives notification when ob is garbage collected; it should # accept a single parameter, which will be the weak reference # object itself. callback may also be None or NULL. If ob is not # a weakly-referencable object, or if callback is not callable, # None, or NULL, this will return NULL and raise TypeError. object PyWeakref_NewProxy(object ob, object callback) # Return a weak reference proxy object for the object ob. This # will always return a new reference, but is not guaranteed to # create a new object; an existing proxy object may be returned. # The second parameter, callback, can be a callable object that # receives notification when ob is garbage collected; it should # accept a single parameter, which will be the weak reference # object itself. callback may also be None or NULL. If ob is not # a weakly-referencable object, or if callback is not callable, # None, or NULL, this will return NULL and raise TypeError. PyObject* PyWeakref_GetObject(object ref) # Return the referenced object from a weak reference, ref. If the # referent is no longer live, returns None. PyObject* PyWeakref_GET_OBJECT(object ref) # Similar to PyWeakref_GetObject, but implemented as a macro that # does no error checking. Cython-0.26.1/Cython/Includes/cpython/oldbuffer.pxd0000664000175000017500000000554412542002467023025 0ustar stefanstefan00000000000000# Legacy Python 2 buffer interface. # # These functions are no longer available in Python 3, use the new # buffer interface instead. cdef extern from "Python.h": cdef enum _: Py_END_OF_BUFFER # This constant may be passed as the size parameter to # PyBuffer_FromObject() or PyBuffer_FromReadWriteObject(). It # indicates that the new PyBufferObject should refer to base object # from the specified offset to the end of its exported # buffer. Using this enables the caller to avoid querying the base # object for its length. bint PyBuffer_Check(object p) # Return true if the argument has type PyBuffer_Type. object PyBuffer_FromObject(object base, Py_ssize_t offset, Py_ssize_t size) # Return value: New reference. # # Return a new read-only buffer object. This raises TypeError if # base doesn't support the read-only buffer protocol or doesn't # provide exactly one buffer segment, or it raises ValueError if # offset is less than zero. The buffer will hold a reference to the # base object, and the buffer's contents will refer to the base # object's buffer interface, starting as position offset and # extending for size bytes. If size is Py_END_OF_BUFFER, then the # new buffer's contents extend to the length of the base object's # exported buffer data. object PyBuffer_FromReadWriteObject(object base, Py_ssize_t offset, Py_ssize_t size) # Return value: New reference. # # Return a new writable buffer object. Parameters and exceptions # are similar to those for PyBuffer_FromObject(). If the base # object does not export the writeable buffer protocol, then # TypeError is raised. object PyBuffer_FromMemory(void *ptr, Py_ssize_t size) # Return value: New reference. # # Return a new read-only buffer object that reads from a specified # location in memory, with a specified size. The caller is # responsible for ensuring that the memory buffer, passed in as # ptr, is not deallocated while the returned buffer object # exists. Raises ValueError if size is less than zero. Note that # Py_END_OF_BUFFER may not be passed for the size parameter; # ValueError will be raised in that case. object PyBuffer_FromReadWriteMemory(void *ptr, Py_ssize_t size) # Return value: New reference. # # Similar to PyBuffer_FromMemory(), but the returned buffer is # writable. object PyBuffer_New(Py_ssize_t size) # Return value: New reference. # # Return a new writable buffer object that maintains its own memory # buffer of size bytes. ValueError is returned if size is not zero # or positive. Note that the memory buffer (as returned by # PyObject_AsWriteBuffer()) is not specifically aligned. Cython-0.26.1/Cython/Includes/cpython/slice.pxd0000664000175000017500000000410112542002467022140 0ustar stefanstefan00000000000000cdef extern from "Python.h": # PyTypeObject PySlice_Type # # The type object for slice objects. This is the same as slice and types.SliceType bint PySlice_Check(object ob) # # Return true if ob is a slice object; ob must not be NULL. slice PySlice_New(object start, object stop, object step) # # Return a new slice object with the given values. The start, stop, and step # parameters are used as the values of the slice object attributes of the same # names. Any of the values may be NULL, in which case the None will be used # for the corresponding attribute. Return NULL if the new object could not be # allocated. int PySlice_GetIndices(object slice, Py_ssize_t length, Py_ssize_t *start, Py_ssize_t *stop, Py_ssize_t *step) except? -1 # # Retrieve the start, stop and step indices from the slice object slice, # assuming a sequence of length length. Treats indices greater than length # as errors. # # Returns 0 on success and -1 on error with no exception set (unless one # of the indices was not None and failed to be converted to an integer, # in which case -1 is returned with an exception set). # # You probably do not want to use this function. # # Changed in version 3.2: The parameter type for the slice parameter was # PySliceObject* before. int PySlice_GetIndicesEx(object slice, Py_ssize_t length, Py_ssize_t *start, Py_ssize_t *stop, Py_ssize_t *step, Py_ssize_t *slicelength) except -1 # # Usable replacement for PySlice_GetIndices(). Retrieve the start, stop, and step # indices from the slice object slice assuming a sequence of length length, and # store the length of the slice in slicelength. Out of bounds indices are clipped # in a manner consistent with the handling of normal slices. # # Returns 0 on success and -1 on error with exception set. # # Changed in version 3.2: The parameter type for the slice parameter was # PySliceObject* before. Cython-0.26.1/Cython/Includes/cpython/function.pxd0000664000175000017500000000515712542002467022702 0ustar stefanstefan00000000000000from .object cimport PyObject cdef extern from "Python.h": ############################################################################ # 7.5.3 Function Objects ############################################################################ # There are a few functions specific to Python functions. # PyFunctionObject # # The C structure used for functions. # PyTypeObject PyFunction_Type # # This is an instance of PyTypeObject and represents the Python # function type. It is exposed to Python programmers as # types.FunctionType. bint PyFunction_Check(object o) # Return true if o is a function object (has type # PyFunction_Type). The parameter must not be NULL. object PyFunction_New(object code, object globals) # Return value: New reference. # Return a new function object associated with the code object # code. globals must be a dictionary with the global variables # accessible to the function. # The function's docstring, name and __module__ are retrieved from # the code object, the argument defaults and closure are set to # NULL. PyObject* PyFunction_GetCode(object op) except? NULL # Return value: Borrowed reference. # Return the code object associated with the function object op. PyObject* PyFunction_GetGlobals(object op) except? NULL # Return value: Borrowed reference. # Return the globals dictionary associated with the function object op. PyObject* PyFunction_GetModule(object op) except? NULL # Return value: Borrowed reference. # Return the __module__ attribute of the function object op. This # is normally a string containing the module name, but can be set # to any other object by Python code. PyObject* PyFunction_GetDefaults(object op) except? NULL # Return value: Borrowed reference. # Return the argument default values of the function object # op. This can be a tuple of arguments or NULL. int PyFunction_SetDefaults(object op, object defaults) except -1 # Set the argument default values for the function object # op. defaults must be Py_None or a tuple. # Raises SystemError and returns -1 on failure. PyObject* PyFunction_GetClosure(object op) except? NULL # Return value: Borrowed reference. # Return the closure associated with the function object op. This # can be NULL or a tuple of cell objects. int PyFunction_SetClosure(object op, object closure) except -1 # Set the closure associated with the function object op. closure # must be Py_None or a tuple of cell objects. # Raises SystemError and returns -1 on failure. Cython-0.26.1/Cython/Includes/openmp.pxd0000664000175000017500000000326113023021033020642 0ustar stefanstefan00000000000000cdef extern from "": ctypedef struct omp_lock_t: pass ctypedef struct omp_nest_lock_t: pass ctypedef enum omp_sched_t: omp_sched_static = 1, omp_sched_dynamic = 2, omp_sched_guided = 3, omp_sched_auto = 4 extern void omp_set_num_threads(int) nogil extern int omp_get_num_threads() nogil extern int omp_get_max_threads() nogil extern int omp_get_thread_num() nogil extern int omp_get_num_procs() nogil extern int omp_in_parallel() nogil extern void omp_set_dynamic(int) nogil extern int omp_get_dynamic() nogil extern void omp_set_nested(int) nogil extern int omp_get_nested() nogil extern void omp_init_lock(omp_lock_t *) nogil extern void omp_destroy_lock(omp_lock_t *) nogil extern void omp_set_lock(omp_lock_t *) nogil extern void omp_unset_lock(omp_lock_t *) nogil extern int omp_test_lock(omp_lock_t *) nogil extern void omp_init_nest_lock(omp_nest_lock_t *) nogil extern void omp_destroy_nest_lock(omp_nest_lock_t *) nogil extern void omp_set_nest_lock(omp_nest_lock_t *) nogil extern void omp_unset_nest_lock(omp_nest_lock_t *) nogil extern int omp_test_nest_lock(omp_nest_lock_t *) nogil extern double omp_get_wtime() nogil extern double omp_get_wtick() nogil void omp_set_schedule(omp_sched_t, int) nogil void omp_get_schedule(omp_sched_t *, int *) nogil int omp_get_thread_limit() nogil void omp_set_max_active_levels(int) nogil int omp_get_max_active_levels() nogil int omp_get_level() nogil int omp_get_ancestor_thread_num(int) nogil int omp_get_team_size(int) nogil int omp_get_active_level() nogil Cython-0.26.1/Cython/Includes/numpy/0000775000175000017500000000000013151203436020010 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Includes/numpy/__init__.pxd0000664000175000017500000010640613143605603022276 0ustar stefanstefan00000000000000# NumPy static imports for Cython # # If any of the PyArray_* functions are called, import_array must be # called first. # # This also defines backwards-compatibility buffer acquisition # code for use in Python 2.x (or Python <= 2.5 when NumPy starts # implementing PEP-3118 directly). # # Because of laziness, the format string of the buffer is statically # allocated. Increase the size if this is not enough, or submit a # patch to do this properly. # # Author: Dag Sverre Seljebotn # DEF _buffer_format_string_len = 255 cimport cpython.buffer as pybuf from cpython.ref cimport Py_INCREF, Py_XDECREF from cpython.object cimport PyObject from cpython.type cimport type cimport libc.stdlib as stdlib cimport libc.stdio as stdio cdef extern from "Python.h": ctypedef int Py_intptr_t cdef extern from "numpy/arrayobject.h": ctypedef Py_intptr_t npy_intp ctypedef size_t npy_uintp cdef enum NPY_TYPES: NPY_BOOL NPY_BYTE NPY_UBYTE NPY_SHORT NPY_USHORT NPY_INT NPY_UINT NPY_LONG NPY_ULONG NPY_LONGLONG NPY_ULONGLONG NPY_FLOAT NPY_DOUBLE NPY_LONGDOUBLE NPY_CFLOAT NPY_CDOUBLE NPY_CLONGDOUBLE NPY_OBJECT NPY_STRING NPY_UNICODE NPY_VOID NPY_NTYPES NPY_NOTYPE NPY_INT8 NPY_INT16 NPY_INT32 NPY_INT64 NPY_INT128 NPY_INT256 NPY_UINT8 NPY_UINT16 NPY_UINT32 NPY_UINT64 NPY_UINT128 NPY_UINT256 NPY_FLOAT16 NPY_FLOAT32 NPY_FLOAT64 NPY_FLOAT80 NPY_FLOAT96 NPY_FLOAT128 NPY_FLOAT256 NPY_COMPLEX32 NPY_COMPLEX64 NPY_COMPLEX128 NPY_COMPLEX160 NPY_COMPLEX192 NPY_COMPLEX256 NPY_COMPLEX512 NPY_INTP ctypedef enum NPY_ORDER: NPY_ANYORDER NPY_CORDER NPY_FORTRANORDER ctypedef enum NPY_CLIPMODE: NPY_CLIP NPY_WRAP NPY_RAISE ctypedef enum NPY_SCALARKIND: NPY_NOSCALAR, NPY_BOOL_SCALAR, NPY_INTPOS_SCALAR, NPY_INTNEG_SCALAR, NPY_FLOAT_SCALAR, NPY_COMPLEX_SCALAR, NPY_OBJECT_SCALAR ctypedef enum NPY_SORTKIND: NPY_QUICKSORT NPY_HEAPSORT NPY_MERGESORT ctypedef enum NPY_SEARCHSIDE: NPY_SEARCHLEFT NPY_SEARCHRIGHT enum: NPY_C_CONTIGUOUS NPY_F_CONTIGUOUS NPY_CONTIGUOUS NPY_FORTRAN NPY_OWNDATA NPY_FORCECAST NPY_ENSURECOPY NPY_ENSUREARRAY NPY_ELEMENTSTRIDES NPY_ALIGNED NPY_NOTSWAPPED NPY_WRITEABLE NPY_UPDATEIFCOPY NPY_ARR_HAS_DESCR NPY_BEHAVED NPY_BEHAVED_NS NPY_CARRAY NPY_CARRAY_RO NPY_FARRAY NPY_FARRAY_RO NPY_DEFAULT NPY_IN_ARRAY NPY_OUT_ARRAY NPY_INOUT_ARRAY NPY_IN_FARRAY NPY_OUT_FARRAY NPY_INOUT_FARRAY NPY_UPDATE_ALL cdef enum: NPY_MAXDIMS npy_intp NPY_MAX_ELSIZE ctypedef void (*PyArray_VectorUnaryFunc)(void *, void *, npy_intp, void *, void *) ctypedef class numpy.dtype [object PyArray_Descr]: # Use PyDataType_* macros when possible, however there are no macros # for accessing some of the fields, so some are defined. cdef char kind cdef char type cdef char byteorder cdef char flags cdef int type_num cdef int itemsize "elsize" cdef int alignment cdef dict fields cdef tuple names ctypedef extern class numpy.flatiter [object PyArrayIterObject]: # Use through macros pass ctypedef extern class numpy.broadcast [object PyArrayMultiIterObject]: # Use through macros pass ctypedef struct PyArrayObject: # For use in situations where ndarray can't replace PyArrayObject*, # like PyArrayObject**. pass ctypedef class numpy.ndarray [object PyArrayObject]: cdef __cythonbufferdefaults__ = {"mode": "strided"} cdef: # Only taking a few of the most commonly used and stable fields. # One should use PyArray_* macros instead to access the C fields. char *data int ndim "nd" npy_intp *shape "dimensions" npy_intp *strides dtype descr PyObject* base # Note: This syntax (function definition in pxd files) is an # experimental exception made for __getbuffer__ and __releasebuffer__ # -- the details of this may change. def __getbuffer__(ndarray self, Py_buffer* info, int flags): # This implementation of getbuffer is geared towards Cython # requirements, and does not yet fullfill the PEP. # In particular strided access is always provided regardless # of flags if info == NULL: return cdef int copy_shape, i, ndim cdef int endian_detector = 1 cdef bint little_endian = ((&endian_detector)[0] != 0) ndim = PyArray_NDIM(self) if sizeof(npy_intp) != sizeof(Py_ssize_t): copy_shape = 1 else: copy_shape = 0 if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): raise ValueError(u"ndarray is not C contiguous") if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): raise ValueError(u"ndarray is not Fortran contiguous") info.buf = PyArray_DATA(self) info.ndim = ndim if copy_shape: # Allocate new buffer for strides and shape info. # This is allocated as one block, strides first. info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) info.shape = info.strides + ndim for i in range(ndim): info.strides[i] = PyArray_STRIDES(self)[i] info.shape[i] = PyArray_DIMS(self)[i] else: info.strides = PyArray_STRIDES(self) info.shape = PyArray_DIMS(self) info.suboffsets = NULL info.itemsize = PyArray_ITEMSIZE(self) info.readonly = not PyArray_ISWRITEABLE(self) cdef int t cdef char* f = NULL cdef dtype descr = self.descr cdef int offset cdef bint hasfields = PyDataType_HASFIELDS(descr) if not hasfields and not copy_shape: # do not call releasebuffer info.obj = None else: # need to call releasebuffer info.obj = self if not hasfields: t = descr.type_num if ((descr.byteorder == c'>' and little_endian) or (descr.byteorder == c'<' and not little_endian)): raise ValueError(u"Non-native byte order not supported") if t == NPY_BYTE: f = "b" elif t == NPY_UBYTE: f = "B" elif t == NPY_SHORT: f = "h" elif t == NPY_USHORT: f = "H" elif t == NPY_INT: f = "i" elif t == NPY_UINT: f = "I" elif t == NPY_LONG: f = "l" elif t == NPY_ULONG: f = "L" elif t == NPY_LONGLONG: f = "q" elif t == NPY_ULONGLONG: f = "Q" elif t == NPY_FLOAT: f = "f" elif t == NPY_DOUBLE: f = "d" elif t == NPY_LONGDOUBLE: f = "g" elif t == NPY_CFLOAT: f = "Zf" elif t == NPY_CDOUBLE: f = "Zd" elif t == NPY_CLONGDOUBLE: f = "Zg" elif t == NPY_OBJECT: f = "O" else: raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) info.format = f return else: info.format = stdlib.malloc(_buffer_format_string_len) info.format[0] = c'^' # Native data types, manual alignment offset = 0 f = _util_dtypestring(descr, info.format + 1, info.format + _buffer_format_string_len, &offset) f[0] = c'\0' # Terminate format string def __releasebuffer__(ndarray self, Py_buffer* info): if PyArray_HASFIELDS(self): stdlib.free(info.format) if sizeof(npy_intp) != sizeof(Py_ssize_t): stdlib.free(info.strides) # info.shape was stored after info.strides in the same block ctypedef unsigned char npy_bool ctypedef signed char npy_byte ctypedef signed short npy_short ctypedef signed int npy_int ctypedef signed long npy_long ctypedef signed long long npy_longlong ctypedef unsigned char npy_ubyte ctypedef unsigned short npy_ushort ctypedef unsigned int npy_uint ctypedef unsigned long npy_ulong ctypedef unsigned long long npy_ulonglong ctypedef float npy_float ctypedef double npy_double ctypedef long double npy_longdouble ctypedef signed char npy_int8 ctypedef signed short npy_int16 ctypedef signed int npy_int32 ctypedef signed long long npy_int64 ctypedef signed long long npy_int96 ctypedef signed long long npy_int128 ctypedef unsigned char npy_uint8 ctypedef unsigned short npy_uint16 ctypedef unsigned int npy_uint32 ctypedef unsigned long long npy_uint64 ctypedef unsigned long long npy_uint96 ctypedef unsigned long long npy_uint128 ctypedef float npy_float32 ctypedef double npy_float64 ctypedef long double npy_float80 ctypedef long double npy_float96 ctypedef long double npy_float128 ctypedef struct npy_cfloat: double real double imag ctypedef struct npy_cdouble: double real double imag ctypedef struct npy_clongdouble: long double real long double imag ctypedef struct npy_complex64: float real float imag ctypedef struct npy_complex128: double real double imag ctypedef struct npy_complex160: long double real long double imag ctypedef struct npy_complex192: long double real long double imag ctypedef struct npy_complex256: long double real long double imag ctypedef struct PyArray_Dims: npy_intp *ptr int len int _import_array() except -1 # # Macros from ndarrayobject.h # bint PyArray_CHKFLAGS(ndarray m, int flags) bint PyArray_ISCONTIGUOUS(ndarray m) bint PyArray_ISWRITEABLE(ndarray m) bint PyArray_ISALIGNED(ndarray m) int PyArray_NDIM(ndarray) bint PyArray_ISONESEGMENT(ndarray) bint PyArray_ISFORTRAN(ndarray) int PyArray_FORTRANIF(ndarray) void* PyArray_DATA(ndarray) char* PyArray_BYTES(ndarray) npy_intp* PyArray_DIMS(ndarray) npy_intp* PyArray_STRIDES(ndarray) npy_intp PyArray_DIM(ndarray, size_t) npy_intp PyArray_STRIDE(ndarray, size_t) # object PyArray_BASE(ndarray) wrong refcount semantics # dtype PyArray_DESCR(ndarray) wrong refcount semantics int PyArray_FLAGS(ndarray) npy_intp PyArray_ITEMSIZE(ndarray) int PyArray_TYPE(ndarray arr) object PyArray_GETITEM(ndarray arr, void *itemptr) int PyArray_SETITEM(ndarray arr, void *itemptr, object obj) bint PyTypeNum_ISBOOL(int) bint PyTypeNum_ISUNSIGNED(int) bint PyTypeNum_ISSIGNED(int) bint PyTypeNum_ISINTEGER(int) bint PyTypeNum_ISFLOAT(int) bint PyTypeNum_ISNUMBER(int) bint PyTypeNum_ISSTRING(int) bint PyTypeNum_ISCOMPLEX(int) bint PyTypeNum_ISPYTHON(int) bint PyTypeNum_ISFLEXIBLE(int) bint PyTypeNum_ISUSERDEF(int) bint PyTypeNum_ISEXTENDED(int) bint PyTypeNum_ISOBJECT(int) bint PyDataType_ISBOOL(dtype) bint PyDataType_ISUNSIGNED(dtype) bint PyDataType_ISSIGNED(dtype) bint PyDataType_ISINTEGER(dtype) bint PyDataType_ISFLOAT(dtype) bint PyDataType_ISNUMBER(dtype) bint PyDataType_ISSTRING(dtype) bint PyDataType_ISCOMPLEX(dtype) bint PyDataType_ISPYTHON(dtype) bint PyDataType_ISFLEXIBLE(dtype) bint PyDataType_ISUSERDEF(dtype) bint PyDataType_ISEXTENDED(dtype) bint PyDataType_ISOBJECT(dtype) bint PyDataType_HASFIELDS(dtype) bint PyArray_ISBOOL(ndarray) bint PyArray_ISUNSIGNED(ndarray) bint PyArray_ISSIGNED(ndarray) bint PyArray_ISINTEGER(ndarray) bint PyArray_ISFLOAT(ndarray) bint PyArray_ISNUMBER(ndarray) bint PyArray_ISSTRING(ndarray) bint PyArray_ISCOMPLEX(ndarray) bint PyArray_ISPYTHON(ndarray) bint PyArray_ISFLEXIBLE(ndarray) bint PyArray_ISUSERDEF(ndarray) bint PyArray_ISEXTENDED(ndarray) bint PyArray_ISOBJECT(ndarray) bint PyArray_HASFIELDS(ndarray) bint PyArray_ISVARIABLE(ndarray) bint PyArray_SAFEALIGNEDCOPY(ndarray) bint PyArray_ISNBO(char) # works on ndarray.byteorder bint PyArray_IsNativeByteOrder(char) # works on ndarray.byteorder bint PyArray_ISNOTSWAPPED(ndarray) bint PyArray_ISBYTESWAPPED(ndarray) bint PyArray_FLAGSWAP(ndarray, int) bint PyArray_ISCARRAY(ndarray) bint PyArray_ISCARRAY_RO(ndarray) bint PyArray_ISFARRAY(ndarray) bint PyArray_ISFARRAY_RO(ndarray) bint PyArray_ISBEHAVED(ndarray) bint PyArray_ISBEHAVED_RO(ndarray) bint PyDataType_ISNOTSWAPPED(dtype) bint PyDataType_ISBYTESWAPPED(dtype) bint PyArray_DescrCheck(object) bint PyArray_Check(object) bint PyArray_CheckExact(object) # Cannot be supported due to out arg: # bint PyArray_HasArrayInterfaceType(object, dtype, object, object&) # bint PyArray_HasArrayInterface(op, out) bint PyArray_IsZeroDim(object) # Cannot be supported due to ## ## in macro: # bint PyArray_IsScalar(object, verbatim work) bint PyArray_CheckScalar(object) bint PyArray_IsPythonNumber(object) bint PyArray_IsPythonScalar(object) bint PyArray_IsAnyScalar(object) bint PyArray_CheckAnyScalar(object) ndarray PyArray_GETCONTIGUOUS(ndarray) bint PyArray_SAMESHAPE(ndarray, ndarray) npy_intp PyArray_SIZE(ndarray) npy_intp PyArray_NBYTES(ndarray) object PyArray_FROM_O(object) object PyArray_FROM_OF(object m, int flags) object PyArray_FROM_OT(object m, int type) object PyArray_FROM_OTF(object m, int type, int flags) object PyArray_FROMANY(object m, int type, int min, int max, int flags) object PyArray_ZEROS(int nd, npy_intp* dims, int type, int fortran) object PyArray_EMPTY(int nd, npy_intp* dims, int type, int fortran) void PyArray_FILLWBYTE(object, int val) npy_intp PyArray_REFCOUNT(object) object PyArray_ContiguousFromAny(op, int, int min_depth, int max_depth) unsigned char PyArray_EquivArrTypes(ndarray a1, ndarray a2) bint PyArray_EquivByteorders(int b1, int b2) object PyArray_SimpleNew(int nd, npy_intp* dims, int typenum) object PyArray_SimpleNewFromData(int nd, npy_intp* dims, int typenum, void* data) #object PyArray_SimpleNewFromDescr(int nd, npy_intp* dims, dtype descr) object PyArray_ToScalar(void* data, ndarray arr) void* PyArray_GETPTR1(ndarray m, npy_intp i) void* PyArray_GETPTR2(ndarray m, npy_intp i, npy_intp j) void* PyArray_GETPTR3(ndarray m, npy_intp i, npy_intp j, npy_intp k) void* PyArray_GETPTR4(ndarray m, npy_intp i, npy_intp j, npy_intp k, npy_intp l) void PyArray_XDECREF_ERR(ndarray) # Cannot be supported due to out arg # void PyArray_DESCR_REPLACE(descr) object PyArray_Copy(ndarray) object PyArray_FromObject(object op, int type, int min_depth, int max_depth) object PyArray_ContiguousFromObject(object op, int type, int min_depth, int max_depth) object PyArray_CopyFromObject(object op, int type, int min_depth, int max_depth) object PyArray_Cast(ndarray mp, int type_num) object PyArray_Take(ndarray ap, object items, int axis) object PyArray_Put(ndarray ap, object items, object values) void PyArray_ITER_RESET(flatiter it) nogil void PyArray_ITER_NEXT(flatiter it) nogil void PyArray_ITER_GOTO(flatiter it, npy_intp* destination) nogil void PyArray_ITER_GOTO1D(flatiter it, npy_intp ind) nogil void* PyArray_ITER_DATA(flatiter it) nogil bint PyArray_ITER_NOTDONE(flatiter it) nogil void PyArray_MultiIter_RESET(broadcast multi) nogil void PyArray_MultiIter_NEXT(broadcast multi) nogil void PyArray_MultiIter_GOTO(broadcast multi, npy_intp dest) nogil void PyArray_MultiIter_GOTO1D(broadcast multi, npy_intp ind) nogil void* PyArray_MultiIter_DATA(broadcast multi, npy_intp i) nogil void PyArray_MultiIter_NEXTi(broadcast multi, npy_intp i) nogil bint PyArray_MultiIter_NOTDONE(broadcast multi) nogil # Functions from __multiarray_api.h # Functions taking dtype and returning object/ndarray are disabled # for now as they steal dtype references. I'm conservative and disable # more than is probably needed until it can be checked further. int PyArray_SetNumericOps (object) object PyArray_GetNumericOps () int PyArray_INCREF (ndarray) int PyArray_XDECREF (ndarray) void PyArray_SetStringFunction (object, int) dtype PyArray_DescrFromType (int) object PyArray_TypeObjectFromType (int) char * PyArray_Zero (ndarray) char * PyArray_One (ndarray) #object PyArray_CastToType (ndarray, dtype, int) int PyArray_CastTo (ndarray, ndarray) int PyArray_CastAnyTo (ndarray, ndarray) int PyArray_CanCastSafely (int, int) npy_bool PyArray_CanCastTo (dtype, dtype) int PyArray_ObjectType (object, int) dtype PyArray_DescrFromObject (object, dtype) #ndarray* PyArray_ConvertToCommonType (object, int *) dtype PyArray_DescrFromScalar (object) dtype PyArray_DescrFromTypeObject (object) npy_intp PyArray_Size (object) #object PyArray_Scalar (void *, dtype, object) #object PyArray_FromScalar (object, dtype) void PyArray_ScalarAsCtype (object, void *) #int PyArray_CastScalarToCtype (object, void *, dtype) #int PyArray_CastScalarDirect (object, dtype, void *, int) object PyArray_ScalarFromObject (object) #PyArray_VectorUnaryFunc * PyArray_GetCastFunc (dtype, int) object PyArray_FromDims (int, int *, int) #object PyArray_FromDimsAndDataAndDescr (int, int *, dtype, char *) #object PyArray_FromAny (object, dtype, int, int, int, object) object PyArray_EnsureArray (object) object PyArray_EnsureAnyArray (object) #object PyArray_FromFile (stdio.FILE *, dtype, npy_intp, char *) #object PyArray_FromString (char *, npy_intp, dtype, npy_intp, char *) #object PyArray_FromBuffer (object, dtype, npy_intp, npy_intp) #object PyArray_FromIter (object, dtype, npy_intp) object PyArray_Return (ndarray) #object PyArray_GetField (ndarray, dtype, int) #int PyArray_SetField (ndarray, dtype, int, object) object PyArray_Byteswap (ndarray, npy_bool) object PyArray_Resize (ndarray, PyArray_Dims *, int, NPY_ORDER) int PyArray_MoveInto (ndarray, ndarray) int PyArray_CopyInto (ndarray, ndarray) int PyArray_CopyAnyInto (ndarray, ndarray) int PyArray_CopyObject (ndarray, object) object PyArray_NewCopy (ndarray, NPY_ORDER) object PyArray_ToList (ndarray) object PyArray_ToString (ndarray, NPY_ORDER) int PyArray_ToFile (ndarray, stdio.FILE *, char *, char *) int PyArray_Dump (object, object, int) object PyArray_Dumps (object, int) int PyArray_ValidType (int) void PyArray_UpdateFlags (ndarray, int) object PyArray_New (type, int, npy_intp *, int, npy_intp *, void *, int, int, object) #object PyArray_NewFromDescr (type, dtype, int, npy_intp *, npy_intp *, void *, int, object) #dtype PyArray_DescrNew (dtype) dtype PyArray_DescrNewFromType (int) double PyArray_GetPriority (object, double) object PyArray_IterNew (object) object PyArray_MultiIterNew (int, ...) int PyArray_PyIntAsInt (object) npy_intp PyArray_PyIntAsIntp (object) int PyArray_Broadcast (broadcast) void PyArray_FillObjectArray (ndarray, object) int PyArray_FillWithScalar (ndarray, object) npy_bool PyArray_CheckStrides (int, int, npy_intp, npy_intp, npy_intp *, npy_intp *) dtype PyArray_DescrNewByteorder (dtype, char) object PyArray_IterAllButAxis (object, int *) #object PyArray_CheckFromAny (object, dtype, int, int, int, object) #object PyArray_FromArray (ndarray, dtype, int) object PyArray_FromInterface (object) object PyArray_FromStructInterface (object) #object PyArray_FromArrayAttr (object, dtype, object) #NPY_SCALARKIND PyArray_ScalarKind (int, ndarray*) int PyArray_CanCoerceScalar (int, int, NPY_SCALARKIND) object PyArray_NewFlagsObject (object) npy_bool PyArray_CanCastScalar (type, type) #int PyArray_CompareUCS4 (npy_ucs4 *, npy_ucs4 *, register size_t) int PyArray_RemoveSmallest (broadcast) int PyArray_ElementStrides (object) void PyArray_Item_INCREF (char *, dtype) void PyArray_Item_XDECREF (char *, dtype) object PyArray_FieldNames (object) object PyArray_Transpose (ndarray, PyArray_Dims *) object PyArray_TakeFrom (ndarray, object, int, ndarray, NPY_CLIPMODE) object PyArray_PutTo (ndarray, object, object, NPY_CLIPMODE) object PyArray_PutMask (ndarray, object, object) object PyArray_Repeat (ndarray, object, int) object PyArray_Choose (ndarray, object, ndarray, NPY_CLIPMODE) int PyArray_Sort (ndarray, int, NPY_SORTKIND) object PyArray_ArgSort (ndarray, int, NPY_SORTKIND) object PyArray_SearchSorted (ndarray, object, NPY_SEARCHSIDE) object PyArray_ArgMax (ndarray, int, ndarray) object PyArray_ArgMin (ndarray, int, ndarray) object PyArray_Reshape (ndarray, object) object PyArray_Newshape (ndarray, PyArray_Dims *, NPY_ORDER) object PyArray_Squeeze (ndarray) #object PyArray_View (ndarray, dtype, type) object PyArray_SwapAxes (ndarray, int, int) object PyArray_Max (ndarray, int, ndarray) object PyArray_Min (ndarray, int, ndarray) object PyArray_Ptp (ndarray, int, ndarray) object PyArray_Mean (ndarray, int, int, ndarray) object PyArray_Trace (ndarray, int, int, int, int, ndarray) object PyArray_Diagonal (ndarray, int, int, int) object PyArray_Clip (ndarray, object, object, ndarray) object PyArray_Conjugate (ndarray, ndarray) object PyArray_Nonzero (ndarray) object PyArray_Std (ndarray, int, int, ndarray, int) object PyArray_Sum (ndarray, int, int, ndarray) object PyArray_CumSum (ndarray, int, int, ndarray) object PyArray_Prod (ndarray, int, int, ndarray) object PyArray_CumProd (ndarray, int, int, ndarray) object PyArray_All (ndarray, int, ndarray) object PyArray_Any (ndarray, int, ndarray) object PyArray_Compress (ndarray, object, int, ndarray) object PyArray_Flatten (ndarray, NPY_ORDER) object PyArray_Ravel (ndarray, NPY_ORDER) npy_intp PyArray_MultiplyList (npy_intp *, int) int PyArray_MultiplyIntList (int *, int) void * PyArray_GetPtr (ndarray, npy_intp*) int PyArray_CompareLists (npy_intp *, npy_intp *, int) #int PyArray_AsCArray (object*, void *, npy_intp *, int, dtype) #int PyArray_As1D (object*, char **, int *, int) #int PyArray_As2D (object*, char ***, int *, int *, int) int PyArray_Free (object, void *) #int PyArray_Converter (object, object*) int PyArray_IntpFromSequence (object, npy_intp *, int) object PyArray_Concatenate (object, int) object PyArray_InnerProduct (object, object) object PyArray_MatrixProduct (object, object) object PyArray_CopyAndTranspose (object) object PyArray_Correlate (object, object, int) int PyArray_TypestrConvert (int, int) #int PyArray_DescrConverter (object, dtype*) #int PyArray_DescrConverter2 (object, dtype*) int PyArray_IntpConverter (object, PyArray_Dims *) #int PyArray_BufferConverter (object, chunk) int PyArray_AxisConverter (object, int *) int PyArray_BoolConverter (object, npy_bool *) int PyArray_ByteorderConverter (object, char *) int PyArray_OrderConverter (object, NPY_ORDER *) unsigned char PyArray_EquivTypes (dtype, dtype) #object PyArray_Zeros (int, npy_intp *, dtype, int) #object PyArray_Empty (int, npy_intp *, dtype, int) object PyArray_Where (object, object, object) object PyArray_Arange (double, double, double, int) #object PyArray_ArangeObj (object, object, object, dtype) int PyArray_SortkindConverter (object, NPY_SORTKIND *) object PyArray_LexSort (object, int) object PyArray_Round (ndarray, int, ndarray) unsigned char PyArray_EquivTypenums (int, int) int PyArray_RegisterDataType (dtype) int PyArray_RegisterCastFunc (dtype, int, PyArray_VectorUnaryFunc *) int PyArray_RegisterCanCast (dtype, int, NPY_SCALARKIND) #void PyArray_InitArrFuncs (PyArray_ArrFuncs *) object PyArray_IntTupleFromIntp (int, npy_intp *) int PyArray_TypeNumFromName (char *) int PyArray_ClipmodeConverter (object, NPY_CLIPMODE *) #int PyArray_OutputConverter (object, ndarray*) object PyArray_BroadcastToShape (object, npy_intp *, int) void _PyArray_SigintHandler (int) void* _PyArray_GetSigintBuf () #int PyArray_DescrAlignConverter (object, dtype*) #int PyArray_DescrAlignConverter2 (object, dtype*) int PyArray_SearchsideConverter (object, void *) object PyArray_CheckAxis (ndarray, int *, int) npy_intp PyArray_OverflowMultiplyList (npy_intp *, int) int PyArray_CompareString (char *, char *, size_t) # Typedefs that matches the runtime dtype objects in # the numpy module. # The ones that are commented out needs an IFDEF function # in Cython to enable them only on the right systems. ctypedef npy_int8 int8_t ctypedef npy_int16 int16_t ctypedef npy_int32 int32_t ctypedef npy_int64 int64_t #ctypedef npy_int96 int96_t #ctypedef npy_int128 int128_t ctypedef npy_uint8 uint8_t ctypedef npy_uint16 uint16_t ctypedef npy_uint32 uint32_t ctypedef npy_uint64 uint64_t #ctypedef npy_uint96 uint96_t #ctypedef npy_uint128 uint128_t ctypedef npy_float32 float32_t ctypedef npy_float64 float64_t #ctypedef npy_float80 float80_t #ctypedef npy_float128 float128_t ctypedef float complex complex64_t ctypedef double complex complex128_t # The int types are mapped a bit surprising -- # numpy.int corresponds to 'l' and numpy.long to 'q' ctypedef npy_long int_t ctypedef npy_longlong long_t ctypedef npy_longlong longlong_t ctypedef npy_ulong uint_t ctypedef npy_ulonglong ulong_t ctypedef npy_ulonglong ulonglong_t ctypedef npy_intp intp_t ctypedef npy_uintp uintp_t ctypedef npy_double float_t ctypedef npy_double double_t ctypedef npy_longdouble longdouble_t ctypedef npy_cfloat cfloat_t ctypedef npy_cdouble cdouble_t ctypedef npy_clongdouble clongdouble_t ctypedef npy_cdouble complex_t cdef inline object PyArray_MultiIterNew1(a): return PyArray_MultiIterNew(1, a) cdef inline object PyArray_MultiIterNew2(a, b): return PyArray_MultiIterNew(2, a, b) cdef inline object PyArray_MultiIterNew3(a, b, c): return PyArray_MultiIterNew(3, a, b, c) cdef inline object PyArray_MultiIterNew4(a, b, c, d): return PyArray_MultiIterNew(4, a, b, c, d) cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): return PyArray_MultiIterNew(5, a, b, c, d, e) cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # Recursive utility function used in __getbuffer__ to get format # string. The new location in the format string is returned. cdef dtype child cdef int endian_detector = 1 cdef bint little_endian = ((&endian_detector)[0] != 0) cdef tuple fields for childname in descr.names: fields = descr.fields[childname] child, new_offset = fields if (end - f) - (new_offset - offset[0]) < 15: raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") if ((child.byteorder == c'>' and little_endian) or (child.byteorder == c'<' and not little_endian)): raise ValueError(u"Non-native byte order not supported") # One could encode it in the format string and have Cython # complain instead, BUT: < and > in format strings also imply # standardized sizes for datatypes, and we rely on native in # order to avoid reencoding data types based on their size. # # A proper PEP 3118 exporter for other clients than Cython # must deal properly with this! # Output padding bytes while offset[0] < new_offset: f[0] = 120 # "x"; pad byte f += 1 offset[0] += 1 offset[0] += child.itemsize if not PyDataType_HASFIELDS(child): t = child.type_num if end - f < 5: raise RuntimeError(u"Format string allocated too short.") # Until ticket #99 is fixed, use integers to avoid warnings if t == NPY_BYTE: f[0] = 98 #"b" elif t == NPY_UBYTE: f[0] = 66 #"B" elif t == NPY_SHORT: f[0] = 104 #"h" elif t == NPY_USHORT: f[0] = 72 #"H" elif t == NPY_INT: f[0] = 105 #"i" elif t == NPY_UINT: f[0] = 73 #"I" elif t == NPY_LONG: f[0] = 108 #"l" elif t == NPY_ULONG: f[0] = 76 #"L" elif t == NPY_LONGLONG: f[0] = 113 #"q" elif t == NPY_ULONGLONG: f[0] = 81 #"Q" elif t == NPY_FLOAT: f[0] = 102 #"f" elif t == NPY_DOUBLE: f[0] = 100 #"d" elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg elif t == NPY_OBJECT: f[0] = 79 #"O" else: raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) f += 1 else: # Cython ignores struct boundary information ("T{...}"), # so don't output it f = _util_dtypestring(child, f, end, offset) return f # # ufunc API # cdef extern from "numpy/ufuncobject.h": ctypedef void (*PyUFuncGenericFunction) (char **, npy_intp *, npy_intp *, void *) ctypedef extern class numpy.ufunc [object PyUFuncObject]: cdef: int nin, nout, nargs int identity PyUFuncGenericFunction *functions void **data int ntypes int check_return char *name char *types char *doc void *ptr PyObject *obj PyObject *userloops cdef enum: PyUFunc_Zero PyUFunc_One PyUFunc_None UFUNC_ERR_IGNORE UFUNC_ERR_WARN UFUNC_ERR_RAISE UFUNC_ERR_CALL UFUNC_ERR_PRINT UFUNC_ERR_LOG UFUNC_MASK_DIVIDEBYZERO UFUNC_MASK_OVERFLOW UFUNC_MASK_UNDERFLOW UFUNC_MASK_INVALID UFUNC_SHIFT_DIVIDEBYZERO UFUNC_SHIFT_OVERFLOW UFUNC_SHIFT_UNDERFLOW UFUNC_SHIFT_INVALID UFUNC_FPE_DIVIDEBYZERO UFUNC_FPE_OVERFLOW UFUNC_FPE_UNDERFLOW UFUNC_FPE_INVALID UFUNC_ERR_DEFAULT UFUNC_ERR_DEFAULT2 object PyUFunc_FromFuncAndData(PyUFuncGenericFunction *, void **, char *, int, int, int, int, char *, char *, int) int PyUFunc_RegisterLoopForType(ufunc, int, PyUFuncGenericFunction, int *, void *) int PyUFunc_GenericFunction \ (ufunc, PyObject *, PyObject *, PyArrayObject **) void PyUFunc_f_f_As_d_d \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_d_d \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_f_f \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_g_g \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_F_F_As_D_D \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_F_F \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_D_D \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_G_G \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_O_O \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_ff_f_As_dd_d \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_ff_f \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_dd_d \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_gg_g \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_FF_F_As_DD_D \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_DD_D \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_FF_F \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_GG_G \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_OO_O \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_O_O_method \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_OO_O_method \ (char **, npy_intp *, npy_intp *, void *) void PyUFunc_On_Om \ (char **, npy_intp *, npy_intp *, void *) int PyUFunc_GetPyValues \ (char *, int *, int *, PyObject **) int PyUFunc_checkfperr \ (int, PyObject *, int *) void PyUFunc_clearfperr() int PyUFunc_getfperr() int PyUFunc_handlefperr \ (int, PyObject *, int, int *) int PyUFunc_ReplaceLoopBySignature \ (ufunc, PyUFuncGenericFunction, int *, PyUFuncGenericFunction *) object PyUFunc_FromFuncAndDataAndSignature \ (PyUFuncGenericFunction *, void **, char *, int, int, int, int, char *, char *, int, char *) int _import_umath() except -1 cdef inline void set_array_base(ndarray arr, object base): cdef PyObject* baseptr if base is None: baseptr = NULL else: Py_INCREF(base) # important to do this before decref below! baseptr = base Py_XDECREF(arr.base) arr.base = baseptr cdef inline object get_array_base(ndarray arr): if arr.base is NULL: return None else: return arr.base # Versions of the import_* functions which are more suitable for # Cython code. cdef inline int import_array() except -1: try: _import_array() except Exception: raise ImportError("numpy.core.multiarray failed to import") cdef inline int import_umath() except -1: try: _import_umath() except Exception: raise ImportError("numpy.core.umath failed to import") cdef inline int import_ufunc() except -1: try: _import_umath() except Exception: raise ImportError("numpy.core.umath failed to import") Cython-0.26.1/Cython/Includes/numpy/math.pxd0000664000175000017500000001325713023021033021453 0ustar stefanstefan00000000000000# NumPy math library # # This exports the functionality of the NumPy core math library, aka npymath, # which provides implementations of C99 math functions and macros for system # with a C89 library (such as MSVC). npymath is available with NumPy >=1.3, # although some functions will require later versions. The spacing function is # not in C99, but comes from Fortran. # # On the Cython side, the npymath functions are available without the "npy_" # prefix that they have in C, to make this is a drop-in replacement for # libc.math. The same is true for the constants, where possible. # # See the NumPy documentation for linking instructions. # # Complex number support and NumPy 2.0 half-precision functions are currently # not exported. # # Author: Lars Buitinck cdef extern from "numpy/npy_math.h" nogil: # Floating-point classification long double NAN "NPY_NAN" long double INFINITY "NPY_INFINITY" long double PZERO "NPY_PZERO" # positive zero long double NZERO "NPY_NZERO" # negative zero # These four are actually macros and work on any floating-point type. int isinf "npy_isinf"(long double) # -1 / 0 / 1 bint isfinite "npy_isfinite"(long double) bint isnan "npy_isnan"(long double) bint signbit "npy_signbit"(long double) # Math constants long double E "NPY_E" long double LOG2E "NPY_LOG2E" # ln(e) / ln(2) long double LOG10E "NPY_LOG10E" # ln(e) / ln(10) long double LOGE2 "NPY_LOGE2" # ln(2) long double LOGE10 "NPY_LOGE10" # ln(10) long double PI "NPY_PI" long double PI_2 "NPY_PI_2" # pi / 2 long double PI_4 "NPY_PI_4" # pi / 4 long double NPY_1_PI # 1 / pi; NPY_ because of ident syntax long double NPY_2_PI # 2 / pi long double EULER "NPY_EULER" # Euler constant (gamma, 0.57721) # Low-level floating point manipulation (NumPy >=1.4) float copysignf "npy_copysignf"(float, float) float nextafterf "npy_nextafterf"(float x, float y) float spacingf "npy_spacingf"(float x) double copysign "npy_copysign"(double, double) double nextafter "npy_nextafter"(double x, double y) double spacing "npy_spacing"(double x) long double copysignl "npy_copysignl"(long double, long double) long double nextafterl "npy_nextafterl"(long double x, long double y) long double spacingl "npy_spacingl"(long double x) # Float C99 functions float sinf "npy_sinf"(float x) float cosf "npy_cosf"(float x) float tanf "npy_tanf"(float x) float sinhf "npy_sinhf"(float x) float coshf "npy_coshf"(float x) float tanhf "npy_tanhf"(float x) float fabsf "npy_fabsf"(float x) float floorf "npy_floorf"(float x) float ceilf "npy_ceilf"(float x) float rintf "npy_rintf"(float x) float sqrtf "npy_sqrtf"(float x) float log10f "npy_log10f"(float x) float logf "npy_logf"(float x) float expf "npy_expf"(float x) float expm1f "npy_expm1f"(float x) float asinf "npy_asinf"(float x) float acosf "npy_acosf"(float x) float atanf "npy_atanf"(float x) float asinhf "npy_asinhf"(float x) float acoshf "npy_acoshf"(float x) float atanhf "npy_atanhf"(float x) float log1pf "npy_log1pf"(float x) float exp2f "npy_exp2f"(float x) float log2f "npy_log2f"(float x) float atan2f "npy_atan2f"(float x, float y) float hypotf "npy_hypotf"(float x, float y) float powf "npy_powf"(float x, float y) float fmodf "npy_fmodf"(float x, float y) float modff "npy_modff"(float x, float* y) # Long double C99 functions long double sinl "npy_sinl"(long double x) long double cosl "npy_cosl"(long double x) long double tanl "npy_tanl"(long double x) long double sinhl "npy_sinhl"(long double x) long double coshl "npy_coshl"(long double x) long double tanhl "npy_tanhl"(long double x) long double fabsl "npy_fabsl"(long double x) long double floorl "npy_floorl"(long double x) long double ceill "npy_ceill"(long double x) long double rintl "npy_rintl"(long double x) long double sqrtl "npy_sqrtl"(long double x) long double log10l "npy_log10l"(long double x) long double logl "npy_logl"(long double x) long double expl "npy_expl"(long double x) long double expm1l "npy_expm1l"(long double x) long double asinl "npy_asinl"(long double x) long double acosl "npy_acosl"(long double x) long double atanl "npy_atanl"(long double x) long double asinhl "npy_asinhl"(long double x) long double acoshl "npy_acoshl"(long double x) long double atanhl "npy_atanhl"(long double x) long double log1pl "npy_log1pl"(long double x) long double exp2l "npy_exp2l"(long double x) long double log2l "npy_log2l"(long double x) long double atan2l "npy_atan2l"(long double x, long double y) long double hypotl "npy_hypotl"(long double x, long double y) long double powl "npy_powl"(long double x, long double y) long double fmodl "npy_fmodl"(long double x, long double y) long double modfl "npy_modfl"(long double x, long double* y) # NumPy extensions float deg2radf "npy_deg2radf"(float x) float rad2degf "npy_rad2degf"(float x) float logaddexpf "npy_logaddexpf"(float x, float y) float logaddexp2f "npy_logaddexp2f"(float x, float y) double deg2rad "npy_deg2rad"(double x) double rad2deg "npy_rad2deg"(double x) double logaddexp "npy_logaddexp"(double x, double y) double logaddexp2 "npy_logaddexp2"(double x, double y) long double deg2radl "npy_deg2radl"(long double x) long double rad2degl "npy_rad2degl"(long double x) long double logaddexpl "npy_logaddexpl"(long double x, long double y) long double logaddexp2l "npy_logaddexp2l"(long double x, long double y) Cython-0.26.1/Cython/Includes/libcpp/0000775000175000017500000000000013151203436020111 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Includes/libcpp/unordered_map.pxd0000664000175000017500000000503213023021033023437 0ustar stefanstefan00000000000000from .utility cimport pair cdef extern from "" namespace "std" nogil: cdef cppclass unordered_map[T, U]: ctypedef T key_type ctypedef U mapped_type ctypedef pair[const T, U] value_type cppclass iterator: pair[T, U]& operator*() iterator operator++() iterator operator--() bint operator==(iterator) bint operator!=(iterator) cppclass reverse_iterator: pair[T, U]& operator*() iterator operator++() iterator operator--() bint operator==(reverse_iterator) bint operator!=(reverse_iterator) cppclass const_iterator(iterator): pass cppclass const_reverse_iterator(reverse_iterator): pass unordered_map() except + unordered_map(unordered_map&) except + #unordered_map(key_compare&) U& operator[](T&) #unordered_map& operator=(unordered_map&) bint operator==(unordered_map&, unordered_map&) bint operator!=(unordered_map&, unordered_map&) bint operator<(unordered_map&, unordered_map&) bint operator>(unordered_map&, unordered_map&) bint operator<=(unordered_map&, unordered_map&) bint operator>=(unordered_map&, unordered_map&) U& at(T&) iterator begin() const_iterator const_begin "begin"() void clear() size_t count(T&) bint empty() iterator end() const_iterator const_end "end"() pair[iterator, iterator] equal_range(T&) #pair[const_iterator, const_iterator] equal_range(key_type&) void erase(iterator) void erase(iterator, iterator) size_t erase(T&) iterator find(T&) const_iterator const_find "find"(T&) pair[iterator, bint] insert(pair[T, U]) # XXX pair[T,U]& iterator insert(iterator, pair[T, U]) # XXX pair[T,U]& #void insert(input_iterator, input_iterator) #key_compare key_comp() iterator lower_bound(T&) const_iterator const_lower_bound "lower_bound"(T&) size_t max_size() reverse_iterator rbegin() const_reverse_iterator const_rbegin "rbegin"() reverse_iterator rend() const_reverse_iterator const_rend "rend"() size_t size() void swap(unordered_map&) iterator upper_bound(T&) const_iterator const_upper_bound "upper_bound"(T&) #value_compare value_comp() void max_load_factor(float) float max_load_factor() Cython-0.26.1/Cython/Includes/libcpp/stack.pxd0000664000175000017500000000044413023021033021722 0ustar stefanstefan00000000000000cdef extern from "" namespace "std" nogil: cdef cppclass stack[T]: ctypedef T value_type stack() except + stack(stack&) except + #stack(Container&) bint empty() void pop() void push(T&) size_t size() T& top() Cython-0.26.1/Cython/Includes/libcpp/utility.pxd0000664000175000017500000000074513023021033022324 0ustar stefanstefan00000000000000cdef extern from "" namespace "std" nogil: cdef cppclass pair[T, U]: ctypedef T first_type ctypedef U second_type T first U second pair() except + pair(pair&) except + pair(T&, U&) except + bint operator==(pair&, pair&) bint operator!=(pair&, pair&) bint operator<(pair&, pair&) bint operator>(pair&, pair&) bint operator<=(pair&, pair&) bint operator>=(pair&, pair&) Cython-0.26.1/Cython/Includes/libcpp/map.pxd0000664000175000017500000000517413023021033021377 0ustar stefanstefan00000000000000from .utility cimport pair cdef extern from "" namespace "std" nogil: cdef cppclass map[T, U, COMPARE=*, ALLOCATOR=*]: ctypedef T key_type ctypedef U mapped_type ctypedef pair[const T, U] value_type ctypedef COMPARE key_compare ctypedef ALLOCATOR allocator_type cppclass iterator: pair[T, U]& operator*() iterator operator++() iterator operator--() bint operator==(iterator) bint operator!=(iterator) cppclass const_iterator: pair[const T, U]& operator*() const_iterator operator++() const_iterator operator--() bint operator==(const_iterator) bint operator!=(const_iterator) cppclass reverse_iterator: pair[T, U]& operator*() iterator operator++() iterator operator--() bint operator==(reverse_iterator) bint operator!=(reverse_iterator) cppclass const_reverse_iterator(reverse_iterator): pass map() except + map(map&) except + #map(key_compare&) U& operator[](T&) #map& operator=(map&) bint operator==(map&, map&) bint operator!=(map&, map&) bint operator<(map&, map&) bint operator>(map&, map&) bint operator<=(map&, map&) bint operator>=(map&, map&) U& at(const T&) except + iterator begin() const_iterator const_begin "begin" () void clear() size_t count(const T&) bint empty() iterator end() const_iterator const_end "end" () pair[iterator, iterator] equal_range(const T&) #pair[const_iterator, const_iterator] equal_range(key_type&) void erase(iterator) void erase(iterator, iterator) size_t erase(const T&) iterator find(const T&) const_iterator const_find "find" (const T&) pair[iterator, bint] insert(pair[T, U]) except + # XXX pair[T,U]& iterator insert(iterator, pair[T, U]) except + # XXX pair[T,U]& #void insert(input_iterator, input_iterator) #key_compare key_comp() iterator lower_bound(const T&) const_iterator const_lower_bound "lower_bound"(const T&) size_t max_size() reverse_iterator rbegin() const_reverse_iterator const_rbegin "rbegin"() reverse_iterator rend() const_reverse_iterator const_rend "rend"() size_t size() void swap(map&) iterator upper_bound(const T&) const_iterator const_upper_bound "upper_bound"(const T&) #value_compare value_comp() Cython-0.26.1/Cython/Includes/libcpp/algorithm.pxd0000664000175000017500000000335213143605603022622 0ustar stefanstefan00000000000000from libcpp cimport bool cdef extern from "" namespace "std" nogil: # Sorting and searching bool binary_search[Iter, T](Iter first, Iter last, const T& value) bool binary_search[Iter, T, Compare](Iter first, Iter last, const T& value, Compare comp) Iter lower_bound[Iter, T](Iter first, Iter last, const T& value) Iter lower_bound[Iter, T, Compare](Iter first, Iter last, const T& value, Compare comp) Iter upper_bound[Iter, T](Iter first, Iter last, const T& value) Iter upper_bound[Iter, T, Compare](Iter first, Iter last, const T& value, Compare comp) void partial_sort[Iter](Iter first, Iter middle, Iter last) void partial_sort[Iter, Compare](Iter first, Iter middle, Iter last, Compare comp) void sort[Iter](Iter first, Iter last) void sort[Iter, Compare](Iter first, Iter last, Compare comp) # Removing duplicates Iter unique[Iter](Iter first, Iter last) Iter unique[Iter, BinaryPredicate](Iter first, Iter last, BinaryPredicate p) # Binary heaps (priority queues) void make_heap[Iter](Iter first, Iter last) void make_heap[Iter, Compare](Iter first, Iter last, Compare comp) void pop_heap[Iter](Iter first, Iter last) void pop_heap[Iter, Compare](Iter first, Iter last, Compare comp) void push_heap[Iter](Iter first, Iter last) void push_heap[Iter, Compare](Iter first, Iter last, Compare comp) void sort_heap[Iter](Iter first, Iter last) void sort_heap[Iter, Compare](Iter first, Iter last, Compare comp) # Copy OutputIter copy[InputIter,OutputIter](InputIter,InputIter,OutputIter) Cython-0.26.1/Cython/Includes/libcpp/list.pxd0000664000175000017500000000450013023021033021565 0ustar stefanstefan00000000000000cdef extern from "" namespace "std" nogil: cdef cppclass list[T,ALLOCATOR=*]: ctypedef T value_type ctypedef ALLOCATOR allocator_type cppclass iterator: iterator() iterator(iterator &) T& operator*() iterator operator++() iterator operator--() bint operator==(iterator) bint operator!=(iterator) cppclass reverse_iterator: reverse_iterator() reverse_iterator(iterator &) T& operator*() reverse_iterator operator++() reverse_iterator operator--() bint operator==(reverse_iterator) bint operator!=(reverse_iterator) cppclass const_iterator(iterator): pass cppclass const_reverse_iterator(reverse_iterator): pass list() except + list(list&) except + list(size_t, T&) except + #list operator=(list&) bint operator==(list&, list&) bint operator!=(list&, list&) bint operator<(list&, list&) bint operator>(list&, list&) bint operator<=(list&, list&) bint operator>=(list&, list&) void assign(size_t, T&) T& back() iterator begin() const_iterator const_begin "begin"() void clear() bint empty() iterator end() const_iterator const_end "end"() iterator erase(iterator) iterator erase(iterator, iterator) T& front() iterator insert(iterator, T&) void insert(iterator, size_t, T&) size_t max_size() void merge(list&) #void merge(list&, BinPred) void pop_back() void pop_front() void push_back(T&) void push_front(T&) reverse_iterator rbegin() const_reverse_iterator const_rbegin "rbegin"() void remove(T&) #void remove_if(UnPred) reverse_iterator rend() const_reverse_iterator const_rend "rend"() void resize(size_t, T&) void reverse() size_t size() void sort() #void sort(BinPred) void splice(iterator, list&) void splice(iterator, list&, iterator) void splice(iterator, list&, iterator, iterator) void swap(list&) void unique() #void unique(BinPred) Cython-0.26.1/Cython/Includes/libcpp/unordered_set.pxd0000664000175000017500000000445413023021033023464 0ustar stefanstefan00000000000000from .utility cimport pair cdef extern from "" namespace "std" nogil: cdef cppclass unordered_set[T,HASH=*,PRED=*,ALLOCATOR=*]: ctypedef T value_type cppclass iterator: T& operator*() iterator operator++() iterator operator--() bint operator==(iterator) bint operator!=(iterator) cppclass reverse_iterator: T& operator*() iterator operator++() iterator operator--() bint operator==(reverse_iterator) bint operator!=(reverse_iterator) cppclass const_iterator(iterator): pass cppclass const_reverse_iterator(reverse_iterator): pass unordered_set() except + unordered_set(unordered_set&) except + #unordered_set(key_compare&) #unordered_set& operator=(unordered_set&) bint operator==(unordered_set&, unordered_set&) bint operator!=(unordered_set&, unordered_set&) bint operator<(unordered_set&, unordered_set&) bint operator>(unordered_set&, unordered_set&) bint operator<=(unordered_set&, unordered_set&) bint operator>=(unordered_set&, unordered_set&) iterator begin() const_iterator const_begin "begin"() void clear() size_t count(T&) bint empty() iterator end() const_iterator const_end "end"() pair[iterator, iterator] equal_range(T&) #pair[const_iterator, const_iterator] equal_range(T&) void erase(iterator) void erase(iterator, iterator) size_t erase(T&) iterator find(T&) const_iterator const_find "find"(T&) pair[iterator, bint] insert(T&) iterator insert(iterator, T&) #void insert(input_iterator, input_iterator) #key_compare key_comp() iterator lower_bound(T&) const_iterator const_lower_bound "lower_bound"(T&) size_t max_size() reverse_iterator rbegin() const_reverse_iterator const_rbegin "rbegin"() reverse_iterator rend() const_reverse_iterator const_rend "rend"() size_t size() void swap(unordered_set&) iterator upper_bound(T&) const_iterator const_upper_bound "upper_bound"(T&) #value_compare value_comp() Cython-0.26.1/Cython/Includes/libcpp/cast.pxd0000664000175000017500000000076512542002467021574 0ustar stefanstefan00000000000000# Defines the standard C++ cast operators. # # Due to type restrictions, these are only defined for pointer parameters, # however that is the only case where they are significantly more interesting # than the standard C cast operator which can be written "(expression)" in # Cython. cdef extern from * nogil: cdef T dynamic_cast[T](void *) except + # nullptr may also indicate failure cdef T static_cast[T](void *) cdef T reinterpret_cast[T](void *) cdef T const_cast[T](void *) Cython-0.26.1/Cython/Includes/libcpp/__init__.pxd0000664000175000017500000000013612574327400022373 0ustar stefanstefan00000000000000cdef extern from *: ctypedef bint bool ctypedef void* nullptr_t nullptr_t nullptr Cython-0.26.1/Cython/Includes/libcpp/functional.pxd0000664000175000017500000000057513143605603023002 0ustar stefanstefan00000000000000cdef extern from "" namespace "std" nogil: cdef cppclass function[T]: function() except + function(T*) except + function(function&) except + function(void*) except + function operator=(T*) function operator=(function&) function operator=(void*) function operator=[U](U) bint operator bool() Cython-0.26.1/Cython/Includes/libcpp/vector.pxd0000664000175000017500000000623013143605603022134 0ustar stefanstefan00000000000000cdef extern from "" namespace "std" nogil: cdef cppclass vector[T,ALLOCATOR=*]: ctypedef T value_type ctypedef ALLOCATOR allocator_type # these should really be allocator_type.size_type and # allocator_type.difference_type to be true to the C++ definition # but cython doesn't support defered access on template arguments ctypedef size_t size_type ctypedef ptrdiff_t difference_type cppclass iterator: T& operator*() iterator operator++() iterator operator--() iterator operator+(size_type) iterator operator-(size_type) difference_type operator-(iterator) bint operator==(iterator) bint operator!=(iterator) bint operator<(iterator) bint operator>(iterator) bint operator<=(iterator) bint operator>=(iterator) cppclass reverse_iterator: T& operator*() iterator operator++() iterator operator--() iterator operator+(size_type) iterator operator-(size_type) bint operator==(reverse_iterator) bint operator!=(reverse_iterator) bint operator<(reverse_iterator) bint operator>(reverse_iterator) bint operator<=(reverse_iterator) bint operator>=(reverse_iterator) cppclass const_iterator(iterator): pass cppclass const_reverse_iterator(reverse_iterator): pass vector() except + vector(vector&) except + vector(size_type) except + vector(size_type, T&) except + #vector[input_iterator](input_iterator, input_iterator) T& operator[](size_type) #vector& operator=(vector&) bint operator==(vector&, vector&) bint operator!=(vector&, vector&) bint operator<(vector&, vector&) bint operator>(vector&, vector&) bint operator<=(vector&, vector&) bint operator>=(vector&, vector&) void assign(size_type, const T&) void assign[input_iterator](input_iterator, input_iterator) except + T& at(size_type) except + T& back() iterator begin() const_iterator const_begin "begin"() size_type capacity() void clear() bint empty() iterator end() const_iterator const_end "end"() iterator erase(iterator) iterator erase(iterator, iterator) T& front() iterator insert(iterator, const T&) except + iterator insert(iterator, size_type, const T&) except + iterator insert[Iter](iterator, Iter, Iter) except + size_type max_size() void pop_back() void push_back(T&) except + reverse_iterator rbegin() const_reverse_iterator const_rbegin "crbegin"() reverse_iterator rend() const_reverse_iterator const_rend "crend"() void reserve(size_type) void resize(size_type) except + void resize(size_type, T&) except + size_type size() void swap(vector&) # C++11 methods T* data() void shrink_to_fit() Cython-0.26.1/Cython/Includes/libcpp/set.pxd0000664000175000017500000000416613023021033021415 0ustar stefanstefan00000000000000from .utility cimport pair cdef extern from "" namespace "std" nogil: cdef cppclass set[T]: ctypedef T value_type cppclass iterator: T& operator*() iterator operator++() iterator operator--() bint operator==(iterator) bint operator!=(iterator) cppclass reverse_iterator: T& operator*() iterator operator++() iterator operator--() bint operator==(reverse_iterator) bint operator!=(reverse_iterator) cppclass const_iterator(iterator): pass cppclass const_reverse_iterator(reverse_iterator): pass set() except + set(set&) except + #set(key_compare&) #set& operator=(set&) bint operator==(set&, set&) bint operator!=(set&, set&) bint operator<(set&, set&) bint operator>(set&, set&) bint operator<=(set&, set&) bint operator>=(set&, set&) iterator begin() const_iterator const_begin "begin"() void clear() size_t count(const T&) bint empty() iterator end() const_iterator const_end "end"() pair[iterator, iterator] equal_range(const T&) #pair[const_iterator, const_iterator] equal_range(T&) void erase(iterator) void erase(iterator, iterator) size_t erase(T&) iterator find(T&) const_iterator const_find "find"(T&) pair[iterator, bint] insert(const T&) except + iterator insert(iterator, const T&) except + #void insert(input_iterator, input_iterator) #key_compare key_comp() iterator lower_bound(T&) const_iterator const_lower_bound "lower_bound"(T&) size_t max_size() reverse_iterator rbegin() const_reverse_iterator const_rbegin "rbegin"() reverse_iterator rend() const_reverse_iterator const_rend "rend"() size_t size() void swap(set&) iterator upper_bound(const T&) const_iterator const_upper_bound "upper_bound"(const T&) #value_compare value_comp() Cython-0.26.1/Cython/Includes/libcpp/memory.pxd0000664000175000017500000000627313143605603022151 0ustar stefanstefan00000000000000from libcpp cimport bool, nullptr_t, nullptr cdef extern from "" namespace "std" nogil: cdef cppclass default_delete[T]: default_delete() cdef cppclass allocator[T]: allocator() allocator(const allocator &) #allocator(const allocator[U] &) #unique_ptr unit tests fail w/this T * address(T &) const T * address(const T &) const T * allocate( size_t n ) # Not to standard. should be a second default argument void deallocate(T * , size_t) size_t max_size() const void construct( T *, const T &) #C++98. The C++11 version is variadic AND perfect-forwarding void destroy(T *) #C++98 void destroy[U](U *) #unique_ptr unit tests fail w/this cdef cppclass unique_ptr[T,DELETER=*]: unique_ptr() unique_ptr(nullptr_t) unique_ptr(T*) unique_ptr(unique_ptr[T]&) # Modifiers T* release() void reset() void reset(nullptr_t) void reset(T*) void swap(unique_ptr&) # Observers T* get() T& operator*() #T* operator->() # Not Supported bool operator bool() bool operator!() bool operator==(const unique_ptr&) bool operator!=(const unique_ptr&) bool operator<(const unique_ptr&) bool operator>(const unique_ptr&) bool operator<=(const unique_ptr&) bool operator>=(const unique_ptr&) bool operator==(nullptr_t) bool operator!=(nullptr_t) # Forward Declaration not working ("Compiler crash in AnalyseDeclarationsTransform") #cdef cppclass weak_ptr[T] cdef cppclass shared_ptr[T]: shared_ptr() shared_ptr(nullptr_t) shared_ptr(T*) shared_ptr(shared_ptr[T]&) shared_ptr(shared_ptr[T]&, T*) shared_ptr(unique_ptr[T]&) #shared_ptr(weak_ptr[T]&) # Not Supported # Modifiers void reset() void reset(T*) void swap(shared_ptr&) # Observers T* get() T& operator*() #T* operator->() # Not Supported long use_count() bool unique() bool operator bool() bool operator!() #bool owner_before[Y](const weak_ptr[Y]&) # Not Supported bool owner_before[Y](const shared_ptr[Y]&) bool operator==(const shared_ptr&) bool operator!=(const shared_ptr&) bool operator<(const shared_ptr&) bool operator>(const shared_ptr&) bool operator<=(const shared_ptr&) bool operator>=(const shared_ptr&) bool operator==(nullptr_t) bool operator!=(nullptr_t) cdef cppclass weak_ptr[T]: weak_ptr() weak_ptr(weak_ptr[T]&) weak_ptr(shared_ptr[T]&) # Modifiers void reset() void swap(weak_ptr&) # Observers long use_count() bool expired() shared_ptr[T] lock() bool owner_before[Y](const weak_ptr[Y]&) bool owner_before[Y](const shared_ptr[Y]&) # Smart pointer non-member operations shared_ptr[T] make_shared[T](...) except + # Temporaries used for exception handling break generated code unique_ptr[T] make_unique[T](...) # except + Cython-0.26.1/Cython/Includes/libcpp/pair.pxd0000664000175000017500000000003312542002467021561 0ustar stefanstefan00000000000000from .utility cimport pair Cython-0.26.1/Cython/Includes/libcpp/deque.pxd0000664000175000017500000000401013023021033021711 0ustar stefanstefan00000000000000cdef extern from "" namespace "std" nogil: cdef cppclass deque[T,ALLOCATOR=*]: cppclass iterator: T& operator*() iterator operator++() iterator operator--() bint operator==(iterator) bint operator!=(iterator) cppclass reverse_iterator: T& operator*() iterator operator++() iterator operator--() bint operator==(reverse_iterator) bint operator!=(reverse_iterator) cppclass const_iterator(iterator): pass #cppclass const_reverse_iterator(reverse_iterator): # pass deque() except + deque(deque&) except + deque(size_t) except + deque(size_t, T&) except + #deque[input_iterator](input_iterator, input_iterator) T& operator[](size_t) #deque& operator=(deque&) bint operator==(deque&, deque&) bint operator!=(deque&, deque&) bint operator<(deque&, deque&) bint operator>(deque&, deque&) bint operator<=(deque&, deque&) bint operator>=(deque&, deque&) void assign(size_t, T&) void assign(input_iterator, input_iterator) T& at(size_t) T& back() iterator begin() const_iterator const_begin "begin"() void clear() bint empty() iterator end() const_iterator const_end "end"() iterator erase(iterator) iterator erase(iterator, iterator) T& front() iterator insert(iterator, T&) void insert(iterator, size_t, T&) void insert(iterator, input_iterator, input_iterator) size_t max_size() void pop_back() void pop_front() void push_back(T&) void push_front(T&) reverse_iterator rbegin() #const_reverse_iterator rbegin() reverse_iterator rend() #const_reverse_iterator rend() void resize(size_t) void resize(size_t, T&) size_t size() void swap(deque&) Cython-0.26.1/Cython/Includes/libcpp/limits.pxd0000664000175000017500000000317513143605603022140 0ustar stefanstefan00000000000000cdef extern from "" namespace "std" nogil: enum float_round_style: round_indeterminate = -1 round_toward_zero = 0 round_to_nearest = 1 round_toward_infinity = 2 round_toward_neg_infinity = 3 enum float_denorm_style: denorm_indeterminate = -1 denorm_absent = 0 denorm_present = 1 #The static methods can be called as, e.g. numeric_limits[int].round_error(), etc. #The const data members should be declared as static. Cython currently doesn't allow that #and/or I can't figure it out, so you must instantiate an object to access, e.g. #cdef numeric_limits[double] lm #print lm.round_style cdef cppclass numeric_limits[T]: const bint is_specialized @staticmethod T min() @staticmethod T max() const int digits const int digits10 const bint is_signed const bint is_integer const bint is_exact const int radix @staticmethod T epsilon() @staticmethod T round_error() const int min_exponent const int min_exponent10 const int max_exponent const int max_exponent10 const bint has_infinity const bint has_quiet_NaN const bint has_signaling_NaN const float_denorm_style has_denorm const bint has_denorm_loss @staticmethod T infinity() @staticmethod T quiet_NaN() @staticmethod T signaling_NaN() @staticmethod T denorm_min() const bint is_iec559 const bint is_bounded const bint is_modulo const bint traps const bint tinyness_before const float_round_style round_style Cython-0.26.1/Cython/Includes/libcpp/queue.pxd0000664000175000017500000000103312542002467021753 0ustar stefanstefan00000000000000cdef extern from "" namespace "std" nogil: cdef cppclass queue[T]: queue() except + queue(queue&) except + #queue(Container&) T& back() bint empty() T& front() void pop() void push(T&) size_t size() cdef cppclass priority_queue[T]: priority_queue() except + priority_queue(priority_queue&) except + #priority_queue(Container&) bint empty() void pop() void push(T&) size_t size() T& top() Cython-0.26.1/Cython/Includes/libcpp/complex.pxd0000664000175000017500000000570412542002467022307 0ustar stefanstefan00000000000000# Note: add integer versions of the functions? cdef extern from "" namespace "std" nogil: cdef cppclass complex[T]: complex() except + complex(T, T) except + complex(complex[T]&) except + # How to make the converting constructor, i.e. convert complex[double] # to complex[float]? complex[T] operator+(complex[T]&) complex[T] operator-(complex[T]&) complex[T] operator+(complex[T]&, complex[T]&) complex[T] operator+(complex[T]&, T&) complex[T] operator+(T&, complex[T]&) complex[T] operator-(complex[T]&, complex[T]&) complex[T] operator-(complex[T]&, T&) complex[T] operator-(T&, complex[T]&) complex[T] operator*(complex[T]&, complex[T]&) complex[T] operator*(complex[T]&, T&) complex[T] operator*(T&, complex[T]&) complex[T] operator/(complex[T]&, complex[T]&) complex[T] operator/(complex[T]&, T&) complex[T] operator/(T&, complex[T]&) bint operator==(complex[T]&, complex[T]&) bint operator==(complex[T]&, T&) bint operator==(T&, complex[T]&) bint operator!=(complex[T]&, complex[T]&) bint operator!=(complex[T]&, T&) bint operator!=(T&, complex[T]&) # Access real part T real() void real(T) # Access imaginary part T imag() void imag(T) # Return real part T real[T](complex[T]&) long double real(long double) double real(double) float real(float) # Return imaginary part T imag[T](complex[T]&) long double imag(long double) double imag(double) float imag(float) T abs[T](complex[T]&) T arg[T](complex[T]&) long double arg(long double) double arg(double) float arg(float) T norm[T](complex[T]) long double norm(long double) double norm(double) float norm(float) complex[T] conj[T](complex[T]&) complex[long double] conj(long double) complex[double] conj(double) complex[float] conj(float) complex[T] proj[T](complex[T]) complex[long double] proj(long double) complex[double] proj(double) complex[float] proj(float) complex[T] polar[T](T&, T&) complex[T] ploar[T](T&) complex[T] exp[T](complex[T]&) complex[T] log[T](complex[T]&) complex[T] log10[T](complex[T]&) complex[T] pow[T](complex[T]&, complex[T]&) complex[T] pow[T](complex[T]&, T&) complex[T] pow[T](T&, complex[T]&) # There are some promotion versions too complex[T] sqrt[T](complex[T]&) complex[T] sin[T](complex[T]&) complex[T] cos[T](complex[T]&) complex[T] tan[T](complex[T]&) complex[T] asin[T](complex[T]&) complex[T] acos[T](complex[T]&) complex[T] atan[T](complex[T]&) complex[T] sinh[T](complex[T]&) complex[T] cosh[T](complex[T]&) complex[T] tanh[T](complex[T]&) complex[T] asinh[T](complex[T]&) complex[T] acosh[T](complex[T]&) complex[T] atanh[T](complex[T]&) Cython-0.26.1/Cython/Includes/libcpp/string.pxd0000664000175000017500000001113713023021033022124 0ustar stefanstefan00000000000000 # deprecated cimport for backwards compatibility: from libc.string cimport const_char cdef extern from "" namespace "std" nogil: size_t npos = -1 cdef cppclass string: string() except + string(char *) except + string(char *, size_t) except + string(string&) except + # as a string formed by a repetition of character c, n times. string(size_t, char) except + cppclass iterator: iterator() char& operator*() iterator(iterator &) iterator operator++() iterator operator--() bint operator==(iterator) bint operator!=(iterator) cppclass reverse_iterator: char& operator*() iterator operator++() iterator operator--() iterator operator+(size_t) iterator operator-(size_t) bint operator==(reverse_iterator) bint operator!=(reverse_iterator) bint operator<(reverse_iterator) bint operator>(reverse_iterator) bint operator<=(reverse_iterator) bint operator>=(reverse_iterator) cppclass const_iterator(iterator): pass cppclass const_reverse_iterator(reverse_iterator): pass iterator begin() const_iterator const_begin "begin"() iterator end() const_iterator const_end "end"() reverse_iterator rbegin() const_reverse_iterator const_rbegin "rbegin"() reverse_iterator rend() const_reverse_iterator const_rend "rend"() const char* c_str() const char* data() size_t size() size_t max_size() size_t length() void resize(size_t) void resize(size_t, char c) size_t capacity() void reserve(size_t) void clear() bint empty() char& at(size_t) char& operator[](size_t) int compare(string&) string& append(string&) string& append(string&, size_t, size_t) string& append(char *) string& append(char *, size_t) string& append(size_t, char) void push_back(char c) string& assign (string&) string& assign (string&, size_t, size_t) string& assign (char *, size_t) string& assign (char *) string& assign (size_t n, char c) string& insert(size_t, string&) string& insert(size_t, string&, size_t, size_t) string& insert(size_t, char* s, size_t) string& insert(size_t, char* s) string& insert(size_t, size_t, char c) size_t copy(char *, size_t, size_t) size_t find(string&) size_t find(string&, size_t) size_t find(char*, size_t pos, size_t) size_t find(char*, size_t pos) size_t find(char, size_t pos) size_t rfind(string&, size_t) size_t rfind(char* s, size_t, size_t) size_t rfind(char*, size_t pos) size_t rfind(char c, size_t) size_t rfind(char c) size_t find_first_of(string&, size_t) size_t find_first_of(char* s, size_t, size_t) size_t find_first_of(char*, size_t pos) size_t find_first_of(char c, size_t) size_t find_first_of(char c) size_t find_first_not_of(string&, size_t) size_t find_first_not_of(char* s, size_t, size_t) size_t find_first_not_of(char*, size_t pos) size_t find_first_not_of(char c, size_t) size_t find_first_not_of(char c) size_t find_last_of(string&, size_t) size_t find_last_of(char* s, size_t, size_t) size_t find_last_of(char*, size_t pos) size_t find_last_of(char c, size_t) size_t find_last_of(char c) size_t find_last_not_of(string&, size_t) size_t find_last_not_of(char* s, size_t, size_t) size_t find_last_not_of(char*, size_t pos) string substr(size_t, size_t) string substr() string substr(size_t) size_t find_last_not_of(char c, size_t) size_t find_last_not_of(char c) #string& operator= (string&) #string& operator= (char*) #string& operator= (char) string operator+ (string& rhs) string operator+ (char* rhs) bint operator==(string&) bint operator==(char*) bint operator!= (string& rhs ) bint operator!= (char* ) bint operator< (string&) bint operator< (char*) bint operator> (string&) bint operator> (char*) bint operator<= (string&) bint operator<= (char*) bint operator>= (string&) bint operator>= (char*) Cython-0.26.1/Cython/Includes/libcpp/iterator.pxd0000664000175000017500000000263013143605603022463 0ustar stefanstefan00000000000000#Basic reference: http://www.cplusplus.com/reference/iterator/ #Most of these classes are in fact empty structs cdef extern from "" namespace "std" nogil: cdef cppclass iterator[Category,T,Distance,Pointer,Reference]: pass cdef cppclass output_iterator_tag: pass cdef cppclass input_iterator_tag: pass cdef cppclass forward_iterator_tag(input_iterator_tag): pass cdef cppclass bidirectional_iterator_tag(forward_iterator_tag): pass cdef cppclass random_access_iterator_tag(bidirectional_iterator_tag): pass cdef cppclass back_insert_iterator[T](iterator[output_iterator_tag,void,void,void,void]): pass cdef cppclass front_insert_iterator[T](iterator[output_iterator_tag,void,void,void,void]): pass cdef cppclass insert_iterator[T](iterator[output_iterator_tag,void,void,void,void]): pass back_insert_iterator[CONTAINER] back_inserter[CONTAINER](CONTAINER &) front_insert_iterator[CONTAINER] front_inserter[CONTAINER](CONTAINER &) ##Note: this is the C++98 version of inserter. ##The C++11 versions's prototype relies on typedef members of classes, which Cython doesn't currently support: ##template ##insert_iterator inserter (Container& x, typename Container::iterator it) insert_iterator[CONTAINER] inserter[CONTAINER,ITERATOR](CONTAINER &, ITERATOR) Cython-0.26.1/Cython/Includes/libcpp/typeindex.pxd0000664000175000017500000000101413023021033022620 0ustar stefanstefan00000000000000from libcpp cimport bool from .typeinfo cimport type_info # This class is C++11-only cdef extern from "" namespace "std" nogil: cdef cppclass type_index: type_index(const type_info &) const char* name() size_t hash_code() bool operator==(const type_index &) bool operator!=(const type_index &) bool operator<(const type_index &) bool operator<=(const type_index &) bool operator>(const type_index &) bool operator>=(const type_index &) Cython-0.26.1/Cython/Includes/libcpp/typeinfo.pxd0000664000175000017500000000046013023021033022450 0ustar stefanstefan00000000000000from libcpp cimport bool cdef extern from "" namespace "std" nogil: cdef cppclass type_info: const char* name() int before(const type_info&) bool operator==(const type_info&) bool operator!=(const type_info&) # C++11-only size_t hash_code() Cython-0.26.1/Cython/Runtime/0000775000175000017500000000000013151203436016515 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Runtime/__init__.py0000664000175000017500000000001512542002467020626 0ustar stefanstefan00000000000000# empty file Cython-0.26.1/Cython/Runtime/refnanny.pyx0000664000175000017500000001416413023021033021072 0ustar stefanstefan00000000000000# cython: language_level=3 from cpython.ref cimport PyObject, Py_INCREF, Py_DECREF, Py_XDECREF, Py_XINCREF from cpython.exc cimport PyErr_Fetch, PyErr_Restore from cpython.pystate cimport PyThreadState_Get cimport cython loglevel = 0 reflog = [] cdef log(level, action, obj, lineno): if loglevel >= level: reflog.append((lineno, action, id(obj))) LOG_NONE, LOG_ALL = range(2) @cython.final cdef class Context(object): cdef readonly object name, filename cdef readonly dict refs cdef readonly list errors cdef readonly Py_ssize_t start def __cinit__(self, name, line=0, filename=None): self.name = name self.start = line self.filename = filename self.refs = {} # id -> (count, [lineno]) self.errors = [] cdef regref(self, obj, lineno, bint is_null): log(LOG_ALL, u'regref', u"" if is_null else obj, lineno) if is_null: self.errors.append(f"NULL argument on line {lineno}") return id_ = id(obj) count, linenumbers = self.refs.get(id_, (0, [])) self.refs[id_] = (count + 1, linenumbers) linenumbers.append(lineno) cdef bint delref(self, obj, lineno, bint is_null) except -1: # returns whether it is ok to do the decref operation log(LOG_ALL, u'delref', u"" if is_null else obj, lineno) if is_null: self.errors.append(f"NULL argument on line {lineno}") return False id_ = id(obj) count, linenumbers = self.refs.get(id_, (0, [])) if count == 0: self.errors.append(f"Too many decrefs on line {lineno}, reference acquired on lines {linenumbers!r}") return False elif count == 1: del self.refs[id_] return True else: self.refs[id_] = (count - 1, linenumbers) return True cdef end(self): if self.refs: msg = u"References leaked:" for count, linenos in self.refs.itervalues(): msg += f"\n ({count}) acquired on lines: {u', '.join([f'{x}' for x in linenos])}" self.errors.append(msg) if self.errors: return u"\n".join([u'REFNANNY: '+error for error in self.errors]) else: return None cdef void report_unraisable(object e=None): try: if e is None: import sys e = sys.exc_info()[1] print(f"refnanny raised an exception: {e}") except: pass # We absolutely cannot exit with an exception # All Python operations must happen after any existing # exception has been fetched, in case we are called from # exception-handling code. cdef PyObject* SetupContext(char* funcname, int lineno, char* filename) except NULL: if Context is None: # Context may be None during finalize phase. # In that case, we don't want to be doing anything fancy # like caching and resetting exceptions. return NULL cdef (PyObject*) type = NULL, value = NULL, tb = NULL, result = NULL PyThreadState_Get() PyErr_Fetch(&type, &value, &tb) try: ctx = Context(funcname, lineno, filename) Py_INCREF(ctx) result = ctx except Exception, e: report_unraisable(e) PyErr_Restore(type, value, tb) return result cdef void GOTREF(PyObject* ctx, PyObject* p_obj, int lineno): if ctx == NULL: return cdef (PyObject*) type = NULL, value = NULL, tb = NULL PyErr_Fetch(&type, &value, &tb) try: try: if p_obj is NULL: (ctx).regref(None, lineno, True) else: (ctx).regref(p_obj, lineno, False) except: report_unraisable() except: # __Pyx_GetException may itself raise errors pass PyErr_Restore(type, value, tb) cdef int GIVEREF_and_report(PyObject* ctx, PyObject* p_obj, int lineno): if ctx == NULL: return 1 cdef (PyObject*) type = NULL, value = NULL, tb = NULL cdef bint decref_ok = False PyErr_Fetch(&type, &value, &tb) try: try: if p_obj is NULL: decref_ok = (ctx).delref(None, lineno, True) else: decref_ok = (ctx).delref(p_obj, lineno, False) except: report_unraisable() except: # __Pyx_GetException may itself raise errors pass PyErr_Restore(type, value, tb) return decref_ok cdef void GIVEREF(PyObject* ctx, PyObject* p_obj, int lineno): GIVEREF_and_report(ctx, p_obj, lineno) cdef void INCREF(PyObject* ctx, PyObject* obj, int lineno): Py_XINCREF(obj) PyThreadState_Get() GOTREF(ctx, obj, lineno) cdef void DECREF(PyObject* ctx, PyObject* obj, int lineno): if GIVEREF_and_report(ctx, obj, lineno): Py_XDECREF(obj) PyThreadState_Get() cdef void FinishContext(PyObject** ctx): if ctx == NULL or ctx[0] == NULL: return cdef (PyObject*) type = NULL, value = NULL, tb = NULL cdef object errors = None cdef Context context PyThreadState_Get() PyErr_Fetch(&type, &value, &tb) try: try: context = ctx[0] errors = context.end() if errors: print(f"{context.filename.decode('latin1')}: {context.name.decode('latin1')}()") print(errors) context = None except: report_unraisable() except: # __Pyx_GetException may itself raise errors pass Py_XDECREF(ctx[0]) ctx[0] = NULL PyErr_Restore(type, value, tb) ctypedef struct RefNannyAPIStruct: void (*INCREF)(PyObject*, PyObject*, int) void (*DECREF)(PyObject*, PyObject*, int) void (*GOTREF)(PyObject*, PyObject*, int) void (*GIVEREF)(PyObject*, PyObject*, int) PyObject* (*SetupContext)(char*, int, char*) except NULL void (*FinishContext)(PyObject**) cdef RefNannyAPIStruct api api.INCREF = INCREF api.DECREF = DECREF api.GOTREF = GOTREF api.GIVEREF = GIVEREF api.SetupContext = SetupContext api.FinishContext = FinishContext cdef extern from "Python.h": object PyLong_FromVoidPtr(void*) RefNannyAPI = PyLong_FromVoidPtr(&api) Cython-0.26.1/Cython/Utility/0000775000175000017500000000000013151203436016535 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Utility/Embed.c0000664000175000017500000001357212574327400017733 0ustar stefanstefan00000000000000//////////////////// MainFunction //////////////////// #ifdef __FreeBSD__ #include #endif #if PY_MAJOR_VERSION < 3 int %(main_method)s(int argc, char** argv) { #elif defined(WIN32) || defined(MS_WINDOWS) int %(wmain_method)s(int argc, wchar_t **argv) { #else static int __Pyx_main(int argc, wchar_t **argv) { #endif /* 754 requires that FP exceptions run in "no stop" mode by default, * and until C vendors implement C99's ways to control FP exceptions, * Python requires non-stop mode. Alas, some platforms enable FP * exceptions by default. Here we disable them. */ #ifdef __FreeBSD__ fp_except_t m; m = fpgetmask(); fpsetmask(m & ~FP_X_OFL); #endif if (argc && argv) Py_SetProgramName(argv[0]); Py_Initialize(); if (argc && argv) PySys_SetArgv(argc, argv); { /* init module '%(module_name)s' as '__main__' */ PyObject* m = NULL; %(module_is_main)s = 1; #if PY_MAJOR_VERSION < 3 init%(module_name)s(); #else m = PyInit_%(module_name)s(); #endif if (PyErr_Occurred()) { PyErr_Print(); /* This exits with the right code if SystemExit. */ #if PY_MAJOR_VERSION < 3 if (Py_FlushLine()) PyErr_Clear(); #endif return 1; } Py_XDECREF(m); } Py_Finalize(); return 0; } #if PY_MAJOR_VERSION >= 3 && !defined(WIN32) && !defined(MS_WINDOWS) #include static wchar_t* __Pyx_char2wchar(char* arg) { wchar_t *res; #ifdef HAVE_BROKEN_MBSTOWCS /* Some platforms have a broken implementation of * mbstowcs which does not count the characters that * would result from conversion. Use an upper bound. */ size_t argsize = strlen(arg); #else size_t argsize = mbstowcs(NULL, arg, 0); #endif size_t count; unsigned char *in; wchar_t *out; #ifdef HAVE_MBRTOWC mbstate_t mbs; #endif if (argsize != (size_t)-1) { res = (wchar_t *)malloc((argsize+1)*sizeof(wchar_t)); if (!res) goto oom; count = mbstowcs(res, arg, argsize+1); if (count != (size_t)-1) { wchar_t *tmp; /* Only use the result if it contains no surrogate characters. */ for (tmp = res; *tmp != 0 && (*tmp < 0xd800 || *tmp > 0xdfff); tmp++) ; if (*tmp == 0) return res; } free(res); } /* Conversion failed. Fall back to escaping with surrogateescape. */ #ifdef HAVE_MBRTOWC /* Try conversion with mbrtwoc (C99), and escape non-decodable bytes. */ /* Overallocate; as multi-byte characters are in the argument, the actual output could use less memory. */ argsize = strlen(arg) + 1; res = (wchar_t *)malloc(argsize*sizeof(wchar_t)); if (!res) goto oom; in = (unsigned char*)arg; out = res; memset(&mbs, 0, sizeof mbs); while (argsize) { size_t converted = mbrtowc(out, (char*)in, argsize, &mbs); if (converted == 0) /* Reached end of string; null char stored. */ break; if (converted == (size_t)-2) { /* Incomplete character. This should never happen, since we provide everything that we have - unless there is a bug in the C library, or I misunderstood how mbrtowc works. */ fprintf(stderr, "unexpected mbrtowc result -2\\n"); free(res); return NULL; } if (converted == (size_t)-1) { /* Conversion error. Escape as UTF-8b, and start over in the initial shift state. */ *out++ = 0xdc00 + *in++; argsize--; memset(&mbs, 0, sizeof mbs); continue; } if (*out >= 0xd800 && *out <= 0xdfff) { /* Surrogate character. Escape the original byte sequence with surrogateescape. */ argsize -= converted; while (converted--) *out++ = 0xdc00 + *in++; continue; } /* successfully converted some bytes */ in += converted; argsize -= converted; out++; } #else /* Cannot use C locale for escaping; manually escape as if charset is ASCII (i.e. escape all bytes > 128. This will still roundtrip correctly in the locale's charset, which must be an ASCII superset. */ res = (wchar_t *)malloc((strlen(arg)+1)*sizeof(wchar_t)); if (!res) goto oom; in = (unsigned char*)arg; out = res; while(*in) if(*in < 128) *out++ = *in++; else *out++ = 0xdc00 + *in++; *out = 0; #endif return res; oom: fprintf(stderr, "out of memory\\n"); return NULL; } int %(main_method)s(int argc, char **argv) { if (!argc) { return __Pyx_main(0, NULL); } else { int i, res; wchar_t **argv_copy = (wchar_t **)malloc(sizeof(wchar_t*)*argc); /* We need a second copy, as Python might modify the first one. */ wchar_t **argv_copy2 = (wchar_t **)malloc(sizeof(wchar_t*)*argc); char *oldloc = strdup(setlocale(LC_ALL, NULL)); if (!argv_copy || !argv_copy2 || !oldloc) { fprintf(stderr, "out of memory\\n"); free(argv_copy); free(argv_copy2); free(oldloc); return 1; } res = 0; setlocale(LC_ALL, ""); for (i = 0; i < argc; i++) { argv_copy2[i] = argv_copy[i] = __Pyx_char2wchar(argv[i]); if (!argv_copy[i]) res = 1; /* failure, but continue to simplify cleanup */ } setlocale(LC_ALL, oldloc); free(oldloc); if (res == 0) res = __Pyx_main(argc, argv_copy); for (i = 0; i < argc; i++) { free(argv_copy2[i]); } free(argv_copy); free(argv_copy2); return res; } } #endif Cython-0.26.1/Cython/Utility/ModuleSetupCode.c0000664000175000017500000010571513150045407021754 0ustar stefanstefan00000000000000/////////////// CModulePreamble /////////////// #include /* For offsetof */ #ifndef offsetof #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) #endif #if !defined(WIN32) && !defined(MS_WINDOWS) #ifndef __stdcall #define __stdcall #endif #ifndef __cdecl #define __cdecl #endif #ifndef __fastcall #define __fastcall #endif #endif #ifndef DL_IMPORT #define DL_IMPORT(t) t #endif #ifndef DL_EXPORT #define DL_EXPORT(t) t #endif // For use in DL_IMPORT/DL_EXPORT macros. #define __PYX_COMMA , #ifndef HAVE_LONG_LONG // CPython has required PY_LONG_LONG support for years, even if HAVE_LONG_LONG is not defined for us #if PY_VERSION_HEX >= 0x03030000 || (PY_MAJOR_VERSION == 2 && PY_VERSION_HEX >= 0x02070000) #define HAVE_LONG_LONG #endif #endif #ifndef PY_LONG_LONG #define PY_LONG_LONG LONG_LONG #endif #ifndef Py_HUGE_VAL #define Py_HUGE_VAL HUGE_VAL #endif #ifdef PYPY_VERSION #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 0 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #undef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #undef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 1 #undef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 0 #undef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 0 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #elif defined(PYSTON_VERSION) #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 1 #define CYTHON_COMPILING_IN_CPYTHON 0 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 1 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #if PY_VERSION_HEX < 0x02070000 // looks like calling _PyType_Lookup() isn't safe in Py<=2.6/3.1 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) #define CYTHON_USE_PYTYPE_LOOKUP 1 #endif #if PY_MAJOR_VERSION < 3 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #if PY_VERSION_HEX < 0x02070000 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #elif !defined(CYTHON_USE_PYLONG_INTERNALS) #define CYTHON_USE_PYLONG_INTERNALS 1 #endif #ifndef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 1 #endif #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #if PY_VERSION_HEX < 0x030300F0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #elif !defined(CYTHON_USE_UNICODE_WRITER) #define CYTHON_USE_UNICODE_WRITER 1 #endif #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #ifndef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 1 #endif #ifndef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 1 #endif #endif #if !defined(CYTHON_FAST_PYCCALL) #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) #endif #if CYTHON_USE_PYLONG_INTERNALS #include "longintrepr.h" /* These short defines can easily conflict with other code */ #undef SHIFT #undef BASE #undef MASK #endif #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) #define Py_OptimizeFlag 0 #endif #define __PYX_BUILD_PY_SSIZE_T "n" #define CYTHON_FORMAT_SSIZE_T "z" #if PY_MAJOR_VERSION < 3 #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #define __Pyx_DefaultClassType PyClass_Type #else #define __Pyx_BUILTIN_MODULE_NAME "builtins" #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #define __Pyx_DefaultClassType PyType_Type #endif #ifndef Py_TPFLAGS_CHECKTYPES #define Py_TPFLAGS_CHECKTYPES 0 #endif #ifndef Py_TPFLAGS_HAVE_INDEX #define Py_TPFLAGS_HAVE_INDEX 0 #endif #ifndef Py_TPFLAGS_HAVE_NEWBUFFER #define Py_TPFLAGS_HAVE_NEWBUFFER 0 #endif #ifndef Py_TPFLAGS_HAVE_FINALIZE #define Py_TPFLAGS_HAVE_FINALIZE 0 #endif #if PY_VERSION_HEX < 0x030700A0 || !defined(METH_FASTCALL) // new in CPython 3.6, but changed in 3.7 - see https://bugs.python.org/issue29464 #ifndef METH_FASTCALL #define METH_FASTCALL 0x80 #endif typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject **args, Py_ssize_t nargs); // new in CPython 3.7, used to be old signature of _PyCFunctionFast() in 3.6 typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject **args, Py_ssize_t nargs, PyObject *kwnames); #else #define __Pyx_PyCFunctionFast _PyCFunctionFast #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords #endif #if CYTHON_FAST_PYCCALL #define __Pyx_PyFastCFunction_Check(func) \ ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))))) #else #define __Pyx_PyFastCFunction_Check(func) 0 #endif /* new Py3.3 unicode type (PEP 393) */ #if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) #define CYTHON_PEP393_ENABLED 1 #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ? \ 0 : _PyUnicode_Ready((PyObject *)(op))) #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) #else #define CYTHON_PEP393_ENABLED 0 #define PyUnicode_1BYTE_KIND 1 #define PyUnicode_2BYTE_KIND 2 #define PyUnicode_4BYTE_KIND 4 #define __Pyx_PyUnicode_READY(op) (0) #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) /* (void)(k) => avoid unused variable warning due to macro: */ #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) #endif #if CYTHON_COMPILING_IN_PYPY #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) #else #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ? \ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) #define PyObject_Malloc(s) PyMem_Malloc(s) #define PyObject_Free(p) PyMem_Free(p) #define PyObject_Realloc(p) PyMem_Realloc(p) #endif #if CYTHON_COMPILING_IN_PYSTON // special C-API functions only in Pyston #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) #else #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) #endif #define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) #define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) #else #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) #endif #if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) #define PyObject_ASCII(o) PyObject_Repr(o) #endif #if PY_MAJOR_VERSION >= 3 #define PyBaseString_Type PyUnicode_Type #define PyStringObject PyUnicodeObject #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) #else #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) #endif #ifndef PySet_CheckExact #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) #endif #define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) #define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) #if PY_MAJOR_VERSION >= 3 #define PyIntObject PyLongObject #define PyInt_Type PyLong_Type #define PyInt_Check(op) PyLong_Check(op) #define PyInt_CheckExact(op) PyLong_CheckExact(op) #define PyInt_FromString PyLong_FromString #define PyInt_FromUnicode PyLong_FromUnicode #define PyInt_FromLong PyLong_FromLong #define PyInt_FromSize_t PyLong_FromSize_t #define PyInt_FromSsize_t PyLong_FromSsize_t #define PyInt_AsLong PyLong_AsLong #define PyInt_AS_LONG PyLong_AS_LONG #define PyInt_AsSsize_t PyLong_AsSsize_t #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask #define PyNumber_Int PyNumber_Long #endif #if PY_MAJOR_VERSION >= 3 #define PyBoolObject PyLongObject #endif #if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY #ifndef PyUnicode_InternFromString #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) #endif #endif #if PY_VERSION_HEX < 0x030200A4 typedef long Py_hash_t; #define __Pyx_PyInt_FromHash_t PyInt_FromLong #define __Pyx_PyInt_AsHash_t PyInt_AsLong #else #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) #else #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) #endif #ifndef __has_attribute #define __has_attribute(x) 0 #endif #ifndef __has_cpp_attribute #define __has_cpp_attribute(x) 0 #endif // backport of PyAsyncMethods from Py3.5 to older Py3.x versions // (mis-)using the "tp_reserved" type slot which is re-activated as "tp_as_async" in Py3.5 #if CYTHON_USE_ASYNC_SLOTS #if PY_VERSION_HEX >= 0x030500B1 #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) #else typedef struct { unaryfunc am_await; unaryfunc am_aiter; unaryfunc am_anext; } __Pyx_PyAsyncMethodsStruct; #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) #endif #else #define __Pyx_PyType_AsAsync(obj) NULL #endif // restrict #ifndef CYTHON_RESTRICT #if defined(__GNUC__) #define CYTHON_RESTRICT __restrict__ #elif defined(_MSC_VER) && _MSC_VER >= 1400 #define CYTHON_RESTRICT __restrict #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_RESTRICT restrict #else #define CYTHON_RESTRICT #endif #endif // unused attribute #ifndef CYTHON_UNUSED # if defined(__GNUC__) # if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif # elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif #endif #ifndef CYTHON_MAYBE_UNUSED_VAR # if defined(__cplusplus) template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } # else # define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) # endif #endif #ifndef CYTHON_NCP_UNUSED # if CYTHON_COMPILING_IN_CPYTHON # define CYTHON_NCP_UNUSED # else # define CYTHON_NCP_UNUSED CYTHON_UNUSED # endif #endif #define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) #ifdef _MSC_VER #ifndef _MSC_STDINT_H_ #if _MSC_VER < 1300 typedef unsigned char uint8_t; typedef unsigned int uint32_t; #else typedef unsigned __int8 uint8_t; typedef unsigned __int32 uint32_t; #endif #endif #else #include #endif #ifndef CYTHON_FALLTHROUGH #ifdef __cplusplus #if __has_cpp_attribute(fallthrough) #define CYTHON_FALLTHROUGH [[fallthrough]] #elif __has_cpp_attribute(clang::fallthrough) #define CYTHON_FALLTHROUGH [[clang::fallthrough]] #endif #endif #ifndef CYTHON_FALLTHROUGH #if __has_attribute(fallthrough) || (defined(__GNUC__) && defined(__attribute__)) #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) #else #define CYTHON_FALLTHROUGH #endif #endif #endif /////////////// CInitCode /////////////// // inline attribute #ifndef CYTHON_INLINE #if defined(__clang__) #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) #elif defined(__GNUC__) #define CYTHON_INLINE __inline__ #elif defined(_MSC_VER) #define CYTHON_INLINE __inline #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_INLINE inline #else #define CYTHON_INLINE #endif #endif /////////////// CppInitCode /////////////// #ifndef __cplusplus #error "Cython files generated with the C++ option must be compiled with a C++ compiler." #endif // inline attribute #ifndef CYTHON_INLINE #if defined(__clang__) #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) #else #define CYTHON_INLINE inline #endif #endif // Work around clang bug http://stackoverflow.com/questions/21847816/c-invoke-nested-template-class-destructor template void __Pyx_call_destructor(T& x) { x.~T(); } // Used for temporary variables of "reference" type. template class __Pyx_FakeReference { public: __Pyx_FakeReference() : ptr(NULL) { } // __Pyx_FakeReference(T& ref) : ptr(&ref) { } // Const version needed as Cython doesn't know about const overloads (e.g. for stl containers). __Pyx_FakeReference(const T& ref) : ptr(const_cast(&ref)) { } T *operator->() { return ptr; } T *operator&() { return ptr; } operator T&() { return *ptr; } // TODO(robertwb): Delegate all operators (or auto-generate unwrapping code where needed). template bool operator ==(U other) { return *ptr == other; } template bool operator !=(U other) { return *ptr != other; } private: T *ptr; }; /////////////// MathInitCode /////////////// #if defined(WIN32) || defined(MS_WINDOWS) #define _USE_MATH_DEFINES #endif #include #ifdef NAN #define __PYX_NAN() ((float) NAN) #else static CYTHON_INLINE float __PYX_NAN() { // Initialize NaN. The sign is irrelevant, an exponent with all bits 1 and // a nonzero mantissa means NaN. If the first bit in the mantissa is 1, it is // a quiet NaN. float value; memset(&value, 0xFF, sizeof(value)); return value; } #endif #if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) #define __Pyx_truncl trunc #else #define __Pyx_truncl truncl #endif /////////////// UtilityFunctionPredeclarations.proto /////////////// typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/ /////////////// ForceInitThreads.proto /////////////// #ifndef __PYX_FORCE_INIT_THREADS #define __PYX_FORCE_INIT_THREADS 0 #endif /////////////// InitThreads.init /////////////// #ifdef WITH_THREAD PyEval_InitThreads(); #endif /////////////// CodeObjectCache.proto /////////////// typedef struct { PyCodeObject* code_object; int code_line; } __Pyx_CodeObjectCacheEntry; struct __Pyx_CodeObjectCache { int count; int max_count; __Pyx_CodeObjectCacheEntry* entries; }; static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); static PyCodeObject *__pyx_find_code_object(int code_line); static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); /////////////// CodeObjectCache /////////////// // Note that errors are simply ignored in the code below. // This is just a cache, if a lookup or insertion fails - so what? static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { int start = 0, mid = 0, end = count - 1; if (end >= 0 && code_line > entries[end].code_line) { return count; } while (start < end) { mid = start + (end - start) / 2; if (code_line < entries[mid].code_line) { end = mid; } else if (code_line > entries[mid].code_line) { start = mid + 1; } else { return mid; } } if (code_line <= entries[mid].code_line) { return mid; } else { return mid + 1; } } static PyCodeObject *__pyx_find_code_object(int code_line) { PyCodeObject* code_object; int pos; if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { return NULL; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { return NULL; } code_object = __pyx_code_cache.entries[pos].code_object; Py_INCREF(code_object); return code_object; } static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { int pos, i; __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; if (unlikely(!code_line)) { return; } if (unlikely(!entries)) { entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); if (likely(entries)) { __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = 64; __pyx_code_cache.count = 1; entries[0].code_line = code_line; entries[0].code_object = code_object; Py_INCREF(code_object); } return; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { PyCodeObject* tmp = entries[pos].code_object; entries[pos].code_object = code_object; Py_DECREF(tmp); return; } if (__pyx_code_cache.count == __pyx_code_cache.max_count) { int new_max = __pyx_code_cache.max_count + 64; entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); if (unlikely(!entries)) { return; } __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = new_max; } for (i=__pyx_code_cache.count; i>pos; i--) { entries[i] = entries[i-1]; } entries[pos].code_line = code_line; entries[pos].code_object = code_object; __pyx_code_cache.count++; Py_INCREF(code_object); } /////////////// CodeObjectCache.cleanup /////////////// if (__pyx_code_cache.entries) { __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; int i, count = __pyx_code_cache.count; __pyx_code_cache.count = 0; __pyx_code_cache.max_count = 0; __pyx_code_cache.entries = NULL; for (i=0; iSetupContext((name), __LINE__, __FILE__); \ PyGILState_Release(__pyx_gilstate_save); \ } else { \ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \ } #else #define __Pyx_RefNannySetupContext(name, acquire_gil) \ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) #endif #define __Pyx_RefNannyFinishContext() \ __Pyx_RefNanny->FinishContext(&__pyx_refnanny) #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) #else #define __Pyx_RefNannyDeclarations #define __Pyx_RefNannySetupContext(name, acquire_gil) #define __Pyx_RefNannyFinishContext() #define __Pyx_INCREF(r) Py_INCREF(r) #define __Pyx_DECREF(r) Py_DECREF(r) #define __Pyx_GOTREF(r) #define __Pyx_GIVEREF(r) #define __Pyx_XINCREF(r) Py_XINCREF(r) #define __Pyx_XDECREF(r) Py_XDECREF(r) #define __Pyx_XGOTREF(r) #define __Pyx_XGIVEREF(r) #endif /* CYTHON_REFNANNY */ #define __Pyx_XDECREF_SET(r, v) do { \ PyObject *tmp = (PyObject *) r; \ r = v; __Pyx_XDECREF(tmp); \ } while (0) #define __Pyx_DECREF_SET(r, v) do { \ PyObject *tmp = (PyObject *) r; \ r = v; __Pyx_DECREF(tmp); \ } while (0) #define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) #define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) /////////////// Refnanny /////////////// #if CYTHON_REFNANNY static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { PyObject *m = NULL, *p = NULL; void *r = NULL; m = PyImport_ImportModule((char *)modname); if (!m) goto end; p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); if (!p) goto end; r = PyLong_AsVoidPtr(p); end: Py_XDECREF(p); Py_XDECREF(m); return (__Pyx_RefNannyAPIStruct *)r; } #endif /* CYTHON_REFNANNY */ /////////////// RegisterModuleCleanup.proto /////////////// //@substitute: naming static void ${cleanup_cname}(PyObject *self); /*proto*/ static int __Pyx_RegisterCleanup(void); /*proto*/ /////////////// RegisterModuleCleanup /////////////// //@substitute: naming //@requires: ImportExport.c::ModuleImport #if PY_MAJOR_VERSION < 3 static PyObject* ${cleanup_cname}_atexit(PyObject *module, CYTHON_UNUSED PyObject *unused) { ${cleanup_cname}(module); Py_INCREF(Py_None); return Py_None; } static int __Pyx_RegisterCleanup(void) { // Don't use Py_AtExit because that has a 32-call limit and is called // after python finalization. // Also, we try to prepend the cleanup function to "atexit._exithandlers" // in Py2 because CPython runs them last-to-first. Being run last allows // user exit code to run before us that may depend on the globals // and cached objects that we are about to clean up. static PyMethodDef cleanup_def = { "__cleanup", (PyCFunction)${cleanup_cname}_atexit, METH_NOARGS, 0}; PyObject *cleanup_func = 0; PyObject *atexit = 0; PyObject *reg = 0; PyObject *args = 0; PyObject *res = 0; int ret = -1; cleanup_func = PyCFunction_New(&cleanup_def, 0); if (!cleanup_func) goto bad; atexit = __Pyx_ImportModule("atexit"); if (!atexit) goto bad; reg = PyObject_GetAttrString(atexit, "_exithandlers"); if (reg && PyList_Check(reg)) { PyObject *a, *kw; a = PyTuple_New(0); kw = PyDict_New(); if (!a || !kw) { Py_XDECREF(a); Py_XDECREF(kw); goto bad; } args = PyTuple_Pack(3, cleanup_func, a, kw); Py_DECREF(a); Py_DECREF(kw); if (!args) goto bad; ret = PyList_Insert(reg, 0, args); } else { if (!reg) PyErr_Clear(); Py_XDECREF(reg); reg = PyObject_GetAttrString(atexit, "register"); if (!reg) goto bad; args = PyTuple_Pack(1, cleanup_func); if (!args) goto bad; res = PyObject_CallObject(reg, args); if (!res) goto bad; ret = 0; } bad: Py_XDECREF(cleanup_func); Py_XDECREF(atexit); Py_XDECREF(reg); Py_XDECREF(args); Py_XDECREF(res); return ret; } #else // fake call purely to work around "unused function" warning for __Pyx_ImportModule() static int __Pyx_RegisterCleanup(void) { if ((0)) __Pyx_ImportModule(NULL); return 0; } #endif /////////////// FastGil.init /////////////// #ifdef WITH_THREAD __Pyx_FastGilFuncInit(); #endif /////////////// NoFastGil.proto /////////////// #define __Pyx_PyGILState_Ensure PyGILState_Ensure #define __Pyx_PyGILState_Release PyGILState_Release #define __Pyx_FastGIL_Remember() #define __Pyx_FastGIL_Forget() #define __Pyx_FastGilFuncInit() /////////////// FastGil.proto /////////////// struct __Pyx_FastGilVtab { PyGILState_STATE (*Fast_PyGILState_Ensure)(void); void (*Fast_PyGILState_Release)(PyGILState_STATE oldstate); void (*FastGIL_Remember)(void); void (*FastGIL_Forget)(void); }; static void __Pyx_FastGIL_Noop(void) {} static struct __Pyx_FastGilVtab __Pyx_FastGilFuncs = { PyGILState_Ensure, PyGILState_Release, __Pyx_FastGIL_Noop, __Pyx_FastGIL_Noop }; static void __Pyx_FastGilFuncInit(void); #define __Pyx_PyGILState_Ensure __Pyx_FastGilFuncs.Fast_PyGILState_Ensure #define __Pyx_PyGILState_Release __Pyx_FastGilFuncs.Fast_PyGILState_Release #define __Pyx_FastGIL_Remember __Pyx_FastGilFuncs.FastGIL_Remember #define __Pyx_FastGIL_Forget __Pyx_FastGilFuncs.FastGIL_Forget #ifdef WITH_THREAD #ifndef CYTHON_THREAD_LOCAL #if __STDC_VERSION__ >= 201112 #define CYTHON_THREAD_LOCAL _Thread_local #elif defined(__GNUC__) #define CYTHON_THREAD_LOCAL __thread #elif defined(_MSC_VER) #define CYTHON_THREAD_LOCAL __declspec(thread) #endif #endif #endif /////////////// FastGil /////////////// //@requires: CommonStructures.c::FetchCommonPointer // The implementations of PyGILState_Ensure/Release calls PyThread_get_key_value // several times which is turns out to be quite slow (slower in fact than // acquiring the GIL itself). Simply storing it in a thread local for the // common case is much faster. // To make optimal use of this thread local, we attempt to share it between // modules. #define __Pyx_FastGIL_ABI_module "_cython_" CYTHON_ABI #define __Pyx_FastGIL_PyCapsuleName "FastGilFuncs" #define __Pyx_FastGIL_PyCapsule \ __Pyx_FastGIL_ABI_module "." __Pyx_FastGIL_PyCapsuleName #if PY_VERSION_HEX >= 0x03050000 #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() #elif PY_VERSION_HEX >= 0x03000000 #define __Pyx_PyThreadState_Current PyThreadState_Get() #elif PY_VERSION_HEX < 0x02070000 #undef CYTHON_THREAD_LOCAL #else #define __Pyx_PyThreadState_Current _PyThreadState_Current #endif #ifdef CYTHON_THREAD_LOCAL #include "pythread.h" #include "pystate.h" static CYTHON_THREAD_LOCAL PyThreadState *__Pyx_FastGil_tcur = NULL; static CYTHON_THREAD_LOCAL int __Pyx_FastGil_tcur_depth = 0; static int __Pyx_FastGil_autoTLSkey = -1; static CYTHON_INLINE void __Pyx_FastGIL_Remember0(void) { ++__Pyx_FastGil_tcur_depth; } static CYTHON_INLINE void __Pyx_FastGIL_Forget0(void) { if (--__Pyx_FastGil_tcur_depth == 0) { __Pyx_FastGil_tcur = NULL; } } static CYTHON_INLINE PyThreadState *__Pyx_FastGil_get_tcur(void) { PyThreadState *tcur = __Pyx_FastGil_tcur; if (tcur == NULL) { tcur = __Pyx_FastGil_tcur = (PyThreadState*)PyThread_get_key_value(__Pyx_FastGil_autoTLSkey); } return tcur; } static PyGILState_STATE __Pyx_FastGil_PyGILState_Ensure(void) { int current; __Pyx_FastGIL_Remember0(); PyThreadState *tcur = __Pyx_FastGil_get_tcur(); if (tcur == NULL) { // Uninitialized, need to initialize now. return PyGILState_Ensure(); } current = tcur == __Pyx_PyThreadState_Current; if (current == 0) { PyEval_RestoreThread(tcur); } ++tcur->gilstate_counter; return current ? PyGILState_LOCKED : PyGILState_UNLOCKED; } static void __Pyx_FastGil_PyGILState_Release(PyGILState_STATE oldstate) { PyThreadState *tcur = __Pyx_FastGil_get_tcur(); __Pyx_FastGIL_Forget0(); if (tcur->gilstate_counter == 1) { // This is the last lock, do all the cleanup as well. PyGILState_Release(oldstate); } else { --tcur->gilstate_counter; if (oldstate == PyGILState_UNLOCKED) { PyEval_SaveThread(); } } } static void __Pyx_FastGilFuncInit0(void) { /* Try to detect autoTLSkey. */ int key; void* this_thread_state = (void*) PyGILState_GetThisThreadState(); for (key = 0; key < 100; key++) { if (PyThread_get_key_value(key) == this_thread_state) { __Pyx_FastGil_autoTLSkey = key; break; } } if (__Pyx_FastGil_autoTLSkey != -1) { PyObject* capsule = NULL; PyObject* abi_module = NULL; __Pyx_PyGILState_Ensure = __Pyx_FastGil_PyGILState_Ensure; __Pyx_PyGILState_Release = __Pyx_FastGil_PyGILState_Release; __Pyx_FastGIL_Remember = __Pyx_FastGIL_Remember0; __Pyx_FastGIL_Forget = __Pyx_FastGIL_Forget0; capsule = PyCapsule_New(&__Pyx_FastGilFuncs, __Pyx_FastGIL_PyCapsule, NULL); abi_module = PyImport_AddModule(__Pyx_FastGIL_ABI_module); if (capsule && abi_module) { PyObject_SetAttrString(abi_module, __Pyx_FastGIL_PyCapsuleName, capsule); } Py_XDECREF(capsule); } } #else static void __Pyx_FastGilFuncInit0(void) { CYTHON_UNUSED void* force_use = (void*)&__Pyx_FetchCommonPointer; } #endif static void __Pyx_FastGilFuncInit(void) { #if PY_VERSION_HEX >= 0x02070000 struct __Pyx_FastGilVtab* shared = (struct __Pyx_FastGilVtab*)PyCapsule_Import(__Pyx_FastGIL_PyCapsule, 1); #else struct __Pyx_FastGilVtab* shared = NULL; #endif if (shared) { __Pyx_FastGilFuncs = *shared; } else { PyErr_Clear(); __Pyx_FastGilFuncInit0(); } } Cython-0.26.1/Cython/Utility/Optimize.c0000664000175000017500000007735713143605603020527 0ustar stefanstefan00000000000000/* * Optional optimisations of built-in functions and methods. * * Required replacements of builtins are in Builtins.c. * * General object operations and protocols are in ObjectHandling.c. */ /////////////// append.proto /////////////// static CYTHON_INLINE int __Pyx_PyObject_Append(PyObject* L, PyObject* x); /*proto*/ /////////////// append /////////////// //@requires: ListAppend //@requires: ObjectHandling.c::PyObjectCallMethod1 static CYTHON_INLINE int __Pyx_PyObject_Append(PyObject* L, PyObject* x) { if (likely(PyList_CheckExact(L))) { if (unlikely(__Pyx_PyList_Append(L, x) < 0)) return -1; } else { PyObject* retval = __Pyx_PyObject_CallMethod1(L, PYIDENT("append"), x); if (unlikely(!retval)) return -1; Py_DECREF(retval); } return 0; } /////////////// ListAppend.proto /////////////// #if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { PyListObject* L = (PyListObject*) list; Py_ssize_t len = Py_SIZE(list); if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { Py_INCREF(x); PyList_SET_ITEM(list, len, x); Py_SIZE(list) = len+1; return 0; } return PyList_Append(list, x); } #else #define __Pyx_PyList_Append(L,x) PyList_Append(L,x) #endif /////////////// ListCompAppend.proto /////////////// #if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) { PyListObject* L = (PyListObject*) list; Py_ssize_t len = Py_SIZE(list); if (likely(L->allocated > len)) { Py_INCREF(x); PyList_SET_ITEM(list, len, x); Py_SIZE(list) = len+1; return 0; } return PyList_Append(list, x); } #else #define __Pyx_ListComp_Append(L,x) PyList_Append(L,x) #endif //////////////////// ListExtend.proto //////////////////// static CYTHON_INLINE int __Pyx_PyList_Extend(PyObject* L, PyObject* v) { #if CYTHON_COMPILING_IN_CPYTHON PyObject* none = _PyList_Extend((PyListObject*)L, v); if (unlikely(!none)) return -1; Py_DECREF(none); return 0; #else return PyList_SetSlice(L, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, v); #endif } /////////////// pop.proto /////////////// static CYTHON_INLINE PyObject* __Pyx__PyObject_Pop(PyObject* L); /*proto*/ #if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS static CYTHON_INLINE PyObject* __Pyx_PyList_Pop(PyObject* L); /*proto*/ #define __Pyx_PyObject_Pop(L) (likely(PyList_CheckExact(L)) ? \ __Pyx_PyList_Pop(L) : __Pyx__PyObject_Pop(L)) #else #define __Pyx_PyList_Pop(L) __Pyx__PyObject_Pop(L) #define __Pyx_PyObject_Pop(L) __Pyx__PyObject_Pop(L) #endif /////////////// pop /////////////// //@requires: ObjectHandling.c::PyObjectCallMethod0 static CYTHON_INLINE PyObject* __Pyx__PyObject_Pop(PyObject* L) { if (Py_TYPE(L) == &PySet_Type) { return PySet_Pop(L); } return __Pyx_PyObject_CallMethod0(L, PYIDENT("pop")); } #if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS static CYTHON_INLINE PyObject* __Pyx_PyList_Pop(PyObject* L) { /* Check that both the size is positive and no reallocation shrinking needs to be done. */ if (likely(PyList_GET_SIZE(L) > (((PyListObject*)L)->allocated >> 1))) { Py_SIZE(L) -= 1; return PyList_GET_ITEM(L, PyList_GET_SIZE(L)); } return CALL_UNBOUND_METHOD(PyList_Type, "pop", L); } #endif /////////////// pop_index.proto /////////////// static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix); /*proto*/ static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix); /*proto*/ #if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix); /*proto*/ #define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) ( \ (likely(PyList_CheckExact(L) && __Pyx_fits_Py_ssize_t(ix, type, is_signed))) ? \ __Pyx__PyList_PopIndex(L, py_ix, ix) : ( \ (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) : \ __Pyx__PyObject_PopIndex(L, py_ix))) #define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) ( \ __Pyx_fits_Py_ssize_t(ix, type, is_signed) ? \ __Pyx__PyList_PopIndex(L, py_ix, ix) : ( \ (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) : \ __Pyx__PyObject_PopIndex(L, py_ix))) #else #define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) \ __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) #define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) ( \ (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) : \ __Pyx__PyObject_PopIndex(L, py_ix)) #endif /////////////// pop_index /////////////// //@requires: ObjectHandling.c::PyObjectCallMethod1 static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix) { PyObject *r; if (unlikely(!py_ix)) return NULL; r = __Pyx__PyObject_PopIndex(L, py_ix); Py_DECREF(py_ix); return r; } static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix) { return __Pyx_PyObject_CallMethod1(L, PYIDENT("pop"), py_ix); } #if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix) { Py_ssize_t size = PyList_GET_SIZE(L); if (likely(size > (((PyListObject*)L)->allocated >> 1))) { Py_ssize_t cix = ix; if (cix < 0) { cix += size; } if (likely(0 <= cix && cix < size)) { PyObject* v = PyList_GET_ITEM(L, cix); Py_SIZE(L) -= 1; size -= 1; memmove(&PyList_GET_ITEM(L, cix), &PyList_GET_ITEM(L, cix+1), (size_t)(size-cix)*sizeof(PyObject*)); return v; } } if (py_ix == Py_None) { return __Pyx__PyObject_PopNewIndex(L, PyInt_FromSsize_t(ix)); } else { return __Pyx__PyObject_PopIndex(L, py_ix); } } #endif /////////////// dict_getitem_default.proto /////////////// static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value); /*proto*/ /////////////// dict_getitem_default /////////////// static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) { PyObject* value; #if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY value = PyDict_GetItemWithError(d, key); if (unlikely(!value)) { if (unlikely(PyErr_Occurred())) return NULL; value = default_value; } Py_INCREF(value); #else if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) { /* these presumably have safe hash functions */ value = PyDict_GetItem(d, key); if (unlikely(!value)) { value = default_value; } Py_INCREF(value); } else { if (default_value == Py_None) default_value = NULL; value = PyObject_CallMethodObjArgs( d, PYIDENT("get"), key, default_value, NULL); } #endif return value; } /////////////// dict_setdefault.proto /////////////// static CYTHON_INLINE PyObject *__Pyx_PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *default_value, int is_safe_type); /*proto*/ /////////////// dict_setdefault /////////////// //@requires: ObjectHandling.c::PyObjectCallMethod2 static CYTHON_INLINE PyObject *__Pyx_PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *default_value, CYTHON_UNUSED int is_safe_type) { PyObject* value; #if PY_VERSION_HEX >= 0x030400A0 // we keep the method call at the end to avoid "unused" C compiler warnings if ((1)) { value = PyDict_SetDefault(d, key, default_value); if (unlikely(!value)) return NULL; Py_INCREF(value); #else if (is_safe_type == 1 || (is_safe_type == -1 && /* the following builtins presumably have repeatably safe and fast hash functions */ #if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY (PyUnicode_CheckExact(key) || PyString_CheckExact(key) || PyLong_CheckExact(key)))) { value = PyDict_GetItemWithError(d, key); if (unlikely(!value)) { if (unlikely(PyErr_Occurred())) return NULL; if (unlikely(PyDict_SetItem(d, key, default_value) == -1)) return NULL; value = default_value; } Py_INCREF(value); #else (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key) || PyLong_CheckExact(key)))) { value = PyDict_GetItem(d, key); if (unlikely(!value)) { if (unlikely(PyDict_SetItem(d, key, default_value) == -1)) return NULL; value = default_value; } Py_INCREF(value); #endif #endif } else { value = __Pyx_PyObject_CallMethod2(d, PYIDENT("setdefault"), key, default_value); } return value; } /////////////// py_dict_clear.proto /////////////// #define __Pyx_PyDict_Clear(d) (PyDict_Clear(d), 0) /////////////// dict_iter.proto /////////////// static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* dict, int is_dict, PyObject* method_name, Py_ssize_t* p_orig_length, int* p_is_dict); static CYTHON_INLINE int __Pyx_dict_iter_next(PyObject* dict_or_iter, Py_ssize_t orig_length, Py_ssize_t* ppos, PyObject** pkey, PyObject** pvalue, PyObject** pitem, int is_dict); /////////////// dict_iter /////////////// //@requires: ObjectHandling.c::UnpackTuple2 //@requires: ObjectHandling.c::IterFinish //@requires: ObjectHandling.c::PyObjectCallMethod0 static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* iterable, int is_dict, PyObject* method_name, Py_ssize_t* p_orig_length, int* p_source_is_dict) { is_dict = is_dict || likely(PyDict_CheckExact(iterable)); *p_source_is_dict = is_dict; if (is_dict) { #if !CYTHON_COMPILING_IN_PYPY *p_orig_length = PyDict_Size(iterable); Py_INCREF(iterable); return iterable; #elif PY_MAJOR_VERSION >= 3 // On PyPy3, we need to translate manually a few method names. // This logic is not needed on CPython thanks to the fast case above. static PyObject *py_items = NULL, *py_keys = NULL, *py_values = NULL; const char *name = PyUnicode_AsUTF8(method_name); PyObject **pp = NULL; if (strcmp(name, "iteritems") == 0) pp = &py_items; else if (strcmp(name, "iterkeys") == 0) pp = &py_keys; else if (strcmp(name, "itervalues") == 0) pp = &py_values; if (pp) { if (!*pp) { *pp = PyUnicode_FromString(name + 4); if (!*pp) return NULL; } method_name = *pp; } #endif } *p_orig_length = 0; if (method_name) { PyObject* iter; iterable = __Pyx_PyObject_CallMethod0(iterable, method_name); if (!iterable) return NULL; #if !CYTHON_COMPILING_IN_PYPY if (PyTuple_CheckExact(iterable) || PyList_CheckExact(iterable)) return iterable; #endif iter = PyObject_GetIter(iterable); Py_DECREF(iterable); return iter; } return PyObject_GetIter(iterable); } static CYTHON_INLINE int __Pyx_dict_iter_next( PyObject* iter_obj, CYTHON_NCP_UNUSED Py_ssize_t orig_length, CYTHON_NCP_UNUSED Py_ssize_t* ppos, PyObject** pkey, PyObject** pvalue, PyObject** pitem, int source_is_dict) { PyObject* next_item; #if !CYTHON_COMPILING_IN_PYPY if (source_is_dict) { PyObject *key, *value; if (unlikely(orig_length != PyDict_Size(iter_obj))) { PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration"); return -1; } if (unlikely(!PyDict_Next(iter_obj, ppos, &key, &value))) { return 0; } if (pitem) { PyObject* tuple = PyTuple_New(2); if (unlikely(!tuple)) { return -1; } Py_INCREF(key); Py_INCREF(value); PyTuple_SET_ITEM(tuple, 0, key); PyTuple_SET_ITEM(tuple, 1, value); *pitem = tuple; } else { if (pkey) { Py_INCREF(key); *pkey = key; } if (pvalue) { Py_INCREF(value); *pvalue = value; } } return 1; } else if (PyTuple_CheckExact(iter_obj)) { Py_ssize_t pos = *ppos; if (unlikely(pos >= PyTuple_GET_SIZE(iter_obj))) return 0; *ppos = pos + 1; next_item = PyTuple_GET_ITEM(iter_obj, pos); Py_INCREF(next_item); } else if (PyList_CheckExact(iter_obj)) { Py_ssize_t pos = *ppos; if (unlikely(pos >= PyList_GET_SIZE(iter_obj))) return 0; *ppos = pos + 1; next_item = PyList_GET_ITEM(iter_obj, pos); Py_INCREF(next_item); } else #endif { next_item = PyIter_Next(iter_obj); if (unlikely(!next_item)) { return __Pyx_IterFinish(); } } if (pitem) { *pitem = next_item; } else if (pkey && pvalue) { if (__Pyx_unpack_tuple2(next_item, pkey, pvalue, source_is_dict, source_is_dict, 1)) return -1; } else if (pkey) { *pkey = next_item; } else { *pvalue = next_item; } return 1; } /////////////// unicode_iter.proto /////////////// static CYTHON_INLINE int __Pyx_init_unicode_iteration( PyObject* ustring, Py_ssize_t *length, void** data, int *kind); /* proto */ /////////////// unicode_iter /////////////// static CYTHON_INLINE int __Pyx_init_unicode_iteration( PyObject* ustring, Py_ssize_t *length, void** data, int *kind) { #if CYTHON_PEP393_ENABLED if (unlikely(__Pyx_PyUnicode_READY(ustring) < 0)) return -1; *kind = PyUnicode_KIND(ustring); *length = PyUnicode_GET_LENGTH(ustring); *data = PyUnicode_DATA(ustring); #else *kind = 0; *length = PyUnicode_GET_SIZE(ustring); *data = (void*)PyUnicode_AS_UNICODE(ustring); #endif return 0; } /////////////// pyobject_as_double.proto /////////////// static double __Pyx__PyObject_AsDouble(PyObject* obj); /* proto */ #if CYTHON_COMPILING_IN_PYPY #define __Pyx_PyObject_AsDouble(obj) \ (likely(PyFloat_CheckExact(obj)) ? PyFloat_AS_DOUBLE(obj) : \ likely(PyInt_CheckExact(obj)) ? \ PyFloat_AsDouble(obj) : __Pyx__PyObject_AsDouble(obj)) #else #define __Pyx_PyObject_AsDouble(obj) \ ((likely(PyFloat_CheckExact(obj))) ? \ PyFloat_AS_DOUBLE(obj) : __Pyx__PyObject_AsDouble(obj)) #endif /////////////// pyobject_as_double /////////////// static double __Pyx__PyObject_AsDouble(PyObject* obj) { PyObject* float_value; #if !CYTHON_USE_TYPE_SLOTS float_value = PyNumber_Float(obj); if (0) goto bad; #else PyNumberMethods *nb = Py_TYPE(obj)->tp_as_number; if (likely(nb) && likely(nb->nb_float)) { float_value = nb->nb_float(obj); if (likely(float_value) && unlikely(!PyFloat_Check(float_value))) { PyErr_Format(PyExc_TypeError, "__float__ returned non-float (type %.200s)", Py_TYPE(float_value)->tp_name); Py_DECREF(float_value); goto bad; } } else if (PyUnicode_CheckExact(obj) || PyBytes_CheckExact(obj)) { #if PY_MAJOR_VERSION >= 3 float_value = PyFloat_FromString(obj); #else float_value = PyFloat_FromString(obj, 0); #endif } else { PyObject* args = PyTuple_New(1); if (unlikely(!args)) goto bad; PyTuple_SET_ITEM(args, 0, obj); float_value = PyObject_Call((PyObject*)&PyFloat_Type, args, 0); PyTuple_SET_ITEM(args, 0, 0); Py_DECREF(args); } #endif if (likely(float_value)) { double value = PyFloat_AS_DOUBLE(float_value); Py_DECREF(float_value); return value; } bad: return (double)-1; } /////////////// PyNumberPow2.proto /////////////// #define __Pyx_PyNumber_InPlacePowerOf2(a, b, c) __Pyx__PyNumber_PowerOf2(a, b, c, 1) #define __Pyx_PyNumber_PowerOf2(a, b, c) __Pyx__PyNumber_PowerOf2(a, b, c, 0) static PyObject* __Pyx__PyNumber_PowerOf2(PyObject *two, PyObject *exp, PyObject *none, int inplace); /*proto*/ /////////////// PyNumberPow2 /////////////// static PyObject* __Pyx__PyNumber_PowerOf2(PyObject *two, PyObject *exp, PyObject *none, int inplace) { // in CPython, 1<ob_digit[0]; } else if (size == 0) { return PyInt_FromLong(1L); } else if (unlikely(size < 0)) { goto fallback; } else { shiftby = PyLong_AsSsize_t(exp); } #else shiftby = PyLong_AsSsize_t(exp); #endif } else { goto fallback; } if (likely(shiftby >= 0)) { if ((size_t)shiftby <= sizeof(long) * 8 - 2) { long value = 1L << shiftby; return PyInt_FromLong(value); #ifdef HAVE_LONG_LONG } else if ((size_t)shiftby <= sizeof(unsigned PY_LONG_LONG) * 8 - 1) { unsigned PY_LONG_LONG value = ((unsigned PY_LONG_LONG)1) << shiftby; return PyLong_FromUnsignedLongLong(value); #endif } else { PyObject *one = PyInt_FromLong(1L); if (unlikely(!one)) return NULL; return PyNumber_Lshift(one, exp); } } else if (shiftby == -1 && PyErr_Occurred()) { PyErr_Clear(); } fallback: #endif return (inplace ? PyNumber_InPlacePower : PyNumber_Power)(two, exp, none); } /////////////// PyIntBinop.proto /////////////// #if !CYTHON_COMPILING_IN_PYPY static PyObject* __Pyx_PyInt_{{op}}{{order}}(PyObject *op1, PyObject *op2, long intval, int inplace); /*proto*/ #else #define __Pyx_PyInt_{{op}}{{order}}(op1, op2, intval, inplace) \ {{if op in ('Eq', 'Ne')}}PyObject_RichCompare(op1, op2, Py_{{op.upper()}}) {{else}}(inplace ? PyNumber_InPlace{{op}}(op1, op2) : PyNumber_{{op}}(op1, op2)) {{endif}} #endif /////////////// PyIntBinop /////////////// #if !CYTHON_COMPILING_IN_PYPY {{py: from Cython.Utility import pylong_join }} {{py: pyval, ival = ('op2', 'b') if order == 'CObj' else ('op1', 'a') }} {{py: slot_name = {'TrueDivide': 'true_divide', 'FloorDivide': 'floor_divide'}.get(op, op.lower()) }} {{py: c_op = { 'Add': '+', 'Subtract': '-', 'Remainder': '%', 'TrueDivide': '/', 'FloorDivide': '/', 'Or': '|', 'Xor': '^', 'And': '&', 'Rshift': '>>', 'Lshift': '<<', 'Eq': '==', 'Ne': '!=', }[op] }} static PyObject* __Pyx_PyInt_{{op}}{{order}}(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, CYTHON_UNUSED int inplace) { {{if op in ('Eq', 'Ne')}} if (op1 == op2) { Py_RETURN_{{'TRUE' if op == 'Eq' else 'FALSE'}}; } {{endif}} #if PY_MAJOR_VERSION < 3 if (likely(PyInt_CheckExact({{pyval}}))) { const long {{'a' if order == 'CObj' else 'b'}} = intval; {{if c_op in '+-%' or op == 'FloorDivide'}} long x; {{endif}} long {{ival}} = PyInt_AS_LONG({{pyval}}); {{if op in ('Eq', 'Ne')}} if (a {{c_op}} b) { Py_RETURN_TRUE; } else { Py_RETURN_FALSE; } {{elif c_op in '+-'}} // adapted from intobject.c in Py2.7: // casts in the line below avoid undefined behaviour on overflow x = (long)((unsigned long)a {{c_op}} b); if (likely((x^a) >= 0 || (x^{{ '~' if op == 'Subtract' else '' }}b) >= 0)) return PyInt_FromLong(x); return PyLong_Type.tp_as_number->nb_{{slot_name}}(op1, op2); {{elif c_op == '%'}} // see ExprNodes.py :: mod_int_utility_code x = a % b; x += ((x != 0) & ((x ^ b) < 0)) * b; return PyInt_FromLong(x); {{elif op == 'TrueDivide'}} if (8 * sizeof(long) <= 53 || likely(labs({{ival}}) <= ((PY_LONG_LONG)1 << 53))) { return PyFloat_FromDouble((double)a / (double)b); } // let Python do the rounding return PyInt_Type.tp_as_number->nb_{{slot_name}}(op1, op2); {{elif op == 'FloorDivide'}} // INT_MIN / -1 is the only case that overflows if (unlikely(b == -1 && ((unsigned long)a) == 0-(unsigned long)a)) return PyInt_Type.tp_as_number->nb_{{slot_name}}(op1, op2); else { long q, r; // see ExprNodes.py :: div_int_utility_code q = a / b; r = a - q*b; q -= ((r != 0) & ((r ^ b) < 0)); x = q; } return PyInt_FromLong(x); {{elif op == 'Lshift'}} if (likely(b < (long) (sizeof(long)*8) && a == (a << b) >> b) || !a) { return PyInt_FromLong(a {{c_op}} b); } {{else}} // other operations are safe, no overflow return PyInt_FromLong(a {{c_op}} b); {{endif}} } #endif #if CYTHON_USE_PYLONG_INTERNALS if (likely(PyLong_CheckExact({{pyval}}))) { const long {{'a' if order == 'CObj' else 'b'}} = intval; long {{ival}}{{if op not in ('Eq', 'Ne')}}, x{{endif}}; {{if op not in ('Eq', 'Ne', 'TrueDivide')}} #ifdef HAVE_LONG_LONG const PY_LONG_LONG ll{{'a' if order == 'CObj' else 'b'}} = intval; PY_LONG_LONG ll{{ival}}, llx; #endif {{endif}} const digit* digits = ((PyLongObject*){{pyval}})->ob_digit; const Py_ssize_t size = Py_SIZE({{pyval}}); // handle most common case first to avoid indirect branch and optimise branch prediction if (likely(__Pyx_sst_abs(size) <= 1)) { {{ival}} = likely(size) ? digits[0] : 0; if (size == -1) {{ival}} = -{{ival}}; } else { switch (size) { {{for _size in range(2, 5)}} {{for _case in (-_size, _size)}} case {{_case}}: if (8 * sizeof(long) - 1 > {{_size}} * PyLong_SHIFT{{if op == 'TrueDivide'}} && {{_size-1}} * PyLong_SHIFT < 53{{endif}}) { {{ival}} = {{'-' if _case < 0 else ''}}(long) {{pylong_join(_size, 'digits')}}; break; {{if op not in ('Eq', 'Ne', 'TrueDivide')}} #ifdef HAVE_LONG_LONG } else if (8 * sizeof(PY_LONG_LONG) - 1 > {{_size}} * PyLong_SHIFT) { ll{{ival}} = {{'-' if _case < 0 else ''}}(PY_LONG_LONG) {{pylong_join(_size, 'digits', 'unsigned PY_LONG_LONG')}}; goto long_long; #endif {{endif}} } // if size doesn't fit into a long or PY_LONG_LONG anymore, fall through to default {{endfor}} {{endfor}} {{if op in ('Eq', 'Ne')}} #if PyLong_SHIFT < 30 && PyLong_SHIFT != 15 // unusual setup - your fault default: return PyLong_Type.tp_richcompare({{'op1, op2' if order == 'ObjC' else 'op2, op1'}}, Py_{{op.upper()}}); #else // too large for the long values we allow => definitely not equal default: Py_RETURN_{{'FALSE' if op == 'Eq' else 'TRUE'}}; #endif {{else}} default: return PyLong_Type.tp_as_number->nb_{{slot_name}}(op1, op2); {{endif}} } } {{if op in ('Eq', 'Ne')}} if (a {{c_op}} b) { Py_RETURN_TRUE; } else { Py_RETURN_FALSE; } {{else}} {{if c_op == '%'}} // see ExprNodes.py :: mod_int_utility_code x = a % b; x += ((x != 0) & ((x ^ b) < 0)) * b; {{elif op == 'TrueDivide'}} if ((8 * sizeof(long) <= 53 || likely(labs({{ival}}) <= ((PY_LONG_LONG)1 << 53))) || __Pyx_sst_abs(size) <= 52 / PyLong_SHIFT) { return PyFloat_FromDouble((double)a / (double)b); } return PyLong_Type.tp_as_number->nb_{{slot_name}}(op1, op2); {{elif op == 'FloorDivide'}} { long q, r; // see ExprNodes.py :: div_int_utility_code q = a / b; r = a - q*b; q -= ((r != 0) & ((r ^ b) < 0)); x = q; } {{else}} x = a {{c_op}} b; {{if op == 'Lshift'}} #ifdef HAVE_LONG_LONG if (unlikely(!(b < (long) (sizeof(long)*8) && a == x >> b)) && a) { ll{{ival}} = {{ival}}; goto long_long; } #else if (likely(b < (long) (sizeof(long)*8) && a == x >> b) || !a) /* execute return statement below */ #endif {{endif}} {{endif}} return PyLong_FromLong(x); {{if op != 'TrueDivide'}} #ifdef HAVE_LONG_LONG long_long: {{if c_op == '%'}} // see ExprNodes.py :: mod_int_utility_code llx = lla % llb; llx += ((llx != 0) & ((llx ^ llb) < 0)) * llb; {{elif op == 'FloorDivide'}} { PY_LONG_LONG q, r; // see ExprNodes.py :: div_int_utility_code q = lla / llb; r = lla - q*llb; q -= ((r != 0) & ((r ^ llb) < 0)); llx = q; } {{else}} llx = lla {{c_op}} llb; {{if op == 'Lshift'}} if (likely(lla == llx >> llb)) /* then execute 'return' below */ {{endif}} {{endif}} return PyLong_FromLongLong(llx); #endif {{endif}}{{# if op != 'TrueDivide' #}} {{endif}}{{# if op in ('Eq', 'Ne') #}} } #endif {{if c_op in '+-' or op in ('TrueDivide', 'Eq', 'Ne')}} if (PyFloat_CheckExact({{pyval}})) { const long {{'a' if order == 'CObj' else 'b'}} = intval; double {{ival}} = PyFloat_AS_DOUBLE({{pyval}}); {{if op in ('Eq', 'Ne')}} if ((double)a {{c_op}} (double)b) { Py_RETURN_TRUE; } else { Py_RETURN_FALSE; } {{else}} double result; // copied from floatobject.c in Py3.5: PyFPE_START_PROTECT("{{op.lower() if not op.endswith('Divide') else 'divide'}}", return NULL) result = ((double)a) {{c_op}} (double)b; PyFPE_END_PROTECT(result) return PyFloat_FromDouble(result); {{endif}} } {{endif}} {{if op in ('Eq', 'Ne')}} return PyObject_RichCompare(op1, op2, Py_{{op.upper()}}); {{else}} return (inplace ? PyNumber_InPlace{{op}} : PyNumber_{{op}})(op1, op2); {{endif}} } #endif /////////////// PyFloatBinop.proto /////////////// #if !CYTHON_COMPILING_IN_PYPY static PyObject* __Pyx_PyFloat_{{op}}{{order}}(PyObject *op1, PyObject *op2, double floatval, int inplace); /*proto*/ #else #define __Pyx_PyFloat_{{op}}{{order}}(op1, op2, floatval, inplace) \ {{if op in ('Eq', 'Ne')}}PyObject_RichCompare(op1, op2, Py_{{op.upper()}}) {{elif op == 'Divide'}}((inplace ? __Pyx_PyNumber_InPlaceDivide(op1, op2) : __Pyx_PyNumber_Divide(op1, op2))) {{else}}(inplace ? PyNumber_InPlace{{op}}(op1, op2) : PyNumber_{{op}}(op1, op2)) {{endif}} #endif /////////////// PyFloatBinop /////////////// #if !CYTHON_COMPILING_IN_PYPY {{py: from Cython.Utility import pylong_join }} {{py: pyval, fval = ('op2', 'b') if order == 'CObj' else ('op1', 'a') }} {{py: c_op = { 'Add': '+', 'Subtract': '-', 'TrueDivide': '/', 'Divide': '/', 'Remainder': '%', 'Eq': '==', 'Ne': '!=', }[op] }} static PyObject* __Pyx_PyFloat_{{op}}{{order}}(PyObject *op1, PyObject *op2, double floatval, CYTHON_UNUSED int inplace) { const double {{'a' if order == 'CObj' else 'b'}} = floatval; double {{fval}}{{if op not in ('Eq', 'Ne')}}, result{{endif}}; {{if op in ('Eq', 'Ne')}} if (op1 == op2) { Py_RETURN_{{'TRUE' if op == 'Eq' else 'FALSE'}}; } {{endif}} if (likely(PyFloat_CheckExact({{pyval}}))) { {{fval}} = PyFloat_AS_DOUBLE({{pyval}}); } else #if PY_MAJOR_VERSION < 3 if (likely(PyInt_CheckExact({{pyval}}))) { {{fval}} = (double) PyInt_AS_LONG({{pyval}}); } else #endif if (likely(PyLong_CheckExact({{pyval}}))) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*){{pyval}})->ob_digit; const Py_ssize_t size = Py_SIZE({{pyval}}); switch (size) { case 0: {{fval}} = 0.0; break; case -1: {{fval}} = -(double) digits[0]; break; case 1: {{fval}} = (double) digits[0]; break; {{for _size in (2, 3, 4)}} case -{{_size}}: case {{_size}}: if (8 * sizeof(unsigned long) > {{_size}} * PyLong_SHIFT && ((8 * sizeof(unsigned long) < 53) || ({{_size-1}} * PyLong_SHIFT < 53))) { {{fval}} = (double) {{pylong_join(_size, 'digits')}}; // let CPython do its own float rounding from 2**53 on (max. consecutive integer in double float) if ((8 * sizeof(unsigned long) < 53) || ({{_size}} * PyLong_SHIFT < 53) || ({{fval}} < (double) ((PY_LONG_LONG)1 << 53))) { if (size == {{-_size}}) {{fval}} = -{{fval}}; break; } } // Fall through if size doesn't fit safely into a double anymore. // It may not be obvious that this is a safe fall-through given the "fval < 2**53" // check above. However, the number of digits that CPython uses for a given PyLong // value is minimal, and together with the "(size-1) * SHIFT < 53" check above, // this should make it safe. {{endfor}} default: #else { #endif {{if op in ('Eq', 'Ne')}} return PyFloat_Type.tp_richcompare({{'op1, op2' if order == 'CObj' else 'op2, op1'}}, Py_{{op.upper()}}); {{else}} {{fval}} = PyLong_AsDouble({{pyval}}); if (unlikely({{fval}} == -1.0 && PyErr_Occurred())) return NULL; {{endif}} } } else { {{if op in ('Eq', 'Ne')}} return PyObject_RichCompare(op1, op2, Py_{{op.upper()}}); {{elif op == 'Divide'}} return (inplace ? __Pyx_PyNumber_InPlaceDivide(op1, op2) : __Pyx_PyNumber_Divide(op1, op2)); {{else}} return (inplace ? PyNumber_InPlace{{op}} : PyNumber_{{op}})(op1, op2); {{endif}} } {{if op in ('Eq', 'Ne')}} if (a {{c_op}} b) { Py_RETURN_TRUE; } else { Py_RETURN_FALSE; } {{else}} // copied from floatobject.c in Py3.5: PyFPE_START_PROTECT("{{op.lower() if not op.endswith('Divide') else 'divide'}}", return NULL) {{if c_op == '%'}} result = fmod(a, b); if (result) result += ((result < 0) ^ (b < 0)) * b; else result = copysign(0.0, b); {{else}} result = a {{c_op}} b; {{endif}} PyFPE_END_PROTECT(result) return PyFloat_FromDouble(result); {{endif}} } #endif Cython-0.26.1/Cython/Utility/Builtins.c0000664000175000017500000003706313143605603020506 0ustar stefanstefan00000000000000/* * Special implementations of built-in functions and methods. * * Optional optimisations for builtins are in Optimize.c. * * General object operations and protocols are in ObjectHandling.c. */ //////////////////// Globals.proto //////////////////// static PyObject* __Pyx_Globals(void); /*proto*/ //////////////////// Globals //////////////////// //@substitute: naming //@requires: ObjectHandling.c::GetAttr // This is a stub implementation until we have something more complete. // Currently, we only handle the most common case of a read-only dict // of Python names. Supporting cdef names in the module and write // access requires a rewrite as a dedicated class. static PyObject* __Pyx_Globals(void) { Py_ssize_t i; PyObject *names; PyObject *globals = $moddict_cname; Py_INCREF(globals); names = PyObject_Dir($module_cname); if (!names) goto bad; for (i = PyList_GET_SIZE(names)-1; i >= 0; i--) { #if CYTHON_COMPILING_IN_PYPY PyObject* name = PySequence_ITEM(names, i); if (!name) goto bad; #else PyObject* name = PyList_GET_ITEM(names, i); #endif if (!PyDict_Contains(globals, name)) { PyObject* value = __Pyx_GetAttr($module_cname, name); if (!value) { #if CYTHON_COMPILING_IN_PYPY Py_DECREF(name); #endif goto bad; } if (PyDict_SetItem(globals, name, value) < 0) { #if CYTHON_COMPILING_IN_PYPY Py_DECREF(name); #endif Py_DECREF(value); goto bad; } } #if CYTHON_COMPILING_IN_PYPY Py_DECREF(name); #endif } Py_DECREF(names); return globals; bad: Py_XDECREF(names); Py_XDECREF(globals); return NULL; } //////////////////// PyExecGlobals.proto //////////////////// static PyObject* __Pyx_PyExecGlobals(PyObject*); //////////////////// PyExecGlobals //////////////////// //@requires: Globals //@requires: PyExec static PyObject* __Pyx_PyExecGlobals(PyObject* code) { PyObject* result; PyObject* globals = __Pyx_Globals(); if (unlikely(!globals)) return NULL; result = __Pyx_PyExec2(code, globals); Py_DECREF(globals); return result; } //////////////////// PyExec.proto //////////////////// static PyObject* __Pyx_PyExec3(PyObject*, PyObject*, PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyExec2(PyObject*, PyObject*); //////////////////// PyExec //////////////////// //@substitute: naming static CYTHON_INLINE PyObject* __Pyx_PyExec2(PyObject* o, PyObject* globals) { return __Pyx_PyExec3(o, globals, NULL); } static PyObject* __Pyx_PyExec3(PyObject* o, PyObject* globals, PyObject* locals) { PyObject* result; PyObject* s = 0; char *code = 0; if (!globals || globals == Py_None) { globals = $moddict_cname; } else if (!PyDict_Check(globals)) { PyErr_Format(PyExc_TypeError, "exec() arg 2 must be a dict, not %.200s", Py_TYPE(globals)->tp_name); goto bad; } if (!locals || locals == Py_None) { locals = globals; } if (PyDict_GetItem(globals, PYIDENT("__builtins__")) == NULL) { if (PyDict_SetItem(globals, PYIDENT("__builtins__"), PyEval_GetBuiltins()) < 0) goto bad; } if (PyCode_Check(o)) { if (__Pyx_PyCode_HasFreeVars((PyCodeObject *)o)) { PyErr_SetString(PyExc_TypeError, "code object passed to exec() may not contain free variables"); goto bad; } #if CYTHON_COMPILING_IN_PYPY || PY_VERSION_HEX < 0x030200B1 result = PyEval_EvalCode((PyCodeObject *)o, globals, locals); #else result = PyEval_EvalCode(o, globals, locals); #endif } else { PyCompilerFlags cf; cf.cf_flags = 0; if (PyUnicode_Check(o)) { cf.cf_flags = PyCF_SOURCE_IS_UTF8; s = PyUnicode_AsUTF8String(o); if (!s) goto bad; o = s; #if PY_MAJOR_VERSION >= 3 } else if (!PyBytes_Check(o)) { #else } else if (!PyString_Check(o)) { #endif PyErr_Format(PyExc_TypeError, "exec: arg 1 must be string, bytes or code object, got %.200s", Py_TYPE(o)->tp_name); goto bad; } #if PY_MAJOR_VERSION >= 3 code = PyBytes_AS_STRING(o); #else code = PyString_AS_STRING(o); #endif if (PyEval_MergeCompilerFlags(&cf)) { result = PyRun_StringFlags(code, Py_file_input, globals, locals, &cf); } else { result = PyRun_String(code, Py_file_input, globals, locals); } Py_XDECREF(s); } return result; bad: Py_XDECREF(s); return 0; } //////////////////// GetAttr3.proto //////////////////// static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); /*proto*/ //////////////////// GetAttr3 //////////////////// //@requires: ObjectHandling.c::GetAttr static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { PyObject *r = __Pyx_GetAttr(o, n); if (unlikely(!r)) { if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; PyErr_Clear(); r = d; Py_INCREF(d); } return r; bad: return NULL; } //////////////////// HasAttr.proto //////////////////// static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); /*proto*/ //////////////////// HasAttr //////////////////// //@requires: ObjectHandling.c::GetAttr static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { PyObject *r; if (unlikely(!__Pyx_PyBaseString_Check(n))) { PyErr_SetString(PyExc_TypeError, "hasattr(): attribute name must be string"); return -1; } r = __Pyx_GetAttr(o, n); if (unlikely(!r)) { PyErr_Clear(); return 0; } else { Py_DECREF(r); return 1; } } //////////////////// Intern.proto //////////////////// static PyObject* __Pyx_Intern(PyObject* s); /* proto */ //////////////////// Intern //////////////////// static PyObject* __Pyx_Intern(PyObject* s) { if (!(likely(PyString_CheckExact(s)))) { PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(s)->tp_name); return 0; } Py_INCREF(s); #if PY_MAJOR_VERSION >= 3 PyUnicode_InternInPlace(&s); #else PyString_InternInPlace(&s); #endif return s; } //////////////////// abs_int.proto //////////////////// static CYTHON_INLINE unsigned int __Pyx_abs_int(int x) { if (unlikely(x == -INT_MAX-1)) return ((unsigned int)INT_MAX) + 1U; return (unsigned int) abs(x); } //////////////////// abs_long.proto //////////////////// static CYTHON_INLINE unsigned long __Pyx_abs_long(long x) { if (unlikely(x == -LONG_MAX-1)) return ((unsigned long)LONG_MAX) + 1U; return (unsigned long) labs(x); } //////////////////// abs_longlong.proto //////////////////// static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_abs_longlong(PY_LONG_LONG x) { if (unlikely(x == -PY_LLONG_MAX-1)) return ((unsigned PY_LONG_LONG)PY_LLONG_MAX) + 1U; #if defined (__cplusplus) && __cplusplus >= 201103L return (unsigned PY_LONG_LONG) std::abs(x); #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L return (unsigned PY_LONG_LONG) llabs(x); #elif defined (_MSC_VER) && defined (_M_X64) // abs() is defined for long, but 64-bits type on MSVC is long long. // Use MS-specific _abs64 instead. return (unsigned PY_LONG_LONG) _abs64(x); #elif defined (__GNUC__) // gcc or clang on 64 bit windows. return (unsigned PY_LONG_LONG) __builtin_llabs(x); #else if (sizeof(PY_LONG_LONG) <= sizeof(Py_ssize_t)) return __Pyx_sst_abs(x); return (x<0) ? (unsigned PY_LONG_LONG)-x : (unsigned PY_LONG_LONG)x; #endif } //////////////////// pow2.proto //////////////////// #define __Pyx_PyNumber_Power2(a, b) PyNumber_Power(a, b, Py_None) //////////////////// object_ord.proto //////////////////// //@requires: TypeConversion.c::UnicodeAsUCS4 #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyObject_Ord(c) \ (likely(PyUnicode_Check(c)) ? (long)__Pyx_PyUnicode_AsPy_UCS4(c) : __Pyx__PyObject_Ord(c)) #else #define __Pyx_PyObject_Ord(c) __Pyx__PyObject_Ord(c) #endif static long __Pyx__PyObject_Ord(PyObject* c); /*proto*/ //////////////////// object_ord //////////////////// static long __Pyx__PyObject_Ord(PyObject* c) { Py_ssize_t size; if (PyBytes_Check(c)) { size = PyBytes_GET_SIZE(c); if (likely(size == 1)) { return (unsigned char) PyBytes_AS_STRING(c)[0]; } #if PY_MAJOR_VERSION < 3 } else if (PyUnicode_Check(c)) { return (long)__Pyx_PyUnicode_AsPy_UCS4(c); #endif #if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) } else if (PyByteArray_Check(c)) { size = PyByteArray_GET_SIZE(c); if (likely(size == 1)) { return (unsigned char) PyByteArray_AS_STRING(c)[0]; } #endif } else { // FIXME: support character buffers - but CPython doesn't support them either PyErr_Format(PyExc_TypeError, "ord() expected string of length 1, but %.200s found", c->ob_type->tp_name); return (long)(Py_UCS4)-1; } PyErr_Format(PyExc_TypeError, "ord() expected a character, but string of length %zd found", size); return (long)(Py_UCS4)-1; } //////////////////// py_dict_keys.proto //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_Keys(PyObject* d); /*proto*/ //////////////////// py_dict_keys //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_Keys(PyObject* d) { if (PY_MAJOR_VERSION >= 3) return CALL_UNBOUND_METHOD(PyDict_Type, "keys", d); else return PyDict_Keys(d); } //////////////////// py_dict_values.proto //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d); /*proto*/ //////////////////// py_dict_values //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d) { if (PY_MAJOR_VERSION >= 3) return CALL_UNBOUND_METHOD(PyDict_Type, "values", d); else return PyDict_Values(d); } //////////////////// py_dict_items.proto //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_Items(PyObject* d); /*proto*/ //////////////////// py_dict_items //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_Items(PyObject* d) { if (PY_MAJOR_VERSION >= 3) return CALL_UNBOUND_METHOD(PyDict_Type, "items", d); else return PyDict_Items(d); } //////////////////// py_dict_iterkeys.proto //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_IterKeys(PyObject* d); /*proto*/ //////////////////// py_dict_iterkeys //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_IterKeys(PyObject* d) { if (PY_MAJOR_VERSION >= 3) return CALL_UNBOUND_METHOD(PyDict_Type, "keys", d); else return CALL_UNBOUND_METHOD(PyDict_Type, "iterkeys", d); } //////////////////// py_dict_itervalues.proto //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_IterValues(PyObject* d); /*proto*/ //////////////////// py_dict_itervalues //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_IterValues(PyObject* d) { if (PY_MAJOR_VERSION >= 3) return CALL_UNBOUND_METHOD(PyDict_Type, "values", d); else return CALL_UNBOUND_METHOD(PyDict_Type, "itervalues", d); } //////////////////// py_dict_iteritems.proto //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_IterItems(PyObject* d); /*proto*/ //////////////////// py_dict_iteritems //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_IterItems(PyObject* d) { if (PY_MAJOR_VERSION >= 3) return CALL_UNBOUND_METHOD(PyDict_Type, "items", d); else return CALL_UNBOUND_METHOD(PyDict_Type, "iteritems", d); } //////////////////// py_dict_viewkeys.proto //////////////////// #if PY_VERSION_HEX < 0x02070000 #error This module uses dict views, which require Python 2.7 or later #endif static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewKeys(PyObject* d); /*proto*/ //////////////////// py_dict_viewkeys //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewKeys(PyObject* d) { if (PY_MAJOR_VERSION >= 3) return CALL_UNBOUND_METHOD(PyDict_Type, "keys", d); else return CALL_UNBOUND_METHOD(PyDict_Type, "viewkeys", d); } //////////////////// py_dict_viewvalues.proto //////////////////// #if PY_VERSION_HEX < 0x02070000 #error This module uses dict views, which require Python 2.7 or later #endif static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewValues(PyObject* d); /*proto*/ //////////////////// py_dict_viewvalues //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewValues(PyObject* d) { if (PY_MAJOR_VERSION >= 3) return CALL_UNBOUND_METHOD(PyDict_Type, "values", d); else return CALL_UNBOUND_METHOD(PyDict_Type, "viewvalues", d); } //////////////////// py_dict_viewitems.proto //////////////////// #if PY_VERSION_HEX < 0x02070000 #error This module uses dict views, which require Python 2.7 or later #endif static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewItems(PyObject* d); /*proto*/ //////////////////// py_dict_viewitems //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewItems(PyObject* d) { if (PY_MAJOR_VERSION >= 3) return CALL_UNBOUND_METHOD(PyDict_Type, "items", d); else return CALL_UNBOUND_METHOD(PyDict_Type, "viewitems", d); } //////////////////// pyfrozenset_new.proto //////////////////// //@substitute: naming static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it) { if (it) { PyObject* result; #if CYTHON_COMPILING_IN_PYPY // PyPy currently lacks PyFrozenSet_CheckExact() and PyFrozenSet_New() PyObject* args; args = PyTuple_Pack(1, it); if (unlikely(!args)) return NULL; result = PyObject_Call((PyObject*)&PyFrozenSet_Type, args, NULL); Py_DECREF(args); return result; #else if (PyFrozenSet_CheckExact(it)) { Py_INCREF(it); return it; } result = PyFrozenSet_New(it); if (unlikely(!result)) return NULL; if (likely(PySet_GET_SIZE(result))) return result; // empty frozenset is a singleton // seems wasteful, but CPython does the same Py_DECREF(result); #endif } #if CYTHON_USE_TYPE_SLOTS return PyFrozenSet_Type.tp_new(&PyFrozenSet_Type, $empty_tuple, NULL); #else return PyObject_Call((PyObject*)&PyFrozenSet_Type, $empty_tuple, NULL); #endif } //////////////////// PySet_Update.proto //////////////////// static CYTHON_INLINE int __Pyx_PySet_Update(PyObject* set, PyObject* it); /*proto*/ //////////////////// PySet_Update //////////////////// static CYTHON_INLINE int __Pyx_PySet_Update(PyObject* set, PyObject* it) { PyObject *retval; #if CYTHON_USE_TYPE_SLOTS && !CYTHON_COMPILING_IN_PYPY if (PyAnySet_Check(it)) { if (PySet_GET_SIZE(it) == 0) return 0; // fast and safe case: CPython will update our result set and return it retval = PySet_Type.tp_as_number->nb_inplace_or(set, it); if (likely(retval == set)) { Py_DECREF(retval); return 0; } if (unlikely(!retval)) return -1; // unusual result, fall through to set.update() call below Py_DECREF(retval); } #endif retval = CALL_UNBOUND_METHOD(PySet_Type, "update", set, it); if (unlikely(!retval)) return -1; Py_DECREF(retval); return 0; } Cython-0.26.1/Cython/Utility/Capsule.c0000664000175000017500000000077112542002467020306 0ustar stefanstefan00000000000000//////////////// Capsule.proto //////////////// /* Todo: wrap the rest of the functionality in similar functions */ static CYTHON_INLINE PyObject *__pyx_capsule_create(void *p, const char *sig); //////////////// Capsule //////////////// static CYTHON_INLINE PyObject * __pyx_capsule_create(void *p, CYTHON_UNUSED const char *sig) { PyObject *cobj; #if PY_VERSION_HEX >= 0x02070000 cobj = PyCapsule_New(p, sig, NULL); #else cobj = PyCObject_FromVoidPtr(p, NULL); #endif return cobj; } Cython-0.26.1/Cython/Utility/CppSupport.cpp0000664000175000017500000000426613143605603021373 0ustar stefanstefan00000000000000/////////////// CppExceptionConversion.proto /////////////// #ifndef __Pyx_CppExn2PyErr #include #include #include #include static void __Pyx_CppExn2PyErr() { // Catch a handful of different errors here and turn them into the // equivalent Python errors. try { if (PyErr_Occurred()) ; // let the latest Python exn pass through and ignore the current one else throw; } catch (const std::bad_alloc& exn) { PyErr_SetString(PyExc_MemoryError, exn.what()); } catch (const std::bad_cast& exn) { PyErr_SetString(PyExc_TypeError, exn.what()); } catch (const std::bad_typeid& exn) { PyErr_SetString(PyExc_TypeError, exn.what()); } catch (const std::domain_error& exn) { PyErr_SetString(PyExc_ValueError, exn.what()); } catch (const std::invalid_argument& exn) { PyErr_SetString(PyExc_ValueError, exn.what()); } catch (const std::ios_base::failure& exn) { // Unfortunately, in standard C++ we have no way of distinguishing EOF // from other errors here; be careful with the exception mask PyErr_SetString(PyExc_IOError, exn.what()); } catch (const std::out_of_range& exn) { // Change out_of_range to IndexError PyErr_SetString(PyExc_IndexError, exn.what()); } catch (const std::overflow_error& exn) { PyErr_SetString(PyExc_OverflowError, exn.what()); } catch (const std::range_error& exn) { PyErr_SetString(PyExc_ArithmeticError, exn.what()); } catch (const std::underflow_error& exn) { PyErr_SetString(PyExc_ArithmeticError, exn.what()); } catch (const std::exception& exn) { PyErr_SetString(PyExc_RuntimeError, exn.what()); } catch (...) { PyErr_SetString(PyExc_RuntimeError, "Unknown exception"); } } #endif /////////////// PythranConversion.proto /////////////// template auto to_python_from_expr(T &&value) -> decltype(to_python( typename pythonic::returnable::type>::type>::type{std::forward(value)})) { using returnable_type = typename pythonic::returnable::type>::type>::type; return to_python(returnable_type{std::forward(value)}); } Cython-0.26.1/Cython/Utility/TestCyUtilityLoader.pyx0000664000175000017500000000023012542002467023224 0ustar stefanstefan00000000000000########## TestCyUtilityLoader ########## #@requires: OtherUtility test {{cy_loader}} impl ########## OtherUtility ########## req {{cy_loader}} impl Cython-0.26.1/Cython/Utility/Buffer.c0000664000175000017500000007001313150055750020116 0ustar stefanstefan00000000000000/////////////// BufferStructDeclare.proto /////////////// /* structs for buffer access */ typedef struct { Py_ssize_t shape, strides, suboffsets; } __Pyx_Buf_DimInfo; typedef struct { size_t refcount; Py_buffer pybuffer; } __Pyx_Buffer; typedef struct { __Pyx_Buffer *rcbuffer; char *data; __Pyx_Buf_DimInfo diminfo[{{max_dims}}]; } __Pyx_LocalBuf_ND; /////////////// BufferIndexError.proto /////////////// static void __Pyx_RaiseBufferIndexError(int axis); /*proto*/ /////////////// BufferIndexError /////////////// static void __Pyx_RaiseBufferIndexError(int axis) { PyErr_Format(PyExc_IndexError, "Out of bounds on buffer access (axis %d)", axis); } /////////////// BufferIndexErrorNogil.proto /////////////// //@requires: BufferIndexError static void __Pyx_RaiseBufferIndexErrorNogil(int axis); /*proto*/ /////////////// BufferIndexErrorNogil /////////////// static void __Pyx_RaiseBufferIndexErrorNogil(int axis) { #ifdef WITH_THREAD PyGILState_STATE gilstate = PyGILState_Ensure(); #endif __Pyx_RaiseBufferIndexError(axis); #ifdef WITH_THREAD PyGILState_Release(gilstate); #endif } /////////////// BufferFallbackError.proto /////////////// static void __Pyx_RaiseBufferFallbackError(void); /*proto*/ /////////////// BufferFallbackError /////////////// static void __Pyx_RaiseBufferFallbackError(void) { PyErr_SetString(PyExc_ValueError, "Buffer acquisition failed on assignment; and then reacquiring the old buffer failed too!"); } /////////////// BufferFormatStructs.proto /////////////// #define IS_UNSIGNED(type) (((type) -1) > 0) /* Run-time type information about structs used with buffers */ struct __Pyx_StructField_; #define __PYX_BUF_FLAGS_PACKED_STRUCT (1 << 0) typedef struct { const char* name; /* for error messages only */ struct __Pyx_StructField_* fields; size_t size; /* sizeof(type) */ size_t arraysize[8]; /* length of array in each dimension */ int ndim; char typegroup; /* _R_eal, _C_omplex, Signed _I_nt, _U_nsigned int, _S_truct, _P_ointer, _O_bject, c_H_ar */ char is_unsigned; int flags; } __Pyx_TypeInfo; typedef struct __Pyx_StructField_ { __Pyx_TypeInfo* type; const char* name; size_t offset; } __Pyx_StructField; typedef struct { __Pyx_StructField* field; size_t parent_offset; } __Pyx_BufFmt_StackElem; typedef struct { __Pyx_StructField root; __Pyx_BufFmt_StackElem* head; size_t fmt_offset; size_t new_count, enc_count; size_t struct_alignment; int is_complex; char enc_type; char new_packmode; char enc_packmode; char is_valid_array; } __Pyx_BufFmt_Context; /////////////// GetAndReleaseBuffer.proto /////////////// #if PY_MAJOR_VERSION < 3 static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags); static void __Pyx_ReleaseBuffer(Py_buffer *view); #else #define __Pyx_GetBuffer PyObject_GetBuffer #define __Pyx_ReleaseBuffer PyBuffer_Release #endif /////////////// GetAndReleaseBuffer /////////////// #if PY_MAJOR_VERSION < 3 static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) { if (PyObject_CheckBuffer(obj)) return PyObject_GetBuffer(obj, view, flags); {{for type_ptr, getbuffer, releasebuffer in types}} {{if getbuffer}} if (PyObject_TypeCheck(obj, {{type_ptr}})) return {{getbuffer}}(obj, view, flags); {{endif}} {{endfor}} PyErr_Format(PyExc_TypeError, "'%.200s' does not have the buffer interface", Py_TYPE(obj)->tp_name); return -1; } static void __Pyx_ReleaseBuffer(Py_buffer *view) { PyObject *obj = view->obj; if (!obj) return; if (PyObject_CheckBuffer(obj)) { PyBuffer_Release(view); return; } if ((0)) {} {{for type_ptr, getbuffer, releasebuffer in types}} {{if releasebuffer}} else if (PyObject_TypeCheck(obj, {{type_ptr}})) {{releasebuffer}}(obj, view); {{endif}} {{endfor}} view->obj = NULL; Py_DECREF(obj); } #endif /* PY_MAJOR_VERSION < 3 */ /////////////// BufferFormatCheck.proto /////////////// {{# Buffer format string checking Buffer type checking. Utility code for checking that acquired buffers match our assumptions. We only need to check ndim and the format string; the access mode/flags is checked by the exporter. See: http://docs.python.org/3/library/struct.html http://legacy.python.org/dev/peps/pep-3118/#additions-to-the-struct-string-syntax The alignment code is copied from _struct.c in Python. }} static CYTHON_INLINE int __Pyx_GetBufferAndValidate(Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack); static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info); static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts); static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx, __Pyx_BufFmt_StackElem* stack, __Pyx_TypeInfo* type); /* PROTO */ /////////////// BufferFormatCheck /////////////// static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx, __Pyx_BufFmt_StackElem* stack, __Pyx_TypeInfo* type) { stack[0].field = &ctx->root; stack[0].parent_offset = 0; ctx->root.type = type; ctx->root.name = "buffer dtype"; ctx->root.offset = 0; ctx->head = stack; ctx->head->field = &ctx->root; ctx->fmt_offset = 0; ctx->head->parent_offset = 0; ctx->new_packmode = '@'; ctx->enc_packmode = '@'; ctx->new_count = 1; ctx->enc_count = 0; ctx->enc_type = 0; ctx->is_complex = 0; ctx->is_valid_array = 0; ctx->struct_alignment = 0; while (type->typegroup == 'S') { ++ctx->head; ctx->head->field = type->fields; ctx->head->parent_offset = 0; type = type->fields->type; } } static int __Pyx_BufFmt_ParseNumber(const char** ts) { int count; const char* t = *ts; if (*t < '0' || *t > '9') { return -1; } else { count = *t++ - '0'; while (*t >= '0' && *t < '9') { count *= 10; count += *t++ - '0'; } } *ts = t; return count; } static int __Pyx_BufFmt_ExpectNumber(const char **ts) { int number = __Pyx_BufFmt_ParseNumber(ts); if (number == -1) /* First char was not a digit */ PyErr_Format(PyExc_ValueError,\ "Does not understand character buffer dtype format string ('%c')", **ts); return number; } static void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) { PyErr_Format(PyExc_ValueError, "Unexpected format string character: '%c'", ch); } static const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) { switch (ch) { case 'c': return "'char'"; case 'b': return "'signed char'"; case 'B': return "'unsigned char'"; case 'h': return "'short'"; case 'H': return "'unsigned short'"; case 'i': return "'int'"; case 'I': return "'unsigned int'"; case 'l': return "'long'"; case 'L': return "'unsigned long'"; case 'q': return "'long long'"; case 'Q': return "'unsigned long long'"; case 'f': return (is_complex ? "'complex float'" : "'float'"); case 'd': return (is_complex ? "'complex double'" : "'double'"); case 'g': return (is_complex ? "'complex long double'" : "'long double'"); case 'T': return "a struct"; case 'O': return "Python object"; case 'P': return "a pointer"; case 's': case 'p': return "a string"; case 0: return "end"; default: return "unparseable format string"; } } static size_t __Pyx_BufFmt_TypeCharToStandardSize(char ch, int is_complex) { switch (ch) { case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; case 'h': case 'H': return 2; case 'i': case 'I': case 'l': case 'L': return 4; case 'q': case 'Q': return 8; case 'f': return (is_complex ? 8 : 4); case 'd': return (is_complex ? 16 : 8); case 'g': { PyErr_SetString(PyExc_ValueError, "Python does not define a standard format string size for long double ('g').."); return 0; } case 'O': case 'P': return sizeof(void*); default: __Pyx_BufFmt_RaiseUnexpectedChar(ch); return 0; } } static size_t __Pyx_BufFmt_TypeCharToNativeSize(char ch, int is_complex) { switch (ch) { case 'c': case 'b': case 'B': case 's': case 'p': return 1; case 'h': case 'H': return sizeof(short); case 'i': case 'I': return sizeof(int); case 'l': case 'L': return sizeof(long); #ifdef HAVE_LONG_LONG case 'q': case 'Q': return sizeof(PY_LONG_LONG); #endif case 'f': return sizeof(float) * (is_complex ? 2 : 1); case 'd': return sizeof(double) * (is_complex ? 2 : 1); case 'g': return sizeof(long double) * (is_complex ? 2 : 1); case 'O': case 'P': return sizeof(void*); default: { __Pyx_BufFmt_RaiseUnexpectedChar(ch); return 0; } } } typedef struct { char c; short x; } __Pyx_st_short; typedef struct { char c; int x; } __Pyx_st_int; typedef struct { char c; long x; } __Pyx_st_long; typedef struct { char c; float x; } __Pyx_st_float; typedef struct { char c; double x; } __Pyx_st_double; typedef struct { char c; long double x; } __Pyx_st_longdouble; typedef struct { char c; void *x; } __Pyx_st_void_p; #ifdef HAVE_LONG_LONG typedef struct { char c; PY_LONG_LONG x; } __Pyx_st_longlong; #endif static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, CYTHON_UNUSED int is_complex) { switch (ch) { case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; case 'h': case 'H': return sizeof(__Pyx_st_short) - sizeof(short); case 'i': case 'I': return sizeof(__Pyx_st_int) - sizeof(int); case 'l': case 'L': return sizeof(__Pyx_st_long) - sizeof(long); #ifdef HAVE_LONG_LONG case 'q': case 'Q': return sizeof(__Pyx_st_longlong) - sizeof(PY_LONG_LONG); #endif case 'f': return sizeof(__Pyx_st_float) - sizeof(float); case 'd': return sizeof(__Pyx_st_double) - sizeof(double); case 'g': return sizeof(__Pyx_st_longdouble) - sizeof(long double); case 'P': case 'O': return sizeof(__Pyx_st_void_p) - sizeof(void*); default: __Pyx_BufFmt_RaiseUnexpectedChar(ch); return 0; } } /* These are for computing the padding at the end of the struct to align on the first member of the struct. This will probably the same as above, but we don't have any guarantees. */ typedef struct { short x; char c; } __Pyx_pad_short; typedef struct { int x; char c; } __Pyx_pad_int; typedef struct { long x; char c; } __Pyx_pad_long; typedef struct { float x; char c; } __Pyx_pad_float; typedef struct { double x; char c; } __Pyx_pad_double; typedef struct { long double x; char c; } __Pyx_pad_longdouble; typedef struct { void *x; char c; } __Pyx_pad_void_p; #ifdef HAVE_LONG_LONG typedef struct { PY_LONG_LONG x; char c; } __Pyx_pad_longlong; #endif static size_t __Pyx_BufFmt_TypeCharToPadding(char ch, CYTHON_UNUSED int is_complex) { switch (ch) { case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; case 'h': case 'H': return sizeof(__Pyx_pad_short) - sizeof(short); case 'i': case 'I': return sizeof(__Pyx_pad_int) - sizeof(int); case 'l': case 'L': return sizeof(__Pyx_pad_long) - sizeof(long); #ifdef HAVE_LONG_LONG case 'q': case 'Q': return sizeof(__Pyx_pad_longlong) - sizeof(PY_LONG_LONG); #endif case 'f': return sizeof(__Pyx_pad_float) - sizeof(float); case 'd': return sizeof(__Pyx_pad_double) - sizeof(double); case 'g': return sizeof(__Pyx_pad_longdouble) - sizeof(long double); case 'P': case 'O': return sizeof(__Pyx_pad_void_p) - sizeof(void*); default: __Pyx_BufFmt_RaiseUnexpectedChar(ch); return 0; } } static char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) { switch (ch) { case 'c': return 'H'; case 'b': case 'h': case 'i': case 'l': case 'q': case 's': case 'p': return 'I'; case 'B': case 'H': case 'I': case 'L': case 'Q': return 'U'; case 'f': case 'd': case 'g': return (is_complex ? 'C' : 'R'); case 'O': return 'O'; case 'P': return 'P'; default: { __Pyx_BufFmt_RaiseUnexpectedChar(ch); return 0; } } } static void __Pyx_BufFmt_RaiseExpected(__Pyx_BufFmt_Context* ctx) { if (ctx->head == NULL || ctx->head->field == &ctx->root) { const char* expected; const char* quote; if (ctx->head == NULL) { expected = "end"; quote = ""; } else { expected = ctx->head->field->type->name; quote = "'"; } PyErr_Format(PyExc_ValueError, "Buffer dtype mismatch, expected %s%s%s but got %s", quote, expected, quote, __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex)); } else { __Pyx_StructField* field = ctx->head->field; __Pyx_StructField* parent = (ctx->head - 1)->field; PyErr_Format(PyExc_ValueError, "Buffer dtype mismatch, expected '%s' but got %s in '%s.%s'", field->type->name, __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex), parent->type->name, field->name); } } static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) { char group; size_t size, offset, arraysize = 1; /* printf("processing... %s\n", ctx->head->field->type->name); */ if (ctx->enc_type == 0) return 0; /* Validate array size */ if (ctx->head->field->type->arraysize[0]) { int i, ndim = 0; /* handle strings ('s' and 'p') */ if (ctx->enc_type == 's' || ctx->enc_type == 'p') { ctx->is_valid_array = ctx->head->field->type->ndim == 1; ndim = 1; if (ctx->enc_count != ctx->head->field->type->arraysize[0]) { PyErr_Format(PyExc_ValueError, "Expected a dimension of size %zu, got %zu", ctx->head->field->type->arraysize[0], ctx->enc_count); return -1; } } if (!ctx->is_valid_array) { PyErr_Format(PyExc_ValueError, "Expected %d dimensions, got %d", ctx->head->field->type->ndim, ndim); return -1; } for (i = 0; i < ctx->head->field->type->ndim; i++) { arraysize *= ctx->head->field->type->arraysize[i]; } ctx->is_valid_array = 0; ctx->enc_count = 1; } group = __Pyx_BufFmt_TypeCharToGroup(ctx->enc_type, ctx->is_complex); do { __Pyx_StructField* field = ctx->head->field; __Pyx_TypeInfo* type = field->type; if (ctx->enc_packmode == '@' || ctx->enc_packmode == '^') { size = __Pyx_BufFmt_TypeCharToNativeSize(ctx->enc_type, ctx->is_complex); } else { size = __Pyx_BufFmt_TypeCharToStandardSize(ctx->enc_type, ctx->is_complex); } if (ctx->enc_packmode == '@') { size_t align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex); size_t align_mod_offset; if (align_at == 0) return -1; align_mod_offset = ctx->fmt_offset % align_at; if (align_mod_offset > 0) ctx->fmt_offset += align_at - align_mod_offset; if (ctx->struct_alignment == 0) ctx->struct_alignment = __Pyx_BufFmt_TypeCharToPadding(ctx->enc_type, ctx->is_complex); } if (type->size != size || type->typegroup != group) { if (type->typegroup == 'C' && type->fields != NULL) { /* special case -- treat as struct rather than complex number */ size_t parent_offset = ctx->head->parent_offset + field->offset; ++ctx->head; ctx->head->field = type->fields; ctx->head->parent_offset = parent_offset; continue; } if ((type->typegroup == 'H' || group == 'H') && type->size == size) { /* special case -- chars don't care about sign */ } else { __Pyx_BufFmt_RaiseExpected(ctx); return -1; } } offset = ctx->head->parent_offset + field->offset; if (ctx->fmt_offset != offset) { PyErr_Format(PyExc_ValueError, "Buffer dtype mismatch; next field is at offset %" CYTHON_FORMAT_SSIZE_T "d but %" CYTHON_FORMAT_SSIZE_T "d expected", (Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset); return -1; } ctx->fmt_offset += size; if (arraysize) ctx->fmt_offset += (arraysize - 1) * size; --ctx->enc_count; /* Consume from buffer string */ /* Done checking, move to next field, pushing or popping struct stack if needed */ while (1) { if (field == &ctx->root) { ctx->head = NULL; if (ctx->enc_count != 0) { __Pyx_BufFmt_RaiseExpected(ctx); return -1; } break; /* breaks both loops as ctx->enc_count == 0 */ } ctx->head->field = ++field; if (field->type == NULL) { --ctx->head; field = ctx->head->field; continue; } else if (field->type->typegroup == 'S') { size_t parent_offset = ctx->head->parent_offset + field->offset; if (field->type->fields->type == NULL) continue; /* empty struct */ field = field->type->fields; ++ctx->head; ctx->head->field = field; ctx->head->parent_offset = parent_offset; break; } else { break; } } } while (ctx->enc_count); ctx->enc_type = 0; ctx->is_complex = 0; return 0; } /* Parse an array in the format string (e.g. (1,2,3)) */ static CYTHON_INLINE PyObject * __pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp) { const char *ts = *tsp; int i = 0, number; int ndim = ctx->head->field->type->ndim; ; ++ts; if (ctx->new_count != 1) { PyErr_SetString(PyExc_ValueError, "Cannot handle repeated arrays in format string"); return NULL; } /* Process the previous element */ if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; /* Parse all numbers in the format string */ while (*ts && *ts != ')') { // ignore space characters (not using isspace() due to C/C++ problem on MacOS-X) switch (*ts) { case ' ': case '\f': case '\r': case '\n': case '\t': case '\v': continue; default: break; /* not a 'break' in the loop */ } number = __Pyx_BufFmt_ExpectNumber(&ts); if (number == -1) return NULL; if (i < ndim && (size_t) number != ctx->head->field->type->arraysize[i]) return PyErr_Format(PyExc_ValueError, "Expected a dimension of size %zu, got %d", ctx->head->field->type->arraysize[i], number); if (*ts != ',' && *ts != ')') return PyErr_Format(PyExc_ValueError, "Expected a comma in format string, got '%c'", *ts); if (*ts == ',') ts++; i++; } if (i != ndim) return PyErr_Format(PyExc_ValueError, "Expected %d dimension(s), got %d", ctx->head->field->type->ndim, i); if (!*ts) { PyErr_SetString(PyExc_ValueError, "Unexpected end of format string, expected ')'"); return NULL; } ctx->is_valid_array = 1; ctx->new_count = 1; *tsp = ++ts; return Py_None; } static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts) { int got_Z = 0; while (1) { /* puts(ts); */ switch(*ts) { case 0: if (ctx->enc_type != 0 && ctx->head == NULL) { __Pyx_BufFmt_RaiseExpected(ctx); return NULL; } if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; if (ctx->head != NULL) { __Pyx_BufFmt_RaiseExpected(ctx); return NULL; } return ts; case ' ': case '\r': case '\n': ++ts; break; case '<': if (!__Pyx_Is_Little_Endian()) { PyErr_SetString(PyExc_ValueError, "Little-endian buffer not supported on big-endian compiler"); return NULL; } ctx->new_packmode = '='; ++ts; break; case '>': case '!': if (__Pyx_Is_Little_Endian()) { PyErr_SetString(PyExc_ValueError, "Big-endian buffer not supported on little-endian compiler"); return NULL; } ctx->new_packmode = '='; ++ts; break; case '=': case '@': case '^': ctx->new_packmode = *ts++; break; case 'T': /* substruct */ { const char* ts_after_sub; size_t i, struct_count = ctx->new_count; size_t struct_alignment = ctx->struct_alignment; ctx->new_count = 1; ++ts; if (*ts != '{') { PyErr_SetString(PyExc_ValueError, "Buffer acquisition: Expected '{' after 'T'"); return NULL; } if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; ctx->enc_type = 0; /* Erase processed last struct element */ ctx->enc_count = 0; ctx->struct_alignment = 0; ++ts; ts_after_sub = ts; for (i = 0; i != struct_count; ++i) { ts_after_sub = __Pyx_BufFmt_CheckString(ctx, ts); if (!ts_after_sub) return NULL; } ts = ts_after_sub; if (struct_alignment) ctx->struct_alignment = struct_alignment; } break; case '}': /* end of substruct; either repeat or move on */ { size_t alignment = ctx->struct_alignment; ++ts; if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; ctx->enc_type = 0; /* Erase processed last struct element */ if (alignment && ctx->fmt_offset % alignment) { /* Pad struct on size of the first member */ ctx->fmt_offset += alignment - (ctx->fmt_offset % alignment); } } return ts; case 'x': if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; ctx->fmt_offset += ctx->new_count; ctx->new_count = 1; ctx->enc_count = 0; ctx->enc_type = 0; ctx->enc_packmode = ctx->new_packmode; ++ts; break; case 'Z': got_Z = 1; ++ts; if (*ts != 'f' && *ts != 'd' && *ts != 'g') { __Pyx_BufFmt_RaiseUnexpectedChar('Z'); return NULL; } /* fall through */ case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I': case 'l': case 'L': case 'q': case 'Q': case 'f': case 'd': case 'g': case 'O': case 'p': if (ctx->enc_type == *ts && got_Z == ctx->is_complex && ctx->enc_packmode == ctx->new_packmode) { /* Continue pooling same type */ ctx->enc_count += ctx->new_count; ctx->new_count = 1; got_Z = 0; ++ts; break; } /* fall through */ case 's': /* 's' or new type (cannot be added to current pool) */ if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; ctx->enc_count = ctx->new_count; ctx->enc_packmode = ctx->new_packmode; ctx->enc_type = *ts; ctx->is_complex = got_Z; ++ts; ctx->new_count = 1; got_Z = 0; break; case ':': ++ts; while(*ts != ':') ++ts; ++ts; break; case '(': if (!__pyx_buffmt_parse_array(ctx, &ts)) return NULL; break; default: { int number = __Pyx_BufFmt_ExpectNumber(&ts); if (number == -1) return NULL; ctx->new_count = (size_t)number; } } } } static CYTHON_INLINE void __Pyx_ZeroBuffer(Py_buffer* buf) { buf->buf = NULL; buf->obj = NULL; buf->strides = __Pyx_zeros; buf->shape = __Pyx_zeros; buf->suboffsets = __Pyx_minusones; } static CYTHON_INLINE int __Pyx_GetBufferAndValidate( Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack) { if (obj == Py_None || obj == NULL) { __Pyx_ZeroBuffer(buf); return 0; } buf->buf = NULL; if (__Pyx_GetBuffer(obj, buf, flags) == -1) goto fail; if (buf->ndim != nd) { PyErr_Format(PyExc_ValueError, "Buffer has wrong number of dimensions (expected %d, got %d)", nd, buf->ndim); goto fail; } if (!cast) { __Pyx_BufFmt_Context ctx; __Pyx_BufFmt_Init(&ctx, stack, dtype); if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail; } if ((unsigned)buf->itemsize != dtype->size) { PyErr_Format(PyExc_ValueError, "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "d byte%s) does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "d byte%s)", buf->itemsize, (buf->itemsize > 1) ? "s" : "", dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : ""); goto fail; } if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones; return 0; fail:; __Pyx_ZeroBuffer(buf); return -1; } static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) { if (info->buf == NULL) return; if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL; __Pyx_ReleaseBuffer(info); } /////////////// TypeInfoCompare.proto /////////////// static int __pyx_typeinfo_cmp(__Pyx_TypeInfo *a, __Pyx_TypeInfo *b); /////////////// TypeInfoCompare /////////////// /* See if two dtypes are equal */ static int __pyx_typeinfo_cmp(__Pyx_TypeInfo *a, __Pyx_TypeInfo *b) { int i; if (!a || !b) return 0; if (a == b) return 1; if (a->size != b->size || a->typegroup != b->typegroup || a->is_unsigned != b->is_unsigned || a->ndim != b->ndim) { if (a->typegroup == 'H' || b->typegroup == 'H') { /* Special case for chars */ return a->size == b->size; } else { return 0; } } if (a->ndim) { /* Verify multidimensional C arrays */ for (i = 0; i < a->ndim; i++) if (a->arraysize[i] != b->arraysize[i]) return 0; } if (a->typegroup == 'S') { /* Check for packed struct */ if (a->flags != b->flags) return 0; /* compare all struct fields */ if (a->fields || b->fields) { /* Check if both have fields */ if (!(a->fields && b->fields)) return 0; /* compare */ for (i = 0; a->fields[i].type && b->fields[i].type; i++) { __Pyx_StructField *field_a = a->fields + i; __Pyx_StructField *field_b = b->fields + i; if (field_a->offset != field_b->offset || !__pyx_typeinfo_cmp(field_a->type, field_b->type)) return 0; } /* If all fields are processed, we have a match */ return !a->fields[i].type && !b->fields[i].type; } } return 1; } /////////////// TypeInfoToFormat.proto /////////////// struct __pyx_typeinfo_string { char string[3]; }; static struct __pyx_typeinfo_string __Pyx_TypeInfoToFormat(__Pyx_TypeInfo *type); /////////////// TypeInfoToFormat /////////////// {{# See also MemoryView.pyx:BufferFormatFromTypeInfo }} static struct __pyx_typeinfo_string __Pyx_TypeInfoToFormat(__Pyx_TypeInfo *type) { struct __pyx_typeinfo_string result = { {0} }; char *buf = (char *) result.string; size_t size = type->size; switch (type->typegroup) { case 'H': *buf = 'c'; break; case 'I': case 'U': if (size == 1) *buf = (type->is_unsigned) ? 'B' : 'b'; else if (size == 2) *buf = (type->is_unsigned) ? 'H' : 'h'; else if (size == 4) *buf = (type->is_unsigned) ? 'I' : 'i'; else if (size == 8) *buf = (type->is_unsigned) ? 'Q' : 'q'; break; case 'P': *buf = 'P'; break; case 'C': { __Pyx_TypeInfo complex_type = *type; complex_type.typegroup = 'R'; complex_type.size /= 2; *buf++ = 'Z'; *buf = __Pyx_TypeInfoToFormat(&complex_type).string[0]; break; } case 'R': if (size == 4) *buf = 'f'; else if (size == 8) *buf = 'd'; else *buf = 'g'; break; } return result; } Cython-0.26.1/Cython/Utility/Profile.c0000664000175000017500000003777213143605603020324 0ustar stefanstefan00000000000000/////////////// Profile.proto /////////////// //@substitute: naming // Note that cPython ignores PyTrace_EXCEPTION, // but maybe some other profilers don't. #ifndef CYTHON_PROFILE #if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON #define CYTHON_PROFILE 0 #else #define CYTHON_PROFILE 1 #endif #endif #ifndef CYTHON_TRACE_NOGIL #define CYTHON_TRACE_NOGIL 0 #else #if CYTHON_TRACE_NOGIL && !defined(CYTHON_TRACE) #define CYTHON_TRACE 1 #endif #endif #ifndef CYTHON_TRACE #define CYTHON_TRACE 0 #endif #if CYTHON_TRACE #undef CYTHON_PROFILE_REUSE_FRAME #endif #ifndef CYTHON_PROFILE_REUSE_FRAME #define CYTHON_PROFILE_REUSE_FRAME 0 #endif #if CYTHON_PROFILE || CYTHON_TRACE #include "compile.h" #include "frameobject.h" #include "traceback.h" #if CYTHON_PROFILE_REUSE_FRAME #define CYTHON_FRAME_MODIFIER static #define CYTHON_FRAME_DEL(frame) #else #define CYTHON_FRAME_MODIFIER #define CYTHON_FRAME_DEL(frame) Py_CLEAR(frame) #endif #define __Pyx_TraceDeclarations \ static PyCodeObject *$frame_code_cname = NULL; \ CYTHON_FRAME_MODIFIER PyFrameObject *$frame_cname = NULL; \ int __Pyx_use_tracing = 0; #define __Pyx_TraceFrameInit(codeobj) \ if (codeobj) $frame_code_cname = (PyCodeObject*) codeobj; #ifdef WITH_THREAD #define __Pyx_TraceCall(funcname, srcfile, firstlineno, nogil, goto_error) \ if (nogil) { \ if (CYTHON_TRACE_NOGIL) { \ PyThreadState *tstate; \ PyGILState_STATE state = PyGILState_Ensure(); \ tstate = PyThreadState_GET(); \ if (unlikely(tstate->use_tracing) && !tstate->tracing && \ (tstate->c_profilefunc || (CYTHON_TRACE && tstate->c_tracefunc))) { \ __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, funcname, srcfile, firstlineno); \ } \ PyGILState_Release(state); \ if (unlikely(__Pyx_use_tracing < 0)) goto_error; \ } \ } else { \ PyThreadState* tstate = PyThreadState_GET(); \ if (unlikely(tstate->use_tracing) && !tstate->tracing && \ (tstate->c_profilefunc || (CYTHON_TRACE && tstate->c_tracefunc))) { \ __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, funcname, srcfile, firstlineno); \ if (unlikely(__Pyx_use_tracing < 0)) goto_error; \ } \ } #else #define __Pyx_TraceCall(funcname, srcfile, firstlineno, nogil, goto_error) \ { PyThreadState* tstate = PyThreadState_GET(); \ if (unlikely(tstate->use_tracing) && !tstate->tracing && \ (tstate->c_profilefunc || (CYTHON_TRACE && tstate->c_tracefunc))) { \ __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, funcname, srcfile, firstlineno); \ if (unlikely(__Pyx_use_tracing < 0)) goto_error; \ } \ } #endif #define __Pyx_TraceException() \ if (likely(!__Pyx_use_tracing)); else { \ PyThreadState* tstate = PyThreadState_GET(); \ if (tstate->use_tracing && \ (tstate->c_profilefunc || (CYTHON_TRACE && tstate->c_tracefunc))) { \ tstate->tracing++; \ tstate->use_tracing = 0; \ PyObject *exc_info = __Pyx_GetExceptionTuple(tstate); \ if (exc_info) { \ if (CYTHON_TRACE && tstate->c_tracefunc) \ tstate->c_tracefunc( \ tstate->c_traceobj, $frame_cname, PyTrace_EXCEPTION, exc_info); \ tstate->c_profilefunc( \ tstate->c_profileobj, $frame_cname, PyTrace_EXCEPTION, exc_info); \ Py_DECREF(exc_info); \ } \ tstate->use_tracing = 1; \ tstate->tracing--; \ } \ } static void __Pyx_call_return_trace_func(PyThreadState *tstate, PyFrameObject *frame, PyObject *result) { PyObject *type, *value, *traceback; PyErr_Fetch(&type, &value, &traceback); tstate->tracing++; tstate->use_tracing = 0; if (CYTHON_TRACE && tstate->c_tracefunc) tstate->c_tracefunc(tstate->c_traceobj, frame, PyTrace_RETURN, result); if (tstate->c_profilefunc) tstate->c_profilefunc(tstate->c_profileobj, frame, PyTrace_RETURN, result); CYTHON_FRAME_DEL(frame); tstate->use_tracing = 1; tstate->tracing--; PyErr_Restore(type, value, traceback); } #ifdef WITH_THREAD #define __Pyx_TraceReturn(result, nogil) \ if (likely(!__Pyx_use_tracing)); else { \ if (nogil) { \ if (CYTHON_TRACE_NOGIL) { \ PyThreadState *tstate; \ PyGILState_STATE state = PyGILState_Ensure(); \ tstate = PyThreadState_GET(); \ if (tstate->use_tracing) { \ __Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \ } \ PyGILState_Release(state); \ } \ } else { \ PyThreadState* tstate = PyThreadState_GET(); \ if (tstate->use_tracing) { \ __Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \ } \ } \ } #else #define __Pyx_TraceReturn(result, nogil) \ if (likely(!__Pyx_use_tracing)); else { \ PyThreadState* tstate = PyThreadState_GET(); \ if (tstate->use_tracing) { \ __Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \ } \ } #endif static PyCodeObject *__Pyx_createFrameCodeObject(const char *funcname, const char *srcfile, int firstlineno); /*proto*/ static int __Pyx_TraceSetupAndCall(PyCodeObject** code, PyFrameObject** frame, const char *funcname, const char *srcfile, int firstlineno); /*proto*/ #else #define __Pyx_TraceDeclarations #define __Pyx_TraceFrameInit(codeobj) // mark error label as used to avoid compiler warnings #define __Pyx_TraceCall(funcname, srcfile, firstlineno, nogil, goto_error) if ((1)); else goto_error; #define __Pyx_TraceException() #define __Pyx_TraceReturn(result, nogil) #endif /* CYTHON_PROFILE */ #if CYTHON_TRACE // see call_trace_protected() in CPython's ceval.c static int __Pyx_call_line_trace_func(PyThreadState *tstate, PyFrameObject *frame, int lineno) { int ret; PyObject *type, *value, *traceback; PyErr_Fetch(&type, &value, &traceback); __Pyx_PyFrame_SetLineNumber(frame, lineno); tstate->tracing++; tstate->use_tracing = 0; ret = tstate->c_tracefunc(tstate->c_traceobj, frame, PyTrace_LINE, NULL); tstate->use_tracing = 1; tstate->tracing--; if (likely(!ret)) { PyErr_Restore(type, value, traceback); } else { Py_XDECREF(type); Py_XDECREF(value); Py_XDECREF(traceback); } return ret; } #ifdef WITH_THREAD #define __Pyx_TraceLine(lineno, nogil, goto_error) \ if (likely(!__Pyx_use_tracing)); else { \ if (nogil) { \ if (CYTHON_TRACE_NOGIL) { \ int ret = 0; \ PyThreadState *tstate; \ PyGILState_STATE state = PyGILState_Ensure(); \ tstate = PyThreadState_GET(); \ if (unlikely(tstate->use_tracing && tstate->c_tracefunc)) { \ ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \ } \ PyGILState_Release(state); \ if (unlikely(ret)) goto_error; \ } \ } else { \ PyThreadState* tstate = PyThreadState_GET(); \ if (unlikely(tstate->use_tracing && tstate->c_tracefunc)) { \ int ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \ if (unlikely(ret)) goto_error; \ } \ } \ } #else #define __Pyx_TraceLine(lineno, nogil, goto_error) \ if (likely(!__Pyx_use_tracing)); else { \ PyThreadState* tstate = PyThreadState_GET(); \ if (unlikely(tstate->use_tracing && tstate->c_tracefunc)) { \ int ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \ if (unlikely(ret)) goto_error; \ } \ } #endif #else // mark error label as used to avoid compiler warnings #define __Pyx_TraceLine(lineno, nogil, goto_error) if ((1)); else goto_error; #endif /////////////// Profile /////////////// //@substitute: naming #if CYTHON_PROFILE static int __Pyx_TraceSetupAndCall(PyCodeObject** code, PyFrameObject** frame, const char *funcname, const char *srcfile, int firstlineno) { PyObject *type, *value, *traceback; int retval; PyThreadState* tstate = PyThreadState_GET(); if (*frame == NULL || !CYTHON_PROFILE_REUSE_FRAME) { if (*code == NULL) { *code = __Pyx_createFrameCodeObject(funcname, srcfile, firstlineno); if (*code == NULL) return 0; } *frame = PyFrame_New( tstate, /*PyThreadState *tstate*/ *code, /*PyCodeObject *code*/ $moddict_cname, /*PyObject *globals*/ 0 /*PyObject *locals*/ ); if (*frame == NULL) return 0; if (CYTHON_TRACE && (*frame)->f_trace == NULL) { // this enables "f_lineno" lookup, at least in CPython ... Py_INCREF(Py_None); (*frame)->f_trace = Py_None; } #if PY_VERSION_HEX < 0x030400B1 } else { (*frame)->f_tstate = tstate; #endif } __Pyx_PyFrame_SetLineNumber(*frame, firstlineno); retval = 1; tstate->tracing++; tstate->use_tracing = 0; PyErr_Fetch(&type, &value, &traceback); #if CYTHON_TRACE if (tstate->c_tracefunc) retval = tstate->c_tracefunc(tstate->c_traceobj, *frame, PyTrace_CALL, NULL) == 0; if (retval && tstate->c_profilefunc) #endif retval = tstate->c_profilefunc(tstate->c_profileobj, *frame, PyTrace_CALL, NULL) == 0; tstate->use_tracing = (tstate->c_profilefunc || (CYTHON_TRACE && tstate->c_tracefunc)); tstate->tracing--; if (retval) { PyErr_Restore(type, value, traceback); return tstate->use_tracing && retval; } else { Py_XDECREF(type); Py_XDECREF(value); Py_XDECREF(traceback); return -1; } } static PyCodeObject *__Pyx_createFrameCodeObject(const char *funcname, const char *srcfile, int firstlineno) { PyObject *py_srcfile = 0; PyObject *py_funcname = 0; PyCodeObject *py_code = 0; #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromString(funcname); py_srcfile = PyString_FromString(srcfile); #else py_funcname = PyUnicode_FromString(funcname); py_srcfile = PyUnicode_FromString(srcfile); #endif if (!py_funcname | !py_srcfile) goto bad; py_code = PyCode_New( 0, /*int argcount,*/ #if PY_MAJOR_VERSION >= 3 0, /*int kwonlyargcount,*/ #endif 0, /*int nlocals,*/ 0, /*int stacksize,*/ 0, /*int flags,*/ $empty_bytes, /*PyObject *code,*/ $empty_tuple, /*PyObject *consts,*/ $empty_tuple, /*PyObject *names,*/ $empty_tuple, /*PyObject *varnames,*/ $empty_tuple, /*PyObject *freevars,*/ $empty_tuple, /*PyObject *cellvars,*/ py_srcfile, /*PyObject *filename,*/ py_funcname, /*PyObject *name,*/ firstlineno, /*int firstlineno,*/ $empty_bytes /*PyObject *lnotab*/ ); bad: Py_XDECREF(py_srcfile); Py_XDECREF(py_funcname); return py_code; } #endif /* CYTHON_PROFILE */ Cython-0.26.1/Cython/Utility/StringTools.c0000664000175000017500000011527013150045407021177 0ustar stefanstefan00000000000000 //////////////////// IncludeStringH.proto //////////////////// #include //////////////////// IncludeCppStringH.proto //////////////////// #include //////////////////// InitStrings.proto //////////////////// static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/ //////////////////// InitStrings //////////////////// static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { while (t->p) { #if PY_MAJOR_VERSION < 3 if (t->is_unicode) { *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); } else if (t->intern) { *t->p = PyString_InternFromString(t->s); } else { *t->p = PyString_FromStringAndSize(t->s, t->n - 1); } #else /* Python 3+ has unicode identifiers */ if (t->is_unicode | t->is_str) { if (t->intern) { *t->p = PyUnicode_InternFromString(t->s); } else if (t->encoding) { *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); } else { *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); } } else { *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); } #endif if (!*t->p) return -1; // initialise cached hash value if (PyObject_Hash(*t->p) == -1) PyErr_Clear(); ++t; } return 0; } //////////////////// BytesContains.proto //////////////////// static CYTHON_INLINE int __Pyx_BytesContains(PyObject* bytes, char character); /*proto*/ //////////////////// BytesContains //////////////////// //@requires: IncludeStringH static CYTHON_INLINE int __Pyx_BytesContains(PyObject* bytes, char character) { const Py_ssize_t length = PyBytes_GET_SIZE(bytes); char* char_start = PyBytes_AS_STRING(bytes); return memchr(char_start, (unsigned char)character, (size_t)length) != NULL; } //////////////////// PyUCS4InUnicode.proto //////////////////// static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 character); /*proto*/ static CYTHON_INLINE int __Pyx_PyUnicodeBufferContainsUCS4(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character); /*proto*/ //////////////////// PyUCS4InUnicode //////////////////// static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 character) { #if CYTHON_PEP393_ENABLED const int kind = PyUnicode_KIND(unicode); if (likely(kind != PyUnicode_WCHAR_KIND)) { Py_ssize_t i; const void* udata = PyUnicode_DATA(unicode); const Py_ssize_t length = PyUnicode_GET_LENGTH(unicode); for (i=0; i < length; i++) { if (unlikely(character == PyUnicode_READ(kind, udata, i))) return 1; } return 0; } #endif return __Pyx_PyUnicodeBufferContainsUCS4( PyUnicode_AS_UNICODE(unicode), PyUnicode_GET_SIZE(unicode), character); } static CYTHON_INLINE int __Pyx_PyUnicodeBufferContainsUCS4(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character) { Py_UNICODE uchar; Py_UNICODE* pos; #if Py_UNICODE_SIZE == 2 if (character > 65535) { /* handle surrogate pairs for Py_UNICODE buffers in 16bit Unicode builds */ Py_UNICODE high_val, low_val; high_val = (Py_UNICODE) (0xD800 | (((character - 0x10000) >> 10) & ((1<<10)-1))); low_val = (Py_UNICODE) (0xDC00 | ( (character - 0x10000) & ((1<<10)-1))); for (pos=buffer; pos < buffer+length-1; pos++) { if (unlikely(high_val == pos[0]) & unlikely(low_val == pos[1])) return 1; } return 0; } #endif uchar = (Py_UNICODE) character; for (pos=buffer; pos < buffer+length; pos++) { if (unlikely(uchar == pos[0])) return 1; } return 0; } //////////////////// PyUnicodeContains.proto //////////////////// static CYTHON_INLINE int __Pyx_PyUnicode_ContainsTF(PyObject* substring, PyObject* text, int eq) { int result = PyUnicode_Contains(text, substring); return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); } //////////////////// CStringEquals.proto //////////////////// static CYTHON_INLINE int __Pyx_StrEq(const char *, const char *); /*proto*/ //////////////////// CStringEquals //////////////////// static CYTHON_INLINE int __Pyx_StrEq(const char *s1, const char *s2) { while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } return *s1 == *s2; } //////////////////// StrEquals.proto //////////////////// //@requires: BytesEquals //@requires: UnicodeEquals #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals #else #define __Pyx_PyString_Equals __Pyx_PyBytes_Equals #endif //////////////////// UnicodeEquals.proto //////////////////// static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); /*proto*/ //////////////////// UnicodeEquals //////////////////// //@requires: BytesEquals static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { #if CYTHON_COMPILING_IN_PYPY return PyObject_RichCompareBool(s1, s2, equals); #else #if PY_MAJOR_VERSION < 3 PyObject* owned_ref = NULL; #endif int s1_is_unicode, s2_is_unicode; if (s1 == s2) { /* as done by PyObject_RichCompareBool(); also catches the (interned) empty string */ goto return_eq; } s1_is_unicode = PyUnicode_CheckExact(s1); s2_is_unicode = PyUnicode_CheckExact(s2); #if PY_MAJOR_VERSION < 3 if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { owned_ref = PyUnicode_FromObject(s2); if (unlikely(!owned_ref)) return -1; s2 = owned_ref; s2_is_unicode = 1; } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { owned_ref = PyUnicode_FromObject(s1); if (unlikely(!owned_ref)) return -1; s1 = owned_ref; s1_is_unicode = 1; } else if (((!s2_is_unicode) & (!s1_is_unicode))) { return __Pyx_PyBytes_Equals(s1, s2, equals); } #endif if (s1_is_unicode & s2_is_unicode) { Py_ssize_t length; int kind; void *data1, *data2; if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) return -1; length = __Pyx_PyUnicode_GET_LENGTH(s1); if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { goto return_ne; } #if CYTHON_USE_UNICODE_INTERNALS { Py_hash_t hash1, hash2; #if CYTHON_PEP393_ENABLED hash1 = ((PyASCIIObject*)s1)->hash; hash2 = ((PyASCIIObject*)s2)->hash; #else hash1 = ((PyUnicodeObject*)s1)->hash; hash2 = ((PyUnicodeObject*)s2)->hash; #endif if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { goto return_ne; } } #endif // len(s1) == len(s2) >= 1 (empty string is interned, and "s1 is not s2") kind = __Pyx_PyUnicode_KIND(s1); if (kind != __Pyx_PyUnicode_KIND(s2)) { goto return_ne; } data1 = __Pyx_PyUnicode_DATA(s1); data2 = __Pyx_PyUnicode_DATA(s2); if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { goto return_ne; } else if (length == 1) { goto return_eq; } else { int result = memcmp(data1, data2, (size_t)(length * kind)); #if PY_MAJOR_VERSION < 3 Py_XDECREF(owned_ref); #endif return (equals == Py_EQ) ? (result == 0) : (result != 0); } } else if ((s1 == Py_None) & s2_is_unicode) { goto return_ne; } else if ((s2 == Py_None) & s1_is_unicode) { goto return_ne; } else { int result; PyObject* py_result = PyObject_RichCompare(s1, s2, equals); if (!py_result) return -1; result = __Pyx_PyObject_IsTrue(py_result); Py_DECREF(py_result); return result; } return_eq: #if PY_MAJOR_VERSION < 3 Py_XDECREF(owned_ref); #endif return (equals == Py_EQ); return_ne: #if PY_MAJOR_VERSION < 3 Py_XDECREF(owned_ref); #endif return (equals == Py_NE); #endif } //////////////////// BytesEquals.proto //////////////////// static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); /*proto*/ //////////////////// BytesEquals //////////////////// //@requires: IncludeStringH static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { #if CYTHON_COMPILING_IN_PYPY return PyObject_RichCompareBool(s1, s2, equals); #else if (s1 == s2) { /* as done by PyObject_RichCompareBool(); also catches the (interned) empty string */ return (equals == Py_EQ); } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { const char *ps1, *ps2; Py_ssize_t length = PyBytes_GET_SIZE(s1); if (length != PyBytes_GET_SIZE(s2)) return (equals == Py_NE); // len(s1) == len(s2) >= 1 (empty string is interned, and "s1 is not s2") ps1 = PyBytes_AS_STRING(s1); ps2 = PyBytes_AS_STRING(s2); if (ps1[0] != ps2[0]) { return (equals == Py_NE); } else if (length == 1) { return (equals == Py_EQ); } else { int result; #if CYTHON_USE_UNICODE_INTERNALS Py_hash_t hash1, hash2; hash1 = ((PyBytesObject*)s1)->ob_shash; hash2 = ((PyBytesObject*)s2)->ob_shash; if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { return (equals == Py_NE); } #endif result = memcmp(ps1, ps2, (size_t)length); return (equals == Py_EQ) ? (result == 0) : (result != 0); } } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { return (equals == Py_NE); } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { return (equals == Py_NE); } else { int result; PyObject* py_result = PyObject_RichCompare(s1, s2, equals); if (!py_result) return -1; result = __Pyx_PyObject_IsTrue(py_result); Py_DECREF(py_result); return result; } #endif } //////////////////// GetItemIntByteArray.proto //////////////////// #define __Pyx_GetItemInt_ByteArray(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ __Pyx_GetItemInt_ByteArray_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) : \ (PyErr_SetString(PyExc_IndexError, "bytearray index out of range"), -1)) static CYTHON_INLINE int __Pyx_GetItemInt_ByteArray_Fast(PyObject* string, Py_ssize_t i, int wraparound, int boundscheck); //////////////////// GetItemIntByteArray //////////////////// static CYTHON_INLINE int __Pyx_GetItemInt_ByteArray_Fast(PyObject* string, Py_ssize_t i, int wraparound, int boundscheck) { Py_ssize_t length; if (wraparound | boundscheck) { length = PyByteArray_GET_SIZE(string); if (wraparound & unlikely(i < 0)) i += length; if ((!boundscheck) || likely((0 <= i) & (i < length))) { return (unsigned char) (PyByteArray_AS_STRING(string)[i]); } else { PyErr_SetString(PyExc_IndexError, "bytearray index out of range"); return -1; } } else { return (unsigned char) (PyByteArray_AS_STRING(string)[i]); } } //////////////////// SetItemIntByteArray.proto //////////////////// #define __Pyx_SetItemInt_ByteArray(o, i, v, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ __Pyx_SetItemInt_ByteArray_Fast(o, (Py_ssize_t)i, v, wraparound, boundscheck) : \ (PyErr_SetString(PyExc_IndexError, "bytearray index out of range"), -1)) static CYTHON_INLINE int __Pyx_SetItemInt_ByteArray_Fast(PyObject* string, Py_ssize_t i, unsigned char v, int wraparound, int boundscheck); //////////////////// SetItemIntByteArray //////////////////// static CYTHON_INLINE int __Pyx_SetItemInt_ByteArray_Fast(PyObject* string, Py_ssize_t i, unsigned char v, int wraparound, int boundscheck) { Py_ssize_t length; if (wraparound | boundscheck) { length = PyByteArray_GET_SIZE(string); if (wraparound & unlikely(i < 0)) i += length; if ((!boundscheck) || likely((0 <= i) & (i < length))) { PyByteArray_AS_STRING(string)[i] = (char) v; return 0; } else { PyErr_SetString(PyExc_IndexError, "bytearray index out of range"); return -1; } } else { PyByteArray_AS_STRING(string)[i] = (char) v; return 0; } } //////////////////// GetItemIntUnicode.proto //////////////////// #define __Pyx_GetItemInt_Unicode(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ __Pyx_GetItemInt_Unicode_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) : \ (PyErr_SetString(PyExc_IndexError, "string index out of range"), (Py_UCS4)-1)) static CYTHON_INLINE Py_UCS4 __Pyx_GetItemInt_Unicode_Fast(PyObject* ustring, Py_ssize_t i, int wraparound, int boundscheck); //////////////////// GetItemIntUnicode //////////////////// static CYTHON_INLINE Py_UCS4 __Pyx_GetItemInt_Unicode_Fast(PyObject* ustring, Py_ssize_t i, int wraparound, int boundscheck) { Py_ssize_t length; if (unlikely(__Pyx_PyUnicode_READY(ustring) < 0)) return (Py_UCS4)-1; if (wraparound | boundscheck) { length = __Pyx_PyUnicode_GET_LENGTH(ustring); if (wraparound & unlikely(i < 0)) i += length; if ((!boundscheck) || likely((0 <= i) & (i < length))) { return __Pyx_PyUnicode_READ_CHAR(ustring, i); } else { PyErr_SetString(PyExc_IndexError, "string index out of range"); return (Py_UCS4)-1; } } else { return __Pyx_PyUnicode_READ_CHAR(ustring, i); } } /////////////// decode_c_string_utf16.proto /////////////// static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, const char *errors) { int byteorder = 0; return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); } static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16LE(const char *s, Py_ssize_t size, const char *errors) { int byteorder = -1; return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); } static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16BE(const char *s, Py_ssize_t size, const char *errors) { int byteorder = 1; return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); } /////////////// decode_cpp_string.proto /////////////// //@requires: IncludeCppStringH //@requires: decode_c_bytes static CYTHON_INLINE PyObject* __Pyx_decode_cpp_string( std::string cppstring, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { return __Pyx_decode_c_bytes( cppstring.data(), cppstring.size(), start, stop, encoding, errors, decode_func); } /////////////// decode_c_string.proto /////////////// static CYTHON_INLINE PyObject* __Pyx_decode_c_string( const char* cstring, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)); /////////////// decode_c_string /////////////// //@requires: IncludeStringH //@requires: decode_c_string_utf16 /* duplicate code to avoid calling strlen() if start >= 0 and stop >= 0 */ static CYTHON_INLINE PyObject* __Pyx_decode_c_string( const char* cstring, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { Py_ssize_t length; if (unlikely((start < 0) | (stop < 0))) { size_t slen = strlen(cstring); if (unlikely(slen > (size_t) PY_SSIZE_T_MAX)) { PyErr_SetString(PyExc_OverflowError, "c-string too long to convert to Python"); return NULL; } length = (Py_ssize_t) slen; if (start < 0) { start += length; if (start < 0) start = 0; } if (stop < 0) stop += length; } length = stop - start; if (unlikely(length <= 0)) return PyUnicode_FromUnicode(NULL, 0); cstring += start; if (decode_func) { return decode_func(cstring, length, errors); } else { return PyUnicode_Decode(cstring, length, encoding, errors); } } /////////////// decode_c_bytes.proto /////////////// static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)); /////////////// decode_c_bytes /////////////// //@requires: decode_c_string_utf16 static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { if (unlikely((start < 0) | (stop < 0))) { if (start < 0) { start += length; if (start < 0) start = 0; } if (stop < 0) stop += length; } if (stop > length) stop = length; length = stop - start; if (unlikely(length <= 0)) return PyUnicode_FromUnicode(NULL, 0); cstring += start; if (decode_func) { return decode_func(cstring, length, errors); } else { return PyUnicode_Decode(cstring, length, encoding, errors); } } /////////////// decode_bytes.proto /////////////// //@requires: decode_c_bytes static CYTHON_INLINE PyObject* __Pyx_decode_bytes( PyObject* string, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { return __Pyx_decode_c_bytes( PyBytes_AS_STRING(string), PyBytes_GET_SIZE(string), start, stop, encoding, errors, decode_func); } /////////////// decode_bytearray.proto /////////////// //@requires: decode_c_bytes static CYTHON_INLINE PyObject* __Pyx_decode_bytearray( PyObject* string, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { return __Pyx_decode_c_bytes( PyByteArray_AS_STRING(string), PyByteArray_GET_SIZE(string), start, stop, encoding, errors, decode_func); } /////////////// PyUnicode_Substring.proto /////////////// static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Substring( PyObject* text, Py_ssize_t start, Py_ssize_t stop); /////////////// PyUnicode_Substring /////////////// static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Substring( PyObject* text, Py_ssize_t start, Py_ssize_t stop) { Py_ssize_t length; if (unlikely(__Pyx_PyUnicode_READY(text) == -1)) return NULL; length = __Pyx_PyUnicode_GET_LENGTH(text); if (start < 0) { start += length; if (start < 0) start = 0; } if (stop < 0) stop += length; else if (stop > length) stop = length; length = stop - start; if (length <= 0) return PyUnicode_FromUnicode(NULL, 0); #if CYTHON_PEP393_ENABLED return PyUnicode_FromKindAndData(PyUnicode_KIND(text), PyUnicode_1BYTE_DATA(text) + start*PyUnicode_KIND(text), stop-start); #else return PyUnicode_FromUnicode(PyUnicode_AS_UNICODE(text)+start, stop-start); #endif } /////////////// py_unicode_istitle.proto /////////////// // Py_UNICODE_ISTITLE() doesn't match unicode.istitle() as the latter // additionally allows character that comply with Py_UNICODE_ISUPPER() #if PY_VERSION_HEX < 0x030200A2 static CYTHON_INLINE int __Pyx_Py_UNICODE_ISTITLE(Py_UNICODE uchar) #else static CYTHON_INLINE int __Pyx_Py_UNICODE_ISTITLE(Py_UCS4 uchar) #endif { return Py_UNICODE_ISTITLE(uchar) || Py_UNICODE_ISUPPER(uchar); } /////////////// unicode_tailmatch.proto /////////////// static int __Pyx_PyUnicode_Tailmatch(PyObject* s, PyObject* substr, Py_ssize_t start, Py_ssize_t end, int direction); /*proto*/ /////////////// unicode_tailmatch /////////////// // Python's unicode.startswith() and unicode.endswith() support a // tuple of prefixes/suffixes, whereas it's much more common to // test for a single unicode string. static int __Pyx_PyUnicode_Tailmatch(PyObject* s, PyObject* substr, Py_ssize_t start, Py_ssize_t end, int direction) { if (unlikely(PyTuple_Check(substr))) { Py_ssize_t i, count = PyTuple_GET_SIZE(substr); for (i = 0; i < count; i++) { Py_ssize_t result; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS result = PyUnicode_Tailmatch(s, PyTuple_GET_ITEM(substr, i), start, end, direction); #else PyObject* sub = PySequence_ITEM(substr, i); if (unlikely(!sub)) return -1; result = PyUnicode_Tailmatch(s, sub, start, end, direction); Py_DECREF(sub); #endif if (result) { return (int) result; } } return 0; } return (int) PyUnicode_Tailmatch(s, substr, start, end, direction); } /////////////// bytes_tailmatch.proto /////////////// static int __Pyx_PyBytes_SingleTailmatch(PyObject* self, PyObject* arg, Py_ssize_t start, Py_ssize_t end, int direction); /*proto*/ static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* substr, Py_ssize_t start, Py_ssize_t end, int direction); /*proto*/ /////////////// bytes_tailmatch /////////////// static int __Pyx_PyBytes_SingleTailmatch(PyObject* self, PyObject* arg, Py_ssize_t start, Py_ssize_t end, int direction) { const char* self_ptr = PyBytes_AS_STRING(self); Py_ssize_t self_len = PyBytes_GET_SIZE(self); const char* sub_ptr; Py_ssize_t sub_len; int retval; Py_buffer view; view.obj = NULL; if ( PyBytes_Check(arg) ) { sub_ptr = PyBytes_AS_STRING(arg); sub_len = PyBytes_GET_SIZE(arg); } #if PY_MAJOR_VERSION < 3 // Python 2.x allows mixing unicode and str else if ( PyUnicode_Check(arg) ) { return (int) PyUnicode_Tailmatch(self, arg, start, end, direction); } #endif else { if (unlikely(PyObject_GetBuffer(self, &view, PyBUF_SIMPLE) == -1)) return -1; sub_ptr = (const char*) view.buf; sub_len = view.len; } if (end > self_len) end = self_len; else if (end < 0) end += self_len; if (end < 0) end = 0; if (start < 0) start += self_len; if (start < 0) start = 0; if (direction > 0) { /* endswith */ if (end-sub_len > start) start = end - sub_len; } if (start + sub_len <= end) retval = !memcmp(self_ptr+start, sub_ptr, (size_t)sub_len); else retval = 0; if (view.obj) PyBuffer_Release(&view); return retval; } static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* substr, Py_ssize_t start, Py_ssize_t end, int direction) { if (unlikely(PyTuple_Check(substr))) { Py_ssize_t i, count = PyTuple_GET_SIZE(substr); for (i = 0; i < count; i++) { int result; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS result = __Pyx_PyBytes_SingleTailmatch(self, PyTuple_GET_ITEM(substr, i), start, end, direction); #else PyObject* sub = PySequence_ITEM(substr, i); if (unlikely(!sub)) return -1; result = __Pyx_PyBytes_SingleTailmatch(self, sub, start, end, direction); Py_DECREF(sub); #endif if (result) { return result; } } return 0; } return __Pyx_PyBytes_SingleTailmatch(self, substr, start, end, direction); } /////////////// str_tailmatch.proto /////////////// static CYTHON_INLINE int __Pyx_PyStr_Tailmatch(PyObject* self, PyObject* arg, Py_ssize_t start, Py_ssize_t end, int direction); /*proto*/ /////////////// str_tailmatch /////////////// //@requires: bytes_tailmatch //@requires: unicode_tailmatch static CYTHON_INLINE int __Pyx_PyStr_Tailmatch(PyObject* self, PyObject* arg, Py_ssize_t start, Py_ssize_t end, int direction) { // We do not use a C compiler macro here to avoid "unused function" // warnings for the *_Tailmatch() function that is not being used in // the specific CPython version. The C compiler will generate the same // code anyway, and will usually just remove the unused function. if (PY_MAJOR_VERSION < 3) return __Pyx_PyBytes_Tailmatch(self, arg, start, end, direction); else return __Pyx_PyUnicode_Tailmatch(self, arg, start, end, direction); } /////////////// bytes_index.proto /////////////// static CYTHON_INLINE char __Pyx_PyBytes_GetItemInt(PyObject* bytes, Py_ssize_t index, int check_bounds); /*proto*/ /////////////// bytes_index /////////////// static CYTHON_INLINE char __Pyx_PyBytes_GetItemInt(PyObject* bytes, Py_ssize_t index, int check_bounds) { if (check_bounds) { Py_ssize_t size = PyBytes_GET_SIZE(bytes); if (unlikely(index >= size) | ((index < 0) & unlikely(index < -size))) { PyErr_SetString(PyExc_IndexError, "string index out of range"); return (char) -1; } } if (index < 0) index += PyBytes_GET_SIZE(bytes); return PyBytes_AS_STRING(bytes)[index]; } //////////////////// StringJoin.proto //////////////////// #if PY_MAJOR_VERSION < 3 #define __Pyx_PyString_Join __Pyx_PyBytes_Join #define __Pyx_PyBaseString_Join(s, v) (PyUnicode_CheckExact(s) ? PyUnicode_Join(s, v) : __Pyx_PyBytes_Join(s, v)) #else #define __Pyx_PyString_Join PyUnicode_Join #define __Pyx_PyBaseString_Join PyUnicode_Join #endif #if CYTHON_COMPILING_IN_CPYTHON #if PY_MAJOR_VERSION < 3 #define __Pyx_PyBytes_Join _PyString_Join #else #define __Pyx_PyBytes_Join _PyBytes_Join #endif #else static CYTHON_INLINE PyObject* __Pyx_PyBytes_Join(PyObject* sep, PyObject* values); /*proto*/ #endif //////////////////// StringJoin //////////////////// #if !CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyBytes_Join(PyObject* sep, PyObject* values) { return PyObject_CallMethodObjArgs(sep, PYIDENT("join"), values, NULL); } #endif /////////////// JoinPyUnicode.proto /////////////// static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, Py_UCS4 max_char); /////////////// JoinPyUnicode /////////////// //@requires: IncludeStringH //@substitute: naming static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, CYTHON_UNUSED Py_UCS4 max_char) { #if CYTHON_USE_UNICODE_INTERNALS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS PyObject *result_uval; int result_ukind; Py_ssize_t i, char_pos; void *result_udata; #if CYTHON_PEP393_ENABLED // Py 3.3+ (post PEP-393) result_uval = PyUnicode_New(result_ulength, max_char); if (unlikely(!result_uval)) return NULL; result_ukind = (max_char <= 255) ? PyUnicode_1BYTE_KIND : (max_char <= 65535) ? PyUnicode_2BYTE_KIND : PyUnicode_4BYTE_KIND; result_udata = PyUnicode_DATA(result_uval); #else // Py 2.x/3.2 (pre PEP-393) result_uval = PyUnicode_FromUnicode(NULL, result_ulength); if (unlikely(!result_uval)) return NULL; result_ukind = sizeof(Py_UNICODE); result_udata = PyUnicode_AS_UNICODE(result_uval); #endif char_pos = 0; for (i=0; i < value_count; i++) { int ukind; Py_ssize_t ulength; void *udata; PyObject *uval = PyTuple_GET_ITEM(value_tuple, i); if (unlikely(__Pyx_PyUnicode_READY(uval))) goto bad; ulength = __Pyx_PyUnicode_GET_LENGTH(uval); if (unlikely(!ulength)) continue; if (unlikely(char_pos + ulength < 0)) goto overflow; ukind = __Pyx_PyUnicode_KIND(uval); udata = __Pyx_PyUnicode_DATA(uval); if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) { memcpy((char *)result_udata + char_pos * result_ukind, udata, ulength * result_ukind); } else { #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength); #else Py_ssize_t j; for (j=0; j < ulength; j++) { Py_UCS4 uchar = __Pyx_PyUnicode_READ(ukind, udata, j); __Pyx_PyUnicode_WRITE(result_ukind, result_udata, char_pos+j, uchar); } #endif } char_pos += ulength; } return result_uval; overflow: PyErr_SetString(PyExc_OverflowError, "join() result is too long for a Python string"); bad: Py_DECREF(result_uval); return NULL; #else // non-CPython fallback result_ulength++; value_count++; return PyUnicode_Join($empty_unicode, value_tuple); #endif } /////////////// BuildPyUnicode.proto /////////////// static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars, int clength, int prepend_sign, char padding_char); /////////////// BuildPyUnicode /////////////// // Create a PyUnicode object from an ASCII char*, e.g. a formatted number. static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars, int clength, int prepend_sign, char padding_char) { PyObject *uval; Py_ssize_t uoffset = ulength - clength; #if CYTHON_USE_UNICODE_INTERNALS Py_ssize_t i; #if CYTHON_PEP393_ENABLED // Py 3.3+ (post PEP-393) void *udata; uval = PyUnicode_New(ulength, 127); if (unlikely(!uval)) return NULL; udata = PyUnicode_DATA(uval); #else // Py 2.x/3.2 (pre PEP-393) Py_UNICODE *udata; uval = PyUnicode_FromUnicode(NULL, ulength); if (unlikely(!uval)) return NULL; udata = PyUnicode_AS_UNICODE(uval); #endif if (uoffset > 0) { i = 0; if (prepend_sign) { __Pyx_PyUnicode_WRITE(PyUnicode_1BYTE_KIND, udata, 0, '-'); i++; } for (; i < uoffset; i++) { __Pyx_PyUnicode_WRITE(PyUnicode_1BYTE_KIND, udata, i, padding_char); } } for (i=0; i < clength; i++) { __Pyx_PyUnicode_WRITE(PyUnicode_1BYTE_KIND, udata, uoffset+i, chars[i]); } #else // non-CPython { uval = NULL; PyObject *sign = NULL, *padding = NULL; if (uoffset > 0) { prepend_sign = !!prepend_sign; if (uoffset > prepend_sign) { padding = PyUnicode_FromOrdinal(padding_char); if (likely(padding) && uoffset > prepend_sign + 1) { PyObject *tmp; PyObject *repeat = PyInt_FromSize_t(uoffset - prepend_sign); if (unlikely(!repeat)) goto done_or_error; tmp = PyNumber_Multiply(padding, repeat); Py_DECREF(repeat); Py_DECREF(padding); padding = tmp; } if (unlikely(!padding)) goto done_or_error; } if (prepend_sign) { sign = PyUnicode_FromOrdinal('-'); if (unlikely(!sign)) goto done_or_error; } } uval = PyUnicode_DecodeASCII(chars, clength, NULL); if (likely(uval) && padding) { PyObject *tmp = PyNumber_Add(padding, uval); Py_DECREF(uval); uval = tmp; } if (likely(uval) && sign) { PyObject *tmp = PyNumber_Add(sign, uval); Py_DECREF(uval); uval = tmp; } done_or_error: Py_XDECREF(padding); Py_XDECREF(sign); } #endif return uval; } //////////////////// ByteArrayAppendObject.proto //////////////////// static CYTHON_INLINE int __Pyx_PyByteArray_AppendObject(PyObject* bytearray, PyObject* value); //////////////////// ByteArrayAppendObject //////////////////// //@requires: ByteArrayAppend static CYTHON_INLINE int __Pyx_PyByteArray_AppendObject(PyObject* bytearray, PyObject* value) { Py_ssize_t ival; #if PY_MAJOR_VERSION < 3 if (unlikely(PyString_Check(value))) { if (unlikely(PyString_GET_SIZE(value) != 1)) { PyErr_SetString(PyExc_ValueError, "string must be of size 1"); return -1; } ival = (unsigned char) (PyString_AS_STRING(value)[0]); } else #endif #if CYTHON_USE_PYLONG_INTERNALS if (likely(PyLong_CheckExact(value)) && likely(Py_SIZE(value) == 1 || Py_SIZE(value) == 0)) { if (Py_SIZE(value) == 0) { ival = 0; } else { ival = ((PyLongObject*)value)->ob_digit[0]; if (unlikely(ival > 255)) goto bad_range; } } else #endif { // CPython calls PyNumber_Index() internally ival = __Pyx_PyIndex_AsSsize_t(value); if (unlikely((ival < 0) | (ival > 255))) { if (ival == -1 && PyErr_Occurred()) return -1; goto bad_range; } } return __Pyx_PyByteArray_Append(bytearray, ival); bad_range: PyErr_SetString(PyExc_ValueError, "byte must be in range(0, 256)"); return -1; } //////////////////// ByteArrayAppend.proto //////////////////// static CYTHON_INLINE int __Pyx_PyByteArray_Append(PyObject* bytearray, int value); //////////////////// ByteArrayAppend //////////////////// //@requires: ObjectHandling.c::PyObjectCallMethod1 static CYTHON_INLINE int __Pyx_PyByteArray_Append(PyObject* bytearray, int value) { PyObject *pyval, *retval; #if CYTHON_COMPILING_IN_CPYTHON if (likely((value >= 0) & (value <= 255))) { Py_ssize_t n = Py_SIZE(bytearray); if (likely(n != PY_SSIZE_T_MAX)) { if (unlikely(PyByteArray_Resize(bytearray, n + 1) < 0)) return -1; PyByteArray_AS_STRING(bytearray)[n] = value; return 0; } } else { PyErr_SetString(PyExc_ValueError, "byte must be in range(0, 256)"); return -1; } #endif pyval = PyInt_FromLong(value); if (unlikely(!pyval)) return -1; retval = __Pyx_PyObject_CallMethod1(bytearray, PYIDENT("append"), pyval); Py_DECREF(pyval); if (unlikely(!retval)) return -1; Py_DECREF(retval); return 0; } //////////////////// PyObjectFormat.proto //////////////////// #if CYTHON_USE_UNICODE_WRITER static PyObject* __Pyx_PyObject_Format(PyObject* s, PyObject* f); #else #define __Pyx_PyObject_Format(s, f) PyObject_Format(s, f) #endif //////////////////// PyObjectFormat //////////////////// #if CYTHON_USE_UNICODE_WRITER static PyObject* __Pyx_PyObject_Format(PyObject* obj, PyObject* format_spec) { int ret; _PyUnicodeWriter writer; if (likely(PyFloat_CheckExact(obj))) { // copied from CPython 3.5 "float__format__()" in floatobject.c #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x03040000 _PyUnicodeWriter_Init(&writer, 0); #else _PyUnicodeWriter_Init(&writer); #endif ret = _PyFloat_FormatAdvancedWriter( &writer, obj, format_spec, 0, PyUnicode_GET_LENGTH(format_spec)); } else if (likely(PyLong_CheckExact(obj))) { // copied from CPython 3.5 "long__format__()" in longobject.c #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x03040000 _PyUnicodeWriter_Init(&writer, 0); #else _PyUnicodeWriter_Init(&writer); #endif ret = _PyLong_FormatAdvancedWriter( &writer, obj, format_spec, 0, PyUnicode_GET_LENGTH(format_spec)); } else { return PyObject_Format(obj, format_spec); } if (unlikely(ret == -1)) { _PyUnicodeWriter_Dealloc(&writer); return NULL; } return _PyUnicodeWriter_Finish(&writer); } #endif //////////////////// PyObjectFormatSimple.proto //////////////////// #if CYTHON_COMPILING_IN_PYPY #define __Pyx_PyObject_FormatSimple(s, f) ( \ likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) : \ PyObject_Format(s, f)) #elif PY_MAJOR_VERSION < 3 // str is common in Py2, but formatting must return a Unicode string #define __Pyx_PyObject_FormatSimple(s, f) ( \ likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) : \ likely(PyString_CheckExact(s)) ? PyUnicode_FromEncodedObject(s, NULL, "strict") : \ PyObject_Format(s, f)) #elif CYTHON_USE_TYPE_SLOTS // Py3 nicely returns unicode strings from str() which makes this quite efficient for builtin types #define __Pyx_PyObject_FormatSimple(s, f) ( \ likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) : \ likely(PyLong_CheckExact(s)) ? PyLong_Type.tp_str(s) : \ likely(PyFloat_CheckExact(s)) ? PyFloat_Type.tp_str(s) : \ PyObject_Format(s, f)) #else #define __Pyx_PyObject_FormatSimple(s, f) ( \ likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) : \ PyObject_Format(s, f)) #endif //////////////////// PyObjectFormatAndDecref.proto //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatSimpleAndDecref(PyObject* s, PyObject* f); static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatAndDecref(PyObject* s, PyObject* f); //////////////////// PyObjectFormatAndDecref //////////////////// static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatSimpleAndDecref(PyObject* s, PyObject* f) { if (unlikely(!s)) return NULL; if (likely(PyUnicode_CheckExact(s))) return s; #if PY_MAJOR_VERSION < 3 // str is common in Py2, but formatting must return a Unicode string if (likely(PyString_CheckExact(s))) { PyObject *result = PyUnicode_FromEncodedObject(s, NULL, "strict"); Py_DECREF(s); return result; } #endif return __Pyx_PyObject_FormatAndDecref(s, f); } static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatAndDecref(PyObject* s, PyObject* f) { PyObject *result = PyObject_Format(s, f); Py_DECREF(s); return result; } Cython-0.26.1/Cython/Utility/ObjectHandling.c0000664000175000017500000017714013150045407021567 0ustar stefanstefan00000000000000/* * General object operations and protocol implementations, * including their specialisations for certain builtins. * * Optional optimisations for builtins are in Optimize.c. * * Required replacements of builtins are in Builtins.c. */ /////////////// RaiseNoneIterError.proto /////////////// static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); /////////////// RaiseNoneIterError /////////////// static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); } /////////////// RaiseTooManyValuesToUnpack.proto /////////////// static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); /////////////// RaiseTooManyValuesToUnpack /////////////// static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { PyErr_Format(PyExc_ValueError, "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); } /////////////// RaiseNeedMoreValuesToUnpack.proto /////////////// static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); /////////////// RaiseNeedMoreValuesToUnpack /////////////// static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { PyErr_Format(PyExc_ValueError, "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", index, (index == 1) ? "" : "s"); } /////////////// UnpackTupleError.proto /////////////// static void __Pyx_UnpackTupleError(PyObject *, Py_ssize_t index); /*proto*/ /////////////// UnpackTupleError /////////////// //@requires: RaiseNoneIterError //@requires: RaiseNeedMoreValuesToUnpack //@requires: RaiseTooManyValuesToUnpack static void __Pyx_UnpackTupleError(PyObject *t, Py_ssize_t index) { if (t == Py_None) { __Pyx_RaiseNoneNotIterableError(); } else if (PyTuple_GET_SIZE(t) < index) { __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(t)); } else { __Pyx_RaiseTooManyValuesError(index); } } /////////////// UnpackItemEndCheck.proto /////////////// static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); /*proto*/ /////////////// UnpackItemEndCheck /////////////// //@requires: RaiseTooManyValuesToUnpack //@requires: IterFinish static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { if (unlikely(retval)) { Py_DECREF(retval); __Pyx_RaiseTooManyValuesError(expected); return -1; } else { return __Pyx_IterFinish(); } return 0; } /////////////// UnpackTuple2.proto /////////////// static CYTHON_INLINE int __Pyx_unpack_tuple2(PyObject* tuple, PyObject** value1, PyObject** value2, int is_tuple, int has_known_size, int decref_tuple); /////////////// UnpackTuple2 /////////////// //@requires: UnpackItemEndCheck //@requires: UnpackTupleError //@requires: RaiseNeedMoreValuesToUnpack static CYTHON_INLINE int __Pyx_unpack_tuple2(PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, int is_tuple, int has_known_size, int decref_tuple) { Py_ssize_t index; PyObject *value1 = NULL, *value2 = NULL, *iter = NULL; if (!is_tuple && unlikely(!PyTuple_Check(tuple))) { iternextfunc iternext; iter = PyObject_GetIter(tuple); if (unlikely(!iter)) goto bad; if (decref_tuple) { Py_DECREF(tuple); tuple = NULL; } iternext = Py_TYPE(iter)->tp_iternext; value1 = iternext(iter); if (unlikely(!value1)) { index = 0; goto unpacking_failed; } value2 = iternext(iter); if (unlikely(!value2)) { index = 1; goto unpacking_failed; } if (!has_known_size && unlikely(__Pyx_IternextUnpackEndCheck(iternext(iter), 2))) goto bad; Py_DECREF(iter); } else { if (!has_known_size && unlikely(PyTuple_GET_SIZE(tuple) != 2)) { __Pyx_UnpackTupleError(tuple, 2); goto bad; } #if CYTHON_COMPILING_IN_PYPY value1 = PySequence_ITEM(tuple, 0); if (unlikely(!value1)) goto bad; value2 = PySequence_ITEM(tuple, 1); if (unlikely(!value2)) goto bad; #else value1 = PyTuple_GET_ITEM(tuple, 0); value2 = PyTuple_GET_ITEM(tuple, 1); Py_INCREF(value1); Py_INCREF(value2); #endif if (decref_tuple) { Py_DECREF(tuple); } } *pvalue1 = value1; *pvalue2 = value2; return 0; unpacking_failed: if (!has_known_size && __Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); bad: Py_XDECREF(iter); Py_XDECREF(value1); Py_XDECREF(value2); if (decref_tuple) { Py_XDECREF(tuple); } return -1; } /////////////// IterNext.proto /////////////// #define __Pyx_PyIter_Next(obj) __Pyx_PyIter_Next2(obj, NULL) static CYTHON_INLINE PyObject *__Pyx_PyIter_Next2(PyObject *, PyObject *); /*proto*/ /////////////// IterNext /////////////// // originally copied from Py3's builtin_next() static CYTHON_INLINE PyObject *__Pyx_PyIter_Next2(PyObject* iterator, PyObject* defval) { PyObject* next; iternextfunc iternext = Py_TYPE(iterator)->tp_iternext; #if CYTHON_USE_TYPE_SLOTS if (unlikely(!iternext)) { #else if (unlikely(!iternext) || unlikely(!PyIter_Check(iterator))) { #endif PyErr_Format(PyExc_TypeError, "%.200s object is not an iterator", Py_TYPE(iterator)->tp_name); return NULL; } next = iternext(iterator); if (likely(next)) return next; #if CYTHON_USE_TYPE_SLOTS #if PY_VERSION_HEX >= 0x02070000 if (unlikely(iternext == &_PyObject_NextNotImplemented)) return NULL; #endif #endif if (defval) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (unlikely(exc_type != PyExc_StopIteration) && !PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) return NULL; PyErr_Clear(); } Py_INCREF(defval); return defval; } if (!PyErr_Occurred()) PyErr_SetNone(PyExc_StopIteration); return NULL; } /////////////// IterFinish.proto /////////////// static CYTHON_INLINE int __Pyx_IterFinish(void); /*proto*/ /////////////// IterFinish /////////////// // When PyIter_Next(iter) has returned NULL in order to signal termination, // this function does the right cleanup and returns 0 on success. If it // detects an error that occurred in the iterator, it returns -1. static CYTHON_INLINE int __Pyx_IterFinish(void) { #if CYTHON_FAST_THREAD_STATE PyThreadState *tstate = PyThreadState_GET(); PyObject* exc_type = tstate->curexc_type; if (unlikely(exc_type)) { if (likely(exc_type == PyExc_StopIteration) || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) { PyObject *exc_value, *exc_tb; exc_value = tstate->curexc_value; exc_tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; Py_DECREF(exc_type); Py_XDECREF(exc_value); Py_XDECREF(exc_tb); return 0; } else { return -1; } } return 0; #else if (unlikely(PyErr_Occurred())) { if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { PyErr_Clear(); return 0; } else { return -1; } } return 0; #endif } /////////////// DictGetItem.proto /////////////// #if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { PyObject *value; value = PyDict_GetItemWithError(d, key); if (unlikely(!value)) { if (!PyErr_Occurred()) { PyObject* args = PyTuple_Pack(1, key); if (likely(args)) PyErr_SetObject(PyExc_KeyError, args); Py_XDECREF(args); } return NULL; } Py_INCREF(value); return value; } #else #define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) #endif /////////////// GetItemInt.proto /////////////// #define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) : \ (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) : \ __Pyx_GetItemInt_Generic(o, to_py_func(i)))) {{for type in ['List', 'Tuple']}} #define __Pyx_GetItemInt_{{type}}(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ __Pyx_GetItemInt_{{type}}_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) : \ (PyErr_SetString(PyExc_IndexError, "{{ type.lower() }} index out of range"), (PyObject*)NULL)) static CYTHON_INLINE PyObject *__Pyx_GetItemInt_{{type}}_Fast(PyObject *o, Py_ssize_t i, int wraparound, int boundscheck); {{endfor}} static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, int wraparound, int boundscheck); /////////////// GetItemInt /////////////// static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { PyObject *r; if (!j) return NULL; r = PyObject_GetItem(o, j); Py_DECREF(j); return r; } {{for type in ['List', 'Tuple']}} static CYTHON_INLINE PyObject *__Pyx_GetItemInt_{{type}}_Fast(PyObject *o, Py_ssize_t i, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS Py_ssize_t wrapped_i = i; if (wraparound & unlikely(i < 0)) { wrapped_i += Py{{type}}_GET_SIZE(o); } if ((!boundscheck) || likely((0 <= wrapped_i) & (wrapped_i < Py{{type}}_GET_SIZE(o)))) { PyObject *r = Py{{type}}_GET_ITEM(o, wrapped_i); Py_INCREF(r); return r; } return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); #else return PySequence_GetItem(o, i); #endif } {{endfor}} static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS if (is_list || PyList_CheckExact(o)) { Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); if ((!boundscheck) || (likely((n >= 0) & (n < PyList_GET_SIZE(o))))) { PyObject *r = PyList_GET_ITEM(o, n); Py_INCREF(r); return r; } } else if (PyTuple_CheckExact(o)) { Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); if ((!boundscheck) || likely((n >= 0) & (n < PyTuple_GET_SIZE(o)))) { PyObject *r = PyTuple_GET_ITEM(o, n); Py_INCREF(r); return r; } } else { // inlined PySequence_GetItem() + special cased length overflow PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; if (likely(m && m->sq_item)) { if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { Py_ssize_t l = m->sq_length(o); if (likely(l >= 0)) { i += l; } else { // if length > max(Py_ssize_t), maybe the object can wrap around itself? if (!PyErr_ExceptionMatches(PyExc_OverflowError)) return NULL; PyErr_Clear(); } } return m->sq_item(o, i); } } #else if (is_list || PySequence_Check(o)) { return PySequence_GetItem(o, i); } #endif return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); } /////////////// SetItemInt.proto /////////////// #define __Pyx_SetItemInt(o, i, v, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ __Pyx_SetItemInt_Fast(o, (Py_ssize_t)i, v, is_list, wraparound, boundscheck) : \ (is_list ? (PyErr_SetString(PyExc_IndexError, "list assignment index out of range"), -1) : \ __Pyx_SetItemInt_Generic(o, to_py_func(i), v))) static CYTHON_INLINE int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v); static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObject *v, int is_list, int wraparound, int boundscheck); /////////////// SetItemInt /////////////// static CYTHON_INLINE int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v) { int r; if (!j) return -1; r = PyObject_SetItem(o, j, v); Py_DECREF(j); return r; } static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObject *v, int is_list, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS if (is_list || PyList_CheckExact(o)) { Py_ssize_t n = (!wraparound) ? i : ((likely(i >= 0)) ? i : i + PyList_GET_SIZE(o)); if ((!boundscheck) || likely((n >= 0) & (n < PyList_GET_SIZE(o)))) { PyObject* old = PyList_GET_ITEM(o, n); Py_INCREF(v); PyList_SET_ITEM(o, n, v); Py_DECREF(old); return 1; } } else { // inlined PySequence_SetItem() + special cased length overflow PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; if (likely(m && m->sq_ass_item)) { if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { Py_ssize_t l = m->sq_length(o); if (likely(l >= 0)) { i += l; } else { // if length > max(Py_ssize_t), maybe the object can wrap around itself? if (!PyErr_ExceptionMatches(PyExc_OverflowError)) return -1; PyErr_Clear(); } } return m->sq_ass_item(o, i, v); } } #else #if CYTHON_COMPILING_IN_PYPY if (is_list || (PySequence_Check(o) && !PyDict_Check(o))) { #else if (is_list || PySequence_Check(o)) { #endif return PySequence_SetItem(o, i, v); } #endif return __Pyx_SetItemInt_Generic(o, PyInt_FromSsize_t(i), v); } /////////////// DelItemInt.proto /////////////// #define __Pyx_DelItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ __Pyx_DelItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound) : \ (is_list ? (PyErr_SetString(PyExc_IndexError, "list assignment index out of range"), -1) : \ __Pyx_DelItem_Generic(o, to_py_func(i)))) static CYTHON_INLINE int __Pyx_DelItem_Generic(PyObject *o, PyObject *j); static CYTHON_INLINE int __Pyx_DelItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, int wraparound); /////////////// DelItemInt /////////////// static CYTHON_INLINE int __Pyx_DelItem_Generic(PyObject *o, PyObject *j) { int r; if (!j) return -1; r = PyObject_DelItem(o, j); Py_DECREF(j); return r; } static CYTHON_INLINE int __Pyx_DelItemInt_Fast(PyObject *o, Py_ssize_t i, CYTHON_UNUSED int is_list, CYTHON_NCP_UNUSED int wraparound) { #if !CYTHON_USE_TYPE_SLOTS if (is_list || PySequence_Check(o)) { return PySequence_DelItem(o, i); } #else // inlined PySequence_DelItem() + special cased length overflow PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; if (likely(m && m->sq_ass_item)) { if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { Py_ssize_t l = m->sq_length(o); if (likely(l >= 0)) { i += l; } else { // if length > max(Py_ssize_t), maybe the object can wrap around itself? if (!PyErr_ExceptionMatches(PyExc_OverflowError)) return -1; PyErr_Clear(); } } return m->sq_ass_item(o, i, (PyObject *)NULL); } #endif return __Pyx_DelItem_Generic(o, PyInt_FromSsize_t(i)); } /////////////// SliceObject.proto /////////////// // we pass pointer addresses to show the C compiler what is NULL and what isn't {{if access == 'Get'}} static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice( PyObject* obj, Py_ssize_t cstart, Py_ssize_t cstop, PyObject** py_start, PyObject** py_stop, PyObject** py_slice, int has_cstart, int has_cstop, int wraparound); {{else}} #define __Pyx_PyObject_DelSlice(obj, cstart, cstop, py_start, py_stop, py_slice, has_cstart, has_cstop, wraparound) \ __Pyx_PyObject_SetSlice(obj, (PyObject*)NULL, cstart, cstop, py_start, py_stop, py_slice, has_cstart, has_cstop, wraparound) // we pass pointer addresses to show the C compiler what is NULL and what isn't static CYTHON_INLINE int __Pyx_PyObject_SetSlice( PyObject* obj, PyObject* value, Py_ssize_t cstart, Py_ssize_t cstop, PyObject** py_start, PyObject** py_stop, PyObject** py_slice, int has_cstart, int has_cstop, int wraparound); {{endif}} /////////////// SliceObject /////////////// {{if access == 'Get'}} static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice(PyObject* obj, {{else}} static CYTHON_INLINE int __Pyx_PyObject_SetSlice(PyObject* obj, PyObject* value, {{endif}} Py_ssize_t cstart, Py_ssize_t cstop, PyObject** _py_start, PyObject** _py_stop, PyObject** _py_slice, int has_cstart, int has_cstop, CYTHON_UNUSED int wraparound) { #if CYTHON_USE_TYPE_SLOTS PyMappingMethods* mp; #if PY_MAJOR_VERSION < 3 PySequenceMethods* ms = Py_TYPE(obj)->tp_as_sequence; if (likely(ms && ms->sq_{{if access == 'Set'}}ass_{{endif}}slice)) { if (!has_cstart) { if (_py_start && (*_py_start != Py_None)) { cstart = __Pyx_PyIndex_AsSsize_t(*_py_start); if ((cstart == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; } else cstart = 0; } if (!has_cstop) { if (_py_stop && (*_py_stop != Py_None)) { cstop = __Pyx_PyIndex_AsSsize_t(*_py_stop); if ((cstop == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; } else cstop = PY_SSIZE_T_MAX; } if (wraparound && unlikely((cstart < 0) | (cstop < 0)) && likely(ms->sq_length)) { Py_ssize_t l = ms->sq_length(obj); if (likely(l >= 0)) { if (cstop < 0) { cstop += l; if (cstop < 0) cstop = 0; } if (cstart < 0) { cstart += l; if (cstart < 0) cstart = 0; } } else { // if length > max(Py_ssize_t), maybe the object can wrap around itself? if (!PyErr_ExceptionMatches(PyExc_OverflowError)) goto bad; PyErr_Clear(); } } {{if access == 'Get'}} return ms->sq_slice(obj, cstart, cstop); {{else}} return ms->sq_ass_slice(obj, cstart, cstop, value); {{endif}} } #endif mp = Py_TYPE(obj)->tp_as_mapping; {{if access == 'Get'}} if (likely(mp && mp->mp_subscript)) {{else}} if (likely(mp && mp->mp_ass_subscript)) {{endif}} #endif { {{if access == 'Get'}}PyObject*{{else}}int{{endif}} result; PyObject *py_slice, *py_start, *py_stop; if (_py_slice) { py_slice = *_py_slice; } else { PyObject* owned_start = NULL; PyObject* owned_stop = NULL; if (_py_start) { py_start = *_py_start; } else { if (has_cstart) { owned_start = py_start = PyInt_FromSsize_t(cstart); if (unlikely(!py_start)) goto bad; } else py_start = Py_None; } if (_py_stop) { py_stop = *_py_stop; } else { if (has_cstop) { owned_stop = py_stop = PyInt_FromSsize_t(cstop); if (unlikely(!py_stop)) { Py_XDECREF(owned_start); goto bad; } } else py_stop = Py_None; } py_slice = PySlice_New(py_start, py_stop, Py_None); Py_XDECREF(owned_start); Py_XDECREF(owned_stop); if (unlikely(!py_slice)) goto bad; } #if CYTHON_USE_TYPE_SLOTS {{if access == 'Get'}} result = mp->mp_subscript(obj, py_slice); #else result = PyObject_GetItem(obj, py_slice); {{else}} result = mp->mp_ass_subscript(obj, py_slice, value); #else result = value ? PyObject_SetItem(obj, py_slice, value) : PyObject_DelItem(obj, py_slice); {{endif}} #endif if (!_py_slice) { Py_DECREF(py_slice); } return result; } PyErr_Format(PyExc_TypeError, {{if access == 'Get'}} "'%.200s' object is unsliceable", Py_TYPE(obj)->tp_name); {{else}} "'%.200s' object does not support slice %.10s", Py_TYPE(obj)->tp_name, value ? "assignment" : "deletion"); {{endif}} bad: return {{if access == 'Get'}}NULL{{else}}-1{{endif}}; } /////////////// SliceTupleAndList.proto /////////////// #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyList_GetSlice(PyObject* src, Py_ssize_t start, Py_ssize_t stop); static CYTHON_INLINE PyObject* __Pyx_PyTuple_GetSlice(PyObject* src, Py_ssize_t start, Py_ssize_t stop); #else #define __Pyx_PyList_GetSlice(seq, start, stop) PySequence_GetSlice(seq, start, stop) #define __Pyx_PyTuple_GetSlice(seq, start, stop) PySequence_GetSlice(seq, start, stop) #endif /////////////// SliceTupleAndList /////////////// #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE void __Pyx_crop_slice(Py_ssize_t* _start, Py_ssize_t* _stop, Py_ssize_t* _length) { Py_ssize_t start = *_start, stop = *_stop, length = *_length; if (start < 0) { start += length; if (start < 0) start = 0; } if (stop < 0) stop += length; else if (stop > length) stop = length; *_length = stop - start; *_start = start; *_stop = stop; } static CYTHON_INLINE void __Pyx_copy_object_array(PyObject** CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { PyObject *v; Py_ssize_t i; for (i = 0; i < length; i++) { v = dest[i] = src[i]; Py_INCREF(v); } } {{for type in ['List', 'Tuple']}} static CYTHON_INLINE PyObject* __Pyx_Py{{type}}_GetSlice( PyObject* src, Py_ssize_t start, Py_ssize_t stop) { PyObject* dest; Py_ssize_t length = Py{{type}}_GET_SIZE(src); __Pyx_crop_slice(&start, &stop, &length); if (unlikely(length <= 0)) return Py{{type}}_New(0); dest = Py{{type}}_New(length); if (unlikely(!dest)) return NULL; __Pyx_copy_object_array( ((Py{{type}}Object*)src)->ob_item + start, ((Py{{type}}Object*)dest)->ob_item, length); return dest; } {{endfor}} #endif /////////////// CalculateMetaclass.proto /////////////// static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases); /////////////// CalculateMetaclass /////////////// static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases) { Py_ssize_t i, nbases = PyTuple_GET_SIZE(bases); for (i=0; i < nbases; i++) { PyTypeObject *tmptype; PyObject *tmp = PyTuple_GET_ITEM(bases, i); tmptype = Py_TYPE(tmp); #if PY_MAJOR_VERSION < 3 if (tmptype == &PyClass_Type) continue; #endif if (!metaclass) { metaclass = tmptype; continue; } if (PyType_IsSubtype(metaclass, tmptype)) continue; if (PyType_IsSubtype(tmptype, metaclass)) { metaclass = tmptype; continue; } // else: PyErr_SetString(PyExc_TypeError, "metaclass conflict: " "the metaclass of a derived class " "must be a (non-strict) subclass " "of the metaclasses of all its bases"); return NULL; } if (!metaclass) { #if PY_MAJOR_VERSION < 3 metaclass = &PyClass_Type; #else metaclass = &PyType_Type; #endif } // make owned reference Py_INCREF((PyObject*) metaclass); return (PyObject*) metaclass; } /////////////// FindInheritedMetaclass.proto /////////////// static PyObject *__Pyx_FindInheritedMetaclass(PyObject *bases); /*proto*/ /////////////// FindInheritedMetaclass /////////////// //@requires: PyObjectGetAttrStr //@requires: CalculateMetaclass static PyObject *__Pyx_FindInheritedMetaclass(PyObject *bases) { PyObject *metaclass; if (PyTuple_Check(bases) && PyTuple_GET_SIZE(bases) > 0) { PyTypeObject *metatype; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS PyObject *base = PyTuple_GET_ITEM(bases, 0); #else PyObject *base = PySequence_ITEM(bases, 0); #endif #if PY_MAJOR_VERSION < 3 PyObject* basetype = __Pyx_PyObject_GetAttrStr(base, PYIDENT("__class__")); if (basetype) { metatype = (PyType_Check(basetype)) ? ((PyTypeObject*) basetype) : NULL; } else { PyErr_Clear(); metatype = Py_TYPE(base); basetype = (PyObject*) metatype; Py_INCREF(basetype); } #else metatype = Py_TYPE(base); #endif metaclass = __Pyx_CalculateMetaclass(metatype, bases); #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) Py_DECREF(base); #endif #if PY_MAJOR_VERSION < 3 Py_DECREF(basetype); #endif } else { // no bases => use default metaclass #if PY_MAJOR_VERSION < 3 metaclass = (PyObject *) &PyClass_Type; #else metaclass = (PyObject *) &PyType_Type; #endif Py_INCREF(metaclass); } return metaclass; } /////////////// Py3MetaclassGet.proto /////////////// static PyObject *__Pyx_Py3MetaclassGet(PyObject *bases, PyObject *mkw); /*proto*/ /////////////// Py3MetaclassGet /////////////// //@requires: FindInheritedMetaclass //@requires: CalculateMetaclass static PyObject *__Pyx_Py3MetaclassGet(PyObject *bases, PyObject *mkw) { PyObject *metaclass = mkw ? PyDict_GetItem(mkw, PYIDENT("metaclass")) : NULL; if (metaclass) { Py_INCREF(metaclass); if (PyDict_DelItem(mkw, PYIDENT("metaclass")) < 0) { Py_DECREF(metaclass); return NULL; } if (PyType_Check(metaclass)) { PyObject* orig = metaclass; metaclass = __Pyx_CalculateMetaclass((PyTypeObject*) metaclass, bases); Py_DECREF(orig); } return metaclass; } return __Pyx_FindInheritedMetaclass(bases); } /////////////// CreateClass.proto /////////////// static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name, PyObject *qualname, PyObject *modname); /*proto*/ /////////////// CreateClass /////////////// //@requires: FindInheritedMetaclass //@requires: CalculateMetaclass static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name, PyObject *qualname, PyObject *modname) { PyObject *result; PyObject *metaclass; if (PyDict_SetItem(dict, PYIDENT("__module__"), modname) < 0) return NULL; if (PyDict_SetItem(dict, PYIDENT("__qualname__"), qualname) < 0) return NULL; /* Python2 __metaclass__ */ metaclass = PyDict_GetItem(dict, PYIDENT("__metaclass__")); if (metaclass) { Py_INCREF(metaclass); if (PyType_Check(metaclass)) { PyObject* orig = metaclass; metaclass = __Pyx_CalculateMetaclass((PyTypeObject*) metaclass, bases); Py_DECREF(orig); } } else { metaclass = __Pyx_FindInheritedMetaclass(bases); } if (unlikely(!metaclass)) return NULL; result = PyObject_CallFunctionObjArgs(metaclass, name, bases, dict, NULL); Py_DECREF(metaclass); return result; } /////////////// Py3ClassCreate.proto /////////////// static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, PyObject *qualname, PyObject *mkw, PyObject *modname, PyObject *doc); /*proto*/ static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, PyObject *dict, PyObject *mkw, int calculate_metaclass, int allow_py2_metaclass); /*proto*/ /////////////// Py3ClassCreate /////////////// //@requires: PyObjectGetAttrStr //@requires: CalculateMetaclass static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, PyObject *qualname, PyObject *mkw, PyObject *modname, PyObject *doc) { PyObject *ns; if (metaclass) { PyObject *prep = __Pyx_PyObject_GetAttrStr(metaclass, PYIDENT("__prepare__")); if (prep) { PyObject *pargs = PyTuple_Pack(2, name, bases); if (unlikely(!pargs)) { Py_DECREF(prep); return NULL; } ns = PyObject_Call(prep, pargs, mkw); Py_DECREF(prep); Py_DECREF(pargs); } else { if (unlikely(!PyErr_ExceptionMatches(PyExc_AttributeError))) return NULL; PyErr_Clear(); ns = PyDict_New(); } } else { ns = PyDict_New(); } if (unlikely(!ns)) return NULL; /* Required here to emulate assignment order */ if (unlikely(PyObject_SetItem(ns, PYIDENT("__module__"), modname) < 0)) goto bad; if (unlikely(PyObject_SetItem(ns, PYIDENT("__qualname__"), qualname) < 0)) goto bad; if (unlikely(doc && PyObject_SetItem(ns, PYIDENT("__doc__"), doc) < 0)) goto bad; return ns; bad: Py_DECREF(ns); return NULL; } static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, PyObject *dict, PyObject *mkw, int calculate_metaclass, int allow_py2_metaclass) { PyObject *result, *margs; PyObject *owned_metaclass = NULL; if (allow_py2_metaclass) { /* honour Python2 __metaclass__ for backward compatibility */ owned_metaclass = PyObject_GetItem(dict, PYIDENT("__metaclass__")); if (owned_metaclass) { metaclass = owned_metaclass; } else if (likely(PyErr_ExceptionMatches(PyExc_KeyError))) { PyErr_Clear(); } else { return NULL; } } if (calculate_metaclass && (!metaclass || PyType_Check(metaclass))) { metaclass = __Pyx_CalculateMetaclass((PyTypeObject*) metaclass, bases); Py_XDECREF(owned_metaclass); if (unlikely(!metaclass)) return NULL; owned_metaclass = metaclass; } margs = PyTuple_Pack(3, name, bases, dict); if (unlikely(!margs)) { result = NULL; } else { result = PyObject_Call(metaclass, margs, mkw); Py_DECREF(margs); } Py_XDECREF(owned_metaclass); return result; } /////////////// ExtTypeTest.proto /////////////// static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/ /////////////// ExtTypeTest /////////////// static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { if (unlikely(!type)) { PyErr_SetString(PyExc_SystemError, "Missing type object"); return 0; } if (likely(PyObject_TypeCheck(obj, type))) return 1; PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", Py_TYPE(obj)->tp_name, type->tp_name); return 0; } /////////////// CallableCheck.proto /////////////// #if CYTHON_USE_TYPE_SLOTS && PY_MAJOR_VERSION >= 3 #define __Pyx_PyCallable_Check(obj) ((obj)->ob_type->tp_call != NULL) #else #define __Pyx_PyCallable_Check(obj) PyCallable_Check(obj) #endif /////////////// PyDictContains.proto /////////////// static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict, int eq) { int result = PyDict_Contains(dict, item); return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); } /////////////// PySequenceContains.proto /////////////// static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { int result = PySequence_Contains(seq, item); return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); } /////////////// PyBoolOrNullFromLong.proto /////////////// static CYTHON_INLINE PyObject* __Pyx_PyBoolOrNull_FromLong(long b) { return unlikely(b < 0) ? NULL : __Pyx_PyBool_FromLong(b); } /////////////// GetBuiltinName.proto /////////////// static PyObject *__Pyx_GetBuiltinName(PyObject *name); /*proto*/ /////////////// GetBuiltinName /////////////// //@requires: PyObjectGetAttrStr //@substitute: naming static PyObject *__Pyx_GetBuiltinName(PyObject *name) { PyObject* result = __Pyx_PyObject_GetAttrStr($builtins_cname, name); if (unlikely(!result)) { PyErr_Format(PyExc_NameError, #if PY_MAJOR_VERSION >= 3 "name '%U' is not defined", name); #else "name '%.200s' is not defined", PyString_AS_STRING(name)); #endif } return result; } /////////////// GetNameInClass.proto /////////////// static PyObject *__Pyx_GetNameInClass(PyObject *nmspace, PyObject *name); /*proto*/ /////////////// GetNameInClass /////////////// //@requires: PyObjectGetAttrStr //@requires: GetModuleGlobalName static PyObject *__Pyx_GetNameInClass(PyObject *nmspace, PyObject *name) { PyObject *result; result = __Pyx_PyObject_GetAttrStr(nmspace, name); if (!result) result = __Pyx_GetModuleGlobalName(name); return result; } /////////////// GetModuleGlobalName.proto /////////////// static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name); /*proto*/ /////////////// GetModuleGlobalName /////////////// //@requires: GetBuiltinName //@substitute: naming static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) { PyObject *result; #if !CYTHON_AVOID_BORROWED_REFS result = PyDict_GetItem($moddict_cname, name); if (likely(result)) { Py_INCREF(result); } else { #else result = PyObject_GetItem($moddict_cname, name); if (!result) { PyErr_Clear(); #endif result = __Pyx_GetBuiltinName(name); } return result; } //////////////////// GetAttr.proto //////////////////// static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); /*proto*/ //////////////////// GetAttr //////////////////// //@requires: PyObjectGetAttrStr static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { #if CYTHON_COMPILING_IN_CPYTHON #if PY_MAJOR_VERSION >= 3 if (likely(PyUnicode_Check(n))) #else if (likely(PyString_Check(n))) #endif return __Pyx_PyObject_GetAttrStr(o, n); #endif return PyObject_GetAttr(o, n); } /////////////// PyObjectLookupSpecial.proto /////////////// //@requires: PyObjectGetAttrStr #if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx_PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name) { PyObject *res; PyTypeObject *tp = Py_TYPE(obj); #if PY_MAJOR_VERSION < 3 if (unlikely(PyInstance_Check(obj))) return __Pyx_PyObject_GetAttrStr(obj, attr_name); #endif // adapted from CPython's special_lookup() in ceval.c res = _PyType_Lookup(tp, attr_name); if (likely(res)) { descrgetfunc f = Py_TYPE(res)->tp_descr_get; if (!f) { Py_INCREF(res); } else { res = f(res, obj, (PyObject *)tp); } } else { PyErr_SetObject(PyExc_AttributeError, attr_name); } return res; } #else #define __Pyx_PyObject_LookupSpecial(o,n) __Pyx_PyObject_GetAttrStr(o,n) #endif /////////////// PyObjectGetAttrStr.proto /////////////// #if CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { PyTypeObject* tp = Py_TYPE(obj); if (likely(tp->tp_getattro)) return tp->tp_getattro(obj, attr_name); #if PY_MAJOR_VERSION < 3 if (likely(tp->tp_getattr)) return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); #endif return PyObject_GetAttr(obj, attr_name); } #else #define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) #endif /////////////// PyObjectSetAttrStr.proto /////////////// #if CYTHON_USE_TYPE_SLOTS #define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o,n,NULL) static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) { PyTypeObject* tp = Py_TYPE(obj); if (likely(tp->tp_setattro)) return tp->tp_setattro(obj, attr_name, value); #if PY_MAJOR_VERSION < 3 if (likely(tp->tp_setattr)) return tp->tp_setattr(obj, PyString_AS_STRING(attr_name), value); #endif return PyObject_SetAttr(obj, attr_name, value); } #else #define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n) #define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v) #endif /////////////// UnpackUnboundCMethod.proto /////////////// typedef struct { PyObject *type; PyObject **method_name; // "func" is set on first access (direct C function pointer) PyCFunction func; // "method" is set on first access (fallback) PyObject *method; int flag; } __Pyx_CachedCFunction; /////////////// UnpackUnboundCMethod /////////////// //@requires: PyObjectGetAttrStr static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { PyObject *method; method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); if (unlikely(!method)) return -1; target->method = method; #if CYTHON_COMPILING_IN_CPYTHON #if PY_MAJOR_VERSION >= 3 // method dscriptor type isn't exported in Py2.x, cannot easily check the type there if (likely(PyObject_TypeCheck(method, &PyMethodDescr_Type))) #endif { PyMethodDescrObject *descr = (PyMethodDescrObject*) method; target->func = descr->d_method->ml_meth; target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST); } #endif return 0; } /////////////// CallUnboundCMethod0.proto /////////////// //@substitute: naming static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self); /*proto*/ #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_CallUnboundCMethod0(cfunc, self) \ ((likely((cfunc)->func)) ? \ (likely((cfunc)->flag == METH_NOARGS) ? (*((cfunc)->func))(self, NULL) : \ (likely((cfunc)->flag == (METH_VARARGS | METH_KEYWORDS)) ? ((*(PyCFunctionWithKeywords)(cfunc)->func)(self, $empty_tuple, NULL)) : \ ((cfunc)->flag == METH_VARARGS ? (*((cfunc)->func))(self, $empty_tuple) : \ (PY_VERSION_HEX >= 0x030600B1 && (cfunc)->flag == METH_FASTCALL ? \ (PY_VERSION_HEX >= 0x030700A0 ? \ (*(__Pyx_PyCFunctionFast)(cfunc)->func)(self, &PyTuple_GET_ITEM($empty_tuple, 0), 0) : \ (*(__Pyx_PyCFunctionFastWithKeywords)(cfunc)->func)(self, &PyTuple_GET_ITEM($empty_tuple, 0), 0, NULL)) : \ (PY_VERSION_HEX >= 0x030700A0 && (cfunc)->flag == (METH_FASTCALL | METH_KEYWORDS) ? \ (*(__Pyx_PyCFunctionFastWithKeywords)(cfunc)->func)(self, &PyTuple_GET_ITEM($empty_tuple, 0), 0, NULL) : \ __Pyx__CallUnboundCMethod0(cfunc, self)))))) : \ __Pyx__CallUnboundCMethod0(cfunc, self)) #else #define __Pyx_CallUnboundCMethod0(cfunc, self) __Pyx__CallUnboundCMethod0(cfunc, self) #endif /////////////// CallUnboundCMethod0 /////////////// //@requires: UnpackUnboundCMethod //@requires: PyObjectCall static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self) { PyObject *args, *result = NULL; if (unlikely(!cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; #if CYTHON_ASSUME_SAFE_MACROS args = PyTuple_New(1); if (unlikely(!args)) goto bad; Py_INCREF(self); PyTuple_SET_ITEM(args, 0, self); #else args = PyTuple_Pack(1, self); if (unlikely(!args)) goto bad; #endif result = __Pyx_PyObject_Call(cfunc->method, args, NULL); Py_DECREF(args); bad: return result; } /////////////// CallUnboundCMethod1.proto /////////////// static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); /*proto*/ #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_CallUnboundCMethod1(cfunc, self, arg) \ ((likely((cfunc)->func && (cfunc)->flag == METH_O)) ? (*((cfunc)->func))(self, arg) : \ ((PY_VERSION_HEX >= 0x030600B1 && (cfunc)->func && (cfunc)->flag == METH_FASTCALL) ? \ (PY_VERSION_HEX >= 0x030700A0 ? \ (*(__Pyx_PyCFunctionFast)(cfunc)->func)(self, &arg, 1) : \ (*(__Pyx_PyCFunctionFastWithKeywords)(cfunc)->func)(self, &arg, 1, NULL)) : \ (PY_VERSION_HEX >= 0x030700A0 && (cfunc)->func && (cfunc)->flag == (METH_FASTCALL | METH_KEYWORDS) ? \ (*(__Pyx_PyCFunctionFastWithKeywords)(cfunc)->func)(self, &arg, 1, NULL) : \ __Pyx__CallUnboundCMethod1(cfunc, self, arg)))) #else #define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) #endif /////////////// CallUnboundCMethod1 /////////////// //@requires: UnpackUnboundCMethod //@requires: PyObjectCall static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ PyObject *args, *result = NULL; if (unlikely(!cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; #if CYTHON_COMPILING_IN_CPYTHON if (cfunc->func && (cfunc->flag & METH_VARARGS)) { args = PyTuple_New(1); if (unlikely(!args)) goto bad; Py_INCREF(arg); PyTuple_SET_ITEM(args, 0, arg); if (cfunc->flag & METH_KEYWORDS) result = (*(PyCFunctionWithKeywords)cfunc->func)(self, args, NULL); else result = (*cfunc->func)(self, args); } else { args = PyTuple_New(2); if (unlikely(!args)) goto bad; Py_INCREF(self); PyTuple_SET_ITEM(args, 0, self); Py_INCREF(arg); PyTuple_SET_ITEM(args, 1, arg); result = __Pyx_PyObject_Call(cfunc->method, args, NULL); } #else args = PyTuple_Pack(2, self, arg); if (unlikely(!args)) goto bad; result = __Pyx_PyObject_Call(cfunc->method, args, NULL); #endif bad: Py_XDECREF(args); return result; } /////////////// PyObjectCallMethod0.proto /////////////// static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); /*proto*/ /////////////// PyObjectCallMethod0 /////////////// //@requires: PyObjectGetAttrStr //@requires: PyObjectCallOneArg //@requires: PyObjectCallNoArg static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { PyObject *method, *result = NULL; method = __Pyx_PyObject_GetAttrStr(obj, method_name); if (unlikely(!method)) goto bad; #if CYTHON_UNPACK_METHODS if (likely(PyMethod_Check(method))) { PyObject *self = PyMethod_GET_SELF(method); if (likely(self)) { PyObject *function = PyMethod_GET_FUNCTION(method); result = __Pyx_PyObject_CallOneArg(function, self); Py_DECREF(method); return result; } } #endif result = __Pyx_PyObject_CallNoArg(method); Py_DECREF(method); bad: return result; } /////////////// PyObjectCallMethod1.proto /////////////// static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); /*proto*/ /////////////// PyObjectCallMethod1 /////////////// //@requires: PyObjectGetAttrStr //@requires: PyObjectCallOneArg //@requires: PyFunctionFastCall //@requires: PyCFunctionFastCall static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) { PyObject *method, *result = NULL; method = __Pyx_PyObject_GetAttrStr(obj, method_name); if (unlikely(!method)) goto done; #if CYTHON_UNPACK_METHODS if (likely(PyMethod_Check(method))) { PyObject *self = PyMethod_GET_SELF(method); if (likely(self)) { PyObject *args; PyObject *function = PyMethod_GET_FUNCTION(method); #if CYTHON_FAST_PYCALL if (PyFunction_Check(function)) { PyObject *args[2] = {self, arg}; result = __Pyx_PyFunction_FastCall(function, args, 2); goto done; } #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(function)) { PyObject *args[2] = {self, arg}; result = __Pyx_PyCFunction_FastCall(function, args, 2); goto done; } #endif args = PyTuple_New(2); if (unlikely(!args)) goto done; Py_INCREF(self); PyTuple_SET_ITEM(args, 0, self); Py_INCREF(arg); PyTuple_SET_ITEM(args, 1, arg); Py_INCREF(function); Py_DECREF(method); method = NULL; result = __Pyx_PyObject_Call(function, args, NULL); Py_DECREF(args); Py_DECREF(function); return result; } } #endif result = __Pyx_PyObject_CallOneArg(method, arg); done: Py_XDECREF(method); return result; } /////////////// PyObjectCallMethod2.proto /////////////// static PyObject* __Pyx_PyObject_CallMethod2(PyObject* obj, PyObject* method_name, PyObject* arg1, PyObject* arg2); /*proto*/ /////////////// PyObjectCallMethod2 /////////////// //@requires: PyObjectGetAttrStr //@requires: PyObjectCall //@requires: PyFunctionFastCall //@requires: PyCFunctionFastCall static PyObject* __Pyx_PyObject_CallMethod2(PyObject* obj, PyObject* method_name, PyObject* arg1, PyObject* arg2) { PyObject *args, *method, *result = NULL; method = __Pyx_PyObject_GetAttrStr(obj, method_name); if (unlikely(!method)) return NULL; #if CYTHON_UNPACK_METHODS if (likely(PyMethod_Check(method)) && likely(PyMethod_GET_SELF(method))) { PyObject *self, *function; self = PyMethod_GET_SELF(method); function = PyMethod_GET_FUNCTION(method); #if CYTHON_FAST_PYCALL if (PyFunction_Check(function)) { PyObject *args[3] = {self, arg1, arg2}; result = __Pyx_PyFunction_FastCall(function, args, 3); goto done; } #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(function)) { PyObject *args[3] = {self, arg1, arg2}; result = __Pyx_PyFunction_FastCall(function, args, 3); goto done; } #endif args = PyTuple_New(3); if (unlikely(!args)) goto done; Py_INCREF(self); PyTuple_SET_ITEM(args, 0, self); Py_INCREF(arg1); PyTuple_SET_ITEM(args, 1, arg1); Py_INCREF(arg2); PyTuple_SET_ITEM(args, 2, arg2); Py_INCREF(function); Py_DECREF(method); method = function; } else #endif #if CYTHON_FAST_PYCALL if (PyFunction_Check(method)) { PyObject *args[2] = {arg1, arg2}; result = __Pyx_PyFunction_FastCall(method, args, 2); goto done; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(method)) { PyObject *args[2] = {arg1, arg2}; result = __Pyx_PyCFunction_FastCall(method, args, 2); goto done; } else #endif { args = PyTuple_New(2); if (unlikely(!args)) goto done; Py_INCREF(arg1); PyTuple_SET_ITEM(args, 0, arg1); Py_INCREF(arg2); PyTuple_SET_ITEM(args, 1, arg2); } result = __Pyx_PyObject_Call(method, args, NULL); Py_DECREF(args); done: Py_DECREF(method); return result; } /////////////// tp_new.proto /////////////// #define __Pyx_tp_new(type_obj, args) __Pyx_tp_new_kwargs(type_obj, args, NULL) static CYTHON_INLINE PyObject* __Pyx_tp_new_kwargs(PyObject* type_obj, PyObject* args, PyObject* kwargs) { return (PyObject*) (((PyTypeObject*)type_obj)->tp_new((PyTypeObject*)type_obj, args, kwargs)); } /////////////// PyObjectCall.proto /////////////// #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); /*proto*/ #else #define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) #endif /////////////// PyObjectCall /////////////// #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { PyObject *result; ternaryfunc call = func->ob_type->tp_call; if (unlikely(!call)) return PyObject_Call(func, arg, kw); if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) return NULL; result = (*call)(func, arg, kw); Py_LeaveRecursiveCall(); if (unlikely(!result) && unlikely(!PyErr_Occurred())) { PyErr_SetString( PyExc_SystemError, "NULL result without error in PyObject_Call"); } return result; } #endif /////////////// PyObjectCallMethO.proto /////////////// #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); /*proto*/ #endif /////////////// PyObjectCallMethO /////////////// #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { PyObject *self, *result; PyCFunction cfunc; cfunc = PyCFunction_GET_FUNCTION(func); self = PyCFunction_GET_SELF(func); if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) return NULL; result = cfunc(self, arg); Py_LeaveRecursiveCall(); if (unlikely(!result) && unlikely(!PyErr_Occurred())) { PyErr_SetString( PyExc_SystemError, "NULL result without error in PyObject_Call"); } return result; } #endif /////////////// PyFunctionFastCall.proto /////////////// #if CYTHON_FAST_PYCALL #define __Pyx_PyFunction_FastCall(func, args, nargs) \ __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) // let's assume that the non-public C-API function might still change during the 3.6 beta phase #if 1 || PY_VERSION_HEX < 0x030600B1 static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); #else #define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) #endif #endif /////////////// PyFunctionFastCall /////////////// // copied from CPython 3.6 ceval.c #if CYTHON_FAST_PYCALL #include "frameobject.h" static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, PyObject *globals) { PyFrameObject *f; PyThreadState *tstate = PyThreadState_GET(); PyObject **fastlocals; Py_ssize_t i; PyObject *result; assert(globals != NULL); /* XXX Perhaps we should create a specialized PyFrame_New() that doesn't take locals, but does take builtins without sanity checking them. */ assert(tstate != NULL); f = PyFrame_New(tstate, co, globals, NULL); if (f == NULL) { return NULL; } fastlocals = f->f_localsplus; for (i = 0; i < na; i++) { Py_INCREF(*args); fastlocals[i] = *args++; } result = PyEval_EvalFrameEx(f,0); ++tstate->recursion_depth; Py_DECREF(f); --tstate->recursion_depth; return result; } #if 1 || PY_VERSION_HEX < 0x030600B1 static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); PyObject *globals = PyFunction_GET_GLOBALS(func); PyObject *argdefs = PyFunction_GET_DEFAULTS(func); PyObject *closure; #if PY_MAJOR_VERSION >= 3 PyObject *kwdefs; //#if PY_VERSION_HEX >= 0x03050000 //PyObject *name, *qualname; //#endif #endif PyObject *kwtuple, **k; PyObject **d; Py_ssize_t nd; Py_ssize_t nk; PyObject *result; assert(kwargs == NULL || PyDict_Check(kwargs)); nk = kwargs ? PyDict_Size(kwargs) : 0; if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { return NULL; } if ( #if PY_MAJOR_VERSION >= 3 co->co_kwonlyargcount == 0 && #endif likely(kwargs == NULL || nk == 0) && co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { /* Fast paths */ if (argdefs == NULL && co->co_argcount == nargs) { result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); goto done; } else if (nargs == 0 && argdefs != NULL && co->co_argcount == Py_SIZE(argdefs)) { /* function called with no arguments, but all parameters have a default value: use default values as arguments .*/ args = &PyTuple_GET_ITEM(argdefs, 0); result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); goto done; } } if (kwargs != NULL) { Py_ssize_t pos, i; kwtuple = PyTuple_New(2 * nk); if (kwtuple == NULL) { result = NULL; goto done; } k = &PyTuple_GET_ITEM(kwtuple, 0); pos = i = 0; while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { Py_INCREF(k[i]); Py_INCREF(k[i+1]); i += 2; } nk = i / 2; } else { kwtuple = NULL; k = NULL; } closure = PyFunction_GET_CLOSURE(func); #if PY_MAJOR_VERSION >= 3 kwdefs = PyFunction_GET_KW_DEFAULTS(func); //#if PY_VERSION_HEX >= 0x03050000 //name = ((PyFunctionObject *)func) -> func_name; //qualname = ((PyFunctionObject *)func) -> func_qualname; //#endif #endif if (argdefs != NULL) { d = &PyTuple_GET_ITEM(argdefs, 0); nd = Py_SIZE(argdefs); } else { d = NULL; nd = 0; } //#if PY_VERSION_HEX >= 0x03050000 //return _PyEval_EvalCodeWithName((PyObject*)co, globals, (PyObject *)NULL, // args, nargs, // NULL, 0, // d, nd, kwdefs, // closure, name, qualname); //#elif PY_MAJOR_VERSION >= 3 #if PY_MAJOR_VERSION >= 3 result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, args, nargs, k, (int)nk, d, (int)nd, kwdefs, closure); #else result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, args, nargs, k, (int)nk, d, (int)nd, closure); #endif Py_XDECREF(kwtuple); done: Py_LeaveRecursiveCall(); return result; } #endif /* CPython < 3.6 */ #endif /* CYTHON_FAST_PYCALL */ /////////////// PyCFunctionFastCall.proto /////////////// #if CYTHON_FAST_PYCCALL static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); #else #define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) #endif /////////////// PyCFunctionFastCall /////////////// #if CYTHON_FAST_PYCCALL static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { PyCFunctionObject *func = (PyCFunctionObject*)func_obj; PyCFunction meth = PyCFunction_GET_FUNCTION(func); PyObject *self = PyCFunction_GET_SELF(func); int flags = PyCFunction_GET_FLAGS(func); assert(PyCFunction_Check(func)); assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))); assert(nargs >= 0); assert(nargs == 0 || args != NULL); /* _PyCFunction_FastCallDict() must not be called with an exception set, because it may clear it (directly or indirectly) and so the caller loses its exception */ assert(!PyErr_Occurred()); if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { return (*((__Pyx_PyCFunctionFastWithKeywords)meth)) (self, args, nargs, NULL); } else { return (*((__Pyx_PyCFunctionFast)meth)) (self, args, nargs); } } #endif /* CYTHON_FAST_PYCCALL */ /////////////// PyObjectCallOneArg.proto /////////////// static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); /*proto*/ /////////////// PyObjectCallOneArg /////////////// //@requires: PyObjectCallMethO //@requires: PyObjectCall //@requires: PyFunctionFastCall //@requires: PyCFunctionFastCall #if CYTHON_COMPILING_IN_CPYTHON static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { PyObject *result; PyObject *args = PyTuple_New(1); if (unlikely(!args)) return NULL; Py_INCREF(arg); PyTuple_SET_ITEM(args, 0, arg); result = __Pyx_PyObject_Call(func, args, NULL); Py_DECREF(args); return result; } static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { #if CYTHON_FAST_PYCALL if (PyFunction_Check(func)) { return __Pyx_PyFunction_FastCall(func, &arg, 1); } #endif if (likely(PyCFunction_Check(func))) { if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { // fast and simple case that we are optimising for return __Pyx_PyObject_CallMethO(func, arg); #if CYTHON_FAST_PYCCALL } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { return __Pyx_PyCFunction_FastCall(func, &arg, 1); #endif } } return __Pyx__PyObject_CallOneArg(func, arg); } #else static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { PyObject *result; PyObject *args = PyTuple_Pack(1, arg); if (unlikely(!args)) return NULL; result = __Pyx_PyObject_Call(func, args, NULL); Py_DECREF(args); return result; } #endif /////////////// PyObjectCallNoArg.proto /////////////// //@requires: PyObjectCall //@substitute: naming #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); /*proto*/ #else #define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, $empty_tuple, NULL) #endif /////////////// PyObjectCallNoArg /////////////// //@requires: PyObjectCallMethO //@requires: PyObjectCall //@requires: PyFunctionFastCall //@substitute: naming #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { #if CYTHON_FAST_PYCALL if (PyFunction_Check(func)) { return __Pyx_PyFunction_FastCall(func, NULL, 0); } #endif #ifdef __Pyx_CyFunction_USED if (likely(PyCFunction_Check(func) || PyObject_TypeCheck(func, __pyx_CyFunctionType))) { #else if (likely(PyCFunction_Check(func))) { #endif if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { // fast and simple case that we are optimising for return __Pyx_PyObject_CallMethO(func, NULL); } } return __Pyx_PyObject_Call(func, $empty_tuple, NULL); } #endif /////////////// MatrixMultiply.proto /////////////// #if PY_VERSION_HEX >= 0x03050000 #define __Pyx_PyNumber_MatrixMultiply(x,y) PyNumber_MatrixMultiply(x,y) #define __Pyx_PyNumber_InPlaceMatrixMultiply(x,y) PyNumber_InPlaceMatrixMultiply(x,y) #else #define __Pyx_PyNumber_MatrixMultiply(x,y) __Pyx__PyNumber_MatrixMultiply(x, y, "@") static PyObject* __Pyx__PyNumber_MatrixMultiply(PyObject* x, PyObject* y, const char* op_name); static PyObject* __Pyx_PyNumber_InPlaceMatrixMultiply(PyObject* x, PyObject* y); #endif /////////////// MatrixMultiply /////////////// //@requires: PyObjectGetAttrStr //@requires: PyObjectCallOneArg //@requires: PyFunctionFastCall //@requires: PyCFunctionFastCall #if PY_VERSION_HEX < 0x03050000 static PyObject* __Pyx_PyObject_CallMatrixMethod(PyObject* method, PyObject* arg) { // NOTE: eats the method reference PyObject *result = NULL; #if CYTHON_UNPACK_METHODS if (likely(PyMethod_Check(method))) { PyObject *self = PyMethod_GET_SELF(method); if (likely(self)) { PyObject *args; PyObject *function = PyMethod_GET_FUNCTION(method); #if CYTHON_FAST_PYCALL if (PyFunction_Check(function)) { PyObject *args[2] = {self, arg}; result = __Pyx_PyFunction_FastCall(function, args, 2); goto done; } #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(function)) { PyObject *args[2] = {self, arg}; result = __Pyx_PyCFunction_FastCall(function, args, 2); goto done; } #endif args = PyTuple_New(2); if (unlikely(!args)) goto done; Py_INCREF(self); PyTuple_SET_ITEM(args, 0, self); Py_INCREF(arg); PyTuple_SET_ITEM(args, 1, arg); Py_INCREF(function); Py_DECREF(method); method = NULL; result = __Pyx_PyObject_Call(function, args, NULL); Py_DECREF(args); Py_DECREF(function); return result; } } #endif result = __Pyx_PyObject_CallOneArg(method, arg); done: Py_DECREF(method); return result; } #define __Pyx_TryMatrixMethod(x, y, py_method_name) { \ PyObject *func = __Pyx_PyObject_GetAttrStr(x, py_method_name); \ if (func) { \ PyObject *result = __Pyx_PyObject_CallMatrixMethod(func, y); \ if (result != Py_NotImplemented) \ return result; \ Py_DECREF(result); \ } else { \ if (!PyErr_ExceptionMatches(PyExc_AttributeError)) \ return NULL; \ PyErr_Clear(); \ } \ } static PyObject* __Pyx__PyNumber_MatrixMultiply(PyObject* x, PyObject* y, const char* op_name) { int right_is_subtype = PyObject_IsSubclass((PyObject*)Py_TYPE(y), (PyObject*)Py_TYPE(x)); if (unlikely(right_is_subtype == -1)) return NULL; if (right_is_subtype) { // to allow subtypes to override parent behaviour, try reversed operation first // see note at https://docs.python.org/3/reference/datamodel.html#emulating-numeric-types __Pyx_TryMatrixMethod(y, x, PYIDENT("__rmatmul__")) } __Pyx_TryMatrixMethod(x, y, PYIDENT("__matmul__")) if (!right_is_subtype) { __Pyx_TryMatrixMethod(y, x, PYIDENT("__rmatmul__")) } PyErr_Format(PyExc_TypeError, "unsupported operand type(s) for %.2s: '%.100s' and '%.100s'", op_name, Py_TYPE(x)->tp_name, Py_TYPE(y)->tp_name); return NULL; } static PyObject* __Pyx_PyNumber_InPlaceMatrixMultiply(PyObject* x, PyObject* y) { __Pyx_TryMatrixMethod(x, y, PYIDENT("__imatmul__")) return __Pyx__PyNumber_MatrixMultiply(x, y, "@="); } #undef __Pyx_TryMatrixMethod #endif Cython-0.26.1/Cython/Utility/CConvert.pyx0000664000175000017500000001036213143605603021027 0ustar stefanstefan00000000000000#################### FromPyStructUtility #################### cdef extern from *: ctypedef struct PyTypeObject: char* tp_name PyTypeObject *Py_TYPE(obj) bint PyMapping_Check(obj) object PyErr_Format(exc, const char *format, ...) @cname("{{funcname}}") cdef {{struct_type}} {{funcname}}(obj) except *: cdef {{struct_type}} result if not PyMapping_Check(obj): PyErr_Format(TypeError, b"Expected %.16s, got %.200s", b"a mapping", Py_TYPE(obj).tp_name) {{for member in var_entries:}} try: value = obj['{{member.name}}'] except KeyError: raise ValueError("No value specified for struct attribute '{{member.name}}'") result.{{member.cname}} = value {{endfor}} return result #################### FromPyUnionUtility #################### cdef extern from *: ctypedef struct PyTypeObject: char* tp_name PyTypeObject *Py_TYPE(obj) bint PyMapping_Check(obj) object PyErr_Format(exc, const char *format, ...) @cname("{{funcname}}") cdef {{struct_type}} {{funcname}}(obj) except *: cdef {{struct_type}} result cdef Py_ssize_t length if not PyMapping_Check(obj): PyErr_Format(TypeError, b"Expected %.16s, got %.200s", b"a mapping", Py_TYPE(obj).tp_name) last_found = None length = len(obj) if length: {{for member in var_entries:}} if '{{member.name}}' in obj: if last_found is not None: raise ValueError("More than one union attribute passed: '%s' and '%s'" % (last_found, '{{member.name}}')) last_found = '{{member.name}}' result.{{member.cname}} = obj['{{member.name}}'] length -= 1 if not length: return result {{endfor}} if last_found is None: raise ValueError("No value specified for any of the union attributes (%s)" % '{{", ".join(member.name for member in var_entries)}}') return result #################### cfunc.to_py #################### @cname("{{cname}}") cdef object {{cname}}({{return_type.ctype}} (*f)({{ ', '.join(arg.type_cname for arg in args) }}) {{except_clause}}): def wrap({{ ', '.join('{arg.ctype} {arg.name}'.format(arg=arg) for arg in args) }}): """wrap({{', '.join(('{arg.name}: {arg.type_displayname}'.format(arg=arg) if arg.type_displayname else arg.name) for arg in args)}}){{if return_type.type_displayname}} -> {{return_type.type_displayname}}{{endif}}""" {{'' if return_type.type.is_void else 'return '}}f({{ ', '.join(arg.name for arg in args) }}) return wrap #################### carray.from_py #################### cdef extern from *: object PyErr_Format(exc, const char *format, ...) @cname("{{cname}}") cdef int {{cname}}(object o, {{base_type}} *v, Py_ssize_t length) except -1: cdef Py_ssize_t i = length try: i = len(o) except (TypeError, OverflowError): pass if i == length: for i, item in enumerate(o): if i >= length: break v[i] = item else: i += 1 # convert index to length if i == length: return 0 PyErr_Format( IndexError, ("too many values found during array assignment, expected %zd" if i >= length else "not enough values found during array assignment, expected %zd, got %zd"), length, i) #################### carray.to_py #################### cdef extern from *: void Py_INCREF(object o) tuple PyTuple_New(Py_ssize_t size) list PyList_New(Py_ssize_t size) void PyTuple_SET_ITEM(object p, Py_ssize_t pos, object o) void PyList_SET_ITEM(object p, Py_ssize_t pos, object o) @cname("{{cname}}") cdef inline list {{cname}}({{base_type}} *v, Py_ssize_t length): cdef size_t i cdef object value l = PyList_New(length) for i in range(length): value = v[i] Py_INCREF(value) PyList_SET_ITEM(l, i, value) return l @cname("{{to_tuple_cname}}") cdef inline tuple {{to_tuple_cname}}({{base_type}} *v, Py_ssize_t length): cdef size_t i cdef object value t = PyTuple_New(length) for i in range(length): value = v[i] Py_INCREF(value) PyTuple_SET_ITEM(t, i, value) return t Cython-0.26.1/Cython/Utility/CppConvert.pyx0000664000175000017500000001371613143605603021375 0ustar stefanstefan00000000000000# TODO: Figure out how many of the pass-by-value copies the compiler can eliminate. #################### string.from_py #################### cdef extern from *: cdef cppclass string "{{type}}": string() string(char* c_str, size_t size) cdef const char* __Pyx_PyObject_AsStringAndSize(object, Py_ssize_t*) except NULL @cname("{{cname}}") cdef string {{cname}}(object o) except *: cdef Py_ssize_t length cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) return string(data, length) #################### string.to_py #################### #cimport cython #from libcpp.string cimport string cdef extern from *: cdef cppclass string "{{type}}": char* data() size_t size() {{for py_type in ['PyObject', 'PyUnicode', 'PyStr', 'PyBytes', 'PyByteArray']}} cdef extern from *: cdef object __Pyx_{{py_type}}_FromStringAndSize(const char*, size_t) @cname("{{cname.replace("PyObject", py_type, 1)}}") cdef inline object {{cname.replace("PyObject", py_type, 1)}}(const string& s): return __Pyx_{{py_type}}_FromStringAndSize(s.data(), s.size()) {{endfor}} #################### vector.from_py #################### cdef extern from *: cdef cppclass vector "std::vector" [T]: void push_back(T&) @cname("{{cname}}") cdef vector[X] {{cname}}(object o) except *: cdef vector[X] v for item in o: v.push_back(item) return v #################### vector.to_py #################### cdef extern from *: cdef cppclass vector "const std::vector" [T]: size_t size() T& operator[](size_t) @cname("{{cname}}") cdef object {{cname}}(vector[X]& v): return [v[i] for i in range(v.size())] #################### list.from_py #################### cdef extern from *: cdef cppclass cpp_list "std::list" [T]: void push_back(T&) @cname("{{cname}}") cdef cpp_list[X] {{cname}}(object o) except *: cdef cpp_list[X] l for item in o: l.push_back(item) return l #################### list.to_py #################### cimport cython cdef extern from *: cdef cppclass cpp_list "std::list" [T]: cppclass const_iterator: T& operator*() const_iterator operator++() bint operator!=(const_iterator) const_iterator begin() const_iterator end() @cname("{{cname}}") cdef object {{cname}}(const cpp_list[X]& v): o = [] cdef cpp_list[X].const_iterator iter = v.begin() while iter != v.end(): o.append(cython.operator.dereference(iter)) cython.operator.preincrement(iter) return o #################### set.from_py #################### cdef extern from *: cdef cppclass set "std::{{maybe_unordered}}set" [T]: void insert(T&) @cname("{{cname}}") cdef set[X] {{cname}}(object o) except *: cdef set[X] s for item in o: s.insert(item) return s #################### set.to_py #################### cimport cython cdef extern from *: cdef cppclass cpp_set "std::{{maybe_unordered}}set" [T]: cppclass const_iterator: T& operator*() const_iterator operator++() bint operator!=(const_iterator) const_iterator begin() const_iterator end() @cname("{{cname}}") cdef object {{cname}}(const cpp_set[X]& s): o = set() cdef cpp_set[X].const_iterator iter = s.begin() while iter != s.end(): o.add(cython.operator.dereference(iter)) cython.operator.preincrement(iter) return o #################### pair.from_py #################### cdef extern from *: cdef cppclass pair "std::pair" [T, U]: pair() pair(T&, U&) @cname("{{cname}}") cdef pair[X,Y] {{cname}}(object o) except *: x, y = o return pair[X,Y](x, y) #################### pair.to_py #################### cdef extern from *: cdef cppclass pair "std::pair" [T, U]: T first U second @cname("{{cname}}") cdef object {{cname}}(const pair[X,Y]& p): return p.first, p.second #################### map.from_py #################### cdef extern from *: cdef cppclass pair "std::pair" [T, U]: pair(T&, U&) cdef cppclass map "std::{{maybe_unordered}}map" [T, U]: void insert(pair[T, U]&) cdef cppclass vector "std::vector" [T]: pass @cname("{{cname}}") cdef map[X,Y] {{cname}}(object o) except *: cdef dict d = o cdef map[X,Y] m for key, value in d.iteritems(): m.insert(pair[X,Y](key, value)) return m #################### map.to_py #################### # TODO: Work out const so that this can take a const # reference rather than pass by value. cimport cython cdef extern from *: cdef cppclass map "std::{{maybe_unordered}}map" [T, U]: cppclass value_type: T first U second cppclass const_iterator: value_type& operator*() const_iterator operator++() bint operator!=(const_iterator) const_iterator begin() const_iterator end() @cname("{{cname}}") cdef object {{cname}}(const map[X,Y]& s): o = {} cdef const map[X,Y].value_type *key_value cdef map[X,Y].const_iterator iter = s.begin() while iter != s.end(): key_value = &cython.operator.dereference(iter) o[key_value.first] = key_value.second cython.operator.preincrement(iter) return o #################### complex.from_py #################### cdef extern from *: cdef cppclass std_complex "std::complex" [T]: std_complex() std_complex(T, T) except + @cname("{{cname}}") cdef std_complex[X] {{cname}}(object o) except *: cdef double complex z = o return std_complex[X](z.real, z.imag) #################### complex.to_py #################### cdef extern from *: cdef cppclass std_complex "std::complex" [T]: X real() X imag() @cname("{{cname}}") cdef object {{cname}}(const std_complex[X]& z): cdef double complex tmp tmp.real = z.real() tmp.imag = z.imag() return tmp Cython-0.26.1/Cython/Utility/Exceptions.c0000664000175000017500000005355413150045407021037 0ustar stefanstefan00000000000000// Exception raising code // // Exceptions are raised by __Pyx_Raise() and stored as plain // type/value/tb in PyThreadState->curexc_*. When being caught by an // 'except' statement, curexc_* is moved over to exc_* by // __Pyx_GetException() /////////////// PyThreadStateGet.proto /////////////// //@substitute: naming #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyThreadState_declare PyThreadState *$local_tstate_cname; #define __Pyx_PyThreadState_assign $local_tstate_cname = PyThreadState_GET(); #else #define __Pyx_PyThreadState_declare #define __Pyx_PyThreadState_assign #endif /////////////// PyErrExceptionMatches.proto /////////////// //@substitute: naming #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState($local_tstate_cname, err) static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); #else #define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) #endif /////////////// PyErrExceptionMatches /////////////// #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err) { PyObject *exc_type = tstate->curexc_type; if (exc_type == err) return 1; if (unlikely(!exc_type)) return 0; return PyErr_GivenExceptionMatches(exc_type, err); } #endif /////////////// PyErrFetchRestore.proto /////////////// //@substitute: naming //@requires: PyThreadStateGet #if CYTHON_FAST_THREAD_STATE #define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState($local_tstate_cname, type, value, tb) #define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState($local_tstate_cname, type, value, tb) static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); /*proto*/ static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); /*proto*/ #else #define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) #endif /////////////// PyErrFetchRestore /////////////// #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; tmp_type = tstate->curexc_type; tmp_value = tstate->curexc_value; tmp_tb = tstate->curexc_traceback; tstate->curexc_type = type; tstate->curexc_value = value; tstate->curexc_traceback = tb; Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); } static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { *type = tstate->curexc_type; *value = tstate->curexc_value; *tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; } #endif /////////////// RaiseException.proto /////////////// static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /*proto*/ /////////////// RaiseException /////////////// //@requires: PyErrFetchRestore //@requires: PyThreadStateGet // The following function is based on do_raise() from ceval.c. There // are separate versions for Python2 and Python3 as exception handling // has changed quite a lot between the two versions. #if PY_MAJOR_VERSION < 3 static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, CYTHON_UNUSED PyObject *cause) { __Pyx_PyThreadState_declare /* 'cause' is only used in Py3 */ Py_XINCREF(type); if (!value || value == Py_None) value = NULL; else Py_INCREF(value); if (!tb || tb == Py_None) tb = NULL; else { Py_INCREF(tb); if (!PyTraceBack_Check(tb)) { PyErr_SetString(PyExc_TypeError, "raise: arg 3 must be a traceback or None"); goto raise_error; } } if (PyType_Check(type)) { /* instantiate the type now (we don't know when and how it will be caught) */ #if CYTHON_COMPILING_IN_PYPY /* PyPy can't handle value == NULL */ if (!value) { Py_INCREF(Py_None); value = Py_None; } #endif PyErr_NormalizeException(&type, &value, &tb); } else { /* Raising an instance. The value should be a dummy. */ if (value) { PyErr_SetString(PyExc_TypeError, "instance exception may not have a separate value"); goto raise_error; } /* Normalize to raise , */ value = type; type = (PyObject*) Py_TYPE(type); Py_INCREF(type); if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { PyErr_SetString(PyExc_TypeError, "raise: exception class must be a subclass of BaseException"); goto raise_error; } } __Pyx_PyThreadState_assign __Pyx_ErrRestore(type, value, tb); return; raise_error: Py_XDECREF(value); Py_XDECREF(type); Py_XDECREF(tb); return; } #else /* Python 3+ */ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { PyObject* owned_instance = NULL; if (tb == Py_None) { tb = 0; } else if (tb && !PyTraceBack_Check(tb)) { PyErr_SetString(PyExc_TypeError, "raise: arg 3 must be a traceback or None"); goto bad; } if (value == Py_None) value = 0; if (PyExceptionInstance_Check(type)) { if (value) { PyErr_SetString(PyExc_TypeError, "instance exception may not have a separate value"); goto bad; } value = type; type = (PyObject*) Py_TYPE(value); } else if (PyExceptionClass_Check(type)) { // make sure value is an exception instance of type PyObject *instance_class = NULL; if (value && PyExceptionInstance_Check(value)) { instance_class = (PyObject*) Py_TYPE(value); if (instance_class != type) { int is_subclass = PyObject_IsSubclass(instance_class, type); if (!is_subclass) { instance_class = NULL; } else if (unlikely(is_subclass == -1)) { // error on subclass test goto bad; } else { // believe the instance type = instance_class; } } } if (!instance_class) { // instantiate the type now (we don't know when and how it will be caught) // assuming that 'value' is an argument to the type's constructor // not using PyErr_NormalizeException() to avoid ref-counting problems PyObject *args; if (!value) args = PyTuple_New(0); else if (PyTuple_Check(value)) { Py_INCREF(value); args = value; } else args = PyTuple_Pack(1, value); if (!args) goto bad; owned_instance = PyObject_Call(type, args, NULL); Py_DECREF(args); if (!owned_instance) goto bad; value = owned_instance; if (!PyExceptionInstance_Check(value)) { PyErr_Format(PyExc_TypeError, "calling %R should have returned an instance of " "BaseException, not %R", type, Py_TYPE(value)); goto bad; } } } else { PyErr_SetString(PyExc_TypeError, "raise: exception class must be a subclass of BaseException"); goto bad; } #if PY_VERSION_HEX >= 0x03030000 if (cause) { #else if (cause && cause != Py_None) { #endif PyObject *fixed_cause; if (cause == Py_None) { // raise ... from None fixed_cause = NULL; } else if (PyExceptionClass_Check(cause)) { fixed_cause = PyObject_CallObject(cause, NULL); if (fixed_cause == NULL) goto bad; } else if (PyExceptionInstance_Check(cause)) { fixed_cause = cause; Py_INCREF(fixed_cause); } else { PyErr_SetString(PyExc_TypeError, "exception causes must derive from " "BaseException"); goto bad; } PyException_SetCause(value, fixed_cause); } PyErr_SetObject(type, value); if (tb) { #if CYTHON_COMPILING_IN_PYPY PyObject *tmp_type, *tmp_value, *tmp_tb; PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); Py_INCREF(tb); PyErr_Restore(tmp_type, tmp_value, tb); Py_XDECREF(tmp_tb); #else PyThreadState *tstate = PyThreadState_GET(); PyObject* tmp_tb = tstate->curexc_traceback; if (tb != tmp_tb) { Py_INCREF(tb); tstate->curexc_traceback = tb; Py_XDECREF(tmp_tb); } #endif } bad: Py_XDECREF(owned_instance); return; } #endif /////////////// GetException.proto /////////////// //@substitute: naming //@requires: PyThreadStateGet #if CYTHON_FAST_THREAD_STATE #define __Pyx_GetException(type, value, tb) __Pyx__GetException($local_tstate_cname, type, value, tb) static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); /*proto*/ #else static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ #endif /////////////// GetException /////////////// #if CYTHON_FAST_THREAD_STATE static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { #else static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) { #endif PyObject *local_type, *local_value, *local_tb; #if CYTHON_FAST_THREAD_STATE PyObject *tmp_type, *tmp_value, *tmp_tb; local_type = tstate->curexc_type; local_value = tstate->curexc_value; local_tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; #else PyErr_Fetch(&local_type, &local_value, &local_tb); #endif PyErr_NormalizeException(&local_type, &local_value, &local_tb); #if CYTHON_FAST_THREAD_STATE if (unlikely(tstate->curexc_type)) #else if (unlikely(PyErr_Occurred())) #endif goto bad; #if PY_MAJOR_VERSION >= 3 if (local_tb) { if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) goto bad; } #endif // traceback may be NULL for freshly raised exceptions Py_XINCREF(local_tb); // exception state may be temporarily empty in parallel loops (race condition) Py_XINCREF(local_type); Py_XINCREF(local_value); *type = local_type; *value = local_value; *tb = local_tb; #if CYTHON_FAST_THREAD_STATE tmp_type = tstate->exc_type; tmp_value = tstate->exc_value; tmp_tb = tstate->exc_traceback; tstate->exc_type = local_type; tstate->exc_value = local_value; tstate->exc_traceback = local_tb; // Make sure tstate is in a consistent state when we XDECREF // these objects (DECREF may run arbitrary code). Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); #else PyErr_SetExcInfo(local_type, local_value, local_tb); #endif return 0; bad: *type = 0; *value = 0; *tb = 0; Py_XDECREF(local_type); Py_XDECREF(local_value); Py_XDECREF(local_tb); return -1; } /////////////// ReRaiseException.proto /////////////// static CYTHON_INLINE void __Pyx_ReraiseException(void); /*proto*/ /////////////// ReRaiseException.proto /////////////// static CYTHON_INLINE void __Pyx_ReraiseException(void) { PyObject *type = NULL, *value = NULL, *tb = NULL; #if CYTHON_FAST_THREAD_STATE PyThreadState *tstate = PyThreadState_GET(); type = tstate->exc_type; value = tstate->exc_value; tb = tstate->exc_traceback; #else PyErr_GetExcInfo(&type, &value, &tb); #endif if (!type || type == Py_None) { #if !CYTHON_FAST_THREAD_STATE Py_XDECREF(type); Py_XDECREF(value); Py_XDECREF(tb); #endif // message copied from Py3 PyErr_SetString(PyExc_RuntimeError, "No active exception to reraise"); } else { #if CYTHON_FAST_THREAD_STATE Py_INCREF(type); Py_XINCREF(value); Py_XINCREF(tb); #endif PyErr_Restore(type, value, tb); } } /////////////// SaveResetException.proto /////////////// //@substitute: naming //@requires: PyThreadStateGet #if CYTHON_FAST_THREAD_STATE #define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave($local_tstate_cname, type, value, tb) static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); /*proto*/ #define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset($local_tstate_cname, type, value, tb) static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); /*proto*/ #else #define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) #define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) #endif /////////////// SaveResetException /////////////// #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { *type = tstate->exc_type; *value = tstate->exc_value; *tb = tstate->exc_traceback; Py_XINCREF(*type); Py_XINCREF(*value); Py_XINCREF(*tb); } static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; tmp_type = tstate->exc_type; tmp_value = tstate->exc_value; tmp_tb = tstate->exc_traceback; tstate->exc_type = type; tstate->exc_value = value; tstate->exc_traceback = tb; Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); } #endif /////////////// SwapException.proto /////////////// //@substitute: naming //@requires: PyThreadStateGet #if CYTHON_FAST_THREAD_STATE #define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap($local_tstate_cname, type, value, tb) static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); /*proto*/ #else static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ #endif /////////////// SwapException /////////////// #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; tmp_type = tstate->exc_type; tmp_value = tstate->exc_value; tmp_tb = tstate->exc_traceback; tstate->exc_type = *type; tstate->exc_value = *value; tstate->exc_traceback = *tb; *type = tmp_type; *value = tmp_value; *tb = tmp_tb; } #else static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); PyErr_SetExcInfo(*type, *value, *tb); *type = tmp_type; *value = tmp_value; *tb = tmp_tb; } #endif /////////////// WriteUnraisableException.proto /////////////// static void __Pyx_WriteUnraisable(const char *name, int clineno, int lineno, const char *filename, int full_traceback, int nogil); /*proto*/ /////////////// WriteUnraisableException /////////////// //@requires: PyErrFetchRestore //@requires: PyThreadStateGet static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno, CYTHON_UNUSED int lineno, CYTHON_UNUSED const char *filename, int full_traceback, CYTHON_UNUSED int nogil) { PyObject *old_exc, *old_val, *old_tb; PyObject *ctx; __Pyx_PyThreadState_declare #ifdef WITH_THREAD PyGILState_STATE state; if (nogil) state = PyGILState_Ensure(); #ifdef _MSC_VER /* arbitrary, to suppress warning */ else state = (PyGILState_STATE)-1; #endif #endif __Pyx_PyThreadState_assign __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); if (full_traceback) { Py_XINCREF(old_exc); Py_XINCREF(old_val); Py_XINCREF(old_tb); __Pyx_ErrRestore(old_exc, old_val, old_tb); PyErr_PrintEx(1); } #if PY_MAJOR_VERSION < 3 ctx = PyString_FromString(name); #else ctx = PyUnicode_FromString(name); #endif __Pyx_ErrRestore(old_exc, old_val, old_tb); if (!ctx) { PyErr_WriteUnraisable(Py_None); } else { PyErr_WriteUnraisable(ctx); Py_DECREF(ctx); } #ifdef WITH_THREAD if (nogil) PyGILState_Release(state); #endif } /////////////// CLineInTraceback.proto /////////////// static int __Pyx_CLineForTraceback(int c_line); /////////////// CLineInTraceback /////////////// //@requires: ObjectHandling.c::PyObjectGetAttrStr //@substitute: naming static int __Pyx_CLineForTraceback(int c_line) { #ifdef CYTHON_CLINE_IN_TRACEBACK /* 0 or 1 to disable/enable C line display in tracebacks at C compile time */ return ((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0; #else PyObject *use_cline; #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict = _PyObject_GetDictPtr(${cython_runtime_cname}); if (likely(cython_runtime_dict)) { use_cline = PyDict_GetItem(*cython_runtime_dict, PYIDENT("cline_in_traceback")); } else #endif { PyObject *ptype, *pvalue, *ptraceback; PyObject *use_cline_obj; PyErr_Fetch(&ptype, &pvalue, &ptraceback); use_cline_obj = __Pyx_PyObject_GetAttrStr(${cython_runtime_cname}, PYIDENT("cline_in_traceback")); if (use_cline_obj) { use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; Py_DECREF(use_cline_obj); } else { use_cline = NULL; } PyErr_Restore(ptype, pvalue, ptraceback); } if (!use_cline) { c_line = 0; PyObject_SetAttr(${cython_runtime_cname}, PYIDENT("cline_in_traceback"), Py_False); } else if (PyObject_Not(use_cline) != 0) { c_line = 0; } return c_line; #endif } /////////////// AddTraceback.proto /////////////// static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename); /*proto*/ /////////////// AddTraceback /////////////// //@requires: ModuleSetupCode.c::CodeObjectCache //@requires: CLineInTraceback //@substitute: naming #include "compile.h" #include "frameobject.h" #include "traceback.h" static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = 0; PyObject *py_srcfile = 0; PyObject *py_funcname = 0; #if PY_MAJOR_VERSION < 3 py_srcfile = PyString_FromString(filename); #else py_srcfile = PyUnicode_FromString(filename); #endif if (!py_srcfile) goto bad; if (c_line) { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, $cfilenm_cname, c_line); #else py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, $cfilenm_cname, c_line); #endif } else { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromString(funcname); #else py_funcname = PyUnicode_FromString(funcname); #endif } if (!py_funcname) goto bad; py_code = __Pyx_PyCode_New( 0, /*int argcount,*/ 0, /*int kwonlyargcount,*/ 0, /*int nlocals,*/ 0, /*int stacksize,*/ 0, /*int flags,*/ $empty_bytes, /*PyObject *code,*/ $empty_tuple, /*PyObject *consts,*/ $empty_tuple, /*PyObject *names,*/ $empty_tuple, /*PyObject *varnames,*/ $empty_tuple, /*PyObject *freevars,*/ $empty_tuple, /*PyObject *cellvars,*/ py_srcfile, /*PyObject *filename,*/ py_funcname, /*PyObject *name,*/ py_line, /*int firstlineno,*/ $empty_bytes /*PyObject *lnotab*/ ); Py_DECREF(py_srcfile); Py_DECREF(py_funcname); return py_code; bad: Py_XDECREF(py_srcfile); Py_XDECREF(py_funcname); return NULL; } static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = 0; PyFrameObject *py_frame = 0; if (c_line) { c_line = __Pyx_CLineForTraceback(c_line); } // Negate to avoid collisions between py and c lines. py_code = $global_code_object_cache_find(c_line ? -c_line : py_line); if (!py_code) { py_code = __Pyx_CreateCodeObjectForTraceback( funcname, c_line, py_line, filename); if (!py_code) goto bad; $global_code_object_cache_insert(c_line ? -c_line : py_line, py_code); } py_frame = PyFrame_New( PyThreadState_GET(), /*PyThreadState *tstate,*/ py_code, /*PyCodeObject *code,*/ $moddict_cname, /*PyObject *globals,*/ 0 /*PyObject *locals*/ ); if (!py_frame) goto bad; __Pyx_PyFrame_SetLineNumber(py_frame, py_line); PyTraceBack_Here(py_frame); bad: Py_XDECREF(py_code); Py_XDECREF(py_frame); } Cython-0.26.1/Cython/Utility/MemoryView.pyx0000664000175000017500000013752113150045407021414 0ustar stefanstefan00000000000000#################### View.MemoryView #################### # This utility provides cython.array and cython.view.memoryview from __future__ import absolute_import cimport cython # from cpython cimport ... cdef extern from "Python.h": int PyIndex_Check(object) object PyLong_FromVoidPtr(void *) cdef extern from "pythread.h": ctypedef void *PyThread_type_lock PyThread_type_lock PyThread_allocate_lock() void PyThread_free_lock(PyThread_type_lock) int PyThread_acquire_lock(PyThread_type_lock, int mode) nogil void PyThread_release_lock(PyThread_type_lock) nogil cdef extern from "": void *memset(void *b, int c, size_t len) cdef extern from *: int __Pyx_GetBuffer(object, Py_buffer *, int) except -1 void __Pyx_ReleaseBuffer(Py_buffer *) ctypedef struct PyObject ctypedef Py_ssize_t Py_intptr_t void Py_INCREF(PyObject *) void Py_DECREF(PyObject *) void* PyMem_Malloc(size_t n) void PyMem_Free(void *p) void* PyObject_Malloc(size_t n) void PyObject_Free(void *p) cdef struct __pyx_memoryview "__pyx_memoryview_obj": Py_buffer view PyObject *obj __Pyx_TypeInfo *typeinfo ctypedef struct {{memviewslice_name}}: __pyx_memoryview *memview char *data Py_ssize_t shape[{{max_dims}}] Py_ssize_t strides[{{max_dims}}] Py_ssize_t suboffsets[{{max_dims}}] void __PYX_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil) void __PYX_XDEC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil) ctypedef struct __pyx_buffer "Py_buffer": PyObject *obj PyObject *Py_None cdef enum: PyBUF_C_CONTIGUOUS, PyBUF_F_CONTIGUOUS, PyBUF_ANY_CONTIGUOUS PyBUF_FORMAT PyBUF_WRITABLE PyBUF_STRIDES PyBUF_INDIRECT PyBUF_RECORDS ctypedef struct __Pyx_TypeInfo: pass cdef object capsule "__pyx_capsule_create" (void *p, char *sig) cdef int __pyx_array_getbuffer(PyObject *obj, Py_buffer view, int flags) cdef int __pyx_memoryview_getbuffer(PyObject *obj, Py_buffer view, int flags) cdef extern from *: ctypedef int __pyx_atomic_int {{memviewslice_name}} slice_copy_contig "__pyx_memoryview_copy_new_contig"( __Pyx_memviewslice *from_mvs, char *mode, int ndim, size_t sizeof_dtype, int contig_flag, bint dtype_is_object) nogil except * bint slice_is_contig "__pyx_memviewslice_is_contig" ( {{memviewslice_name}} mvs, char order, int ndim) nogil bint slices_overlap "__pyx_slices_overlap" ({{memviewslice_name}} *slice1, {{memviewslice_name}} *slice2, int ndim, size_t itemsize) nogil cdef extern from "": void *malloc(size_t) nogil void free(void *) nogil void *memcpy(void *dest, void *src, size_t n) nogil # ### cython.array class # @cname("__pyx_array") cdef class array: cdef: char *data Py_ssize_t len char *format int ndim Py_ssize_t *_shape Py_ssize_t *_strides Py_ssize_t itemsize unicode mode # FIXME: this should have been a simple 'char' bytes _format void (*callback_free_data)(void *data) # cdef object _memview cdef bint free_data cdef bint dtype_is_object def __cinit__(array self, tuple shape, Py_ssize_t itemsize, format not None, mode="c", bint allocate_buffer=True): cdef int idx cdef Py_ssize_t i, dim cdef PyObject **p self.ndim = len(shape) self.itemsize = itemsize if not self.ndim: raise ValueError("Empty shape tuple for cython.array") if itemsize <= 0: raise ValueError("itemsize <= 0 for cython.array") if not isinstance(format, bytes): format = format.encode('ASCII') self._format = format # keep a reference to the byte string self.format = self._format # use single malloc() for both shape and strides self._shape = PyObject_Malloc(sizeof(Py_ssize_t)*self.ndim*2) self._strides = self._shape + self.ndim if not self._shape: raise MemoryError("unable to allocate shape and strides.") # cdef Py_ssize_t dim, stride for idx, dim in enumerate(shape): if dim <= 0: raise ValueError("Invalid shape in axis %d: %d." % (idx, dim)) self._shape[idx] = dim cdef char order if mode == 'fortran': order = b'F' self.mode = u'fortran' elif mode == 'c': order = b'C' self.mode = u'c' else: raise ValueError("Invalid mode, expected 'c' or 'fortran', got %s" % mode) self.len = fill_contig_strides_array(self._shape, self._strides, itemsize, self.ndim, order) self.free_data = allocate_buffer self.dtype_is_object = format == b'O' if allocate_buffer: # use malloc() for backwards compatibility # in case external code wants to change the data pointer self.data = malloc(self.len) if not self.data: raise MemoryError("unable to allocate array data.") if self.dtype_is_object: p = self.data for i in range(self.len / itemsize): p[i] = Py_None Py_INCREF(Py_None) @cname('getbuffer') def __getbuffer__(self, Py_buffer *info, int flags): cdef int bufmode = -1 if self.mode == u"c": bufmode = PyBUF_C_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS elif self.mode == u"fortran": bufmode = PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS if not (flags & bufmode): raise ValueError("Can only create a buffer that is contiguous in memory.") info.buf = self.data info.len = self.len info.ndim = self.ndim info.shape = self._shape info.strides = self._strides info.suboffsets = NULL info.itemsize = self.itemsize info.readonly = 0 if flags & PyBUF_FORMAT: info.format = self.format else: info.format = NULL info.obj = self __pyx_getbuffer = capsule( &__pyx_array_getbuffer, "getbuffer(obj, view, flags)") def __dealloc__(array self): if self.callback_free_data != NULL: self.callback_free_data(self.data) elif self.free_data: if self.dtype_is_object: refcount_objects_in_slice(self.data, self._shape, self._strides, self.ndim, False) free(self.data) PyObject_Free(self._shape) @property def memview(self): return self.get_memview() @cname('get_memview') cdef get_memview(self): flags = PyBUF_ANY_CONTIGUOUS|PyBUF_FORMAT|PyBUF_WRITABLE return memoryview(self, flags, self.dtype_is_object) def __len__(self): return self._shape[0] def __getattr__(self, attr): return getattr(self.memview, attr) def __getitem__(self, item): return self.memview[item] def __setitem__(self, item, value): self.memview[item] = value @cname("__pyx_array_new") cdef array array_cwrapper(tuple shape, Py_ssize_t itemsize, char *format, char *mode, char *buf): cdef array result if buf == NULL: result = array(shape, itemsize, format, mode.decode('ASCII')) else: result = array(shape, itemsize, format, mode.decode('ASCII'), allocate_buffer=False) result.data = buf return result # ### Memoryview constants and cython.view.memoryview class # # Disable generic_contiguous, as it makes trouble verifying contiguity: # - 'contiguous' or '::1' means the dimension is contiguous with dtype # - 'indirect_contiguous' means a contiguous list of pointers # - dtype contiguous must be contiguous in the first or last dimension # from the start, or from the dimension following the last indirect dimension # # e.g. # int[::indirect_contiguous, ::contiguous, :] # # is valid (list of pointers to 2d fortran-contiguous array), but # # int[::generic_contiguous, ::contiguous, :] # # would mean you'd have assert dimension 0 to be indirect (and pointer contiguous) at runtime. # So it doesn't bring any performance benefit, and it's only confusing. @cname('__pyx_MemviewEnum') cdef class Enum(object): cdef object name def __init__(self, name): self.name = name def __repr__(self): return self.name cdef generic = Enum("") cdef strided = Enum("") # default cdef indirect = Enum("") # Disable generic_contiguous, as it is a troublemaker #cdef generic_contiguous = Enum("") cdef contiguous = Enum("") cdef indirect_contiguous = Enum("") # 'follow' is implied when the first or last axis is ::1 @cname('__pyx_align_pointer') cdef void *align_pointer(void *memory, size_t alignment) nogil: "Align pointer memory on a given boundary" cdef Py_intptr_t aligned_p = memory cdef size_t offset with cython.cdivision(True): offset = aligned_p % alignment if offset > 0: aligned_p += alignment - offset return aligned_p # pre-allocate thread locks for reuse ## note that this could be implemented in a more beautiful way in "normal" Cython, ## but this code gets merged into the user module and not everything works there. DEF THREAD_LOCKS_PREALLOCATED = 8 cdef int __pyx_memoryview_thread_locks_used = 0 cdef PyThread_type_lock[THREAD_LOCKS_PREALLOCATED] __pyx_memoryview_thread_locks = [ PyThread_allocate_lock(), PyThread_allocate_lock(), PyThread_allocate_lock(), PyThread_allocate_lock(), PyThread_allocate_lock(), PyThread_allocate_lock(), PyThread_allocate_lock(), PyThread_allocate_lock(), ] @cname('__pyx_memoryview') cdef class memoryview(object): cdef object obj cdef object _size cdef object _array_interface cdef PyThread_type_lock lock # the following array will contain a single __pyx_atomic int with # suitable alignment cdef __pyx_atomic_int acquisition_count[2] cdef __pyx_atomic_int *acquisition_count_aligned_p cdef Py_buffer view cdef int flags cdef bint dtype_is_object cdef __Pyx_TypeInfo *typeinfo def __cinit__(memoryview self, object obj, int flags, bint dtype_is_object=False): self.obj = obj self.flags = flags if type(self) is memoryview or obj is not None: __Pyx_GetBuffer(obj, &self.view, flags) if self.view.obj == NULL: (<__pyx_buffer *> &self.view).obj = Py_None Py_INCREF(Py_None) global __pyx_memoryview_thread_locks_used if __pyx_memoryview_thread_locks_used < THREAD_LOCKS_PREALLOCATED: self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] __pyx_memoryview_thread_locks_used += 1 if self.lock is NULL: self.lock = PyThread_allocate_lock() if self.lock is NULL: raise MemoryError if flags & PyBUF_FORMAT: self.dtype_is_object = (self.view.format[0] == b'O' and self.view.format[1] == b'\0') else: self.dtype_is_object = dtype_is_object self.acquisition_count_aligned_p = <__pyx_atomic_int *> align_pointer( &self.acquisition_count[0], sizeof(__pyx_atomic_int)) self.typeinfo = NULL def __dealloc__(memoryview self): if self.obj is not None: __Pyx_ReleaseBuffer(&self.view) cdef int i global __pyx_memoryview_thread_locks_used if self.lock != NULL: for i in range(__pyx_memoryview_thread_locks_used): if __pyx_memoryview_thread_locks[i] is self.lock: __pyx_memoryview_thread_locks_used -= 1 if i != __pyx_memoryview_thread_locks_used: __pyx_memoryview_thread_locks[i], __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] = ( __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used], __pyx_memoryview_thread_locks[i]) break else: PyThread_free_lock(self.lock) cdef char *get_item_pointer(memoryview self, object index) except NULL: cdef Py_ssize_t dim cdef char *itemp = self.view.buf for dim, idx in enumerate(index): itemp = pybuffer_index(&self.view, itemp, idx, dim) return itemp #@cname('__pyx_memoryview_getitem') def __getitem__(memoryview self, object index): if index is Ellipsis: return self have_slices, indices = _unellipsify(index, self.view.ndim) cdef char *itemp if have_slices: return memview_slice(self, indices) else: itemp = self.get_item_pointer(indices) return self.convert_item_to_object(itemp) def __setitem__(memoryview self, object index, object value): have_slices, index = _unellipsify(index, self.view.ndim) if have_slices: obj = self.is_slice(value) if obj: self.setitem_slice_assignment(self[index], obj) else: self.setitem_slice_assign_scalar(self[index], value) else: self.setitem_indexed(index, value) cdef is_slice(self, obj): if not isinstance(obj, memoryview): try: obj = memoryview(obj, self.flags|PyBUF_ANY_CONTIGUOUS, self.dtype_is_object) except TypeError: return None return obj cdef setitem_slice_assignment(self, dst, src): cdef {{memviewslice_name}} dst_slice cdef {{memviewslice_name}} src_slice memoryview_copy_contents(get_slice_from_memview(src, &src_slice)[0], get_slice_from_memview(dst, &dst_slice)[0], src.ndim, dst.ndim, self.dtype_is_object) cdef setitem_slice_assign_scalar(self, memoryview dst, value): cdef int array[128] cdef void *tmp = NULL cdef void *item cdef {{memviewslice_name}} *dst_slice cdef {{memviewslice_name}} tmp_slice dst_slice = get_slice_from_memview(dst, &tmp_slice) if self.view.itemsize > sizeof(array): tmp = PyMem_Malloc(self.view.itemsize) if tmp == NULL: raise MemoryError item = tmp else: item = array try: if self.dtype_is_object: ( item)[0] = value else: self.assign_item_from_object( item, value) # It would be easy to support indirect dimensions, but it's easier # to disallow :) if self.view.suboffsets != NULL: assert_direct_dimensions(self.view.suboffsets, self.view.ndim) slice_assign_scalar(dst_slice, dst.view.ndim, self.view.itemsize, item, self.dtype_is_object) finally: PyMem_Free(tmp) cdef setitem_indexed(self, index, value): cdef char *itemp = self.get_item_pointer(index) self.assign_item_from_object(itemp, value) cdef convert_item_to_object(self, char *itemp): """Only used if instantiated manually by the user, or if Cython doesn't know how to convert the type""" import struct cdef bytes bytesitem # Do a manual and complete check here instead of this easy hack bytesitem = itemp[:self.view.itemsize] try: result = struct.unpack(self.view.format, bytesitem) except struct.error: raise ValueError("Unable to convert item to object") else: if len(self.view.format) == 1: return result[0] return result cdef assign_item_from_object(self, char *itemp, object value): """Only used if instantiated manually by the user, or if Cython doesn't know how to convert the type""" import struct cdef char c cdef bytes bytesvalue cdef Py_ssize_t i if isinstance(value, tuple): bytesvalue = struct.pack(self.view.format, *value) else: bytesvalue = struct.pack(self.view.format, value) for i, c in enumerate(bytesvalue): itemp[i] = c @cname('getbuffer') def __getbuffer__(self, Py_buffer *info, int flags): if flags & PyBUF_STRIDES: info.shape = self.view.shape else: info.shape = NULL if flags & PyBUF_STRIDES: info.strides = self.view.strides else: info.strides = NULL if flags & PyBUF_INDIRECT: info.suboffsets = self.view.suboffsets else: info.suboffsets = NULL if flags & PyBUF_FORMAT: info.format = self.view.format else: info.format = NULL info.buf = self.view.buf info.ndim = self.view.ndim info.itemsize = self.view.itemsize info.len = self.view.len info.readonly = 0 info.obj = self __pyx_getbuffer = capsule( &__pyx_memoryview_getbuffer, "getbuffer(obj, view, flags)") # Some properties that have the same sematics as in NumPy @property def T(self): cdef _memoryviewslice result = memoryview_copy(self) transpose_memslice(&result.from_slice) return result @property def base(self): return self.obj @property def shape(self): return tuple([length for length in self.view.shape[:self.view.ndim]]) @property def strides(self): if self.view.strides == NULL: # Note: we always ask for strides, so if this is not set it's a bug raise ValueError("Buffer view does not expose strides") return tuple([stride for stride in self.view.strides[:self.view.ndim]]) @property def suboffsets(self): if self.view.suboffsets == NULL: return (-1,) * self.view.ndim return tuple([suboffset for suboffset in self.view.suboffsets[:self.view.ndim]]) @property def ndim(self): return self.view.ndim @property def itemsize(self): return self.view.itemsize @property def nbytes(self): return self.size * self.view.itemsize @property def size(self): if self._size is None: result = 1 for length in self.view.shape[:self.view.ndim]: result *= length self._size = result return self._size def __len__(self): if self.view.ndim >= 1: return self.view.shape[0] return 0 def __repr__(self): return "" % (self.base.__class__.__name__, id(self)) def __str__(self): return "" % (self.base.__class__.__name__,) # Support the same attributes as memoryview slices def is_c_contig(self): cdef {{memviewslice_name}} *mslice cdef {{memviewslice_name}} tmp mslice = get_slice_from_memview(self, &tmp) return slice_is_contig(mslice[0], 'C', self.view.ndim) def is_f_contig(self): cdef {{memviewslice_name}} *mslice cdef {{memviewslice_name}} tmp mslice = get_slice_from_memview(self, &tmp) return slice_is_contig(mslice[0], 'F', self.view.ndim) def copy(self): cdef {{memviewslice_name}} mslice cdef int flags = self.flags & ~PyBUF_F_CONTIGUOUS slice_copy(self, &mslice) mslice = slice_copy_contig(&mslice, "c", self.view.ndim, self.view.itemsize, flags|PyBUF_C_CONTIGUOUS, self.dtype_is_object) return memoryview_copy_from_slice(self, &mslice) def copy_fortran(self): cdef {{memviewslice_name}} src, dst cdef int flags = self.flags & ~PyBUF_C_CONTIGUOUS slice_copy(self, &src) dst = slice_copy_contig(&src, "fortran", self.view.ndim, self.view.itemsize, flags|PyBUF_F_CONTIGUOUS, self.dtype_is_object) return memoryview_copy_from_slice(self, &dst) @cname('__pyx_memoryview_new') cdef memoryview_cwrapper(object o, int flags, bint dtype_is_object, __Pyx_TypeInfo *typeinfo): cdef memoryview result = memoryview(o, flags, dtype_is_object) result.typeinfo = typeinfo return result @cname('__pyx_memoryview_check') cdef inline bint memoryview_check(object o): return isinstance(o, memoryview) cdef tuple _unellipsify(object index, int ndim): """ Replace all ellipses with full slices and fill incomplete indices with full slices. """ if not isinstance(index, tuple): tup = (index,) else: tup = index result = [] have_slices = False seen_ellipsis = False for idx, item in enumerate(tup): if item is Ellipsis: if not seen_ellipsis: result.extend([slice(None)] * (ndim - len(tup) + 1)) seen_ellipsis = True else: result.append(slice(None)) have_slices = True else: if not isinstance(item, slice) and not PyIndex_Check(item): raise TypeError("Cannot index with type '%s'" % type(item)) have_slices = have_slices or isinstance(item, slice) result.append(item) nslices = ndim - len(result) if nslices: result.extend([slice(None)] * nslices) return have_slices or nslices, tuple(result) cdef assert_direct_dimensions(Py_ssize_t *suboffsets, int ndim): for suboffset in suboffsets[:ndim]: if suboffset >= 0: raise ValueError("Indirect dimensions not supported") # ### Slicing a memoryview # @cname('__pyx_memview_slice') cdef memoryview memview_slice(memoryview memview, object indices): cdef int new_ndim = 0, suboffset_dim = -1, dim cdef bint negative_step cdef {{memviewslice_name}} src, dst cdef {{memviewslice_name}} *p_src # dst is copied by value in memoryview_fromslice -- initialize it # src is never copied memset(&dst, 0, sizeof(dst)) cdef _memoryviewslice memviewsliceobj assert memview.view.ndim > 0 if isinstance(memview, _memoryviewslice): memviewsliceobj = memview p_src = &memviewsliceobj.from_slice else: slice_copy(memview, &src) p_src = &src # Note: don't use variable src at this point # SubNote: we should be able to declare variables in blocks... # memoryview_fromslice() will inc our dst slice dst.memview = p_src.memview dst.data = p_src.data # Put everything in temps to avoid this bloody warning: # "Argument evaluation order in C function call is undefined and # may not be as expected" cdef {{memviewslice_name}} *p_dst = &dst cdef int *p_suboffset_dim = &suboffset_dim cdef Py_ssize_t start, stop, step cdef bint have_start, have_stop, have_step for dim, index in enumerate(indices): if PyIndex_Check(index): slice_memviewslice( p_dst, p_src.shape[dim], p_src.strides[dim], p_src.suboffsets[dim], dim, new_ndim, p_suboffset_dim, index, 0, 0, # start, stop, step 0, 0, 0, # have_{start,stop,step} False) elif index is None: p_dst.shape[new_ndim] = 1 p_dst.strides[new_ndim] = 0 p_dst.suboffsets[new_ndim] = -1 new_ndim += 1 else: start = index.start or 0 stop = index.stop or 0 step = index.step or 0 have_start = index.start is not None have_stop = index.stop is not None have_step = index.step is not None slice_memviewslice( p_dst, p_src.shape[dim], p_src.strides[dim], p_src.suboffsets[dim], dim, new_ndim, p_suboffset_dim, start, stop, step, have_start, have_stop, have_step, True) new_ndim += 1 if isinstance(memview, _memoryviewslice): return memoryview_fromslice(dst, new_ndim, memviewsliceobj.to_object_func, memviewsliceobj.to_dtype_func, memview.dtype_is_object) else: return memoryview_fromslice(dst, new_ndim, NULL, NULL, memview.dtype_is_object) # ### Slicing in a single dimension of a memoryviewslice # cdef extern from "": void abort() nogil void printf(char *s, ...) nogil cdef extern from "": ctypedef struct FILE FILE *stderr int fputs(char *s, FILE *stream) cdef extern from "pystate.h": void PyThreadState_Get() nogil # These are not actually nogil, but we check for the GIL before calling them void PyErr_SetString(PyObject *type, char *msg) nogil PyObject *PyErr_Format(PyObject *exc, char *msg, ...) nogil @cname('__pyx_memoryview_slice_memviewslice') cdef int slice_memviewslice( {{memviewslice_name}} *dst, Py_ssize_t shape, Py_ssize_t stride, Py_ssize_t suboffset, int dim, int new_ndim, int *suboffset_dim, Py_ssize_t start, Py_ssize_t stop, Py_ssize_t step, int have_start, int have_stop, int have_step, bint is_slice) nogil except -1: """ Create a new slice dst given slice src. dim - the current src dimension (indexing will make dimensions disappear) new_dim - the new dst dimension suboffset_dim - pointer to a single int initialized to -1 to keep track of where slicing offsets should be added """ cdef Py_ssize_t new_shape cdef bint negative_step if not is_slice: # index is a normal integer-like index if start < 0: start += shape if not 0 <= start < shape: _err_dim(IndexError, "Index out of bounds (axis %d)", dim) else: # index is a slice negative_step = have_step != 0 and step < 0 if have_step and step == 0: _err_dim(ValueError, "Step may not be zero (axis %d)", dim) # check our bounds and set defaults if have_start: if start < 0: start += shape if start < 0: start = 0 elif start >= shape: if negative_step: start = shape - 1 else: start = shape else: if negative_step: start = shape - 1 else: start = 0 if have_stop: if stop < 0: stop += shape if stop < 0: stop = 0 elif stop > shape: stop = shape else: if negative_step: stop = -1 else: stop = shape if not have_step: step = 1 # len = ceil( (stop - start) / step ) with cython.cdivision(True): new_shape = (stop - start) // step if (stop - start) - step * new_shape: new_shape += 1 if new_shape < 0: new_shape = 0 # shape/strides/suboffsets dst.strides[new_ndim] = stride * step dst.shape[new_ndim] = new_shape dst.suboffsets[new_ndim] = suboffset # Add the slicing or idexing offsets to the right suboffset or base data * if suboffset_dim[0] < 0: dst.data += start * stride else: dst.suboffsets[suboffset_dim[0]] += start * stride if suboffset >= 0: if not is_slice: if new_ndim == 0: dst.data = ( dst.data)[0] + suboffset else: _err_dim(IndexError, "All dimensions preceding dimension %d " "must be indexed and not sliced", dim) else: suboffset_dim[0] = new_ndim return 0 # ### Index a memoryview # @cname('__pyx_pybuffer_index') cdef char *pybuffer_index(Py_buffer *view, char *bufp, Py_ssize_t index, Py_ssize_t dim) except NULL: cdef Py_ssize_t shape, stride, suboffset = -1 cdef Py_ssize_t itemsize = view.itemsize cdef char *resultp if view.ndim == 0: shape = view.len / itemsize stride = itemsize else: shape = view.shape[dim] stride = view.strides[dim] if view.suboffsets != NULL: suboffset = view.suboffsets[dim] if index < 0: index += view.shape[dim] if index < 0: raise IndexError("Out of bounds on buffer access (axis %d)" % dim) if index >= shape: raise IndexError("Out of bounds on buffer access (axis %d)" % dim) resultp = bufp + index * stride if suboffset >= 0: resultp = ( resultp)[0] + suboffset return resultp # ### Transposing a memoryviewslice # @cname('__pyx_memslice_transpose') cdef int transpose_memslice({{memviewslice_name}} *memslice) nogil except 0: cdef int ndim = memslice.memview.view.ndim cdef Py_ssize_t *shape = memslice.shape cdef Py_ssize_t *strides = memslice.strides # reverse strides and shape cdef int i, j for i in range(ndim / 2): j = ndim - 1 - i strides[i], strides[j] = strides[j], strides[i] shape[i], shape[j] = shape[j], shape[i] if memslice.suboffsets[i] >= 0 or memslice.suboffsets[j] >= 0: _err(ValueError, "Cannot transpose memoryview with indirect dimensions") return 1 # ### Creating new memoryview objects from slices and memoryviews # @cname('__pyx_memoryviewslice') cdef class _memoryviewslice(memoryview): "Internal class for passing memoryview slices to Python" # We need this to keep our shape/strides/suboffset pointers valid cdef {{memviewslice_name}} from_slice # We need this only to print it's class' name cdef object from_object cdef object (*to_object_func)(char *) cdef int (*to_dtype_func)(char *, object) except 0 def __dealloc__(self): __PYX_XDEC_MEMVIEW(&self.from_slice, 1) cdef convert_item_to_object(self, char *itemp): if self.to_object_func != NULL: return self.to_object_func(itemp) else: return memoryview.convert_item_to_object(self, itemp) cdef assign_item_from_object(self, char *itemp, object value): if self.to_dtype_func != NULL: self.to_dtype_func(itemp, value) else: memoryview.assign_item_from_object(self, itemp, value) @property def base(self): return self.from_object __pyx_getbuffer = capsule( &__pyx_memoryview_getbuffer, "getbuffer(obj, view, flags)") @cname('__pyx_memoryview_fromslice') cdef memoryview_fromslice({{memviewslice_name}} memviewslice, int ndim, object (*to_object_func)(char *), int (*to_dtype_func)(char *, object) except 0, bint dtype_is_object): cdef _memoryviewslice result if memviewslice.memview == Py_None: return None # assert 0 < ndim <= memviewslice.memview.view.ndim, ( # ndim, memviewslice.memview.view.ndim) result = _memoryviewslice(None, 0, dtype_is_object) result.from_slice = memviewslice __PYX_INC_MEMVIEW(&memviewslice, 1) result.from_object = ( memviewslice.memview).base result.typeinfo = memviewslice.memview.typeinfo result.view = memviewslice.memview.view result.view.buf = memviewslice.data result.view.ndim = ndim (<__pyx_buffer *> &result.view).obj = Py_None Py_INCREF(Py_None) result.flags = PyBUF_RECORDS result.view.shape = result.from_slice.shape result.view.strides = result.from_slice.strides # only set suboffsets if actually used, otherwise set to NULL to improve compatibility result.view.suboffsets = NULL for suboffset in result.from_slice.suboffsets[:ndim]: if suboffset >= 0: result.view.suboffsets = result.from_slice.suboffsets break result.view.len = result.view.itemsize for length in result.view.shape[:ndim]: result.view.len *= length result.to_object_func = to_object_func result.to_dtype_func = to_dtype_func return result @cname('__pyx_memoryview_get_slice_from_memoryview') cdef {{memviewslice_name}} *get_slice_from_memview(memoryview memview, {{memviewslice_name}} *mslice): cdef _memoryviewslice obj if isinstance(memview, _memoryviewslice): obj = memview return &obj.from_slice else: slice_copy(memview, mslice) return mslice @cname('__pyx_memoryview_slice_copy') cdef void slice_copy(memoryview memview, {{memviewslice_name}} *dst): cdef int dim cdef (Py_ssize_t*) shape, strides, suboffsets shape = memview.view.shape strides = memview.view.strides suboffsets = memview.view.suboffsets dst.memview = <__pyx_memoryview *> memview dst.data = memview.view.buf for dim in range(memview.view.ndim): dst.shape[dim] = shape[dim] dst.strides[dim] = strides[dim] dst.suboffsets[dim] = suboffsets[dim] if suboffsets else -1 @cname('__pyx_memoryview_copy_object') cdef memoryview_copy(memoryview memview): "Create a new memoryview object" cdef {{memviewslice_name}} memviewslice slice_copy(memview, &memviewslice) return memoryview_copy_from_slice(memview, &memviewslice) @cname('__pyx_memoryview_copy_object_from_slice') cdef memoryview_copy_from_slice(memoryview memview, {{memviewslice_name}} *memviewslice): """ Create a new memoryview object from a given memoryview object and slice. """ cdef object (*to_object_func)(char *) cdef int (*to_dtype_func)(char *, object) except 0 if isinstance(memview, _memoryviewslice): to_object_func = (<_memoryviewslice> memview).to_object_func to_dtype_func = (<_memoryviewslice> memview).to_dtype_func else: to_object_func = NULL to_dtype_func = NULL return memoryview_fromslice(memviewslice[0], memview.view.ndim, to_object_func, to_dtype_func, memview.dtype_is_object) # ### Copy the contents of a memoryview slices # cdef Py_ssize_t abs_py_ssize_t(Py_ssize_t arg) nogil: if arg < 0: return -arg else: return arg @cname('__pyx_get_best_slice_order') cdef char get_best_order({{memviewslice_name}} *mslice, int ndim) nogil: """ Figure out the best memory access order for a given slice. """ cdef int i cdef Py_ssize_t c_stride = 0 cdef Py_ssize_t f_stride = 0 for i in range(ndim - 1, -1, -1): if mslice.shape[i] > 1: c_stride = mslice.strides[i] break for i in range(ndim): if mslice.shape[i] > 1: f_stride = mslice.strides[i] break if abs_py_ssize_t(c_stride) <= abs_py_ssize_t(f_stride): return 'C' else: return 'F' @cython.cdivision(True) cdef void _copy_strided_to_strided(char *src_data, Py_ssize_t *src_strides, char *dst_data, Py_ssize_t *dst_strides, Py_ssize_t *src_shape, Py_ssize_t *dst_shape, int ndim, size_t itemsize) nogil: # Note: src_extent is 1 if we're broadcasting # dst_extent always >= src_extent as we don't do reductions cdef Py_ssize_t i cdef Py_ssize_t src_extent = src_shape[0] cdef Py_ssize_t dst_extent = dst_shape[0] cdef Py_ssize_t src_stride = src_strides[0] cdef Py_ssize_t dst_stride = dst_strides[0] if ndim == 1: if (src_stride > 0 and dst_stride > 0 and src_stride == itemsize == dst_stride): memcpy(dst_data, src_data, itemsize * dst_extent) else: for i in range(dst_extent): memcpy(dst_data, src_data, itemsize) src_data += src_stride dst_data += dst_stride else: for i in range(dst_extent): _copy_strided_to_strided(src_data, src_strides + 1, dst_data, dst_strides + 1, src_shape + 1, dst_shape + 1, ndim - 1, itemsize) src_data += src_stride dst_data += dst_stride cdef void copy_strided_to_strided({{memviewslice_name}} *src, {{memviewslice_name}} *dst, int ndim, size_t itemsize) nogil: _copy_strided_to_strided(src.data, src.strides, dst.data, dst.strides, src.shape, dst.shape, ndim, itemsize) @cname('__pyx_memoryview_slice_get_size') cdef Py_ssize_t slice_get_size({{memviewslice_name}} *src, int ndim) nogil: "Return the size of the memory occupied by the slice in number of bytes" cdef int i cdef Py_ssize_t size = src.memview.view.itemsize for i in range(ndim): size *= src.shape[i] return size @cname('__pyx_fill_contig_strides_array') cdef Py_ssize_t fill_contig_strides_array( Py_ssize_t *shape, Py_ssize_t *strides, Py_ssize_t stride, int ndim, char order) nogil: """ Fill the strides array for a slice with C or F contiguous strides. This is like PyBuffer_FillContiguousStrides, but compatible with py < 2.6 """ cdef int idx if order == 'F': for idx in range(ndim): strides[idx] = stride stride = stride * shape[idx] else: for idx in range(ndim - 1, -1, -1): strides[idx] = stride stride = stride * shape[idx] return stride @cname('__pyx_memoryview_copy_data_to_temp') cdef void *copy_data_to_temp({{memviewslice_name}} *src, {{memviewslice_name}} *tmpslice, char order, int ndim) nogil except NULL: """ Copy a direct slice to temporary contiguous memory. The caller should free the result when done. """ cdef int i cdef void *result cdef size_t itemsize = src.memview.view.itemsize cdef size_t size = slice_get_size(src, ndim) result = malloc(size) if not result: _err(MemoryError, NULL) # tmpslice[0] = src tmpslice.data = result tmpslice.memview = src.memview for i in range(ndim): tmpslice.shape[i] = src.shape[i] tmpslice.suboffsets[i] = -1 fill_contig_strides_array(&tmpslice.shape[0], &tmpslice.strides[0], itemsize, ndim, order) # We need to broadcast strides again for i in range(ndim): if tmpslice.shape[i] == 1: tmpslice.strides[i] = 0 if slice_is_contig(src[0], order, ndim): memcpy(result, src.data, size) else: copy_strided_to_strided(src, tmpslice, ndim, itemsize) return result # Use 'with gil' functions and avoid 'with gil' blocks, as the code within the blocks # has temporaries that need the GIL to clean up @cname('__pyx_memoryview_err_extents') cdef int _err_extents(int i, Py_ssize_t extent1, Py_ssize_t extent2) except -1 with gil: raise ValueError("got differing extents in dimension %d (got %d and %d)" % (i, extent1, extent2)) @cname('__pyx_memoryview_err_dim') cdef int _err_dim(object error, char *msg, int dim) except -1 with gil: raise error(msg.decode('ascii') % dim) @cname('__pyx_memoryview_err') cdef int _err(object error, char *msg) except -1 with gil: if msg != NULL: raise error(msg.decode('ascii')) else: raise error @cname('__pyx_memoryview_copy_contents') cdef int memoryview_copy_contents({{memviewslice_name}} src, {{memviewslice_name}} dst, int src_ndim, int dst_ndim, bint dtype_is_object) nogil except -1: """ Copy memory from slice src to slice dst. Check for overlapping memory and verify the shapes. """ cdef void *tmpdata = NULL cdef size_t itemsize = src.memview.view.itemsize cdef int i cdef char order = get_best_order(&src, src_ndim) cdef bint broadcasting = False cdef bint direct_copy = False cdef {{memviewslice_name}} tmp if src_ndim < dst_ndim: broadcast_leading(&src, src_ndim, dst_ndim) elif dst_ndim < src_ndim: broadcast_leading(&dst, dst_ndim, src_ndim) cdef int ndim = max(src_ndim, dst_ndim) for i in range(ndim): if src.shape[i] != dst.shape[i]: if src.shape[i] == 1: broadcasting = True src.strides[i] = 0 else: _err_extents(i, dst.shape[i], src.shape[i]) if src.suboffsets[i] >= 0: _err_dim(ValueError, "Dimension %d is not direct", i) if slices_overlap(&src, &dst, ndim, itemsize): # slices overlap, copy to temp, copy temp to dst if not slice_is_contig(src, order, ndim): order = get_best_order(&dst, ndim) tmpdata = copy_data_to_temp(&src, &tmp, order, ndim) src = tmp if not broadcasting: # See if both slices have equal contiguity, in that case perform a # direct copy. This only works when we are not broadcasting. if slice_is_contig(src, 'C', ndim): direct_copy = slice_is_contig(dst, 'C', ndim) elif slice_is_contig(src, 'F', ndim): direct_copy = slice_is_contig(dst, 'F', ndim) if direct_copy: # Contiguous slices with same order refcount_copying(&dst, dtype_is_object, ndim, False) memcpy(dst.data, src.data, slice_get_size(&src, ndim)) refcount_copying(&dst, dtype_is_object, ndim, True) free(tmpdata) return 0 if order == 'F' == get_best_order(&dst, ndim): # see if both slices have Fortran order, transpose them to match our # C-style indexing order transpose_memslice(&src) transpose_memslice(&dst) refcount_copying(&dst, dtype_is_object, ndim, False) copy_strided_to_strided(&src, &dst, ndim, itemsize) refcount_copying(&dst, dtype_is_object, ndim, True) free(tmpdata) return 0 @cname('__pyx_memoryview_broadcast_leading') cdef void broadcast_leading({{memviewslice_name}} *mslice, int ndim, int ndim_other) nogil: cdef int i cdef int offset = ndim_other - ndim for i in range(ndim - 1, -1, -1): mslice.shape[i + offset] = mslice.shape[i] mslice.strides[i + offset] = mslice.strides[i] mslice.suboffsets[i + offset] = mslice.suboffsets[i] for i in range(offset): mslice.shape[i] = 1 mslice.strides[i] = mslice.strides[0] mslice.suboffsets[i] = -1 # ### Take care of refcounting the objects in slices. Do this seperately from any copying, ### to minimize acquiring the GIL # @cname('__pyx_memoryview_refcount_copying') cdef void refcount_copying({{memviewslice_name}} *dst, bint dtype_is_object, int ndim, bint inc) nogil: # incref or decref the objects in the destination slice if the dtype is # object if dtype_is_object: refcount_objects_in_slice_with_gil(dst.data, dst.shape, dst.strides, ndim, inc) @cname('__pyx_memoryview_refcount_objects_in_slice_with_gil') cdef void refcount_objects_in_slice_with_gil(char *data, Py_ssize_t *shape, Py_ssize_t *strides, int ndim, bint inc) with gil: refcount_objects_in_slice(data, shape, strides, ndim, inc) @cname('__pyx_memoryview_refcount_objects_in_slice') cdef void refcount_objects_in_slice(char *data, Py_ssize_t *shape, Py_ssize_t *strides, int ndim, bint inc): cdef Py_ssize_t i for i in range(shape[0]): if ndim == 1: if inc: Py_INCREF(( data)[0]) else: Py_DECREF(( data)[0]) else: refcount_objects_in_slice(data, shape + 1, strides + 1, ndim - 1, inc) data += strides[0] # ### Scalar to slice assignment # @cname('__pyx_memoryview_slice_assign_scalar') cdef void slice_assign_scalar({{memviewslice_name}} *dst, int ndim, size_t itemsize, void *item, bint dtype_is_object) nogil: refcount_copying(dst, dtype_is_object, ndim, False) _slice_assign_scalar(dst.data, dst.shape, dst.strides, ndim, itemsize, item) refcount_copying(dst, dtype_is_object, ndim, True) @cname('__pyx_memoryview__slice_assign_scalar') cdef void _slice_assign_scalar(char *data, Py_ssize_t *shape, Py_ssize_t *strides, int ndim, size_t itemsize, void *item) nogil: cdef Py_ssize_t i cdef Py_ssize_t stride = strides[0] cdef Py_ssize_t extent = shape[0] if ndim == 1: for i in range(extent): memcpy(data, item, itemsize) data += stride else: for i in range(extent): _slice_assign_scalar(data, shape + 1, strides + 1, ndim - 1, itemsize, item) data += stride ############### BufferFormatFromTypeInfo ############### cdef extern from *: ctypedef struct __Pyx_StructField cdef enum: __PYX_BUF_FLAGS_PACKED_STRUCT __PYX_BUF_FLAGS_INTEGER_COMPLEX ctypedef struct __Pyx_TypeInfo: char* name __Pyx_StructField* fields size_t size size_t arraysize[8] int ndim char typegroup char is_unsigned int flags ctypedef struct __Pyx_StructField: __Pyx_TypeInfo* type char* name size_t offset ctypedef struct __Pyx_BufFmt_StackElem: __Pyx_StructField* field size_t parent_offset #ctypedef struct __Pyx_BufFmt_Context: # __Pyx_StructField root __Pyx_BufFmt_StackElem* head struct __pyx_typeinfo_string: char string[3] __pyx_typeinfo_string __Pyx_TypeInfoToFormat(__Pyx_TypeInfo *) @cname('__pyx_format_from_typeinfo') cdef bytes format_from_typeinfo(__Pyx_TypeInfo *type): cdef __Pyx_StructField *field cdef __pyx_typeinfo_string fmt cdef bytes part, result if type.typegroup == 'S': assert type.fields != NULL and type.fields.type != NULL if type.flags & __PYX_BUF_FLAGS_PACKED_STRUCT: alignment = b'^' else: alignment = b'' parts = [b"T{"] field = type.fields while field.type: part = format_from_typeinfo(field.type) parts.append(part + b':' + field.name + b':') field += 1 result = alignment.join(parts) + b'}' else: fmt = __Pyx_TypeInfoToFormat(type) if type.arraysize[0]: extents = [unicode(type.arraysize[i]) for i in range(type.ndim)] result = (u"(%s)" % u','.join(extents)).encode('ascii') + fmt.string else: result = fmt.string return result Cython-0.26.1/Cython/Utility/CMath.c0000664000175000017500000000471612542002467017711 0ustar stefanstefan00000000000000 /////////////// CDivisionWarning.proto /////////////// static int __Pyx_cdivision_warning(const char *, int); /* proto */ /////////////// CDivisionWarning /////////////// static int __Pyx_cdivision_warning(const char *filename, int lineno) { #if CYTHON_COMPILING_IN_PYPY // avoid compiler warnings filename++; lineno++; return PyErr_Warn(PyExc_RuntimeWarning, "division with oppositely signed operands, C and Python semantics differ"); #else return PyErr_WarnExplicit(PyExc_RuntimeWarning, "division with oppositely signed operands, C and Python semantics differ", filename, lineno, __Pyx_MODULE_NAME, NULL); #endif } /////////////// DivInt.proto /////////////// static CYTHON_INLINE %(type)s __Pyx_div_%(type_name)s(%(type)s, %(type)s); /* proto */ /////////////// DivInt /////////////// static CYTHON_INLINE %(type)s __Pyx_div_%(type_name)s(%(type)s a, %(type)s b) { %(type)s q = a / b; %(type)s r = a - q*b; q -= ((r != 0) & ((r ^ b) < 0)); return q; } /////////////// ModInt.proto /////////////// static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s, %(type)s); /* proto */ /////////////// ModInt /////////////// static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s a, %(type)s b) { %(type)s r = a %% b; r += ((r != 0) & ((r ^ b) < 0)) * b; return r; } /////////////// ModFloat.proto /////////////// static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s, %(type)s); /* proto */ /////////////// ModFloat /////////////// static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s a, %(type)s b) { %(type)s r = fmod%(math_h_modifier)s(a, b); r += ((r != 0) & ((r < 0) ^ (b < 0))) * b; return r; } /////////////// IntPow.proto /////////////// static CYTHON_INLINE %(type)s %(func_name)s(%(type)s, %(type)s); /* proto */ /////////////// IntPow /////////////// static CYTHON_INLINE %(type)s %(func_name)s(%(type)s b, %(type)s e) { %(type)s t = b; switch (e) { case 3: t *= b; case 2: t *= b; case 1: return t; case 0: return 1; } #if %(signed)s if (unlikely(e<0)) return 0; #endif t = 1; while (likely(e)) { t *= (b * (e&1)) | ((~e)&1); /* 1 or b */ b *= b; e >>= 1; } return t; } Cython-0.26.1/Cython/Utility/CythonFunction.c0000664000175000017500000012255713143605603021672 0ustar stefanstefan00000000000000 //////////////////// CythonFunction.proto //////////////////// #define __Pyx_CyFunction_USED 1 #include #define __Pyx_CYFUNCTION_STATICMETHOD 0x01 #define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 #define __Pyx_CYFUNCTION_CCLASS 0x04 #define __Pyx_CyFunction_GetClosure(f) \ (((__pyx_CyFunctionObject *) (f))->func_closure) #define __Pyx_CyFunction_GetClassObj(f) \ (((__pyx_CyFunctionObject *) (f))->func_classobj) #define __Pyx_CyFunction_Defaults(type, f) \ ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) #define __Pyx_CyFunction_SetDefaultsGetter(f, g) \ ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) typedef struct { PyCFunctionObject func; #if PY_VERSION_HEX < 0x030500A0 PyObject *func_weakreflist; #endif PyObject *func_dict; PyObject *func_name; PyObject *func_qualname; PyObject *func_doc; PyObject *func_globals; PyObject *func_code; PyObject *func_closure; // No-args super() class cell PyObject *func_classobj; // Dynamic default args and annotations void *defaults; int defaults_pyobjects; int flags; // Defaults info PyObject *defaults_tuple; /* Const defaults tuple */ PyObject *defaults_kwdict; /* Const kwonly defaults dict */ PyObject *(*defaults_getter)(PyObject *); PyObject *func_annotations; /* function annotations dict */ } __pyx_CyFunctionObject; static PyTypeObject *__pyx_CyFunctionType = 0; #define __Pyx_CyFunction_NewEx(ml, flags, qualname, self, module, globals, code) \ __Pyx_CyFunction_New(__pyx_CyFunctionType, ml, flags, qualname, self, module, globals, code) static PyObject *__Pyx_CyFunction_New(PyTypeObject *, PyMethodDef *ml, int flags, PyObject* qualname, PyObject *self, PyObject *module, PyObject *globals, PyObject* code); static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, size_t size, int pyobjects); static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, PyObject *tuple); static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, PyObject *dict); static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, PyObject *dict); static int __pyx_CyFunction_init(void); //////////////////// CythonFunction //////////////////// //@substitute: naming //@requires: CommonStructures.c::FetchCommonType ////@requires: ObjectHandling.c::PyObjectGetAttrStr static PyObject * __Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *closure) { if (unlikely(op->func_doc == NULL)) { if (op->func.m_ml->ml_doc) { #if PY_MAJOR_VERSION >= 3 op->func_doc = PyUnicode_FromString(op->func.m_ml->ml_doc); #else op->func_doc = PyString_FromString(op->func.m_ml->ml_doc); #endif if (unlikely(op->func_doc == NULL)) return NULL; } else { Py_INCREF(Py_None); return Py_None; } } Py_INCREF(op->func_doc); return op->func_doc; } static int __Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value) { PyObject *tmp = op->func_doc; if (value == NULL) { // Mark as deleted value = Py_None; } Py_INCREF(value); op->func_doc = value; Py_XDECREF(tmp); return 0; } static PyObject * __Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op) { if (unlikely(op->func_name == NULL)) { #if PY_MAJOR_VERSION >= 3 op->func_name = PyUnicode_InternFromString(op->func.m_ml->ml_name); #else op->func_name = PyString_InternFromString(op->func.m_ml->ml_name); #endif if (unlikely(op->func_name == NULL)) return NULL; } Py_INCREF(op->func_name); return op->func_name; } static int __Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value) { PyObject *tmp; #if PY_MAJOR_VERSION >= 3 if (unlikely(value == NULL || !PyUnicode_Check(value))) { #else if (unlikely(value == NULL || !PyString_Check(value))) { #endif PyErr_SetString(PyExc_TypeError, "__name__ must be set to a string object"); return -1; } tmp = op->func_name; Py_INCREF(value); op->func_name = value; Py_XDECREF(tmp); return 0; } static PyObject * __Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op) { Py_INCREF(op->func_qualname); return op->func_qualname; } static int __Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value) { PyObject *tmp; #if PY_MAJOR_VERSION >= 3 if (unlikely(value == NULL || !PyUnicode_Check(value))) { #else if (unlikely(value == NULL || !PyString_Check(value))) { #endif PyErr_SetString(PyExc_TypeError, "__qualname__ must be set to a string object"); return -1; } tmp = op->func_qualname; Py_INCREF(value); op->func_qualname = value; Py_XDECREF(tmp); return 0; } static PyObject * __Pyx_CyFunction_get_self(__pyx_CyFunctionObject *m, CYTHON_UNUSED void *closure) { PyObject *self; self = m->func_closure; if (self == NULL) self = Py_None; Py_INCREF(self); return self; } static PyObject * __Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op) { if (unlikely(op->func_dict == NULL)) { op->func_dict = PyDict_New(); if (unlikely(op->func_dict == NULL)) return NULL; } Py_INCREF(op->func_dict); return op->func_dict; } static int __Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value) { PyObject *tmp; if (unlikely(value == NULL)) { PyErr_SetString(PyExc_TypeError, "function's dictionary may not be deleted"); return -1; } if (unlikely(!PyDict_Check(value))) { PyErr_SetString(PyExc_TypeError, "setting function's dictionary to a non-dict"); return -1; } tmp = op->func_dict; Py_INCREF(value); op->func_dict = value; Py_XDECREF(tmp); return 0; } static PyObject * __Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op) { Py_INCREF(op->func_globals); return op->func_globals; } static PyObject * __Pyx_CyFunction_get_closure(CYTHON_UNUSED __pyx_CyFunctionObject *op) { Py_INCREF(Py_None); return Py_None; } static PyObject * __Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op) { PyObject* result = (op->func_code) ? op->func_code : Py_None; Py_INCREF(result); return result; } static int __Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { int result = 0; PyObject *res = op->defaults_getter((PyObject *) op); if (unlikely(!res)) return -1; // Cache result #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS op->defaults_tuple = PyTuple_GET_ITEM(res, 0); Py_INCREF(op->defaults_tuple); op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); Py_INCREF(op->defaults_kwdict); #else op->defaults_tuple = PySequence_ITEM(res, 0); if (unlikely(!op->defaults_tuple)) result = -1; else { op->defaults_kwdict = PySequence_ITEM(res, 1); if (unlikely(!op->defaults_kwdict)) result = -1; } #endif Py_DECREF(res); return result; } static int __Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value) { PyObject* tmp; if (!value) { // del => explicit None to prevent rebuilding value = Py_None; } else if (value != Py_None && !PyTuple_Check(value)) { PyErr_SetString(PyExc_TypeError, "__defaults__ must be set to a tuple object"); return -1; } Py_INCREF(value); tmp = op->defaults_tuple; op->defaults_tuple = value; Py_XDECREF(tmp); return 0; } static PyObject * __Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op) { PyObject* result = op->defaults_tuple; if (unlikely(!result)) { if (op->defaults_getter) { if (__Pyx_CyFunction_init_defaults(op) < 0) return NULL; result = op->defaults_tuple; } else { result = Py_None; } } Py_INCREF(result); return result; } static int __Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value) { PyObject* tmp; if (!value) { // del => explicit None to prevent rebuilding value = Py_None; } else if (value != Py_None && !PyDict_Check(value)) { PyErr_SetString(PyExc_TypeError, "__kwdefaults__ must be set to a dict object"); return -1; } Py_INCREF(value); tmp = op->defaults_kwdict; op->defaults_kwdict = value; Py_XDECREF(tmp); return 0; } static PyObject * __Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op) { PyObject* result = op->defaults_kwdict; if (unlikely(!result)) { if (op->defaults_getter) { if (__Pyx_CyFunction_init_defaults(op) < 0) return NULL; result = op->defaults_kwdict; } else { result = Py_None; } } Py_INCREF(result); return result; } static int __Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value) { PyObject* tmp; if (!value || value == Py_None) { value = NULL; } else if (!PyDict_Check(value)) { PyErr_SetString(PyExc_TypeError, "__annotations__ must be set to a dict object"); return -1; } Py_XINCREF(value); tmp = op->func_annotations; op->func_annotations = value; Py_XDECREF(tmp); return 0; } static PyObject * __Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op) { PyObject* result = op->func_annotations; if (unlikely(!result)) { result = PyDict_New(); if (unlikely(!result)) return NULL; op->func_annotations = result; } Py_INCREF(result); return result; } //#if PY_VERSION_HEX >= 0x030400C1 //static PyObject * //__Pyx_CyFunction_get_signature(__pyx_CyFunctionObject *op) { // PyObject *inspect_module, *signature_class, *signature; // // from inspect import Signature // inspect_module = PyImport_ImportModuleLevelObject(PYIDENT("inspect"), NULL, NULL, NULL, 0); // if (unlikely(!inspect_module)) // goto bad; // signature_class = __Pyx_PyObject_GetAttrStr(inspect_module, PYIDENT("Signature")); // Py_DECREF(inspect_module); // if (unlikely(!signature_class)) // goto bad; // // return Signature.from_function(op) // signature = PyObject_CallMethodObjArgs(signature_class, PYIDENT("from_function"), op, NULL); // Py_DECREF(signature_class); // if (likely(signature)) // return signature; //bad: // // make sure we raise an AttributeError from this property on any errors // if (!PyErr_ExceptionMatches(PyExc_AttributeError)) // PyErr_SetString(PyExc_AttributeError, "failed to calculate __signature__"); // return NULL; //} //#endif static PyGetSetDef __pyx_CyFunction_getsets[] = { {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, {(char *) "__self__", (getter)__Pyx_CyFunction_get_self, 0, 0, 0}, {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, //#if PY_VERSION_HEX >= 0x030400C1 // {(char *) "__signature__", (getter)__Pyx_CyFunction_get_signature, 0, 0, 0}, //#endif {0, 0, 0, 0, 0} }; static PyMemberDef __pyx_CyFunction_members[] = { {(char *) "__module__", T_OBJECT, offsetof(__pyx_CyFunctionObject, func.m_module), PY_WRITE_RESTRICTED, 0}, {0, 0, 0, 0, 0} }; static PyObject * __Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, CYTHON_UNUSED PyObject *args) { #if PY_MAJOR_VERSION >= 3 return PyUnicode_FromString(m->func.m_ml->ml_name); #else return PyString_FromString(m->func.m_ml->ml_name); #endif } static PyMethodDef __pyx_CyFunction_methods[] = { {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, {0, 0, 0, 0} }; #if PY_VERSION_HEX < 0x030500A0 #define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) #else #define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func.m_weakreflist) #endif static PyObject *__Pyx_CyFunction_New(PyTypeObject *type, PyMethodDef *ml, int flags, PyObject* qualname, PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { __pyx_CyFunctionObject *op = PyObject_GC_New(__pyx_CyFunctionObject, type); if (op == NULL) return NULL; op->flags = flags; __Pyx_CyFunction_weakreflist(op) = NULL; op->func.m_ml = ml; op->func.m_self = (PyObject *) op; Py_XINCREF(closure); op->func_closure = closure; Py_XINCREF(module); op->func.m_module = module; op->func_dict = NULL; op->func_name = NULL; Py_INCREF(qualname); op->func_qualname = qualname; op->func_doc = NULL; op->func_classobj = NULL; op->func_globals = globals; Py_INCREF(op->func_globals); Py_XINCREF(code); op->func_code = code; // Dynamic Default args op->defaults_pyobjects = 0; op->defaults = NULL; op->defaults_tuple = NULL; op->defaults_kwdict = NULL; op->defaults_getter = NULL; op->func_annotations = NULL; PyObject_GC_Track(op); return (PyObject *) op; } static int __Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) { Py_CLEAR(m->func_closure); Py_CLEAR(m->func.m_module); Py_CLEAR(m->func_dict); Py_CLEAR(m->func_name); Py_CLEAR(m->func_qualname); Py_CLEAR(m->func_doc); Py_CLEAR(m->func_globals); Py_CLEAR(m->func_code); Py_CLEAR(m->func_classobj); Py_CLEAR(m->defaults_tuple); Py_CLEAR(m->defaults_kwdict); Py_CLEAR(m->func_annotations); if (m->defaults) { PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); int i; for (i = 0; i < m->defaults_pyobjects; i++) Py_XDECREF(pydefaults[i]); PyObject_Free(m->defaults); m->defaults = NULL; } return 0; } static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) { PyObject_GC_UnTrack(m); if (__Pyx_CyFunction_weakreflist(m) != NULL) PyObject_ClearWeakRefs((PyObject *) m); __Pyx_CyFunction_clear(m); PyObject_GC_Del(m); } static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) { Py_VISIT(m->func_closure); Py_VISIT(m->func.m_module); Py_VISIT(m->func_dict); Py_VISIT(m->func_name); Py_VISIT(m->func_qualname); Py_VISIT(m->func_doc); Py_VISIT(m->func_globals); Py_VISIT(m->func_code); Py_VISIT(m->func_classobj); Py_VISIT(m->defaults_tuple); Py_VISIT(m->defaults_kwdict); if (m->defaults) { PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); int i; for (i = 0; i < m->defaults_pyobjects; i++) Py_VISIT(pydefaults[i]); } return 0; } static PyObject *__Pyx_CyFunction_descr_get(PyObject *func, PyObject *obj, PyObject *type) { __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; if (m->flags & __Pyx_CYFUNCTION_STATICMETHOD) { Py_INCREF(func); return func; } if (m->flags & __Pyx_CYFUNCTION_CLASSMETHOD) { if (type == NULL) type = (PyObject *)(Py_TYPE(obj)); return __Pyx_PyMethod_New(func, type, (PyObject *)(Py_TYPE(type))); } if (obj == Py_None) obj = NULL; return __Pyx_PyMethod_New(func, obj, type); } static PyObject* __Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) { #if PY_MAJOR_VERSION >= 3 return PyUnicode_FromFormat("", op->func_qualname, (void *)op); #else return PyString_FromFormat("", PyString_AsString(op->func_qualname), (void *)op); #endif } static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { // originally copied from PyCFunction_Call() in CPython's Objects/methodobject.c PyCFunctionObject* f = (PyCFunctionObject*)func; PyCFunction meth = f->m_ml->ml_meth; Py_ssize_t size; switch (f->m_ml->ml_flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { case METH_VARARGS: if (likely(kw == NULL || PyDict_Size(kw) == 0)) return (*meth)(self, arg); break; case METH_VARARGS | METH_KEYWORDS: return (*(PyCFunctionWithKeywords)meth)(self, arg, kw); case METH_NOARGS: if (likely(kw == NULL || PyDict_Size(kw) == 0)) { size = PyTuple_GET_SIZE(arg); if (likely(size == 0)) return (*meth)(self, NULL); PyErr_Format(PyExc_TypeError, "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", f->m_ml->ml_name, size); return NULL; } break; case METH_O: if (likely(kw == NULL || PyDict_Size(kw) == 0)) { size = PyTuple_GET_SIZE(arg); if (likely(size == 1)) { PyObject *result, *arg0 = PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; result = (*meth)(self, arg0); Py_DECREF(arg0); return result; } PyErr_Format(PyExc_TypeError, "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", f->m_ml->ml_name, size); return NULL; } break; default: PyErr_SetString(PyExc_SystemError, "Bad call flags in " "__Pyx_CyFunction_Call. METH_OLDARGS is no " "longer supported!"); return NULL; } PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", f->m_ml->ml_name); return NULL; } static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { return __Pyx_CyFunction_CallMethod(func, ((PyCFunctionObject*)func)->m_self, arg, kw); } static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { PyObject *result; __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { Py_ssize_t argc; PyObject *new_args; PyObject *self; argc = PyTuple_GET_SIZE(args); new_args = PyTuple_GetSlice(args, 1, argc); if (unlikely(!new_args)) return NULL; self = PyTuple_GetItem(args, 0); if (unlikely(!self)) { Py_DECREF(new_args); return NULL; } result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); Py_DECREF(new_args); } else { result = __Pyx_CyFunction_Call(func, args, kw); } return result; } static PyTypeObject __pyx_CyFunctionType_type = { PyVarObject_HEAD_INIT(0, 0) "cython_function_or_method", /*tp_name*/ sizeof(__pyx_CyFunctionObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ (destructor) __Pyx_CyFunction_dealloc, /*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ #if PY_MAJOR_VERSION < 3 0, /*tp_compare*/ #else 0, /*reserved*/ #endif (reprfunc) __Pyx_CyFunction_repr, /*tp_repr*/ 0, /*tp_as_number*/ 0, /*tp_as_sequence*/ 0, /*tp_as_mapping*/ 0, /*tp_hash*/ __Pyx_CyFunction_CallAsMethod, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /*tp_flags*/ 0, /*tp_doc*/ (traverseproc) __Pyx_CyFunction_traverse, /*tp_traverse*/ (inquiry) __Pyx_CyFunction_clear, /*tp_clear*/ 0, /*tp_richcompare*/ #if PY_VERSION_HEX < 0x030500A0 offsetof(__pyx_CyFunctionObject, func_weakreflist), /*tp_weaklistoffset*/ #else offsetof(PyCFunctionObject, m_weakreflist), /*tp_weaklistoffset*/ #endif 0, /*tp_iter*/ 0, /*tp_iternext*/ __pyx_CyFunction_methods, /*tp_methods*/ __pyx_CyFunction_members, /*tp_members*/ __pyx_CyFunction_getsets, /*tp_getset*/ 0, /*tp_base*/ 0, /*tp_dict*/ __Pyx_CyFunction_descr_get, /*tp_descr_get*/ 0, /*tp_descr_set*/ offsetof(__pyx_CyFunctionObject, func_dict),/*tp_dictoffset*/ 0, /*tp_init*/ 0, /*tp_alloc*/ 0, /*tp_new*/ 0, /*tp_free*/ 0, /*tp_is_gc*/ 0, /*tp_bases*/ 0, /*tp_mro*/ 0, /*tp_cache*/ 0, /*tp_subclasses*/ 0, /*tp_weaklist*/ 0, /*tp_del*/ 0, /*tp_version_tag*/ #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif }; static int __pyx_CyFunction_init(void) { __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); if (__pyx_CyFunctionType == NULL) { return -1; } return 0; } static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; m->defaults = PyObject_Malloc(size); if (!m->defaults) return PyErr_NoMemory(); memset(m->defaults, 0, size); m->defaults_pyobjects = pyobjects; return m->defaults; } static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; m->defaults_tuple = tuple; Py_INCREF(tuple); } static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; m->defaults_kwdict = dict; Py_INCREF(dict); } static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; m->func_annotations = dict; Py_INCREF(dict); } //////////////////// CyFunctionClassCell.proto //////////////////// static CYTHON_INLINE int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj); //////////////////// CyFunctionClassCell //////////////////// //@requires: CythonFunction static CYTHON_INLINE int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj) { Py_ssize_t i, count = PyList_GET_SIZE(cyfunctions); for (i = 0; i < count; i++) { __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS PyList_GET_ITEM(cyfunctions, i); #else PySequence_ITEM(cyfunctions, i); if (unlikely(!m)) return -1; #endif Py_INCREF(classobj); m->func_classobj = classobj; #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) Py_DECREF((PyObject*)m); #endif } return 0; } //////////////////// FusedFunction.proto //////////////////// typedef struct { __pyx_CyFunctionObject func; PyObject *__signatures__; PyObject *type; PyObject *self; } __pyx_FusedFunctionObject; #define __pyx_FusedFunction_NewEx(ml, flags, qualname, self, module, globals, code) \ __pyx_FusedFunction_New(__pyx_FusedFunctionType, ml, flags, qualname, self, module, globals, code) static PyObject *__pyx_FusedFunction_New(PyTypeObject *type, PyMethodDef *ml, int flags, PyObject *qualname, PyObject *self, PyObject *module, PyObject *globals, PyObject *code); static int __pyx_FusedFunction_clear(__pyx_FusedFunctionObject *self); static PyTypeObject *__pyx_FusedFunctionType = NULL; static int __pyx_FusedFunction_init(void); #define __Pyx_FusedFunction_USED //////////////////// FusedFunction //////////////////// //@requires: CythonFunction static PyObject * __pyx_FusedFunction_New(PyTypeObject *type, PyMethodDef *ml, int flags, PyObject *qualname, PyObject *self, PyObject *module, PyObject *globals, PyObject *code) { __pyx_FusedFunctionObject *fusedfunc = (__pyx_FusedFunctionObject *) __Pyx_CyFunction_New(type, ml, flags, qualname, self, module, globals, code); if (!fusedfunc) return NULL; fusedfunc->__signatures__ = NULL; fusedfunc->type = NULL; fusedfunc->self = NULL; return (PyObject *) fusedfunc; } static void __pyx_FusedFunction_dealloc(__pyx_FusedFunctionObject *self) { __pyx_FusedFunction_clear(self); __pyx_FusedFunctionType->tp_free((PyObject *) self); } static int __pyx_FusedFunction_traverse(__pyx_FusedFunctionObject *self, visitproc visit, void *arg) { Py_VISIT(self->self); Py_VISIT(self->type); Py_VISIT(self->__signatures__); return __Pyx_CyFunction_traverse((__pyx_CyFunctionObject *) self, visit, arg); } static int __pyx_FusedFunction_clear(__pyx_FusedFunctionObject *self) { Py_CLEAR(self->self); Py_CLEAR(self->type); Py_CLEAR(self->__signatures__); return __Pyx_CyFunction_clear((__pyx_CyFunctionObject *) self); } static PyObject * __pyx_FusedFunction_descr_get(PyObject *self, PyObject *obj, PyObject *type) { __pyx_FusedFunctionObject *func, *meth; func = (__pyx_FusedFunctionObject *) self; if (func->self || func->func.flags & __Pyx_CYFUNCTION_STATICMETHOD) { // Do not allow rebinding and don't do anything for static methods Py_INCREF(self); return self; } if (obj == Py_None) obj = NULL; meth = (__pyx_FusedFunctionObject *) __pyx_FusedFunction_NewEx( ((PyCFunctionObject *) func)->m_ml, ((__pyx_CyFunctionObject *) func)->flags, ((__pyx_CyFunctionObject *) func)->func_qualname, ((__pyx_CyFunctionObject *) func)->func_closure, ((PyCFunctionObject *) func)->m_module, ((__pyx_CyFunctionObject *) func)->func_globals, ((__pyx_CyFunctionObject *) func)->func_code); if (!meth) return NULL; Py_XINCREF(func->func.func_classobj); meth->func.func_classobj = func->func.func_classobj; Py_XINCREF(func->__signatures__); meth->__signatures__ = func->__signatures__; Py_XINCREF(type); meth->type = type; Py_XINCREF(func->func.defaults_tuple); meth->func.defaults_tuple = func->func.defaults_tuple; if (func->func.flags & __Pyx_CYFUNCTION_CLASSMETHOD) obj = type; Py_XINCREF(obj); meth->self = obj; return (PyObject *) meth; } static PyObject * _obj_to_str(PyObject *obj) { if (PyType_Check(obj)) return PyObject_GetAttr(obj, PYIDENT("__name__")); else return PyObject_Str(obj); } static PyObject * __pyx_FusedFunction_getitem(__pyx_FusedFunctionObject *self, PyObject *idx) { PyObject *signature = NULL; PyObject *unbound_result_func; PyObject *result_func = NULL; if (self->__signatures__ == NULL) { PyErr_SetString(PyExc_TypeError, "Function is not fused"); return NULL; } if (PyTuple_Check(idx)) { PyObject *list = PyList_New(0); Py_ssize_t n = PyTuple_GET_SIZE(idx); PyObject *string = NULL; PyObject *sep = NULL; int i; if (!list) return NULL; for (i = 0; i < n; i++) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS PyObject *item = PyTuple_GET_ITEM(idx, i); #else PyObject *item = PySequence_ITEM(idx, i); #endif string = _obj_to_str(item); #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) Py_DECREF(item); #endif if (!string || PyList_Append(list, string) < 0) goto __pyx_err; Py_DECREF(string); } sep = PyUnicode_FromString("|"); if (sep) signature = PyUnicode_Join(sep, list); __pyx_err: ; Py_DECREF(list); Py_XDECREF(sep); } else { signature = _obj_to_str(idx); } if (!signature) return NULL; unbound_result_func = PyObject_GetItem(self->__signatures__, signature); if (unbound_result_func) { if (self->self || self->type) { __pyx_FusedFunctionObject *unbound = (__pyx_FusedFunctionObject *) unbound_result_func; // TODO: move this to InitClassCell Py_CLEAR(unbound->func.func_classobj); Py_XINCREF(self->func.func_classobj); unbound->func.func_classobj = self->func.func_classobj; result_func = __pyx_FusedFunction_descr_get(unbound_result_func, self->self, self->type); } else { result_func = unbound_result_func; Py_INCREF(result_func); } } Py_DECREF(signature); Py_XDECREF(unbound_result_func); return result_func; } static PyObject * __pyx_FusedFunction_callfunction(PyObject *func, PyObject *args, PyObject *kw) { __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; int static_specialized = (cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD && !((__pyx_FusedFunctionObject *) func)->__signatures__); if (cyfunc->flags & __Pyx_CYFUNCTION_CCLASS && !static_specialized) { return __Pyx_CyFunction_CallAsMethod(func, args, kw); } else { return __Pyx_CyFunction_Call(func, args, kw); } } // Note: the 'self' from method binding is passed in in the args tuple, // whereas PyCFunctionObject's m_self is passed in as the first // argument to the C function. For extension methods we need // to pass 'self' as 'm_self' and not as the first element of the // args tuple. static PyObject * __pyx_FusedFunction_call(PyObject *func, PyObject *args, PyObject *kw) { __pyx_FusedFunctionObject *binding_func = (__pyx_FusedFunctionObject *) func; Py_ssize_t argc = PyTuple_GET_SIZE(args); PyObject *new_args = NULL; __pyx_FusedFunctionObject *new_func = NULL; PyObject *result = NULL; PyObject *self = NULL; int is_staticmethod = binding_func->func.flags & __Pyx_CYFUNCTION_STATICMETHOD; int is_classmethod = binding_func->func.flags & __Pyx_CYFUNCTION_CLASSMETHOD; if (binding_func->self) { // Bound method call, put 'self' in the args tuple Py_ssize_t i; new_args = PyTuple_New(argc + 1); if (!new_args) return NULL; self = binding_func->self; #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) Py_INCREF(self); #endif Py_INCREF(self); PyTuple_SET_ITEM(new_args, 0, self); for (i = 0; i < argc; i++) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS PyObject *item = PyTuple_GET_ITEM(args, i); Py_INCREF(item); #else PyObject *item = PySequence_ITEM(args, i); if (unlikely(!item)) goto bad; #endif PyTuple_SET_ITEM(new_args, i + 1, item); } args = new_args; } else if (binding_func->type) { // Unbound method call if (argc < 1) { PyErr_SetString(PyExc_TypeError, "Need at least one argument, 0 given."); return NULL; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS self = PyTuple_GET_ITEM(args, 0); #else self = PySequence_ITEM(args, 0); if (unlikely(!self)) return NULL; #endif } if (self && !is_classmethod && !is_staticmethod) { int is_instance = PyObject_IsInstance(self, binding_func->type); if (unlikely(!is_instance)) { PyErr_Format(PyExc_TypeError, "First argument should be of type %.200s, got %.200s.", ((PyTypeObject *) binding_func->type)->tp_name, self->ob_type->tp_name); goto bad; } else if (unlikely(is_instance == -1)) { goto bad; } } #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) Py_XDECREF(self); self = NULL; #endif if (binding_func->__signatures__) { PyObject *tup; if (is_staticmethod && binding_func->func.flags & __Pyx_CYFUNCTION_CCLASS) { // FIXME: this seems wrong, but we must currently pass the signatures dict as 'self' argument tup = PyTuple_Pack(3, args, kw == NULL ? Py_None : kw, binding_func->func.defaults_tuple); if (unlikely(!tup)) goto bad; new_func = (__pyx_FusedFunctionObject *) __Pyx_CyFunction_CallMethod( func, binding_func->__signatures__, tup, NULL); } else { tup = PyTuple_Pack(4, binding_func->__signatures__, args, kw == NULL ? Py_None : kw, binding_func->func.defaults_tuple); if (unlikely(!tup)) goto bad; new_func = (__pyx_FusedFunctionObject *) __pyx_FusedFunction_callfunction(func, tup, NULL); } Py_DECREF(tup); if (unlikely(!new_func)) goto bad; Py_XINCREF(binding_func->func.func_classobj); Py_CLEAR(new_func->func.func_classobj); new_func->func.func_classobj = binding_func->func.func_classobj; func = (PyObject *) new_func; } result = __pyx_FusedFunction_callfunction(func, args, kw); bad: #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) Py_XDECREF(self); #endif Py_XDECREF(new_args); Py_XDECREF((PyObject *) new_func); return result; } static PyMemberDef __pyx_FusedFunction_members[] = { {(char *) "__signatures__", T_OBJECT, offsetof(__pyx_FusedFunctionObject, __signatures__), READONLY, 0}, {0, 0, 0, 0, 0}, }; static PyMappingMethods __pyx_FusedFunction_mapping_methods = { 0, (binaryfunc) __pyx_FusedFunction_getitem, 0, }; static PyTypeObject __pyx_FusedFunctionType_type = { PyVarObject_HEAD_INIT(0, 0) "fused_cython_function", /*tp_name*/ sizeof(__pyx_FusedFunctionObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ (destructor) __pyx_FusedFunction_dealloc, /*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ #if PY_MAJOR_VERSION < 3 0, /*tp_compare*/ #else 0, /*reserved*/ #endif 0, /*tp_repr*/ 0, /*tp_as_number*/ 0, /*tp_as_sequence*/ &__pyx_FusedFunction_mapping_methods, /*tp_as_mapping*/ 0, /*tp_hash*/ (ternaryfunc) __pyx_FusedFunction_call, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /*tp_flags*/ 0, /*tp_doc*/ (traverseproc) __pyx_FusedFunction_traverse, /*tp_traverse*/ (inquiry) __pyx_FusedFunction_clear,/*tp_clear*/ 0, /*tp_richcompare*/ 0, /*tp_weaklistoffset*/ 0, /*tp_iter*/ 0, /*tp_iternext*/ 0, /*tp_methods*/ __pyx_FusedFunction_members, /*tp_members*/ // __doc__ is None for the fused function type, but we need it to be // a descriptor for the instance's __doc__, so rebuild descriptors in our subclass __pyx_CyFunction_getsets, /*tp_getset*/ &__pyx_CyFunctionType_type, /*tp_base*/ 0, /*tp_dict*/ __pyx_FusedFunction_descr_get, /*tp_descr_get*/ 0, /*tp_descr_set*/ 0, /*tp_dictoffset*/ 0, /*tp_init*/ 0, /*tp_alloc*/ 0, /*tp_new*/ 0, /*tp_free*/ 0, /*tp_is_gc*/ 0, /*tp_bases*/ 0, /*tp_mro*/ 0, /*tp_cache*/ 0, /*tp_subclasses*/ 0, /*tp_weaklist*/ 0, /*tp_del*/ 0, /*tp_version_tag*/ #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif }; static int __pyx_FusedFunction_init(void) { __pyx_FusedFunctionType = __Pyx_FetchCommonType(&__pyx_FusedFunctionType_type); if (__pyx_FusedFunctionType == NULL) { return -1; } return 0; } //////////////////// ClassMethod.proto //////////////////// #include "descrobject.h" static PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*proto*/ //////////////////// ClassMethod //////////////////// static PyObject* __Pyx_Method_ClassMethod(PyObject *method) { #if CYTHON_COMPILING_IN_PYPY if (PyObject_TypeCheck(method, &PyWrapperDescr_Type)) { // cdef classes return PyClassMethod_New(method); } #else #if CYTHON_COMPILING_IN_PYSTON // special C-API function only in Pyston if (PyMethodDescr_Check(method)) { #else // It appears that PyMethodDescr_Type is not exposed anywhere in the CPython C-API static PyTypeObject *methoddescr_type = NULL; if (methoddescr_type == NULL) { PyObject *meth = PyObject_GetAttrString((PyObject*)&PyList_Type, "append"); if (!meth) return NULL; methoddescr_type = Py_TYPE(meth); Py_DECREF(meth); } if (PyObject_TypeCheck(method, methoddescr_type)) { #endif // cdef classes PyMethodDescrObject *descr = (PyMethodDescrObject *)method; #if PY_VERSION_HEX < 0x03020000 PyTypeObject *d_type = descr->d_type; #else PyTypeObject *d_type = descr->d_common.d_type; #endif return PyDescr_NewClassMethod(d_type, descr->d_method); } #endif else if (PyMethod_Check(method)) { // python classes return PyClassMethod_New(PyMethod_GET_FUNCTION(method)); } else if (PyCFunction_Check(method)) { return PyClassMethod_New(method); } #ifdef __Pyx_CyFunction_USED else if (PyObject_TypeCheck(method, __pyx_CyFunctionType)) { return PyClassMethod_New(method); } #endif PyErr_SetString(PyExc_TypeError, "Class-level classmethod() can only be called on " "a method_descriptor or instance method."); return NULL; } Cython-0.26.1/Cython/Utility/FunctionArguments.c0000664000175000017500000002774512542002467022377 0ustar stefanstefan00000000000000//////////////////// ArgTypeTest.proto //////////////////// static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, const char *name, int exact); /*proto*/ //////////////////// ArgTypeTest //////////////////// static void __Pyx_RaiseArgumentTypeInvalid(const char* name, PyObject *obj, PyTypeObject *type) { PyErr_Format(PyExc_TypeError, "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", name, type->tp_name, Py_TYPE(obj)->tp_name); } static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, const char *name, int exact) { if (unlikely(!type)) { PyErr_SetString(PyExc_SystemError, "Missing type object"); return 0; } if (none_allowed && obj == Py_None) return 1; else if (exact) { if (likely(Py_TYPE(obj) == type)) return 1; #if PY_MAJOR_VERSION == 2 else if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; #endif } else { if (likely(PyObject_TypeCheck(obj, type))) return 1; } __Pyx_RaiseArgumentTypeInvalid(name, obj, type); return 0; } //////////////////// RaiseArgTupleInvalid.proto //////////////////// static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/ //////////////////// RaiseArgTupleInvalid //////////////////// // __Pyx_RaiseArgtupleInvalid raises the correct exception when too // many or too few positional arguments were found. This handles // Py_ssize_t formatting correctly. static void __Pyx_RaiseArgtupleInvalid( const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found) { Py_ssize_t num_expected; const char *more_or_less; if (num_found < num_min) { num_expected = num_min; more_or_less = "at least"; } else { num_expected = num_max; more_or_less = "at most"; } if (exact) { more_or_less = "exactly"; } PyErr_Format(PyExc_TypeError, "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", func_name, more_or_less, num_expected, (num_expected == 1) ? "" : "s", num_found); } //////////////////// RaiseKeywordRequired.proto //////////////////// static CYTHON_INLINE void __Pyx_RaiseKeywordRequired(const char* func_name, PyObject* kw_name); /*proto*/ //////////////////// RaiseKeywordRequired //////////////////// static CYTHON_INLINE void __Pyx_RaiseKeywordRequired( const char* func_name, PyObject* kw_name) { PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION >= 3 "%s() needs keyword-only argument %U", func_name, kw_name); #else "%s() needs keyword-only argument %s", func_name, PyString_AS_STRING(kw_name)); #endif } //////////////////// RaiseDoubleKeywords.proto //////////////////// static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /*proto*/ //////////////////// RaiseDoubleKeywords //////////////////// static void __Pyx_RaiseDoubleKeywordsError( const char* func_name, PyObject* kw_name) { PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION >= 3 "%s() got multiple values for keyword argument '%U'", func_name, kw_name); #else "%s() got multiple values for keyword argument '%s'", func_name, PyString_AsString(kw_name)); #endif } //////////////////// RaiseMappingExpected.proto //////////////////// static void __Pyx_RaiseMappingExpectedError(PyObject* arg); /*proto*/ //////////////////// RaiseMappingExpected //////////////////// static void __Pyx_RaiseMappingExpectedError(PyObject* arg) { PyErr_Format(PyExc_TypeError, "'%.200s' object is not a mapping", Py_TYPE(arg)->tp_name); } //////////////////// KeywordStringCheck.proto //////////////////// static CYTHON_INLINE int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); /*proto*/ //////////////////// KeywordStringCheck //////////////////// // __Pyx_CheckKeywordStrings raises an error if non-string keywords // were passed to a function, or if any keywords were passed to a // function that does not accept them. static CYTHON_INLINE int __Pyx_CheckKeywordStrings( PyObject *kwdict, const char* function_name, int kw_allowed) { PyObject* key = 0; Py_ssize_t pos = 0; #if CYTHON_COMPILING_IN_PYPY /* PyPy appears to check keywords at call time, not at unpacking time => not much to do here */ if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0)) goto invalid_keyword; return 1; #else while (PyDict_Next(kwdict, &pos, &key, 0)) { #if PY_MAJOR_VERSION < 3 if (unlikely(!PyString_CheckExact(key)) && unlikely(!PyString_Check(key))) #endif if (unlikely(!PyUnicode_Check(key))) goto invalid_keyword_type; } if ((!kw_allowed) && unlikely(key)) goto invalid_keyword; return 1; invalid_keyword_type: PyErr_Format(PyExc_TypeError, "%.200s() keywords must be strings", function_name); return 0; #endif invalid_keyword: PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION < 3 "%.200s() got an unexpected keyword argument '%.200s'", function_name, PyString_AsString(key)); #else "%s() got an unexpected keyword argument '%U'", function_name, key); #endif return 0; } //////////////////// ParseKeywords.proto //////////////////// static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[], \ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, \ const char* function_name); /*proto*/ //////////////////// ParseKeywords //////////////////// //@requires: RaiseDoubleKeywords // __Pyx_ParseOptionalKeywords copies the optional/unknown keyword // arguments from the kwds dict into kwds2. If kwds2 is NULL, unknown // keywords will raise an invalid keyword error. // // Three kinds of errors are checked: 1) non-string keywords, 2) // unexpected keywords and 3) overlap with positional arguments. // // If num_posargs is greater 0, it denotes the number of positional // arguments that were passed and that must therefore not appear // amongst the keywords as well. // // This method does not check for required keyword arguments. static int __Pyx_ParseOptionalKeywords( PyObject *kwds, PyObject **argnames[], PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, const char* function_name) { PyObject *key = 0, *value = 0; Py_ssize_t pos = 0; PyObject*** name; PyObject*** first_kw_arg = argnames + num_pos_args; while (PyDict_Next(kwds, &pos, &key, &value)) { name = first_kw_arg; while (*name && (**name != key)) name++; if (*name) { values[name-argnames] = value; continue; } name = first_kw_arg; #if PY_MAJOR_VERSION < 3 if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { while (*name) { if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) && _PyString_Eq(**name, key)) { values[name-argnames] = value; break; } name++; } if (*name) continue; else { // not found after positional args, check for duplicate PyObject*** argname = argnames; while (argname != first_kw_arg) { if ((**argname == key) || ( (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) && _PyString_Eq(**argname, key))) { goto arg_passed_twice; } argname++; } } } else #endif if (likely(PyUnicode_Check(key))) { while (*name) { int cmp = (**name == key) ? 0 : #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : #endif // need to convert argument name from bytes to unicode for comparison PyUnicode_Compare(**name, key); if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; if (cmp == 0) { values[name-argnames] = value; break; } name++; } if (*name) continue; else { // not found after positional args, check for duplicate PyObject*** argname = argnames; while (argname != first_kw_arg) { int cmp = (**argname == key) ? 0 : #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : #endif // need to convert argument name from bytes to unicode for comparison PyUnicode_Compare(**argname, key); if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; if (cmp == 0) goto arg_passed_twice; argname++; } } } else goto invalid_keyword_type; if (kwds2) { if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; } else { goto invalid_keyword; } } return 0; arg_passed_twice: __Pyx_RaiseDoubleKeywordsError(function_name, key); goto bad; invalid_keyword_type: PyErr_Format(PyExc_TypeError, "%.200s() keywords must be strings", function_name); goto bad; invalid_keyword: PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION < 3 "%.200s() got an unexpected keyword argument '%.200s'", function_name, PyString_AsString(key)); #else "%s() got an unexpected keyword argument '%U'", function_name, key); #endif bad: return -1; } //////////////////// MergeKeywords.proto //////////////////// static int __Pyx_MergeKeywords(PyObject *kwdict, PyObject *source_mapping); /*proto*/ //////////////////// MergeKeywords //////////////////// //@requires: RaiseDoubleKeywords //@requires: Optimize.c::dict_iter static int __Pyx_MergeKeywords(PyObject *kwdict, PyObject *source_mapping) { PyObject *iter, *key = NULL, *value = NULL; int source_is_dict, result; Py_ssize_t orig_length, ppos = 0; iter = __Pyx_dict_iterator(source_mapping, 0, PYIDENT("items"), &orig_length, &source_is_dict); if (unlikely(!iter)) { // slow fallback: try converting to dict, then iterate PyObject *args; if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; PyErr_Clear(); args = PyTuple_Pack(1, source_mapping); if (likely(args)) { PyObject *fallback = PyObject_Call((PyObject*)&PyDict_Type, args, NULL); Py_DECREF(args); if (likely(fallback)) { iter = __Pyx_dict_iterator(fallback, 1, PYIDENT("items"), &orig_length, &source_is_dict); Py_DECREF(fallback); } } if (unlikely(!iter)) goto bad; } while (1) { result = __Pyx_dict_iter_next(iter, orig_length, &ppos, &key, &value, NULL, source_is_dict); if (unlikely(result < 0)) goto bad; if (!result) break; if (unlikely(PyDict_Contains(kwdict, key))) { __Pyx_RaiseDoubleKeywordsError("function", key); result = -1; } else { result = PyDict_SetItem(kwdict, key, value); } Py_DECREF(key); Py_DECREF(value); if (unlikely(result < 0)) goto bad; } Py_XDECREF(iter); return 0; bad: Py_XDECREF(iter); return -1; } Cython-0.26.1/Cython/Utility/__init__.py0000664000175000017500000000220713023021033020634 0ustar stefanstefan00000000000000 def pylong_join(count, digits_ptr='digits', join_type='unsigned long'): """ Generate an unrolled shift-then-or loop over the first 'count' digits. Assumes that they fit into 'join_type'. (((d[2] << n) | d[1]) << n) | d[0] """ return ('(' * (count * 2) + ' | '.join( "(%s)%s[%d])%s)" % (join_type, digits_ptr, _i, " << PyLong_SHIFT" if _i else '') for _i in range(count-1, -1, -1))) # although it could potentially make use of data independence, # this implementation is a bit slower than the simpler one above def _pylong_join(count, digits_ptr='digits', join_type='unsigned long'): """ Generate an or-ed series of shifts for the first 'count' digits. Assumes that they fit into 'join_type'. (d[2] << 2*n) | (d[1] << 1*n) | d[0] """ def shift(n): # avoid compiler warnings for overly large shifts that will be discarded anyway return " << (%d * PyLong_SHIFT < 8 * sizeof(%s) ? %d * PyLong_SHIFT : 0)" % (n, join_type, n) if n else '' return '(%s)' % ' | '.join( "(((%s)%s[%d])%s)" % (join_type, digits_ptr, i, shift(i)) for i in range(count-1, -1, -1)) Cython-0.26.1/Cython/Utility/ImportExport.c0000664000175000017500000004752313143605603021373 0ustar stefanstefan00000000000000/////////////// PyIdentifierFromString.proto /////////////// #if !defined(__Pyx_PyIdentifier_FromString) #if PY_MAJOR_VERSION < 3 #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) #else #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) #endif #endif /////////////// Import.proto /////////////// static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); /*proto*/ /////////////// Import /////////////// //@requires: ObjectHandling.c::PyObjectGetAttrStr //@substitute: naming static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { PyObject *empty_list = 0; PyObject *module = 0; PyObject *global_dict = 0; PyObject *empty_dict = 0; PyObject *list; #if PY_VERSION_HEX < 0x03030000 PyObject *py_import; py_import = __Pyx_PyObject_GetAttrStr($builtins_cname, PYIDENT("__import__")); if (!py_import) goto bad; #endif if (from_list) list = from_list; else { empty_list = PyList_New(0); if (!empty_list) goto bad; list = empty_list; } global_dict = PyModule_GetDict($module_cname); if (!global_dict) goto bad; empty_dict = PyDict_New(); if (!empty_dict) goto bad; { #if PY_MAJOR_VERSION >= 3 if (level == -1) { if (strchr(__Pyx_MODULE_NAME, '.')) { /* try package relative import first */ #if PY_VERSION_HEX < 0x03030000 PyObject *py_level = PyInt_FromLong(1); if (!py_level) goto bad; module = PyObject_CallFunctionObjArgs(py_import, name, global_dict, empty_dict, list, py_level, NULL); Py_DECREF(py_level); #else module = PyImport_ImportModuleLevelObject( name, global_dict, empty_dict, list, 1); #endif if (!module) { if (!PyErr_ExceptionMatches(PyExc_ImportError)) goto bad; PyErr_Clear(); } } level = 0; /* try absolute import on failure */ } #endif if (!module) { #if PY_VERSION_HEX < 0x03030000 PyObject *py_level = PyInt_FromLong(level); if (!py_level) goto bad; module = PyObject_CallFunctionObjArgs(py_import, name, global_dict, empty_dict, list, py_level, NULL); Py_DECREF(py_level); #else module = PyImport_ImportModuleLevelObject( name, global_dict, empty_dict, list, level); #endif } } bad: #if PY_VERSION_HEX < 0x03030000 Py_XDECREF(py_import); #endif Py_XDECREF(empty_list); Py_XDECREF(empty_dict); return module; } /////////////// ImportFrom.proto /////////////// static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); /*proto*/ /////////////// ImportFrom /////////////// //@requires: ObjectHandling.c::PyObjectGetAttrStr static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Format(PyExc_ImportError, #if PY_MAJOR_VERSION < 3 "cannot import name %.230s", PyString_AS_STRING(name)); #else "cannot import name %S", name); #endif } return value; } /////////////// ImportStar /////////////// //@substitute: naming /* import_all_from is an unexposed function from ceval.c */ static int __Pyx_import_all_from(PyObject *locals, PyObject *v) { PyObject *all = PyObject_GetAttrString(v, "__all__"); PyObject *dict, *name, *value; int skip_leading_underscores = 0; int pos, err; if (all == NULL) { if (!PyErr_ExceptionMatches(PyExc_AttributeError)) return -1; /* Unexpected error */ PyErr_Clear(); dict = PyObject_GetAttrString(v, "__dict__"); if (dict == NULL) { if (!PyErr_ExceptionMatches(PyExc_AttributeError)) return -1; PyErr_SetString(PyExc_ImportError, "from-import-* object has no __dict__ and no __all__"); return -1; } #if PY_MAJOR_VERSION < 3 all = PyObject_CallMethod(dict, (char *)"keys", NULL); #else all = PyMapping_Keys(dict); #endif Py_DECREF(dict); if (all == NULL) return -1; skip_leading_underscores = 1; } for (pos = 0, err = 0; ; pos++) { name = PySequence_GetItem(all, pos); if (name == NULL) { if (!PyErr_ExceptionMatches(PyExc_IndexError)) err = -1; else PyErr_Clear(); break; } if (skip_leading_underscores && #if PY_MAJOR_VERSION < 3 PyString_Check(name) && PyString_AS_STRING(name)[0] == '_') #else PyUnicode_Check(name) && PyUnicode_AS_UNICODE(name)[0] == '_') #endif { Py_DECREF(name); continue; } value = PyObject_GetAttr(v, name); if (value == NULL) err = -1; else if (PyDict_CheckExact(locals)) err = PyDict_SetItem(locals, name, value); else err = PyObject_SetItem(locals, name, value); Py_DECREF(name); Py_XDECREF(value); if (err != 0) break; } Py_DECREF(all); return err; } static int ${import_star}(PyObject* m) { int i; int ret = -1; char* s; PyObject *locals = 0; PyObject *list = 0; #if PY_MAJOR_VERSION >= 3 PyObject *utf8_name = 0; #endif PyObject *name; PyObject *item; locals = PyDict_New(); if (!locals) goto bad; if (__Pyx_import_all_from(locals, m) < 0) goto bad; list = PyDict_Items(locals); if (!list) goto bad; for(i=0; i= 3 utf8_name = PyUnicode_AsUTF8String(name); if (!utf8_name) goto bad; s = PyBytes_AS_STRING(utf8_name); if (${import_star_set}(item, name, s) < 0) goto bad; Py_DECREF(utf8_name); utf8_name = 0; #else s = PyString_AsString(name); if (!s) goto bad; if (${import_star_set}(item, name, s) < 0) goto bad; #endif } ret = 0; bad: Py_XDECREF(locals); Py_XDECREF(list); #if PY_MAJOR_VERSION >= 3 Py_XDECREF(utf8_name); #endif return ret; } /////////////// ModuleImport.proto /////////////// static PyObject *__Pyx_ImportModule(const char *name); /*proto*/ /////////////// ModuleImport /////////////// //@requires: PyIdentifierFromString #ifndef __PYX_HAVE_RT_ImportModule #define __PYX_HAVE_RT_ImportModule static PyObject *__Pyx_ImportModule(const char *name) { PyObject *py_name = 0; PyObject *py_module = 0; py_name = __Pyx_PyIdentifier_FromString(name); if (!py_name) goto bad; py_module = PyImport_Import(py_name); Py_DECREF(py_name); return py_module; bad: Py_XDECREF(py_name); return 0; } #endif /////////////// SetPackagePathFromImportLib.proto /////////////// #if PY_VERSION_HEX >= 0x03030000 static int __Pyx_SetPackagePathFromImportLib(const char* parent_package_name, PyObject *module_name); #else #define __Pyx_SetPackagePathFromImportLib(a, b) 0 #endif /////////////// SetPackagePathFromImportLib /////////////// //@requires: ObjectHandling.c::PyObjectGetAttrStr //@substitute: naming #if PY_VERSION_HEX >= 0x03030000 static int __Pyx_SetPackagePathFromImportLib(const char* parent_package_name, PyObject *module_name) { PyObject *importlib, *loader, *osmod, *ossep, *parts, *package_path; PyObject *path = NULL, *file_path = NULL; int result; if (parent_package_name) { PyObject *package = PyImport_ImportModule(parent_package_name); if (unlikely(!package)) goto bad; path = PyObject_GetAttrString(package, "__path__"); Py_DECREF(package); if (unlikely(!path) || unlikely(path == Py_None)) goto bad; } else { path = Py_None; Py_INCREF(Py_None); } // package_path = [importlib.find_loader(module_name, path).path.rsplit(os.sep, 1)[0]] importlib = PyImport_ImportModule("importlib"); if (unlikely(!importlib)) goto bad; loader = PyObject_CallMethod(importlib, "find_loader", "(OO)", module_name, path); Py_DECREF(importlib); Py_DECREF(path); path = NULL; if (unlikely(!loader)) goto bad; file_path = PyObject_GetAttrString(loader, "path"); Py_DECREF(loader); if (unlikely(!file_path)) goto bad; if (unlikely(PyObject_SetAttrString($module_cname, "__file__", file_path) < 0)) goto bad; osmod = PyImport_ImportModule("os"); if (unlikely(!osmod)) goto bad; ossep = PyObject_GetAttrString(osmod, "sep"); Py_DECREF(osmod); if (unlikely(!ossep)) goto bad; parts = PyObject_CallMethod(file_path, "rsplit", "(Oi)", ossep, 1); Py_DECREF(file_path); file_path = NULL; Py_DECREF(ossep); if (unlikely(!parts)) goto bad; package_path = Py_BuildValue("[O]", PyList_GET_ITEM(parts, 0)); Py_DECREF(parts); if (unlikely(!package_path)) goto bad; goto set_path; bad: PyErr_WriteUnraisable(module_name); Py_XDECREF(path); Py_XDECREF(file_path); // set an empty path list on failure PyErr_Clear(); package_path = PyList_New(0); if (unlikely(!package_path)) return -1; set_path: result = PyObject_SetAttrString($module_cname, "__path__", package_path); Py_DECREF(package_path); return result; } #endif /////////////// TypeImport.proto /////////////// static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); /*proto*/ /////////////// TypeImport /////////////// //@requires: PyIdentifierFromString //@requires: ModuleImport #ifndef __PYX_HAVE_RT_ImportType #define __PYX_HAVE_RT_ImportType static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict) { PyObject *py_module = 0; PyObject *result = 0; PyObject *py_name = 0; char warning[200]; Py_ssize_t basicsize; #ifdef Py_LIMITED_API PyObject *py_basicsize; #endif py_module = __Pyx_ImportModule(module_name); if (!py_module) goto bad; py_name = __Pyx_PyIdentifier_FromString(class_name); if (!py_name) goto bad; result = PyObject_GetAttr(py_module, py_name); Py_DECREF(py_name); py_name = 0; Py_DECREF(py_module); py_module = 0; if (!result) goto bad; if (!PyType_Check(result)) { PyErr_Format(PyExc_TypeError, "%.200s.%.200s is not a type object", module_name, class_name); goto bad; } #ifndef Py_LIMITED_API basicsize = ((PyTypeObject *)result)->tp_basicsize; #else py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); if (!py_basicsize) goto bad; basicsize = PyLong_AsSsize_t(py_basicsize); Py_DECREF(py_basicsize); py_basicsize = 0; if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) goto bad; #endif if (!strict && (size_t)basicsize > size) { PyOS_snprintf(warning, sizeof(warning), "%s.%s size changed, may indicate binary incompatibility. Expected %zd, got %zd", module_name, class_name, basicsize, size); if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; } else if ((size_t)basicsize != size) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s has the wrong size, try recompiling. Expected %zd, got %zd", module_name, class_name, basicsize, size); goto bad; } return (PyTypeObject *)result; bad: Py_XDECREF(py_module); Py_XDECREF(result); return NULL; } #endif /////////////// FunctionImport.proto /////////////// static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (**f)(void), const char *sig); /*proto*/ /////////////// FunctionImport /////////////// //@substitute: naming #ifndef __PYX_HAVE_RT_ImportFunction #define __PYX_HAVE_RT_ImportFunction static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (**f)(void), const char *sig) { PyObject *d = 0; PyObject *cobj = 0; union { void (*fp)(void); void *p; } tmp; d = PyObject_GetAttrString(module, (char *)"$api_name"); if (!d) goto bad; cobj = PyDict_GetItemString(d, funcname); if (!cobj) { PyErr_Format(PyExc_ImportError, "%.200s does not export expected C function %.200s", PyModule_GetName(module), funcname); goto bad; } #if PY_VERSION_HEX >= 0x02070000 if (!PyCapsule_IsValid(cobj, sig)) { PyErr_Format(PyExc_TypeError, "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", PyModule_GetName(module), funcname, sig, PyCapsule_GetName(cobj)); goto bad; } tmp.p = PyCapsule_GetPointer(cobj, sig); #else {const char *desc, *s1, *s2; desc = (const char *)PyCObject_GetDesc(cobj); if (!desc) goto bad; s1 = desc; s2 = sig; while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } if (*s1 != *s2) { PyErr_Format(PyExc_TypeError, "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", PyModule_GetName(module), funcname, sig, desc); goto bad; } tmp.p = PyCObject_AsVoidPtr(cobj);} #endif *f = tmp.fp; if (!(*f)) goto bad; Py_DECREF(d); return 0; bad: Py_XDECREF(d); return -1; } #endif /////////////// FunctionExport.proto /////////////// static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig); /*proto*/ /////////////// FunctionExport /////////////// //@substitute: naming static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig) { PyObject *d = 0; PyObject *cobj = 0; union { void (*fp)(void); void *p; } tmp; d = PyObject_GetAttrString($module_cname, (char *)"$api_name"); if (!d) { PyErr_Clear(); d = PyDict_New(); if (!d) goto bad; Py_INCREF(d); if (PyModule_AddObject($module_cname, (char *)"$api_name", d) < 0) goto bad; } tmp.fp = f; #if PY_VERSION_HEX >= 0x02070000 cobj = PyCapsule_New(tmp.p, sig, 0); #else cobj = PyCObject_FromVoidPtrAndDesc(tmp.p, (void *)sig, 0); #endif if (!cobj) goto bad; if (PyDict_SetItemString(d, name, cobj) < 0) goto bad; Py_DECREF(cobj); Py_DECREF(d); return 0; bad: Py_XDECREF(cobj); Py_XDECREF(d); return -1; } /////////////// VoidPtrImport.proto /////////////// static int __Pyx_ImportVoidPtr(PyObject *module, const char *name, void **p, const char *sig); /*proto*/ /////////////// VoidPtrImport /////////////// //@substitute: naming #ifndef __PYX_HAVE_RT_ImportVoidPtr #define __PYX_HAVE_RT_ImportVoidPtr static int __Pyx_ImportVoidPtr(PyObject *module, const char *name, void **p, const char *sig) { PyObject *d = 0; PyObject *cobj = 0; d = PyObject_GetAttrString(module, (char *)"$api_name"); if (!d) goto bad; cobj = PyDict_GetItemString(d, name); if (!cobj) { PyErr_Format(PyExc_ImportError, "%.200s does not export expected C variable %.200s", PyModule_GetName(module), name); goto bad; } #if PY_VERSION_HEX >= 0x02070000 if (!PyCapsule_IsValid(cobj, sig)) { PyErr_Format(PyExc_TypeError, "C variable %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", PyModule_GetName(module), name, sig, PyCapsule_GetName(cobj)); goto bad; } *p = PyCapsule_GetPointer(cobj, sig); #else {const char *desc, *s1, *s2; desc = (const char *)PyCObject_GetDesc(cobj); if (!desc) goto bad; s1 = desc; s2 = sig; while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } if (*s1 != *s2) { PyErr_Format(PyExc_TypeError, "C variable %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", PyModule_GetName(module), name, sig, desc); goto bad; } *p = PyCObject_AsVoidPtr(cobj);} #endif if (!(*p)) goto bad; Py_DECREF(d); return 0; bad: Py_XDECREF(d); return -1; } #endif /////////////// VoidPtrExport.proto /////////////// static int __Pyx_ExportVoidPtr(PyObject *name, void *p, const char *sig); /*proto*/ /////////////// VoidPtrExport /////////////// //@substitute: naming //@requires: ObjectHandling.c::PyObjectSetAttrStr static int __Pyx_ExportVoidPtr(PyObject *name, void *p, const char *sig) { PyObject *d; PyObject *cobj = 0; d = PyDict_GetItem($moddict_cname, PYIDENT("$api_name")); Py_XINCREF(d); if (!d) { d = PyDict_New(); if (!d) goto bad; if (__Pyx_PyObject_SetAttrStr($module_cname, PYIDENT("$api_name"), d) < 0) goto bad; } #if PY_VERSION_HEX >= 0x02070000 cobj = PyCapsule_New(p, sig, 0); #else cobj = PyCObject_FromVoidPtrAndDesc(p, (void *)sig, 0); #endif if (!cobj) goto bad; if (PyDict_SetItem(d, name, cobj) < 0) goto bad; Py_DECREF(cobj); Py_DECREF(d); return 0; bad: Py_XDECREF(cobj); Py_XDECREF(d); return -1; } /////////////// SetVTable.proto /////////////// static int __Pyx_SetVtable(PyObject *dict, void *vtable); /*proto*/ /////////////// SetVTable /////////////// static int __Pyx_SetVtable(PyObject *dict, void *vtable) { #if PY_VERSION_HEX >= 0x02070000 PyObject *ob = PyCapsule_New(vtable, 0, 0); #else PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); #endif if (!ob) goto bad; if (PyDict_SetItem(dict, PYIDENT("__pyx_vtable__"), ob) < 0) goto bad; Py_DECREF(ob); return 0; bad: Py_XDECREF(ob); return -1; } /////////////// GetVTable.proto /////////////// static void* __Pyx_GetVtable(PyObject *dict); /*proto*/ /////////////// GetVTable /////////////// static void* __Pyx_GetVtable(PyObject *dict) { void* ptr; PyObject *ob = PyObject_GetItem(dict, PYIDENT("__pyx_vtable__")); if (!ob) goto bad; #if PY_VERSION_HEX >= 0x02070000 ptr = PyCapsule_GetPointer(ob, 0); #else ptr = PyCObject_AsVoidPtr(ob); #endif if (!ptr && !PyErr_Occurred()) PyErr_SetString(PyExc_RuntimeError, "invalid vtable found for imported type"); Py_DECREF(ob); return ptr; bad: Py_XDECREF(ob); return NULL; } /////////////// ImportNumPyArray.proto /////////////// static PyObject *__pyx_numpy_ndarray = NULL; static PyObject* __Pyx_ImportNumPyArrayTypeIfAvailable(void); /*proto*/ /////////////// ImportNumPyArray.cleanup /////////////// Py_CLEAR(__pyx_numpy_ndarray); /////////////// ImportNumPyArray /////////////// //@requires: ImportExport.c::Import static PyObject* __Pyx__ImportNumPyArray(void) { PyObject *numpy_module, *ndarray_object = NULL; numpy_module = __Pyx_Import(PYIDENT("numpy"), NULL, 0); if (likely(numpy_module)) { ndarray_object = PyObject_GetAttrString(numpy_module, "ndarray"); Py_DECREF(numpy_module); } if (unlikely(!ndarray_object)) { // ImportError, AttributeError, ... PyErr_Clear(); } if (unlikely(!ndarray_object || !PyObject_TypeCheck(ndarray_object, &PyType_Type))) { Py_XDECREF(ndarray_object); Py_INCREF(Py_None); ndarray_object = Py_None; } return ndarray_object; } static CYTHON_INLINE PyObject* __Pyx_ImportNumPyArrayTypeIfAvailable(void) { if (unlikely(!__pyx_numpy_ndarray)) { __pyx_numpy_ndarray = __Pyx__ImportNumPyArray(); } Py_INCREF(__pyx_numpy_ndarray); return __pyx_numpy_ndarray; } Cython-0.26.1/Cython/Utility/Printing.c0000664000175000017500000001175712542002467020512 0ustar stefanstefan00000000000000////////////////////// Print.proto ////////////////////// //@substitute: naming static int __Pyx_Print(PyObject*, PyObject *, int); /*proto*/ #if CYTHON_COMPILING_IN_PYPY || PY_MAJOR_VERSION >= 3 static PyObject* $print_function = 0; static PyObject* $print_function_kwargs = 0; #endif ////////////////////// Print.cleanup ////////////////////// //@substitute: naming #if CYTHON_COMPILING_IN_PYPY || PY_MAJOR_VERSION >= 3 Py_CLEAR($print_function); Py_CLEAR($print_function_kwargs); #endif ////////////////////// Print ////////////////////// //@substitute: naming #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION < 3 static PyObject *__Pyx_GetStdout(void) { PyObject *f = PySys_GetObject((char *)"stdout"); if (!f) { PyErr_SetString(PyExc_RuntimeError, "lost sys.stdout"); } return f; } static int __Pyx_Print(PyObject* f, PyObject *arg_tuple, int newline) { int i; if (!f) { if (!(f = __Pyx_GetStdout())) return -1; } Py_INCREF(f); for (i=0; i < PyTuple_GET_SIZE(arg_tuple); i++) { PyObject* v; if (PyFile_SoftSpace(f, 1)) { if (PyFile_WriteString(" ", f) < 0) goto error; } v = PyTuple_GET_ITEM(arg_tuple, i); if (PyFile_WriteObject(v, f, Py_PRINT_RAW) < 0) goto error; if (PyString_Check(v)) { char *s = PyString_AsString(v); Py_ssize_t len = PyString_Size(v); if (len > 0) { // append soft-space if necessary (not using isspace() due to C/C++ problem on MacOS-X) switch (s[len-1]) { case ' ': break; case '\f': case '\r': case '\n': case '\t': case '\v': PyFile_SoftSpace(f, 0); break; default: break; } } } } if (newline) { if (PyFile_WriteString("\n", f) < 0) goto error; PyFile_SoftSpace(f, 0); } Py_DECREF(f); return 0; error: Py_DECREF(f); return -1; } #else /* Python 3 has a print function */ static int __Pyx_Print(PyObject* stream, PyObject *arg_tuple, int newline) { PyObject* kwargs = 0; PyObject* result = 0; PyObject* end_string; if (unlikely(!$print_function)) { $print_function = PyObject_GetAttr($builtins_cname, PYIDENT("print")); if (!$print_function) return -1; } if (stream) { kwargs = PyDict_New(); if (unlikely(!kwargs)) return -1; if (unlikely(PyDict_SetItem(kwargs, PYIDENT("file"), stream) < 0)) goto bad; if (!newline) { end_string = PyUnicode_FromStringAndSize(" ", 1); if (unlikely(!end_string)) goto bad; if (PyDict_SetItem(kwargs, PYIDENT("end"), end_string) < 0) { Py_DECREF(end_string); goto bad; } Py_DECREF(end_string); } } else if (!newline) { if (unlikely(!$print_function_kwargs)) { $print_function_kwargs = PyDict_New(); if (unlikely(!$print_function_kwargs)) return -1; end_string = PyUnicode_FromStringAndSize(" ", 1); if (unlikely(!end_string)) return -1; if (PyDict_SetItem($print_function_kwargs, PYIDENT("end"), end_string) < 0) { Py_DECREF(end_string); return -1; } Py_DECREF(end_string); } kwargs = $print_function_kwargs; } result = PyObject_Call($print_function, arg_tuple, kwargs); if (unlikely(kwargs) && (kwargs != $print_function_kwargs)) Py_DECREF(kwargs); if (!result) return -1; Py_DECREF(result); return 0; bad: if (kwargs != $print_function_kwargs) Py_XDECREF(kwargs); return -1; } #endif ////////////////////// PrintOne.proto ////////////////////// //@requires: Print static int __Pyx_PrintOne(PyObject* stream, PyObject *o); /*proto*/ ////////////////////// PrintOne ////////////////////// #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION < 3 static int __Pyx_PrintOne(PyObject* f, PyObject *o) { if (!f) { if (!(f = __Pyx_GetStdout())) return -1; } Py_INCREF(f); if (PyFile_SoftSpace(f, 0)) { if (PyFile_WriteString(" ", f) < 0) goto error; } if (PyFile_WriteObject(o, f, Py_PRINT_RAW) < 0) goto error; if (PyFile_WriteString("\n", f) < 0) goto error; Py_DECREF(f); return 0; error: Py_DECREF(f); return -1; /* the line below is just to avoid C compiler * warnings about unused functions */ return __Pyx_Print(f, NULL, 0); } #else /* Python 3 has a print function */ static int __Pyx_PrintOne(PyObject* stream, PyObject *o) { int res; PyObject* arg_tuple = PyTuple_Pack(1, o); if (unlikely(!arg_tuple)) return -1; res = __Pyx_Print(stream, arg_tuple, 1); Py_DECREF(arg_tuple); return res; } #endif Cython-0.26.1/Cython/Utility/Coroutine.c0000664000175000017500000020347113150045407020660 0ustar stefanstefan00000000000000//////////////////// GeneratorYieldFrom.proto //////////////////// static CYTHON_INLINE PyObject* __Pyx_Generator_Yield_From(__pyx_CoroutineObject *gen, PyObject *source); //////////////////// GeneratorYieldFrom //////////////////// //@requires: Generator static CYTHON_INLINE PyObject* __Pyx_Generator_Yield_From(__pyx_CoroutineObject *gen, PyObject *source) { PyObject *source_gen, *retval; #ifdef __Pyx_Coroutine_USED if (__Pyx_Coroutine_CheckExact(source)) { // TODO: this should only happen for types.coroutine()ed generators, but we can't determine that here Py_INCREF(source); source_gen = source; retval = __Pyx_Generator_Next(source); } else #endif { #if CYTHON_USE_TYPE_SLOTS if (likely(Py_TYPE(source)->tp_iter)) { source_gen = Py_TYPE(source)->tp_iter(source); if (unlikely(!source_gen)) return NULL; if (unlikely(!PyIter_Check(source_gen))) { PyErr_Format(PyExc_TypeError, "iter() returned non-iterator of type '%.100s'", Py_TYPE(source_gen)->tp_name); Py_DECREF(source_gen); return NULL; } } else #endif source_gen = PyObject_GetIter(source); // source_gen is now the iterator, make the first next() call retval = Py_TYPE(source_gen)->tp_iternext(source_gen); } if (likely(retval)) { gen->yieldfrom = source_gen; return retval; } Py_DECREF(source_gen); return NULL; } //////////////////// CoroutineYieldFrom.proto //////////////////// #define __Pyx_Coroutine_Yield_From(gen, source) __Pyx__Coroutine_Yield_From(gen, source, 0) static CYTHON_INLINE PyObject* __Pyx__Coroutine_Yield_From(__pyx_CoroutineObject *gen, PyObject *source, int warn); //////////////////// CoroutineYieldFrom //////////////////// //@requires: Coroutine //@requires: GetAwaitIter static int __Pyx_WarnAIterDeprecation(PyObject *aiter) { int result; #if PY_MAJOR_VERSION >= 3 result = PyErr_WarnFormat( PyExc_PendingDeprecationWarning, 1, "'%.100s' implements legacy __aiter__ protocol; " "__aiter__ should return an asynchronous " "iterator, not awaitable", Py_TYPE(aiter)->tp_name); #else result = PyErr_WarnEx( PyExc_PendingDeprecationWarning, "object implements legacy __aiter__ protocol; " "__aiter__ should return an asynchronous " "iterator, not awaitable", 1); #endif return result != 0; } static CYTHON_INLINE PyObject* __Pyx__Coroutine_Yield_From(__pyx_CoroutineObject *gen, PyObject *source, int warn) { PyObject *retval; if (__Pyx_Coroutine_CheckExact(source)) { if (warn && unlikely(__Pyx_WarnAIterDeprecation(source))) { /* Warning was converted to an error. */ return NULL; } retval = __Pyx_Generator_Next(source); if (retval) { Py_INCREF(source); gen->yieldfrom = source; return retval; } } else { PyObject *source_gen = __Pyx__Coroutine_GetAwaitableIter(source); if (unlikely(!source_gen)) return NULL; if (warn && unlikely(__Pyx_WarnAIterDeprecation(source))) { /* Warning was converted to an error. */ Py_DECREF(source_gen); return NULL; } // source_gen is now the iterator, make the first next() call if (__Pyx_Coroutine_CheckExact(source_gen)) { retval = __Pyx_Generator_Next(source_gen); } else { retval = Py_TYPE(source_gen)->tp_iternext(source_gen); } if (retval) { gen->yieldfrom = source_gen; return retval; } Py_DECREF(source_gen); } return NULL; } //////////////////// CoroutineAIterYieldFrom.proto //////////////////// static CYTHON_INLINE PyObject* __Pyx_Coroutine_AIter_Yield_From(__pyx_CoroutineObject *gen, PyObject *source); //////////////////// CoroutineAIterYieldFrom //////////////////// //@requires: CoroutineYieldFrom static CYTHON_INLINE PyObject* __Pyx_Coroutine_AIter_Yield_From(__pyx_CoroutineObject *gen, PyObject *source) { #if CYTHON_USE_ASYNC_SLOTS __Pyx_PyAsyncMethodsStruct* am = __Pyx_PyType_AsAsync(source); if (likely(am && am->am_anext)) { // Starting with CPython 3.5.2, __aiter__ should return // asynchronous iterators directly (not awaitables that // resolve to asynchronous iterators.) // // Therefore, we check if the object that was returned // from __aiter__ has an __anext__ method. If it does, // we return it directly as StopIteration result, // which avoids yielding. // // See http://bugs.python.org/issue27243 for more // details. PyErr_SetObject(PyExc_StopIteration, source); return NULL; } #endif #if PY_VERSION_HEX < 0x030500B2 if (!__Pyx_PyType_AsAsync(source)) { #ifdef __Pyx_Coroutine_USED if (!__Pyx_Coroutine_CheckExact(source)) /* quickly rule out a likely case */ #endif { // same as above in slow PyObject *method = __Pyx_PyObject_GetAttrStr(source, PYIDENT("__anext__")); if (method) { Py_DECREF(method); PyErr_SetObject(PyExc_StopIteration, source); return NULL; } PyErr_Clear(); } } #endif return __Pyx__Coroutine_Yield_From(gen, source, 1); } //////////////////// GetAwaitIter.proto //////////////////// static CYTHON_INLINE PyObject *__Pyx_Coroutine_GetAwaitableIter(PyObject *o); /*proto*/ static PyObject *__Pyx__Coroutine_GetAwaitableIter(PyObject *o); /*proto*/ //////////////////// GetAwaitIter //////////////////// //@requires: ObjectHandling.c::PyObjectGetAttrStr //@requires: ObjectHandling.c::PyObjectCallNoArg //@requires: ObjectHandling.c::PyObjectCallOneArg static CYTHON_INLINE PyObject *__Pyx_Coroutine_GetAwaitableIter(PyObject *o) { #ifdef __Pyx_Coroutine_USED if (__Pyx_Coroutine_CheckExact(o)) { Py_INCREF(o); return o; } #endif return __Pyx__Coroutine_GetAwaitableIter(o); } // adapted from genobject.c in Py3.5 static PyObject *__Pyx__Coroutine_GetAwaitableIter(PyObject *obj) { PyObject *res; #if CYTHON_USE_ASYNC_SLOTS __Pyx_PyAsyncMethodsStruct* am = __Pyx_PyType_AsAsync(obj); if (likely(am && am->am_await)) { res = (*am->am_await)(obj); } else #endif #if PY_VERSION_HEX >= 0x030500B2 || defined(PyCoro_CheckExact) if (PyCoro_CheckExact(obj)) { Py_INCREF(obj); return obj; } else #endif #if CYTHON_COMPILING_IN_CPYTHON && defined(CO_ITERABLE_COROUTINE) if (PyGen_CheckExact(obj) && ((PyGenObject*)obj)->gi_code && ((PyCodeObject *)((PyGenObject*)obj)->gi_code)->co_flags & CO_ITERABLE_COROUTINE) { // Python generator marked with "@types.coroutine" decorator Py_INCREF(obj); return obj; } else #endif { PyObject *method = __Pyx_PyObject_GetAttrStr(obj, PYIDENT("__await__")); if (unlikely(!method)) goto slot_error; #if CYTHON_UNPACK_METHODS if (likely(PyMethod_Check(method))) { PyObject *self = PyMethod_GET_SELF(method); if (likely(self)) { PyObject *function = PyMethod_GET_FUNCTION(method); res = __Pyx_PyObject_CallOneArg(function, self); } else res = __Pyx_PyObject_CallNoArg(method); } else #endif res = __Pyx_PyObject_CallNoArg(method); Py_DECREF(method); } if (unlikely(!res)) goto bad; if (!PyIter_Check(res)) { PyErr_Format(PyExc_TypeError, "__await__() returned non-iterator of type '%.100s'", Py_TYPE(res)->tp_name); Py_CLEAR(res); } else { int is_coroutine = 0; #ifdef __Pyx_Coroutine_USED is_coroutine |= __Pyx_Coroutine_CheckExact(res); #endif #if PY_VERSION_HEX >= 0x030500B2 || defined(PyCoro_CheckExact) is_coroutine |= PyCoro_CheckExact(res); #endif if (unlikely(is_coroutine)) { /* __await__ must return an *iterator*, not a coroutine or another awaitable (see PEP 492) */ PyErr_SetString(PyExc_TypeError, "__await__() returned a coroutine"); Py_CLEAR(res); } } return res; slot_error: PyErr_Format(PyExc_TypeError, "object %.100s can't be used in 'await' expression", Py_TYPE(obj)->tp_name); bad: return NULL; } //////////////////// AsyncIter.proto //////////////////// static CYTHON_INLINE PyObject *__Pyx_Coroutine_GetAsyncIter(PyObject *o); /*proto*/ static CYTHON_INLINE PyObject *__Pyx_Coroutine_AsyncIterNext(PyObject *o); /*proto*/ //////////////////// AsyncIter //////////////////// //@requires: GetAwaitIter //@requires: ObjectHandling.c::PyObjectCallMethod0 static CYTHON_INLINE PyObject *__Pyx_Coroutine_GetAsyncIter(PyObject *obj) { #if CYTHON_USE_ASYNC_SLOTS __Pyx_PyAsyncMethodsStruct* am = __Pyx_PyType_AsAsync(obj); if (likely(am && am->am_aiter)) { return (*am->am_aiter)(obj); } #endif #if PY_VERSION_HEX < 0x030500B1 { PyObject *iter = __Pyx_PyObject_CallMethod0(obj, PYIDENT("__aiter__")); if (likely(iter)) return iter; // FIXME: for the sake of a nicely conforming exception message, assume any AttributeError meant '__aiter__' if (!PyErr_ExceptionMatches(PyExc_AttributeError)) return NULL; } #else // avoid C warning about 'unused function' if ((0)) (void) __Pyx_PyObject_CallMethod0(obj, PYIDENT("__aiter__")); #endif PyErr_Format(PyExc_TypeError, "'async for' requires an object with __aiter__ method, got %.100s", Py_TYPE(obj)->tp_name); return NULL; } static CYTHON_INLINE PyObject *__Pyx_Coroutine_AsyncIterNext(PyObject *obj) { #if CYTHON_USE_ASYNC_SLOTS __Pyx_PyAsyncMethodsStruct* am = __Pyx_PyType_AsAsync(obj); if (likely(am && am->am_anext)) { return (*am->am_anext)(obj); } #endif #if PY_VERSION_HEX < 0x030500B1 { PyObject *value = __Pyx_PyObject_CallMethod0(obj, PYIDENT("__anext__")); if (likely(value)) return value; } // FIXME: for the sake of a nicely conforming exception message, assume any AttributeError meant '__anext__' if (PyErr_ExceptionMatches(PyExc_AttributeError)) #endif PyErr_Format(PyExc_TypeError, "'async for' requires an object with __anext__ method, got %.100s", Py_TYPE(obj)->tp_name); return NULL; } //////////////////// pep479.proto //////////////////// static void __Pyx_Generator_Replace_StopIteration(void); /*proto*/ //////////////////// pep479 //////////////////// //@requires: Exceptions.c::GetException static void __Pyx_Generator_Replace_StopIteration(void) { PyObject *exc, *val, *tb; // Chain exceptions by moving StopIteration to exc_info before creating the RuntimeError. // In Py2.x, no chaining happens, but the exception still stays visible in exc_info. __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_GetException(&exc, &val, &tb); Py_XDECREF(exc); Py_XDECREF(val); Py_XDECREF(tb); PyErr_SetString(PyExc_RuntimeError, "generator raised StopIteration"); } //////////////////// CoroutineBase.proto //////////////////// typedef PyObject *(*__pyx_coroutine_body_t)(PyObject *, PyObject *); typedef struct { PyObject_HEAD __pyx_coroutine_body_t body; PyObject *closure; PyObject *exc_type; PyObject *exc_value; PyObject *exc_traceback; PyObject *gi_weakreflist; PyObject *classobj; PyObject *yieldfrom; PyObject *gi_name; PyObject *gi_qualname; PyObject *gi_modulename; int resume_label; // using T_BOOL for property below requires char value char is_running; } __pyx_CoroutineObject; static __pyx_CoroutineObject *__Pyx__Coroutine_New( PyTypeObject *type, __pyx_coroutine_body_t body, PyObject *closure, PyObject *name, PyObject *qualname, PyObject *module_name); /*proto*/ static int __Pyx_Coroutine_clear(PyObject *self); /*proto*/ #if 1 || PY_VERSION_HEX < 0x030300B0 static int __Pyx_PyGen_FetchStopIterationValue(PyObject **pvalue); /*proto*/ #else #define __Pyx_PyGen_FetchStopIterationValue(pvalue) PyGen_FetchStopIterationValue(pvalue) #endif //////////////////// Coroutine.proto //////////////////// #define __Pyx_Coroutine_USED static PyTypeObject *__pyx_CoroutineType = 0; static PyTypeObject *__pyx_CoroutineAwaitType = 0; #define __Pyx_Coroutine_CheckExact(obj) (Py_TYPE(obj) == __pyx_CoroutineType) #define __Pyx_Coroutine_New(body, closure, name, qualname, module_name) \ __Pyx__Coroutine_New(__pyx_CoroutineType, body, closure, name, qualname, module_name) static int __pyx_Coroutine_init(void); /*proto*/ static PyObject *__Pyx__Coroutine_await(PyObject *coroutine); /*proto*/ //////////////////// Generator.proto //////////////////// #define __Pyx_Generator_USED static PyTypeObject *__pyx_GeneratorType = 0; #define __Pyx_Generator_CheckExact(obj) (Py_TYPE(obj) == __pyx_GeneratorType) #define __Pyx_Generator_New(body, closure, name, qualname, module_name) \ __Pyx__Coroutine_New(__pyx_GeneratorType, body, closure, name, qualname, module_name) static PyObject *__Pyx_Generator_Next(PyObject *self); static int __pyx_Generator_init(void); /*proto*/ //////////////////// CoroutineBase //////////////////// //@substitute: naming //@requires: Exceptions.c::PyErrFetchRestore //@requires: Exceptions.c::PyThreadStateGet //@requires: Exceptions.c::SwapException //@requires: Exceptions.c::RaiseException //@requires: ObjectHandling.c::PyObjectCallMethod1 //@requires: ObjectHandling.c::PyObjectGetAttrStr //@requires: CommonStructures.c::FetchCommonType #include #include static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value); static PyObject *__Pyx_Coroutine_Close(PyObject *self); static PyObject *__Pyx_Coroutine_Throw(PyObject *gen, PyObject *args); #define __Pyx_Coroutine_Undelegate(gen) Py_CLEAR((gen)->yieldfrom) // If StopIteration exception is set, fetches its 'value' // attribute if any, otherwise sets pvalue to None. // // Returns 0 if no exception or StopIteration is set. // If any other exception is set, returns -1 and leaves // pvalue unchanged. #if 1 || PY_VERSION_HEX < 0x030300B0 static int __Pyx_PyGen_FetchStopIterationValue(PyObject **pvalue) { PyObject *et, *ev, *tb; PyObject *value = NULL; __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ErrFetch(&et, &ev, &tb); if (!et) { Py_XDECREF(tb); Py_XDECREF(ev); Py_INCREF(Py_None); *pvalue = Py_None; return 0; } // most common case: plain StopIteration without or with separate argument if (likely(et == PyExc_StopIteration)) { if (!ev) { Py_INCREF(Py_None); value = Py_None; } #if PY_VERSION_HEX >= 0x030300A0 else if (Py_TYPE(ev) == (PyTypeObject*)PyExc_StopIteration) { value = ((PyStopIterationObject *)ev)->value; Py_INCREF(value); Py_DECREF(ev); } #endif // PyErr_SetObject() and friends put the value directly into ev else if (unlikely(PyTuple_Check(ev))) { // if it's a tuple, it is interpreted as separate constructor arguments (surprise!) if (PyTuple_GET_SIZE(ev) >= 1) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS value = PyTuple_GET_ITEM(ev, 0); Py_INCREF(value); #else value = PySequence_ITEM(ev, 0); #endif } else { Py_INCREF(Py_None); value = Py_None; } Py_DECREF(ev); } else if (!PyObject_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration)) { // 'steal' reference to ev value = ev; } if (likely(value)) { Py_XDECREF(tb); Py_DECREF(et); *pvalue = value; return 0; } } else if (!PyErr_GivenExceptionMatches(et, PyExc_StopIteration)) { __Pyx_ErrRestore(et, ev, tb); return -1; } // otherwise: normalise and check what that gives us PyErr_NormalizeException(&et, &ev, &tb); if (unlikely(!PyObject_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration))) { // looks like normalisation failed - raise the new exception __Pyx_ErrRestore(et, ev, tb); return -1; } Py_XDECREF(tb); Py_DECREF(et); #if PY_VERSION_HEX >= 0x030300A0 value = ((PyStopIterationObject *)ev)->value; Py_INCREF(value); Py_DECREF(ev); #else { PyObject* args = __Pyx_PyObject_GetAttrStr(ev, PYIDENT("args")); Py_DECREF(ev); if (likely(args)) { value = PySequence_GetItem(args, 0); Py_DECREF(args); } if (unlikely(!value)) { __Pyx_ErrRestore(NULL, NULL, NULL); Py_INCREF(Py_None); value = Py_None; } } #endif *pvalue = value; return 0; } #endif static CYTHON_INLINE void __Pyx_Coroutine_ExceptionClear(__pyx_CoroutineObject *self) { PyObject *exc_type = self->exc_type; PyObject *exc_value = self->exc_value; PyObject *exc_traceback = self->exc_traceback; self->exc_type = NULL; self->exc_value = NULL; self->exc_traceback = NULL; Py_XDECREF(exc_type); Py_XDECREF(exc_value); Py_XDECREF(exc_traceback); } static CYTHON_INLINE int __Pyx_Coroutine_CheckRunning(__pyx_CoroutineObject *gen) { if (unlikely(gen->is_running)) { PyErr_SetString(PyExc_ValueError, "generator already executing"); return 1; } return 0; } static CYTHON_INLINE PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value) { PyObject *retval; __Pyx_PyThreadState_declare assert(!self->is_running); if (unlikely(self->resume_label == 0)) { if (unlikely(value && value != Py_None)) { PyErr_SetString(PyExc_TypeError, "can't send non-None value to a " "just-started generator"); return NULL; } } if (unlikely(self->resume_label == -1)) { PyErr_SetNone(PyExc_StopIteration); return NULL; } __Pyx_PyThreadState_assign if (value) { #if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON // FIXME: what to do in PyPy? #else // Generators always return to their most recent caller, not // necessarily their creator. if (self->exc_traceback) { PyTracebackObject *tb = (PyTracebackObject *) self->exc_traceback; PyFrameObject *f = tb->tb_frame; Py_XINCREF($local_tstate_cname->frame); assert(f->f_back == NULL); f->f_back = $local_tstate_cname->frame; } #endif __Pyx_ExceptionSwap(&self->exc_type, &self->exc_value, &self->exc_traceback); } else { __Pyx_Coroutine_ExceptionClear(self); } self->is_running = 1; retval = self->body((PyObject *) self, value); self->is_running = 0; if (retval) { __Pyx_ExceptionSwap(&self->exc_type, &self->exc_value, &self->exc_traceback); #if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON // FIXME: what to do in PyPy? #else // Don't keep the reference to f_back any longer than necessary. It // may keep a chain of frames alive or it could create a reference // cycle. if (self->exc_traceback) { PyTracebackObject *tb = (PyTracebackObject *) self->exc_traceback; PyFrameObject *f = tb->tb_frame; Py_CLEAR(f->f_back); } #endif } else { __Pyx_Coroutine_ExceptionClear(self); } return retval; } static CYTHON_INLINE PyObject *__Pyx_Coroutine_MethodReturn(PyObject *retval) { if (unlikely(!retval && !PyErr_Occurred())) { // method call must not terminate with NULL without setting an exception PyErr_SetNone(PyExc_StopIteration); } return retval; } static CYTHON_INLINE PyObject *__Pyx_Coroutine_FinishDelegation(__pyx_CoroutineObject *gen) { PyObject *ret; PyObject *val = NULL; __Pyx_Coroutine_Undelegate(gen); __Pyx_PyGen_FetchStopIterationValue(&val); // val == NULL on failure => pass on exception ret = __Pyx_Coroutine_SendEx(gen, val); Py_XDECREF(val); return ret; } static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value) { PyObject *retval; __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self; PyObject *yf = gen->yieldfrom; if (unlikely(__Pyx_Coroutine_CheckRunning(gen))) return NULL; if (yf) { PyObject *ret; // FIXME: does this really need an INCREF() ? //Py_INCREF(yf); gen->is_running = 1; #ifdef __Pyx_Generator_USED if (__Pyx_Generator_CheckExact(yf)) { ret = __Pyx_Coroutine_Send(yf, value); } else #endif #ifdef __Pyx_Coroutine_USED if (__Pyx_Coroutine_CheckExact(yf)) { ret = __Pyx_Coroutine_Send(yf, value); } else #endif { if (value == Py_None) ret = Py_TYPE(yf)->tp_iternext(yf); else ret = __Pyx_PyObject_CallMethod1(yf, PYIDENT("send"), value); } gen->is_running = 0; //Py_DECREF(yf); if (likely(ret)) { return ret; } retval = __Pyx_Coroutine_FinishDelegation(gen); } else { retval = __Pyx_Coroutine_SendEx(gen, value); } return __Pyx_Coroutine_MethodReturn(retval); } // This helper function is used by gen_close and gen_throw to // close a subiterator being delegated to by yield-from. static int __Pyx_Coroutine_CloseIter(__pyx_CoroutineObject *gen, PyObject *yf) { PyObject *retval = NULL; int err = 0; #ifdef __Pyx_Generator_USED if (__Pyx_Generator_CheckExact(yf)) { retval = __Pyx_Coroutine_Close(yf); if (!retval) return -1; } else #endif #ifdef __Pyx_Coroutine_USED if (__Pyx_Coroutine_CheckExact(yf)) { retval = __Pyx_Coroutine_Close(yf); if (!retval) return -1; } else #endif { PyObject *meth; gen->is_running = 1; meth = __Pyx_PyObject_GetAttrStr(yf, PYIDENT("close")); if (unlikely(!meth)) { if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_WriteUnraisable(yf); } PyErr_Clear(); } else { retval = PyObject_CallFunction(meth, NULL); Py_DECREF(meth); if (!retval) err = -1; } gen->is_running = 0; } Py_XDECREF(retval); return err; } static PyObject *__Pyx_Generator_Next(PyObject *self) { __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self; PyObject *yf = gen->yieldfrom; if (unlikely(__Pyx_Coroutine_CheckRunning(gen))) return NULL; if (yf) { PyObject *ret; // FIXME: does this really need an INCREF() ? //Py_INCREF(yf); // YieldFrom code ensures that yf is an iterator gen->is_running = 1; #ifdef __Pyx_Generator_USED if (__Pyx_Generator_CheckExact(yf)) { ret = __Pyx_Generator_Next(yf); } else #endif ret = Py_TYPE(yf)->tp_iternext(yf); gen->is_running = 0; //Py_DECREF(yf); if (likely(ret)) { return ret; } return __Pyx_Coroutine_FinishDelegation(gen); } return __Pyx_Coroutine_SendEx(gen, Py_None); } static PyObject *__Pyx_Coroutine_Close(PyObject *self) { __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; PyObject *retval, *raised_exception; PyObject *yf = gen->yieldfrom; int err = 0; if (unlikely(__Pyx_Coroutine_CheckRunning(gen))) return NULL; if (yf) { Py_INCREF(yf); err = __Pyx_Coroutine_CloseIter(gen, yf); __Pyx_Coroutine_Undelegate(gen); Py_DECREF(yf); } if (err == 0) PyErr_SetNone(PyExc_GeneratorExit); retval = __Pyx_Coroutine_SendEx(gen, NULL); if (retval) { Py_DECREF(retval); PyErr_SetString(PyExc_RuntimeError, "generator ignored GeneratorExit"); return NULL; } raised_exception = PyErr_Occurred(); if (!raised_exception || raised_exception == PyExc_StopIteration || raised_exception == PyExc_GeneratorExit || PyErr_GivenExceptionMatches(raised_exception, PyExc_GeneratorExit) || PyErr_GivenExceptionMatches(raised_exception, PyExc_StopIteration)) { // ignore these errors if (raised_exception) PyErr_Clear(); Py_INCREF(Py_None); return Py_None; } return NULL; } static PyObject *__Pyx_Coroutine_Throw(PyObject *self, PyObject *args) { __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; PyObject *typ; PyObject *tb = NULL; PyObject *val = NULL; PyObject *yf = gen->yieldfrom; if (!PyArg_UnpackTuple(args, (char *)"throw", 1, 3, &typ, &val, &tb)) return NULL; if (unlikely(__Pyx_Coroutine_CheckRunning(gen))) return NULL; if (yf) { PyObject *ret; Py_INCREF(yf); if (PyErr_GivenExceptionMatches(typ, PyExc_GeneratorExit)) { int err = __Pyx_Coroutine_CloseIter(gen, yf); Py_DECREF(yf); __Pyx_Coroutine_Undelegate(gen); if (err < 0) return __Pyx_Coroutine_MethodReturn(__Pyx_Coroutine_SendEx(gen, NULL)); goto throw_here; } gen->is_running = 1; #ifdef __Pyx_Generator_USED if (__Pyx_Generator_CheckExact(yf)) { ret = __Pyx_Coroutine_Throw(yf, args); } else #endif #ifdef __Pyx_Coroutine_USED if (__Pyx_Coroutine_CheckExact(yf)) { ret = __Pyx_Coroutine_Throw(yf, args); } else #endif { PyObject *meth = __Pyx_PyObject_GetAttrStr(yf, PYIDENT("throw")); if (unlikely(!meth)) { Py_DECREF(yf); if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { gen->is_running = 0; return NULL; } PyErr_Clear(); __Pyx_Coroutine_Undelegate(gen); gen->is_running = 0; goto throw_here; } ret = PyObject_CallObject(meth, args); Py_DECREF(meth); } gen->is_running = 0; Py_DECREF(yf); if (!ret) { ret = __Pyx_Coroutine_FinishDelegation(gen); } return __Pyx_Coroutine_MethodReturn(ret); } throw_here: __Pyx_Raise(typ, val, tb, NULL); return __Pyx_Coroutine_MethodReturn(__Pyx_Coroutine_SendEx(gen, NULL)); } static int __Pyx_Coroutine_traverse(PyObject *self, visitproc visit, void *arg) { __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; Py_VISIT(gen->closure); Py_VISIT(gen->classobj); Py_VISIT(gen->yieldfrom); Py_VISIT(gen->exc_type); Py_VISIT(gen->exc_value); Py_VISIT(gen->exc_traceback); return 0; } static int __Pyx_Coroutine_clear(PyObject *self) { __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; Py_CLEAR(gen->closure); Py_CLEAR(gen->classobj); Py_CLEAR(gen->yieldfrom); Py_CLEAR(gen->exc_type); Py_CLEAR(gen->exc_value); Py_CLEAR(gen->exc_traceback); Py_CLEAR(gen->gi_name); Py_CLEAR(gen->gi_qualname); return 0; } static void __Pyx_Coroutine_dealloc(PyObject *self) { __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; PyObject_GC_UnTrack(gen); if (gen->gi_weakreflist != NULL) PyObject_ClearWeakRefs(self); if (gen->resume_label > 0) { // Generator is paused, so we need to close PyObject_GC_Track(self); #if PY_VERSION_HEX >= 0x030400a1 if (PyObject_CallFinalizerFromDealloc(self)) #else Py_TYPE(gen)->tp_del(self); if (self->ob_refcnt > 0) #endif { // resurrected. :( return; } PyObject_GC_UnTrack(self); } __Pyx_Coroutine_clear(self); PyObject_GC_Del(gen); } static void __Pyx_Coroutine_del(PyObject *self) { PyObject *res; PyObject *error_type, *error_value, *error_traceback; __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; __Pyx_PyThreadState_declare if (gen->resume_label <= 0) return ; #if PY_VERSION_HEX < 0x030400a1 // Temporarily resurrect the object. assert(self->ob_refcnt == 0); self->ob_refcnt = 1; #endif // Save the current exception, if any. __Pyx_PyThreadState_assign __Pyx_ErrFetch(&error_type, &error_value, &error_traceback); res = __Pyx_Coroutine_Close(self); if (res == NULL) PyErr_WriteUnraisable(self); else Py_DECREF(res); // Restore the saved exception. __Pyx_ErrRestore(error_type, error_value, error_traceback); #if PY_VERSION_HEX < 0x030400a1 // Undo the temporary resurrection; can't use DECREF here, it would // cause a recursive call. assert(self->ob_refcnt > 0); if (--self->ob_refcnt == 0) { // this is the normal path out return; } // close() resurrected it! Make it look like the original Py_DECREF // never happened. { Py_ssize_t refcnt = self->ob_refcnt; _Py_NewReference(self); self->ob_refcnt = refcnt; } #if CYTHON_COMPILING_IN_CPYTHON assert(PyType_IS_GC(self->ob_type) && _Py_AS_GC(self)->gc.gc_refs != _PyGC_REFS_UNTRACKED); // If Py_REF_DEBUG, _Py_NewReference bumped _Py_RefTotal, so // we need to undo that. _Py_DEC_REFTOTAL; #endif // If Py_TRACE_REFS, _Py_NewReference re-added self to the object // chain, so no more to do there. // If COUNT_ALLOCS, the original decref bumped tp_frees, and // _Py_NewReference bumped tp_allocs: both of those need to be // undone. #ifdef COUNT_ALLOCS --Py_TYPE(self)->tp_frees; --Py_TYPE(self)->tp_allocs; #endif #endif } static PyObject * __Pyx_Coroutine_get_name(__pyx_CoroutineObject *self) { PyObject *name = self->gi_name; // avoid NULL pointer dereference during garbage collection if (unlikely(!name)) name = Py_None; Py_INCREF(name); return name; } static int __Pyx_Coroutine_set_name(__pyx_CoroutineObject *self, PyObject *value) { PyObject *tmp; #if PY_MAJOR_VERSION >= 3 if (unlikely(value == NULL || !PyUnicode_Check(value))) { #else if (unlikely(value == NULL || !PyString_Check(value))) { #endif PyErr_SetString(PyExc_TypeError, "__name__ must be set to a string object"); return -1; } tmp = self->gi_name; Py_INCREF(value); self->gi_name = value; Py_XDECREF(tmp); return 0; } static PyObject * __Pyx_Coroutine_get_qualname(__pyx_CoroutineObject *self) { PyObject *name = self->gi_qualname; // avoid NULL pointer dereference during garbage collection if (unlikely(!name)) name = Py_None; Py_INCREF(name); return name; } static int __Pyx_Coroutine_set_qualname(__pyx_CoroutineObject *self, PyObject *value) { PyObject *tmp; #if PY_MAJOR_VERSION >= 3 if (unlikely(value == NULL || !PyUnicode_Check(value))) { #else if (unlikely(value == NULL || !PyString_Check(value))) { #endif PyErr_SetString(PyExc_TypeError, "__qualname__ must be set to a string object"); return -1; } tmp = self->gi_qualname; Py_INCREF(value); self->gi_qualname = value; Py_XDECREF(tmp); return 0; } static __pyx_CoroutineObject *__Pyx__Coroutine_New( PyTypeObject* type, __pyx_coroutine_body_t body, PyObject *closure, PyObject *name, PyObject *qualname, PyObject *module_name) { __pyx_CoroutineObject *gen = PyObject_GC_New(__pyx_CoroutineObject, type); if (gen == NULL) return NULL; gen->body = body; gen->closure = closure; Py_XINCREF(closure); gen->is_running = 0; gen->resume_label = 0; gen->classobj = NULL; gen->yieldfrom = NULL; gen->exc_type = NULL; gen->exc_value = NULL; gen->exc_traceback = NULL; gen->gi_weakreflist = NULL; Py_XINCREF(qualname); gen->gi_qualname = qualname; Py_XINCREF(name); gen->gi_name = name; Py_XINCREF(module_name); gen->gi_modulename = module_name; PyObject_GC_Track(gen); return gen; } //////////////////// Coroutine //////////////////// //@requires: CoroutineBase //@requires: PatchGeneratorABC typedef struct { PyObject_HEAD PyObject *coroutine; } __pyx_CoroutineAwaitObject; static void __Pyx_CoroutineAwait_dealloc(PyObject *self) { PyObject_GC_UnTrack(self); Py_CLEAR(((__pyx_CoroutineAwaitObject*)self)->coroutine); PyObject_GC_Del(self); } static int __Pyx_CoroutineAwait_traverse(__pyx_CoroutineAwaitObject *self, visitproc visit, void *arg) { Py_VISIT(self->coroutine); return 0; } static int __Pyx_CoroutineAwait_clear(__pyx_CoroutineAwaitObject *self) { Py_CLEAR(self->coroutine); return 0; } static PyObject *__Pyx_CoroutineAwait_Next(__pyx_CoroutineAwaitObject *self) { return __Pyx_Generator_Next(self->coroutine); } static PyObject *__Pyx_CoroutineAwait_Send(__pyx_CoroutineAwaitObject *self, PyObject *value) { return __Pyx_Coroutine_Send(self->coroutine, value); } static PyObject *__Pyx_CoroutineAwait_Throw(__pyx_CoroutineAwaitObject *self, PyObject *args) { return __Pyx_Coroutine_Throw(self->coroutine, args); } static PyObject *__Pyx_CoroutineAwait_Close(__pyx_CoroutineAwaitObject *self) { return __Pyx_Coroutine_Close(self->coroutine); } static PyObject *__Pyx_CoroutineAwait_self(PyObject *self) { Py_INCREF(self); return self; } #if !CYTHON_COMPILING_IN_PYPY static PyObject *__Pyx_CoroutineAwait_no_new(CYTHON_UNUSED PyTypeObject *type, CYTHON_UNUSED PyObject *args, CYTHON_UNUSED PyObject *kwargs) { PyErr_SetString(PyExc_TypeError, "cannot instantiate type, use 'await coroutine' instead"); return NULL; } #endif static PyMethodDef __pyx_CoroutineAwait_methods[] = { {"send", (PyCFunction) __Pyx_CoroutineAwait_Send, METH_O, (char*) PyDoc_STR("send(arg) -> send 'arg' into coroutine,\nreturn next yielded value or raise StopIteration.")}, {"throw", (PyCFunction) __Pyx_CoroutineAwait_Throw, METH_VARARGS, (char*) PyDoc_STR("throw(typ[,val[,tb]]) -> raise exception in coroutine,\nreturn next yielded value or raise StopIteration.")}, {"close", (PyCFunction) __Pyx_CoroutineAwait_Close, METH_NOARGS, (char*) PyDoc_STR("close() -> raise GeneratorExit inside coroutine.")}, {0, 0, 0, 0} }; static PyTypeObject __pyx_CoroutineAwaitType_type = { PyVarObject_HEAD_INIT(0, 0) "coroutine_wrapper", /*tp_name*/ sizeof(__pyx_CoroutineAwaitObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ (destructor) __Pyx_CoroutineAwait_dealloc,/*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ 0, /*tp_as_async resp. tp_compare*/ 0, /*tp_repr*/ 0, /*tp_as_number*/ 0, /*tp_as_sequence*/ 0, /*tp_as_mapping*/ 0, /*tp_hash*/ 0, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /*tp_flags*/ PyDoc_STR("A wrapper object implementing __await__ for coroutines."), /*tp_doc*/ (traverseproc) __Pyx_CoroutineAwait_traverse, /*tp_traverse*/ (inquiry) __Pyx_CoroutineAwait_clear, /*tp_clear*/ 0, /*tp_richcompare*/ 0, /*tp_weaklistoffset*/ __Pyx_CoroutineAwait_self, /*tp_iter*/ (iternextfunc) __Pyx_CoroutineAwait_Next, /*tp_iternext*/ __pyx_CoroutineAwait_methods, /*tp_methods*/ 0 , /*tp_members*/ 0 , /*tp_getset*/ 0, /*tp_base*/ 0, /*tp_dict*/ 0, /*tp_descr_get*/ 0, /*tp_descr_set*/ 0, /*tp_dictoffset*/ 0, /*tp_init*/ 0, /*tp_alloc*/ #if !CYTHON_COMPILING_IN_PYPY __Pyx_CoroutineAwait_no_new, /*tp_new*/ #else 0, /*tp_new*/ #endif 0, /*tp_free*/ 0, /*tp_is_gc*/ 0, /*tp_bases*/ 0, /*tp_mro*/ 0, /*tp_cache*/ 0, /*tp_subclasses*/ 0, /*tp_weaklist*/ 0, /*tp_del*/ 0, /*tp_version_tag*/ #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif }; static CYTHON_INLINE PyObject *__Pyx__Coroutine_await(PyObject *coroutine) { __pyx_CoroutineAwaitObject *await = PyObject_GC_New(__pyx_CoroutineAwaitObject, __pyx_CoroutineAwaitType); if (unlikely(!await)) return NULL; Py_INCREF(coroutine); await->coroutine = coroutine; PyObject_GC_Track(await); return (PyObject*)await; } static PyObject *__Pyx_Coroutine_await(PyObject *coroutine) { if (unlikely(!coroutine || !__Pyx_Coroutine_CheckExact(coroutine))) { PyErr_SetString(PyExc_TypeError, "invalid input, expected coroutine"); return NULL; } return __Pyx__Coroutine_await(coroutine); } static void __Pyx_Coroutine_check_and_dealloc(PyObject *self) { __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; if (gen->resume_label == 0 && !PyErr_Occurred()) { // untrack dead object as we are executing Python code (which might trigger GC) PyObject_GC_UnTrack(self); #if PY_VERSION_HEX >= 0x03030000 || defined(PyErr_WarnFormat) PyErr_WarnFormat(PyExc_RuntimeWarning, 1, "coroutine '%.50S' was never awaited", gen->gi_qualname); PyErr_Clear(); /* just in case, must not keep a live exception during GC */ #else {PyObject *msg; char *cmsg; #if CYTHON_COMPILING_IN_PYPY msg = NULL; cmsg = (char*) "coroutine was never awaited"; #else char *cname; PyObject *qualname; #if PY_MAJOR_VERSION >= 3 qualname = PyUnicode_AsUTF8String(gen->gi_qualname); if (likely(qualname)) { cname = PyBytes_AS_STRING(qualname); } else { PyErr_Clear(); cname = (char*) "?"; } msg = PyBytes_FromFormat( #else qualname = gen->gi_qualname; cname = PyString_AS_STRING(qualname); msg = PyString_FromFormat( #endif "coroutine '%.50s' was never awaited", cname); #if PY_MAJOR_VERSION >= 3 Py_XDECREF(qualname); #endif if (unlikely(!msg)) { PyErr_Clear(); cmsg = (char*) "coroutine was never awaited"; } else { #if PY_MAJOR_VERSION >= 3 cmsg = PyBytes_AS_STRING(msg); #else cmsg = PyString_AS_STRING(msg); #endif } #endif if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, cmsg, 1) < 0)) PyErr_WriteUnraisable(self); Py_XDECREF(msg);} #endif PyObject_GC_Track(self); } __Pyx_Coroutine_dealloc(self); } #if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1 static PyObject *__Pyx_Coroutine_compare(PyObject *obj, PyObject *other, int op) { PyObject* result; switch (op) { case Py_EQ: result = (other == obj) ? Py_True : Py_False; break; case Py_NE: result = (other != obj) ? Py_True : Py_False; break; default: result = Py_NotImplemented; } Py_INCREF(result); return result; } #endif static PyMethodDef __pyx_Coroutine_methods[] = { {"send", (PyCFunction) __Pyx_Coroutine_Send, METH_O, (char*) PyDoc_STR("send(arg) -> send 'arg' into coroutine,\nreturn next iterated value or raise StopIteration.")}, {"throw", (PyCFunction) __Pyx_Coroutine_Throw, METH_VARARGS, (char*) PyDoc_STR("throw(typ[,val[,tb]]) -> raise exception in coroutine,\nreturn next iterated value or raise StopIteration.")}, {"close", (PyCFunction) __Pyx_Coroutine_Close, METH_NOARGS, (char*) PyDoc_STR("close() -> raise GeneratorExit inside coroutine.")}, #if PY_VERSION_HEX < 0x030500B1 {"__await__", (PyCFunction) __Pyx_Coroutine_await, METH_NOARGS, (char*) PyDoc_STR("__await__() -> return an iterator to be used in await expression.")}, #endif {0, 0, 0, 0} }; static PyMemberDef __pyx_Coroutine_memberlist[] = { {(char *) "cr_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL}, {(char*) "cr_await", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY, (char*) PyDoc_STR("object being awaited, or None")}, {(char *) "__module__", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_modulename), PY_WRITE_RESTRICTED, 0}, {0, 0, 0, 0, 0} }; static PyGetSetDef __pyx_Coroutine_getsets[] = { {(char *) "__name__", (getter)__Pyx_Coroutine_get_name, (setter)__Pyx_Coroutine_set_name, (char*) PyDoc_STR("name of the coroutine"), 0}, {(char *) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname, (char*) PyDoc_STR("qualified name of the coroutine"), 0}, {0, 0, 0, 0, 0} }; #if CYTHON_USE_ASYNC_SLOTS static __Pyx_PyAsyncMethodsStruct __pyx_Coroutine_as_async = { __Pyx_Coroutine_await, /*am_await*/ 0, /*am_aiter*/ 0, /*am_anext*/ }; #endif static PyTypeObject __pyx_CoroutineType_type = { PyVarObject_HEAD_INIT(0, 0) "coroutine", /*tp_name*/ sizeof(__pyx_CoroutineObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ (destructor) __Pyx_Coroutine_check_and_dealloc,/*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ #if CYTHON_USE_ASYNC_SLOTS &__pyx_Coroutine_as_async, /*tp_as_async (tp_reserved) - Py3 only! */ #else 0, /*tp_reserved*/ #endif 0, /*tp_repr*/ 0, /*tp_as_number*/ 0, /*tp_as_sequence*/ 0, /*tp_as_mapping*/ 0, /*tp_hash*/ 0, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ 0, /*tp_doc*/ (traverseproc) __Pyx_Coroutine_traverse, /*tp_traverse*/ 0, /*tp_clear*/ #if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1 // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare __Pyx_Coroutine_compare, /*tp_richcompare*/ #else 0, /*tp_richcompare*/ #endif offsetof(__pyx_CoroutineObject, gi_weakreflist), /*tp_weaklistoffset*/ // no tp_iter() as iterator is only available through __await__() 0, /*tp_iter*/ 0, /*tp_iternext*/ __pyx_Coroutine_methods, /*tp_methods*/ __pyx_Coroutine_memberlist, /*tp_members*/ __pyx_Coroutine_getsets, /*tp_getset*/ 0, /*tp_base*/ 0, /*tp_dict*/ 0, /*tp_descr_get*/ 0, /*tp_descr_set*/ 0, /*tp_dictoffset*/ 0, /*tp_init*/ 0, /*tp_alloc*/ 0, /*tp_new*/ 0, /*tp_free*/ 0, /*tp_is_gc*/ 0, /*tp_bases*/ 0, /*tp_mro*/ 0, /*tp_cache*/ 0, /*tp_subclasses*/ 0, /*tp_weaklist*/ #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_del*/ #else __Pyx_Coroutine_del, /*tp_del*/ #endif 0, /*tp_version_tag*/ #if PY_VERSION_HEX >= 0x030400a1 __Pyx_Coroutine_del, /*tp_finalize*/ #endif }; static int __pyx_Coroutine_init(void) { // on Windows, C-API functions can't be used in slots statically __pyx_CoroutineType_type.tp_getattro = PyObject_GenericGetAttr; __pyx_CoroutineType = __Pyx_FetchCommonType(&__pyx_CoroutineType_type); if (unlikely(!__pyx_CoroutineType)) return -1; __pyx_CoroutineAwaitType = __Pyx_FetchCommonType(&__pyx_CoroutineAwaitType_type); if (unlikely(!__pyx_CoroutineAwaitType)) return -1; return 0; } //////////////////// Generator //////////////////// //@requires: CoroutineBase //@requires: PatchGeneratorABC static PyMethodDef __pyx_Generator_methods[] = { {"send", (PyCFunction) __Pyx_Coroutine_Send, METH_O, (char*) PyDoc_STR("send(arg) -> send 'arg' into generator,\nreturn next yielded value or raise StopIteration.")}, {"throw", (PyCFunction) __Pyx_Coroutine_Throw, METH_VARARGS, (char*) PyDoc_STR("throw(typ[,val[,tb]]) -> raise exception in generator,\nreturn next yielded value or raise StopIteration.")}, {"close", (PyCFunction) __Pyx_Coroutine_Close, METH_NOARGS, (char*) PyDoc_STR("close() -> raise GeneratorExit inside generator.")}, {0, 0, 0, 0} }; static PyMemberDef __pyx_Generator_memberlist[] = { {(char *) "gi_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL}, {(char*) "gi_yieldfrom", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY, (char*) PyDoc_STR("object being iterated by 'yield from', or None")}, {0, 0, 0, 0, 0} }; static PyGetSetDef __pyx_Generator_getsets[] = { {(char *) "__name__", (getter)__Pyx_Coroutine_get_name, (setter)__Pyx_Coroutine_set_name, (char*) PyDoc_STR("name of the generator"), 0}, {(char *) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname, (char*) PyDoc_STR("qualified name of the generator"), 0}, {0, 0, 0, 0, 0} }; static PyTypeObject __pyx_GeneratorType_type = { PyVarObject_HEAD_INIT(0, 0) "generator", /*tp_name*/ sizeof(__pyx_CoroutineObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ (destructor) __Pyx_Coroutine_dealloc,/*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ 0, /*tp_compare / tp_as_async*/ 0, /*tp_repr*/ 0, /*tp_as_number*/ 0, /*tp_as_sequence*/ 0, /*tp_as_mapping*/ 0, /*tp_hash*/ 0, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ 0, /*tp_doc*/ (traverseproc) __Pyx_Coroutine_traverse, /*tp_traverse*/ 0, /*tp_clear*/ 0, /*tp_richcompare*/ offsetof(__pyx_CoroutineObject, gi_weakreflist), /*tp_weaklistoffset*/ 0, /*tp_iter*/ (iternextfunc) __Pyx_Generator_Next, /*tp_iternext*/ __pyx_Generator_methods, /*tp_methods*/ __pyx_Generator_memberlist, /*tp_members*/ __pyx_Generator_getsets, /*tp_getset*/ 0, /*tp_base*/ 0, /*tp_dict*/ 0, /*tp_descr_get*/ 0, /*tp_descr_set*/ 0, /*tp_dictoffset*/ 0, /*tp_init*/ 0, /*tp_alloc*/ 0, /*tp_new*/ 0, /*tp_free*/ 0, /*tp_is_gc*/ 0, /*tp_bases*/ 0, /*tp_mro*/ 0, /*tp_cache*/ 0, /*tp_subclasses*/ 0, /*tp_weaklist*/ #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_del*/ #else __Pyx_Coroutine_del, /*tp_del*/ #endif 0, /*tp_version_tag*/ #if PY_VERSION_HEX >= 0x030400a1 __Pyx_Coroutine_del, /*tp_finalize*/ #endif }; static int __pyx_Generator_init(void) { // on Windows, C-API functions can't be used in slots statically __pyx_GeneratorType_type.tp_getattro = PyObject_GenericGetAttr; __pyx_GeneratorType_type.tp_iter = PyObject_SelfIter; __pyx_GeneratorType = __Pyx_FetchCommonType(&__pyx_GeneratorType_type); if (unlikely(!__pyx_GeneratorType)) { return -1; } return 0; } /////////////// ReturnWithStopIteration.proto /////////////// #define __Pyx_ReturnWithStopIteration(value) \ if (value == Py_None) PyErr_SetNone(PyExc_StopIteration); else __Pyx__ReturnWithStopIteration(value) static void __Pyx__ReturnWithStopIteration(PyObject* value); /*proto*/ /////////////// ReturnWithStopIteration /////////////// //@requires: Exceptions.c::PyErrFetchRestore //@requires: Exceptions.c::PyThreadStateGet //@substitute: naming // 1) Instantiating an exception just to pass back a value is costly. // 2) CPython 3.3 <= x < 3.5b1 crash in yield-from when the StopIteration is not instantiated. // 3) Passing a tuple as value into PyErr_SetObject() passes its items on as arguments. // 4) If there is currently an exception being handled, we need to chain it. static void __Pyx__ReturnWithStopIteration(PyObject* value) { PyObject *exc, *args; #if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_PYSTON __Pyx_PyThreadState_declare if ((PY_VERSION_HEX >= 0x03030000 && PY_VERSION_HEX < 0x030500B1) || unlikely(PyTuple_Check(value))) { args = PyTuple_New(1); if (unlikely(!args)) return; Py_INCREF(value); PyTuple_SET_ITEM(args, 0, value); exc = PyType_Type.tp_call(PyExc_StopIteration, args, NULL); Py_DECREF(args); if (!exc) return; } else { // it's safe to avoid instantiating the exception Py_INCREF(value); exc = value; } __Pyx_PyThreadState_assign if (!$local_tstate_cname->exc_type) { // no chaining needed => avoid the overhead in PyErr_SetObject() Py_INCREF(PyExc_StopIteration); __Pyx_ErrRestore(PyExc_StopIteration, exc, NULL); return; } #else args = PyTuple_Pack(1, value); if (unlikely(!args)) return; exc = PyObject_Call(PyExc_StopIteration, args, NULL); Py_DECREF(args); if (unlikely(!exc)) return; #endif PyErr_SetObject(PyExc_StopIteration, exc); Py_DECREF(exc); } //////////////////// PatchModuleWithCoroutine.proto //////////////////// static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code); /*proto*/ //////////////////// PatchModuleWithCoroutine //////////////////// //@substitute: naming static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code) { #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) int result; PyObject *globals, *result_obj; globals = PyDict_New(); if (unlikely(!globals)) goto ignore; result = PyDict_SetItemString(globals, "_cython_coroutine_type", #ifdef __Pyx_Coroutine_USED (PyObject*)__pyx_CoroutineType); #else Py_None); #endif if (unlikely(result < 0)) goto ignore; result = PyDict_SetItemString(globals, "_cython_generator_type", #ifdef __Pyx_Generator_USED (PyObject*)__pyx_GeneratorType); #else Py_None); #endif if (unlikely(result < 0)) goto ignore; if (unlikely(PyDict_SetItemString(globals, "_module", module) < 0)) goto ignore; if (unlikely(PyDict_SetItemString(globals, "__builtins__", $builtins_cname) < 0)) goto ignore; result_obj = PyRun_String(py_code, Py_file_input, globals, globals); if (unlikely(!result_obj)) goto ignore; Py_DECREF(result_obj); Py_DECREF(globals); return module; ignore: Py_XDECREF(globals); PyErr_WriteUnraisable(module); if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, "Cython module failed to patch module with custom type", 1) < 0)) { Py_DECREF(module); module = NULL; } #else // avoid "unused" warning py_code++; #endif return module; } //////////////////// PatchGeneratorABC.proto //////////////////// // register with Generator/Coroutine ABCs in 'collections.abc' // see https://bugs.python.org/issue24018 static int __Pyx_patch_abc(void); /*proto*/ //////////////////// PatchGeneratorABC //////////////////// //@requires: PatchModuleWithCoroutine #ifndef CYTHON_REGISTER_ABCS #define CYTHON_REGISTER_ABCS 1 #endif #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) static PyObject* __Pyx_patch_abc_module(PyObject *module); /*proto*/ static PyObject* __Pyx_patch_abc_module(PyObject *module) { module = __Pyx_Coroutine_patch_module( module, CSTRING("""\ if _cython_generator_type is not None: try: Generator = _module.Generator except AttributeError: pass else: Generator.register(_cython_generator_type) if _cython_coroutine_type is not None: try: Coroutine = _module.Coroutine except AttributeError: pass else: Coroutine.register(_cython_coroutine_type) """) ); return module; } #endif static int __Pyx_patch_abc(void) { #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) static int abc_patched = 0; if (CYTHON_REGISTER_ABCS && !abc_patched) { PyObject *module; module = PyImport_ImportModule((PY_VERSION_HEX >= 0x03030000) ? "collections.abc" : "collections"); if (!module) { PyErr_WriteUnraisable(NULL); if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, ((PY_VERSION_HEX >= 0x03030000) ? "Cython module failed to register with collections.abc module" : "Cython module failed to register with collections module"), 1) < 0)) { return -1; } } else { module = __Pyx_patch_abc_module(module); abc_patched = 1; if (unlikely(!module)) return -1; Py_DECREF(module); } // also register with "backports_abc" module if available, just in case module = PyImport_ImportModule("backports_abc"); if (module) { module = __Pyx_patch_abc_module(module); Py_XDECREF(module); } if (!module) { PyErr_Clear(); } } #else // avoid "unused" warning for __Pyx_Coroutine_patch_module() if ((0)) __Pyx_Coroutine_patch_module(NULL, NULL); #endif return 0; } //////////////////// PatchAsyncIO.proto //////////////////// // run after importing "asyncio" to patch Cython generator support into it static PyObject* __Pyx_patch_asyncio(PyObject* module); /*proto*/ //////////////////// PatchAsyncIO //////////////////// //@requires: ImportExport.c::Import //@requires: PatchModuleWithCoroutine //@requires: PatchInspect static PyObject* __Pyx_patch_asyncio(PyObject* module) { #if PY_VERSION_HEX < 0x030500B2 && \ (defined(__Pyx_Coroutine_USED) || defined(__Pyx_Generator_USED)) && \ (!defined(CYTHON_PATCH_ASYNCIO) || CYTHON_PATCH_ASYNCIO) PyObject *patch_module = NULL; static int asyncio_patched = 0; if (unlikely((!asyncio_patched) && module)) { PyObject *package; package = __Pyx_Import(PYIDENT("asyncio.coroutines"), NULL, 0); if (package) { patch_module = __Pyx_Coroutine_patch_module( PyObject_GetAttrString(package, "coroutines"), CSTRING("""\ try: coro_types = _module._COROUTINE_TYPES except AttributeError: pass else: if _cython_coroutine_type is not None and _cython_coroutine_type not in coro_types: coro_types = tuple(coro_types) + (_cython_coroutine_type,) if _cython_generator_type is not None and _cython_generator_type not in coro_types: coro_types = tuple(coro_types) + (_cython_generator_type,) _module._COROUTINE_TYPES = coro_types """) ); } else { PyErr_Clear(); #if PY_VERSION_HEX < 0x03040200 // Py3.4.1 used to have asyncio.tasks instead of asyncio.coroutines package = __Pyx_Import(PYIDENT("asyncio.tasks"), NULL, 0); if (unlikely(!package)) goto asyncio_done; patch_module = __Pyx_Coroutine_patch_module( PyObject_GetAttrString(package, "tasks"), CSTRING("""\ if hasattr(_module, 'iscoroutine'): old_types = getattr(_module.iscoroutine, '_cython_coroutine_types', None) if old_types is None or not isinstance(old_types, set): old_types = set() def cy_wrap(orig_func, type=type, cython_coroutine_types=old_types): def cy_iscoroutine(obj): return type(obj) in cython_coroutine_types or orig_func(obj) cy_iscoroutine._cython_coroutine_types = cython_coroutine_types return cy_iscoroutine _module.iscoroutine = cy_wrap(_module.iscoroutine) if _cython_coroutine_type is not None: old_types.add(_cython_coroutine_type) if _cython_generator_type is not None: old_types.add(_cython_generator_type) """) ); #endif // Py < 0x03040200 } Py_DECREF(package); if (unlikely(!patch_module)) goto ignore; #if PY_VERSION_HEX < 0x03040200 asyncio_done: PyErr_Clear(); #endif asyncio_patched = 1; #ifdef __Pyx_Generator_USED // now patch inspect.isgenerator() by looking up the imported module in the patched asyncio module { PyObject *inspect_module; if (patch_module) { inspect_module = PyObject_GetAttr(patch_module, PYIDENT("inspect")); Py_DECREF(patch_module); } else { inspect_module = __Pyx_Import(PYIDENT("inspect"), NULL, 0); } if (unlikely(!inspect_module)) goto ignore; inspect_module = __Pyx_patch_inspect(inspect_module); if (unlikely(!inspect_module)) { Py_DECREF(module); module = NULL; } Py_XDECREF(inspect_module); } #else // avoid "unused" warning for __Pyx_patch_inspect() if ((0)) return __Pyx_patch_inspect(module); #endif } return module; ignore: PyErr_WriteUnraisable(module); if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, "Cython module failed to patch asyncio package with custom generator type", 1) < 0)) { Py_DECREF(module); module = NULL; } #else // avoid "unused" warning for __Pyx_Coroutine_patch_module() if ((0)) return __Pyx_patch_inspect(__Pyx_Coroutine_patch_module(module, NULL)); #endif return module; } //////////////////// PatchInspect.proto //////////////////// // run after importing "inspect" to patch Cython generator support into it static PyObject* __Pyx_patch_inspect(PyObject* module); /*proto*/ //////////////////// PatchInspect //////////////////// //@requires: PatchModuleWithCoroutine static PyObject* __Pyx_patch_inspect(PyObject* module) { #if defined(__Pyx_Generator_USED) && (!defined(CYTHON_PATCH_INSPECT) || CYTHON_PATCH_INSPECT) static int inspect_patched = 0; if (unlikely((!inspect_patched) && module)) { module = __Pyx_Coroutine_patch_module( module, CSTRING("""\ old_types = getattr(_module.isgenerator, '_cython_generator_types', None) if old_types is None or not isinstance(old_types, set): old_types = set() def cy_wrap(orig_func, type=type, cython_generator_types=old_types): def cy_isgenerator(obj): return type(obj) in cython_generator_types or orig_func(obj) cy_isgenerator._cython_generator_types = cython_generator_types return cy_isgenerator _module.isgenerator = cy_wrap(_module.isgenerator) old_types.add(_cython_generator_type) """) ); inspect_patched = 1; } #else // avoid "unused" warning for __Pyx_Coroutine_patch_module() if ((0)) return __Pyx_Coroutine_patch_module(module, NULL); #endif return module; } //////////////////// StopAsyncIteration.proto //////////////////// #define __Pyx_StopAsyncIteration_USED static PyObject *__Pyx_PyExc_StopAsyncIteration; static int __pyx_StopAsyncIteration_init(void); /*proto*/ //////////////////// StopAsyncIteration //////////////////// #if PY_VERSION_HEX < 0x030500B1 static PyTypeObject __Pyx__PyExc_StopAsyncIteration_type = { PyVarObject_HEAD_INIT(0, 0) "StopAsyncIteration", /*tp_name*/ sizeof(PyBaseExceptionObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ 0, /*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ 0, /*tp_compare / reserved*/ 0, /*tp_repr*/ 0, /*tp_as_number*/ 0, /*tp_as_sequence*/ 0, /*tp_as_mapping*/ 0, /*tp_hash*/ 0, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /*tp_flags*/ PyDoc_STR("Signal the end from iterator.__anext__()."), /*tp_doc*/ 0, /*tp_traverse*/ 0, /*tp_clear*/ 0, /*tp_richcompare*/ 0, /*tp_weaklistoffset*/ 0, /*tp_iter*/ 0, /*tp_iternext*/ 0, /*tp_methods*/ 0, /*tp_members*/ 0, /*tp_getset*/ 0, /*tp_base*/ 0, /*tp_dict*/ 0, /*tp_descr_get*/ 0, /*tp_descr_set*/ 0, /*tp_dictoffset*/ 0, /*tp_init*/ 0, /*tp_alloc*/ 0, /*tp_new*/ 0, /*tp_free*/ 0, /*tp_is_gc*/ 0, /*tp_bases*/ 0, /*tp_mro*/ 0, /*tp_cache*/ 0, /*tp_subclasses*/ 0, /*tp_weaklist*/ 0, /*tp_del*/ 0, /*tp_version_tag*/ #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif }; #endif static int __pyx_StopAsyncIteration_init(void) { #if PY_VERSION_HEX >= 0x030500B1 __Pyx_PyExc_StopAsyncIteration = PyExc_StopAsyncIteration; #else PyObject *builtins = PyEval_GetBuiltins(); if (likely(builtins)) { PyObject *exc = PyMapping_GetItemString(builtins, (char*) "StopAsyncIteration"); if (exc) { __Pyx_PyExc_StopAsyncIteration = exc; return 0; } } PyErr_Clear(); __Pyx__PyExc_StopAsyncIteration_type.tp_traverse = ((PyTypeObject*)PyExc_BaseException)->tp_traverse; __Pyx__PyExc_StopAsyncIteration_type.tp_clear = ((PyTypeObject*)PyExc_BaseException)->tp_clear; __Pyx__PyExc_StopAsyncIteration_type.tp_dictoffset = ((PyTypeObject*)PyExc_BaseException)->tp_dictoffset; __Pyx__PyExc_StopAsyncIteration_type.tp_base = (PyTypeObject*)PyExc_Exception; __Pyx_PyExc_StopAsyncIteration = (PyObject*) __Pyx_FetchCommonType(&__Pyx__PyExc_StopAsyncIteration_type); if (unlikely(!__Pyx_PyExc_StopAsyncIteration)) return -1; if (builtins && unlikely(PyMapping_SetItemString(builtins, (char*) "StopAsyncIteration", __Pyx_PyExc_StopAsyncIteration) < 0)) return -1; #endif return 0; } Cython-0.26.1/Cython/Utility/TestCythonScope.pyx0000664000175000017500000000307312542002467022404 0ustar stefanstefan00000000000000########## TestClass ########## # These utilities are for testing purposes cdef extern from *: cdef object __pyx_test_dep(object) @cname('__pyx_TestClass') cdef class TestClass(object): cdef public int value def __init__(self, int value): self.value = value def __str__(self): return 'TestClass(%d)' % self.value cdef cdef_method(self, int value): print 'Hello from cdef_method', value cpdef cpdef_method(self, int value): print 'Hello from cpdef_method', value def def_method(self, int value): print 'Hello from def_method', value @cname('cdef_cname') cdef cdef_cname_method(self, int value): print "Hello from cdef_cname_method", value @cname('cpdef_cname') cpdef cpdef_cname_method(self, int value): print "Hello from cpdef_cname_method", value @cname('def_cname') def def_cname_method(self, int value): print "Hello from def_cname_method", value @cname('__pyx_test_call_other_cy_util') cdef test_call(obj): print 'test_call' __pyx_test_dep(obj) @cname('__pyx_TestClass_New') cdef _testclass_new(int value): return TestClass(value) ########### TestDep ########## @cname('__pyx_test_dep') cdef test_dep(obj): print 'test_dep', obj ########## TestScope ########## @cname('__pyx_testscope') cdef object _testscope(int value): return "hello from cython scope, value=%d" % value ########## View.TestScope ########## @cname('__pyx_view_testscope') cdef object _testscope(int value): return "hello from cython.view scope, value=%d" % value Cython-0.26.1/Cython/Utility/CpdefEnums.pyx0000664000175000017500000000354513023021033021324 0ustar stefanstefan00000000000000#################### EnumBase #################### cimport cython cdef extern from *: int PY_VERSION_HEX cdef object __Pyx_OrderedDict if PY_VERSION_HEX >= 0x02070000: from collections import OrderedDict as __Pyx_OrderedDict else: __Pyx_OrderedDict = dict @cython.internal cdef class __Pyx_EnumMeta(type): def __init__(cls, name, parents, dct): type.__init__(cls, name, parents, dct) cls.__members__ = __Pyx_OrderedDict() def __iter__(cls): return iter(cls.__members__.values()) def __getitem__(cls, name): return cls.__members__[name] # @cython.internal cdef object __Pyx_EnumBase class __Pyx_EnumBase(int): __metaclass__ = __Pyx_EnumMeta def __new__(cls, value, name=None): for v in cls: if v == value: return v if name is None: raise ValueError("Unknown enum value: '%s'" % value) res = int.__new__(cls, value) res.name = name setattr(cls, name, res) cls.__members__[name] = res return res def __repr__(self): return "<%s.%s: %d>" % (self.__class__.__name__, self.name, self) def __str__(self): return "%s.%s" % (self.__class__.__name__, self.name) if PY_VERSION_HEX >= 0x03040000: from enum import IntEnum as __Pyx_EnumBase #################### EnumType #################### #@requires: EnumBase cdef dict __Pyx_globals = globals() if PY_VERSION_HEX >= 0x03040000: # create new IntEnum() {{name}} = __Pyx_EnumBase('{{name}}', __Pyx_OrderedDict([ {{for item in items}} ('{{item}}', {{item}}), {{endfor}} ])) {{for item in items}} __Pyx_globals['{{item}}'] = {{name}}.{{item}} {{endfor}} else: class {{name}}(__Pyx_EnumBase): pass {{for item in items}} __Pyx_globals['{{item}}'] = {{name}}({{item}}, '{{item}}') {{endfor}} Cython-0.26.1/Cython/Utility/TypeConversion.c0000664000175000017500000007735013143605603021707 0ustar stefanstefan00000000000000/////////////// TypeConversions.proto /////////////// /* Type Conversion Predeclarations */ #define __Pyx_uchar_cast(c) ((unsigned char)c) #define __Pyx_long_cast(x) ((long)x) #define __Pyx_fits_Py_ssize_t(v, type, is_signed) ( \ (sizeof(type) < sizeof(Py_ssize_t)) || \ (sizeof(type) > sizeof(Py_ssize_t) && \ likely(v < (type)PY_SSIZE_T_MAX || \ v == (type)PY_SSIZE_T_MAX) && \ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN || \ v == (type)PY_SSIZE_T_MIN))) || \ (sizeof(type) == sizeof(Py_ssize_t) && \ (is_signed || likely(v < (type)PY_SSIZE_T_MAX || \ v == (type)PY_SSIZE_T_MAX))) ) // fast and unsafe abs(Py_ssize_t) that ignores the overflow for (-PY_SSIZE_T_MAX-1) #if defined (__cplusplus) && __cplusplus >= 201103L #include #define __Pyx_sst_abs(value) std::abs(value) #elif SIZEOF_INT >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) abs(value) #elif SIZEOF_LONG >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) labs(value) #elif defined (_MSC_VER) && defined (_M_X64) // abs() is defined for long, but 64-bits type on MSVC is long long. // Use MS-specific _abs64 instead. #define __Pyx_sst_abs(value) _abs64(value) #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define __Pyx_sst_abs(value) llabs(value) #elif defined (__GNUC__) // gcc or clang on 64 bit windows. #define __Pyx_sst_abs(value) __builtin_llabs(value) #else #define __Pyx_sst_abs(value) ((value<0) ? -value : value) #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); #define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) #define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) #define __Pyx_PyBytes_FromString PyBytes_FromString #define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); #if PY_MAJOR_VERSION < 3 #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #else #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize #endif #define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) #define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) #define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) #define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) #define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) #if PY_MAJOR_VERSION < 3 static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { const Py_UNICODE *u_end = u; while (*u_end++) ; return (size_t)(u_end - u - 1); } #else #define __Pyx_Py_UNICODE_strlen Py_UNICODE_strlen #endif #define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) #define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) #define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); #if CYTHON_ASSUME_SAFE_MACROS #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) #else #define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) #endif #define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) #else #define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) #endif #define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII static int __Pyx_sys_getdefaultencoding_not_ascii; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; PyObject* ascii_chars_u = NULL; PyObject* ascii_chars_b = NULL; const char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; if (strcmp(default_encoding_c, "ascii") == 0) { __Pyx_sys_getdefaultencoding_not_ascii = 0; } else { char ascii_chars[128]; int c; for (c = 0; c < 128; c++) { ascii_chars[c] = c; } __Pyx_sys_getdefaultencoding_not_ascii = 1; ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); if (!ascii_chars_u) goto bad; ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { PyErr_Format( PyExc_ValueError, "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", default_encoding_c); goto bad; } Py_DECREF(ascii_chars_u); Py_DECREF(ascii_chars_b); } Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); Py_XDECREF(ascii_chars_u); Py_XDECREF(ascii_chars_b); return -1; } #endif #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) #else #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) // __PYX_DEFAULT_STRING_ENCODING is either a user provided string constant // or we need to look it up here #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT static char* __PYX_DEFAULT_STRING_ENCODING; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c)); if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); return -1; } #endif #endif /////////////// TypeConversions /////////////// /* Type Conversion Functions */ static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); } // Py3.7 returns a "const char*" for unicode strings static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { Py_ssize_t ignore; return __Pyx_PyObject_AsStringAndSize(o, &ignore); } // Py3.7 returns a "const char*" for unicode strings static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { #if CYTHON_COMPILING_IN_CPYTHON && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) if ( #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII __Pyx_sys_getdefaultencoding_not_ascii && #endif PyUnicode_Check(o)) { #if PY_VERSION_HEX < 0x03030000 char* defenc_c; // borrowed reference, cached internally in 'o' by CPython PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); if (!defenc) return NULL; defenc_c = PyBytes_AS_STRING(defenc); #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII { char* end = defenc_c + PyBytes_GET_SIZE(defenc); char* c; for (c = defenc_c; c < end; c++) { if ((unsigned char) (*c) >= 128) { // raise the error PyUnicode_AsASCIIString(o); return NULL; } } } #endif /*__PYX_DEFAULT_STRING_ENCODING_IS_ASCII*/ *length = PyBytes_GET_SIZE(defenc); return defenc_c; #else /* PY_VERSION_HEX < 0x03030000 */ if (__Pyx_PyUnicode_READY(o) == -1) return NULL; #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII if (PyUnicode_IS_ASCII(o)) { // cached for the lifetime of the object *length = PyUnicode_GET_LENGTH(o); return PyUnicode_AsUTF8(o); } else { // raise the error PyUnicode_AsASCIIString(o); return NULL; } #else /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII */ return PyUnicode_AsUTF8AndSize(o, length); #endif /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII */ #endif /* PY_VERSION_HEX < 0x03030000 */ } else #endif /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT */ #if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) if (PyByteArray_Check(o)) { *length = PyByteArray_GET_SIZE(o); return PyByteArray_AS_STRING(o); } else #endif { char* result; int r = PyBytes_AsStringAndSize(o, &result, length); if (unlikely(r < 0)) { return NULL; } else { return result; } } } /* Note: __Pyx_PyObject_IsTrue is written to minimize branching. */ static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { int is_true = x == Py_True; if (is_true | (x == Py_False) | (x == Py_None)) return is_true; else return PyObject_IsTrue(x); } static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { #if CYTHON_USE_TYPE_SLOTS PyNumberMethods *m; #endif const char *name = NULL; PyObject *res = NULL; #if PY_MAJOR_VERSION < 3 if (PyInt_Check(x) || PyLong_Check(x)) #else if (PyLong_Check(x)) #endif return __Pyx_NewRef(x); #if CYTHON_USE_TYPE_SLOTS m = Py_TYPE(x)->tp_as_number; #if PY_MAJOR_VERSION < 3 if (m && m->nb_int) { name = "int"; res = PyNumber_Int(x); } else if (m && m->nb_long) { name = "long"; res = PyNumber_Long(x); } #else if (m && m->nb_int) { name = "int"; res = PyNumber_Long(x); } #endif #else res = PyNumber_Int(x); #endif if (res) { #if PY_MAJOR_VERSION < 3 if (!PyInt_Check(res) && !PyLong_Check(res)) { #else if (!PyLong_Check(res)) { #endif PyErr_Format(PyExc_TypeError, "__%.4s__ returned non-%.4s (type %.200s)", name, name, Py_TYPE(res)->tp_name); Py_DECREF(res); return NULL; } } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_TypeError, "an integer is required"); } return res; } {{py: from Cython.Utility import pylong_join }} static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_ssize_t ival; PyObject *x; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_CheckExact(b))) { if (sizeof(Py_ssize_t) >= sizeof(long)) return PyInt_AS_LONG(b); else return PyInt_AsSsize_t(x); } #endif if (likely(PyLong_CheckExact(b))) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)b)->ob_digit; const Py_ssize_t size = Py_SIZE(b); // handle most common case first to avoid indirect branch and optimise branch prediction if (likely(__Pyx_sst_abs(size) <= 1)) { ival = likely(size) ? digits[0] : 0; if (size == -1) ival = -ival; return ival; } else { switch (size) { {{for _size in (2, 3, 4)}} {{for _case in (_size, -_size)}} case {{_case}}: if (8 * sizeof(Py_ssize_t) > {{_size}} * PyLong_SHIFT) { return {{'-' if _case < 0 else ''}}(Py_ssize_t) {{pylong_join(_size, 'digits', 'size_t')}}; } break; {{endfor}} {{endfor}} } } #endif return PyLong_AsSsize_t(b); } x = PyNumber_Index(b); if (!x) return -1; ival = PyInt_AsSsize_t(x); Py_DECREF(x); return ival; } static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { return PyInt_FromSize_t(ival); } /////////////// ToPyCTupleUtility.proto /////////////// static PyObject* {{funcname}}({{struct_type_decl}}); /////////////// ToPyCTupleUtility /////////////// static PyObject* {{funcname}}({{struct_type_decl}} value) { PyObject* item = NULL; PyObject* result = PyTuple_New({{size}}); if (!result) goto bad; {{for ix, component in enumerate(components):}} {{py:attr = "value.f%s" % ix}} item = {{component.to_py_function}}({{attr}}); if (!item) goto bad; PyTuple_SET_ITEM(result, {{ix}}, item); {{endfor}} return result; bad: Py_XDECREF(item); Py_XDECREF(result); return NULL; } /////////////// FromPyCTupleUtility.proto /////////////// static {{struct_type_decl}} {{funcname}}(PyObject *); /////////////// FromPyCTupleUtility /////////////// static {{struct_type_decl}} {{funcname}}(PyObject * o) { {{struct_type_decl}} result; if (!PyTuple_Check(o) || PyTuple_GET_SIZE(o) != {{size}}) { PyErr_Format(PyExc_TypeError, "Expected %.16s of size %d, got %.200s", "a tuple", {{size}}, Py_TYPE(o)->tp_name); goto bad; } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS {{for ix, component in enumerate(components):}} {{py:attr = "result.f%s" % ix}} {{attr}} = {{component.from_py_function}}(PyTuple_GET_ITEM(o, {{ix}})); if ({{component.error_condition(attr)}}) goto bad; {{endfor}} #else { PyObject *item; {{for ix, component in enumerate(components):}} {{py:attr = "result.f%s" % ix}} item = PySequence_ITEM(o, {{ix}}); if (unlikely(!item)) goto bad; {{attr}} = {{component.from_py_function}}(item); Py_DECREF(item); if ({{component.error_condition(attr)}}) goto bad; {{endfor}} } #endif return result; bad: return result; } /////////////// UnicodeAsUCS4.proto /////////////// static CYTHON_INLINE Py_UCS4 __Pyx_PyUnicode_AsPy_UCS4(PyObject*); /////////////// UnicodeAsUCS4 /////////////// static CYTHON_INLINE Py_UCS4 __Pyx_PyUnicode_AsPy_UCS4(PyObject* x) { Py_ssize_t length; #if CYTHON_PEP393_ENABLED length = PyUnicode_GET_LENGTH(x); if (likely(length == 1)) { return PyUnicode_READ_CHAR(x, 0); } #else length = PyUnicode_GET_SIZE(x); if (likely(length == 1)) { return PyUnicode_AS_UNICODE(x)[0]; } #if Py_UNICODE_SIZE == 2 else if (PyUnicode_GET_SIZE(x) == 2) { Py_UCS4 high_val = PyUnicode_AS_UNICODE(x)[0]; if (high_val >= 0xD800 && high_val <= 0xDBFF) { Py_UCS4 low_val = PyUnicode_AS_UNICODE(x)[1]; if (low_val >= 0xDC00 && low_val <= 0xDFFF) { return 0x10000 + (((high_val & ((1<<10)-1)) << 10) | (low_val & ((1<<10)-1))); } } } #endif #endif PyErr_Format(PyExc_ValueError, "only single character unicode strings can be converted to Py_UCS4, " "got length %" CYTHON_FORMAT_SSIZE_T "d", length); return (Py_UCS4)-1; } /////////////// ObjectAsUCS4.proto /////////////// //@requires: UnicodeAsUCS4 #define __Pyx_PyObject_AsPy_UCS4(x) \ (likely(PyUnicode_Check(x)) ? __Pyx_PyUnicode_AsPy_UCS4(x) : __Pyx__PyObject_AsPy_UCS4(x)) static Py_UCS4 __Pyx__PyObject_AsPy_UCS4(PyObject*); /////////////// ObjectAsUCS4 /////////////// static Py_UCS4 __Pyx__PyObject_AsPy_UCS4(PyObject* x) { long ival; ival = __Pyx_PyInt_As_long(x); if (unlikely(ival < 0)) { if (!PyErr_Occurred()) PyErr_SetString(PyExc_OverflowError, "cannot convert negative value to Py_UCS4"); return (Py_UCS4)-1; } else if (unlikely(ival > 1114111)) { PyErr_SetString(PyExc_OverflowError, "value too large to convert to Py_UCS4"); return (Py_UCS4)-1; } return (Py_UCS4)ival; } /////////////// ObjectAsPyUnicode.proto /////////////// static CYTHON_INLINE Py_UNICODE __Pyx_PyObject_AsPy_UNICODE(PyObject*); /////////////// ObjectAsPyUnicode /////////////// static CYTHON_INLINE Py_UNICODE __Pyx_PyObject_AsPy_UNICODE(PyObject* x) { long ival; #if CYTHON_PEP393_ENABLED #if Py_UNICODE_SIZE > 2 const long maxval = 1114111; #else const long maxval = 65535; #endif #else static long maxval = 0; #endif if (PyUnicode_Check(x)) { if (unlikely(__Pyx_PyUnicode_GET_LENGTH(x) != 1)) { PyErr_Format(PyExc_ValueError, "only single character unicode strings can be converted to Py_UNICODE, " "got length %" CYTHON_FORMAT_SSIZE_T "d", __Pyx_PyUnicode_GET_LENGTH(x)); return (Py_UNICODE)-1; } #if CYTHON_PEP393_ENABLED ival = PyUnicode_READ_CHAR(x, 0); #else return PyUnicode_AS_UNICODE(x)[0]; #endif } else { #if !CYTHON_PEP393_ENABLED if (unlikely(!maxval)) maxval = (long)PyUnicode_GetMax(); #endif ival = __Pyx_PyInt_As_long(x); } if (unlikely(ival < 0)) { if (!PyErr_Occurred()) PyErr_SetString(PyExc_OverflowError, "cannot convert negative value to Py_UNICODE"); return (Py_UNICODE)-1; } else if (unlikely(ival > maxval)) { PyErr_SetString(PyExc_OverflowError, "value too large to convert to Py_UNICODE"); return (Py_UNICODE)-1; } return (Py_UNICODE)ival; } /////////////// CIntToPy.proto /////////////// static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value); /////////////// CIntToPy /////////////// static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value) { const {{TYPE}} neg_one = ({{TYPE}}) -1, const_zero = ({{TYPE}}) 0; const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof({{TYPE}}) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof({{TYPE}}) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof({{TYPE}}) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof({{TYPE}}) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof({{TYPE}}) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; return _PyLong_FromByteArray(bytes, sizeof({{TYPE}}), little, !is_unsigned); } } /////////////// CIntToDigits /////////////// static const char DIGIT_PAIRS_10[2*10*10+1] = { "00010203040506070809" "10111213141516171819" "20212223242526272829" "30313233343536373839" "40414243444546474849" "50515253545556575859" "60616263646566676869" "70717273747576777879" "80818283848586878889" "90919293949596979899" }; static const char DIGIT_PAIRS_8[2*8*8+1] = { "0001020304050607" "1011121314151617" "2021222324252627" "3031323334353637" "4041424344454647" "5051525354555657" "6061626364656667" "7071727374757677" }; static const char DIGITS_HEX[2*16+1] = { "0123456789abcdef0123456789ABCDEF" }; /////////////// CIntToPyUnicode.proto /////////////// static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value, Py_ssize_t width, char padding_char, char format_char); /////////////// CIntToPyUnicode /////////////// //@requires: StringTools.c::BuildPyUnicode //@requires: CIntToDigits #ifdef _MSC_VER #ifndef _MSC_STDINT_H_ #if _MSC_VER < 1300 typedef unsigned short uint16_t; #else typedef unsigned __int16 uint16_t; #endif #endif #else #include #endif // NOTE: inlining because most arguments are constant, which collapses lots of code below static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value, Py_ssize_t width, char padding_char, char format_char) { // simple and conservative C string allocation on the stack: each byte gives at most 3 digits, plus sign char digits[sizeof({{TYPE}})*3+2]; // 'dpos' points to end of digits array + 1 initially to allow for pre-decrement looping char *dpos, *end = digits + sizeof({{TYPE}})*3+2; const char *hex_digits = DIGITS_HEX; Py_ssize_t ulength; int length, prepend_sign, last_one_off; {{TYPE}} remaining; const {{TYPE}} neg_one = ({{TYPE}}) -1, const_zero = ({{TYPE}}) 0; const int is_unsigned = neg_one > const_zero; if (format_char == 'X') { hex_digits += 16; format_char = 'x'; }; // surprise: even trivial sprintf() calls don't get optimised in gcc (4.8) remaining = value; /* not using abs(value) to avoid overflow problems */ last_one_off = 0; dpos = end; while (remaining != 0) { int digit_pos; switch (format_char) { case 'o': digit_pos = abs((int)(remaining % (8*8))); remaining = remaining / (8*8); dpos -= 2; *(uint16_t*)dpos = ((uint16_t*)DIGIT_PAIRS_8)[digit_pos]; /* copy 2 digits at a time */ last_one_off = (digit_pos < 8); break; case 'd': digit_pos = abs((int)(remaining % (10*10))); remaining = remaining / (10*10); dpos -= 2; *(uint16_t*)dpos = ((uint16_t*)DIGIT_PAIRS_10)[digit_pos]; /* copy 2 digits at a time */ last_one_off = (digit_pos < 10); break; case 'x': *(--dpos) = hex_digits[abs((int)(remaining % 16))]; remaining = remaining / 16; break; default: assert(0); break; } } if (last_one_off) { assert(*dpos == '0'); dpos++; } else if (unlikely(dpos == end)) { *(--dpos) = '0'; } length = end - dpos; ulength = length; prepend_sign = 0; if (!is_unsigned && value <= neg_one) { if (padding_char == ' ' || width <= length + 1) { *(--dpos) = '-'; ++length; } else { prepend_sign = 1; } ++ulength; } if (width > ulength) { ulength = width; } // single character unicode strings are cached in CPython => use PyUnicode_FromOrdinal() for them if (ulength == 1) { return PyUnicode_FromOrdinal(*dpos); } return __Pyx_PyUnicode_BuildFromAscii(ulength, dpos, length, prepend_sign, padding_char); } /////////////// CBIntToPyUnicode.proto /////////////// #define {{TO_PY_FUNCTION}}(value) \ ((value) ? __Pyx_NewRef({{TRUE_CONST}}) : __Pyx_NewRef({{FALSE_CONST}})) /////////////// PyIntFromDouble.proto /////////////// #if PY_MAJOR_VERSION < 3 static CYTHON_INLINE PyObject* __Pyx_PyInt_FromDouble(double value); #else #define __Pyx_PyInt_FromDouble(value) PyLong_FromDouble(value) #endif /////////////// PyIntFromDouble /////////////// #if PY_MAJOR_VERSION < 3 static CYTHON_INLINE PyObject* __Pyx_PyInt_FromDouble(double value) { if (value >= (double)LONG_MIN && value <= (double)LONG_MAX) { return PyInt_FromLong((long)value); } return PyLong_FromDouble(value); } #endif /////////////// CIntFromPyVerify /////////////// // see CIntFromPy #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value) \ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) #define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value) \ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) #define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc) \ { \ func_type value = func_value; \ if (sizeof(target_type) < sizeof(func_type)) { \ if (unlikely(value != (func_type) (target_type) value)) { \ func_type zero = 0; \ if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred())) \ return (target_type) -1; \ if (is_unsigned && unlikely(value < zero)) \ goto raise_neg_overflow; \ else \ goto raise_overflow; \ } \ } \ return (target_type) value; \ } /////////////// CIntFromPy.proto /////////////// static CYTHON_INLINE {{TYPE}} {{FROM_PY_FUNCTION}}(PyObject *); /////////////// CIntFromPy /////////////// //@requires: CIntFromPyVerify {{py: from Cython.Utility import pylong_join }} static CYTHON_INLINE {{TYPE}} {{FROM_PY_FUNCTION}}(PyObject *x) { const {{TYPE}} neg_one = ({{TYPE}}) -1, const_zero = ({{TYPE}}) 0; const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if (sizeof({{TYPE}}) < sizeof(long)) { __PYX_VERIFY_RETURN_INT({{TYPE}}, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return ({{TYPE}}) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return ({{TYPE}}) 0; case 1: __PYX_VERIFY_RETURN_INT({{TYPE}}, digit, digits[0]) {{for _size in (2, 3, 4)}} case {{_size}}: if (8 * sizeof({{TYPE}}) > {{_size-1}} * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > {{_size}} * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT({{TYPE}}, unsigned long, {{pylong_join(_size, 'digits')}}) } else if (8 * sizeof({{TYPE}}) >= {{_size}} * PyLong_SHIFT) { return ({{TYPE}}) {{pylong_join(_size, 'digits', TYPE)}}; } } break; {{endfor}} } #endif #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { // misuse Py_False as a quick way to compare to a '0' int object in PyPy int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return ({{TYPE}}) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if (sizeof({{TYPE}}) <= sizeof(unsigned long)) { __PYX_VERIFY_RETURN_INT_EXC({{TYPE}}, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof({{TYPE}}) <= sizeof(unsigned PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC({{TYPE}}, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { // signed #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return ({{TYPE}}) 0; case -1: __PYX_VERIFY_RETURN_INT({{TYPE}}, sdigit, (sdigit) (-(sdigit)digits[0])) case 1: __PYX_VERIFY_RETURN_INT({{TYPE}}, digit, +digits[0]) {{for _size in (2, 3, 4)}} {{for _case in (-_size, _size)}} case {{_case}}: if (8 * sizeof({{TYPE}}){{' - 1' if _case < 0 else ''}} > {{_size-1}} * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > {{_size}} * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT({{TYPE}}, {{'long' if _case < 0 else 'unsigned long'}}, {{'-(long) ' if _case < 0 else ''}}{{pylong_join(_size, 'digits')}}) } else if (8 * sizeof({{TYPE}}) - 1 > {{_size}} * PyLong_SHIFT) { return ({{TYPE}}) ({{'((%s)-1)*' % TYPE if _case < 0 else ''}}{{pylong_join(_size, 'digits', TYPE)}}); } } break; {{endfor}} {{endfor}} } #endif if (sizeof({{TYPE}}) <= sizeof(long)) { __PYX_VERIFY_RETURN_INT_EXC({{TYPE}}, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof({{TYPE}}) <= sizeof(PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC({{TYPE}}, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { #if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else {{TYPE}} val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; int ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); Py_DECREF(v); if (likely(!ret)) return val; } #endif return ({{TYPE}}) -1; } } else { {{TYPE}} val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return ({{TYPE}}) -1; val = {{FROM_PY_FUNCTION}}(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to {{TYPE}}"); return ({{TYPE}}) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to {{TYPE}}"); return ({{TYPE}}) -1; } Cython-0.26.1/Cython/Utility/Complex.c0000664000175000017500000002346413023021033020306 0ustar stefanstefan00000000000000/////////////// Header.proto.h_code /////////////// #if !defined(CYTHON_CCOMPLEX) #if defined(__cplusplus) #define CYTHON_CCOMPLEX 1 #elif defined(_Complex_I) #define CYTHON_CCOMPLEX 1 #else #define CYTHON_CCOMPLEX 0 #endif #endif #if CYTHON_CCOMPLEX #ifdef __cplusplus #include #else #include #endif #endif #if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__) #undef _Complex_I #define _Complex_I 1.0fj #endif /////////////// RealImag.proto /////////////// #if CYTHON_CCOMPLEX #ifdef __cplusplus #define __Pyx_CREAL(z) ((z).real()) #define __Pyx_CIMAG(z) ((z).imag()) #else #define __Pyx_CREAL(z) (__real__(z)) #define __Pyx_CIMAG(z) (__imag__(z)) #endif #else #define __Pyx_CREAL(z) ((z).real) #define __Pyx_CIMAG(z) ((z).imag) #endif #if defined(__cplusplus) && CYTHON_CCOMPLEX \ && (defined(_WIN32) || defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5 || __GNUC__ == 4 && __GNUC_MINOR__ >= 4 )) || __cplusplus >= 201103) #define __Pyx_SET_CREAL(z,x) ((z).real(x)) #define __Pyx_SET_CIMAG(z,y) ((z).imag(y)) #else #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x) #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y) #endif /////////////// Declarations.proto.complex_type_declarations /////////////// #if CYTHON_CCOMPLEX #ifdef __cplusplus typedef ::std::complex< {{real_type}} > {{type_name}}; #else typedef {{real_type}} _Complex {{type_name}}; #endif #else typedef struct { {{real_type}} real, imag; } {{type_name}}; #endif static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}}, {{real_type}}); /////////////// Declarations /////////////// #if CYTHON_CCOMPLEX #ifdef __cplusplus static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}} x, {{real_type}} y) { return ::std::complex< {{real_type}} >(x, y); } #else static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}} x, {{real_type}} y) { return x + y*({{type}})_Complex_I; } #endif #else static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}} x, {{real_type}} y) { {{type}} z; z.real = x; z.imag = y; return z; } #endif /////////////// ToPy.proto /////////////// #define __pyx_PyComplex_FromComplex(z) \ PyComplex_FromDoubles((double)__Pyx_CREAL(z), \ (double)__Pyx_CIMAG(z)) /////////////// FromPy.proto /////////////// static {{type}} __Pyx_PyComplex_As_{{type_name}}(PyObject*); /////////////// FromPy /////////////// static {{type}} __Pyx_PyComplex_As_{{type_name}}(PyObject* o) { Py_complex cval; #if !CYTHON_COMPILING_IN_PYPY if (PyComplex_CheckExact(o)) cval = ((PyComplexObject *)o)->cval; else #endif cval = PyComplex_AsCComplex(o); return {{type_name}}_from_parts( ({{real_type}})cval.real, ({{real_type}})cval.imag); } /////////////// Arithmetic.proto /////////////// #if CYTHON_CCOMPLEX #define __Pyx_c_eq{{func_suffix}}(a, b) ((a)==(b)) #define __Pyx_c_sum{{func_suffix}}(a, b) ((a)+(b)) #define __Pyx_c_diff{{func_suffix}}(a, b) ((a)-(b)) #define __Pyx_c_prod{{func_suffix}}(a, b) ((a)*(b)) #define __Pyx_c_quot{{func_suffix}}(a, b) ((a)/(b)) #define __Pyx_c_neg{{func_suffix}}(a) (-(a)) #ifdef __cplusplus #define __Pyx_c_is_zero{{func_suffix}}(z) ((z)==({{real_type}})0) #define __Pyx_c_conj{{func_suffix}}(z) (::std::conj(z)) #if {{is_float}} #define __Pyx_c_abs{{func_suffix}}(z) (::std::abs(z)) #define __Pyx_c_pow{{func_suffix}}(a, b) (::std::pow(a, b)) #endif #else #define __Pyx_c_is_zero{{func_suffix}}(z) ((z)==0) #define __Pyx_c_conj{{func_suffix}}(z) (conj{{m}}(z)) #if {{is_float}} #define __Pyx_c_abs{{func_suffix}}(z) (cabs{{m}}(z)) #define __Pyx_c_pow{{func_suffix}}(a, b) (cpow{{m}}(a, b)) #endif #endif #else static CYTHON_INLINE int __Pyx_c_eq{{func_suffix}}({{type}}, {{type}}); static CYTHON_INLINE {{type}} __Pyx_c_sum{{func_suffix}}({{type}}, {{type}}); static CYTHON_INLINE {{type}} __Pyx_c_diff{{func_suffix}}({{type}}, {{type}}); static CYTHON_INLINE {{type}} __Pyx_c_prod{{func_suffix}}({{type}}, {{type}}); static CYTHON_INLINE {{type}} __Pyx_c_quot{{func_suffix}}({{type}}, {{type}}); static CYTHON_INLINE {{type}} __Pyx_c_neg{{func_suffix}}({{type}}); static CYTHON_INLINE int __Pyx_c_is_zero{{func_suffix}}({{type}}); static CYTHON_INLINE {{type}} __Pyx_c_conj{{func_suffix}}({{type}}); #if {{is_float}} static CYTHON_INLINE {{real_type}} __Pyx_c_abs{{func_suffix}}({{type}}); static CYTHON_INLINE {{type}} __Pyx_c_pow{{func_suffix}}({{type}}, {{type}}); #endif #endif /////////////// Arithmetic /////////////// #if CYTHON_CCOMPLEX #else static CYTHON_INLINE int __Pyx_c_eq{{func_suffix}}({{type}} a, {{type}} b) { return (a.real == b.real) && (a.imag == b.imag); } static CYTHON_INLINE {{type}} __Pyx_c_sum{{func_suffix}}({{type}} a, {{type}} b) { {{type}} z; z.real = a.real + b.real; z.imag = a.imag + b.imag; return z; } static CYTHON_INLINE {{type}} __Pyx_c_diff{{func_suffix}}({{type}} a, {{type}} b) { {{type}} z; z.real = a.real - b.real; z.imag = a.imag - b.imag; return z; } static CYTHON_INLINE {{type}} __Pyx_c_prod{{func_suffix}}({{type}} a, {{type}} b) { {{type}} z; z.real = a.real * b.real - a.imag * b.imag; z.imag = a.real * b.imag + a.imag * b.real; return z; } #if {{is_float}} static CYTHON_INLINE {{type}} __Pyx_c_quot{{func_suffix}}({{type}} a, {{type}} b) { if (b.imag == 0) { return {{type_name}}_from_parts(a.real / b.real, a.imag / b.real); } else if (fabs{{m}}(b.real) >= fabs{{m}}(b.imag)) { if (b.real == 0 && b.imag == 0) { return {{type_name}}_from_parts(a.real / b.real, a.imag / b.imag); } else { {{real_type}} r = b.imag / b.real; {{real_type}} s = 1.0 / (b.real + b.imag * r); return {{type_name}}_from_parts( (a.real + a.imag * r) * s, (a.imag - a.real * r) * s); } } else { {{real_type}} r = b.real / b.imag; {{real_type}} s = 1.0 / (b.imag + b.real * r); return {{type_name}}_from_parts( (a.real * r + a.imag) * s, (a.imag * r - a.real) * s); } } #else static CYTHON_INLINE {{type}} __Pyx_c_quot{{func_suffix}}({{type}} a, {{type}} b) { if (b.imag == 0) { return {{type_name}}_from_parts(a.real / b.real, a.imag / b.real); } else { {{real_type}} denom = b.real * b.real + b.imag * b.imag; return {{type_name}}_from_parts( (a.real * b.real + a.imag * b.imag) / denom, (a.imag * b.real - a.real * b.imag) / denom); } } #endif static CYTHON_INLINE {{type}} __Pyx_c_neg{{func_suffix}}({{type}} a) { {{type}} z; z.real = -a.real; z.imag = -a.imag; return z; } static CYTHON_INLINE int __Pyx_c_is_zero{{func_suffix}}({{type}} a) { return (a.real == 0) && (a.imag == 0); } static CYTHON_INLINE {{type}} __Pyx_c_conj{{func_suffix}}({{type}} a) { {{type}} z; z.real = a.real; z.imag = -a.imag; return z; } #if {{is_float}} static CYTHON_INLINE {{real_type}} __Pyx_c_abs{{func_suffix}}({{type}} z) { #if !defined(HAVE_HYPOT) || defined(_MSC_VER) return sqrt{{m}}(z.real*z.real + z.imag*z.imag); #else return hypot{{m}}(z.real, z.imag); #endif } static CYTHON_INLINE {{type}} __Pyx_c_pow{{func_suffix}}({{type}} a, {{type}} b) { {{type}} z; {{real_type}} r, lnr, theta, z_r, z_theta; if (b.imag == 0 && b.real == (int)b.real) { if (b.real < 0) { {{real_type}} denom = a.real * a.real + a.imag * a.imag; a.real = a.real / denom; a.imag = -a.imag / denom; b.real = -b.real; } switch ((int)b.real) { case 0: z.real = 1; z.imag = 0; return z; case 1: return a; case 2: z = __Pyx_c_prod{{func_suffix}}(a, a); return __Pyx_c_prod{{func_suffix}}(a, a); case 3: z = __Pyx_c_prod{{func_suffix}}(a, a); return __Pyx_c_prod{{func_suffix}}(z, a); case 4: z = __Pyx_c_prod{{func_suffix}}(a, a); return __Pyx_c_prod{{func_suffix}}(z, z); } } if (a.imag == 0) { if (a.real == 0) { return a; } else if (b.imag == 0) { z.real = pow{{m}}(a.real, b.real); z.imag = 0; return z; } else if (a.real > 0) { r = a.real; theta = 0; } else { r = -a.real; theta = atan2{{m}}(0, -1); } } else { r = __Pyx_c_abs{{func_suffix}}(a); theta = atan2{{m}}(a.imag, a.real); } lnr = log{{m}}(r); z_r = exp{{m}}(lnr * b.real - theta * b.imag); z_theta = theta * b.real + lnr * b.imag; z.real = z_r * cos{{m}}(z_theta); z.imag = z_r * sin{{m}}(z_theta); return z; } #endif #endif Cython-0.26.1/Cython/Utility/ExtensionTypes.c0000664000175000017500000001315313150045407021706 0ustar stefanstefan00000000000000 /////////////// CallNextTpDealloc.proto /////////////// static void __Pyx_call_next_tp_dealloc(PyObject* obj, destructor current_tp_dealloc); /////////////// CallNextTpDealloc /////////////// static void __Pyx_call_next_tp_dealloc(PyObject* obj, destructor current_tp_dealloc) { PyTypeObject* type = Py_TYPE(obj); /* try to find the first parent type that has a different tp_dealloc() function */ while (type && type->tp_dealloc != current_tp_dealloc) type = type->tp_base; while (type && type->tp_dealloc == current_tp_dealloc) type = type->tp_base; if (type) type->tp_dealloc(obj); } /////////////// CallNextTpTraverse.proto /////////////// static int __Pyx_call_next_tp_traverse(PyObject* obj, visitproc v, void *a, traverseproc current_tp_traverse); /////////////// CallNextTpTraverse /////////////// static int __Pyx_call_next_tp_traverse(PyObject* obj, visitproc v, void *a, traverseproc current_tp_traverse) { PyTypeObject* type = Py_TYPE(obj); /* try to find the first parent type that has a different tp_traverse() function */ while (type && type->tp_traverse != current_tp_traverse) type = type->tp_base; while (type && type->tp_traverse == current_tp_traverse) type = type->tp_base; if (type && type->tp_traverse) return type->tp_traverse(obj, v, a); // FIXME: really ignore? return 0; } /////////////// CallNextTpClear.proto /////////////// static void __Pyx_call_next_tp_clear(PyObject* obj, inquiry current_tp_dealloc); /////////////// CallNextTpClear /////////////// static void __Pyx_call_next_tp_clear(PyObject* obj, inquiry current_tp_clear) { PyTypeObject* type = Py_TYPE(obj); /* try to find the first parent type that has a different tp_clear() function */ while (type && type->tp_clear != current_tp_clear) type = type->tp_base; while (type && type->tp_clear == current_tp_clear) type = type->tp_base; if (type && type->tp_clear) type->tp_clear(obj); } /////////////// SetupReduce.proto /////////////// static int __Pyx_setup_reduce(PyObject* type_obj); /////////////// SetupReduce /////////////// //@requires: ObjectHandling.c::PyObjectGetAttrStr //@substitute: naming static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { int ret; PyObject *name_attr; name_attr = __Pyx_PyObject_GetAttrStr(meth, PYIDENT("__name__")); if (likely(name_attr)) { ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); } else { ret = -1; } if (unlikely(ret < 0)) { PyErr_Clear(); ret = 0; } Py_XDECREF(name_attr); return ret; } static int __Pyx_setup_reduce(PyObject* type_obj) { int ret = 0; PyObject *object_reduce = NULL; PyObject *object_reduce_ex = NULL; PyObject *reduce = NULL; PyObject *reduce_ex = NULL; PyObject *reduce_cython = NULL; PyObject *setstate = NULL; PyObject *setstate_cython = NULL; #if CYTHON_USE_PYTYPE_LOOKUP if (_PyType_Lookup((PyTypeObject*)type_obj, PYIDENT("__getstate__"))) goto GOOD; #else if (PyObject_HasAttr(type_obj, PYIDENT("__getstate__"))) goto GOOD; #endif #if CYTHON_USE_PYTYPE_LOOKUP object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, PYIDENT("__reduce_ex__")); if (!object_reduce_ex) goto BAD; #else object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, PYIDENT("__reduce_ex__")); if (!object_reduce_ex) goto BAD; #endif reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__reduce_ex__")); if (unlikely(!reduce_ex)) goto BAD; if (reduce_ex == object_reduce_ex) { #if CYTHON_USE_PYTYPE_LOOKUP object_reduce = _PyType_Lookup(&PyBaseObject_Type, PYIDENT("__reduce__")); if (!object_reduce) goto BAD; #else object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, PYIDENT("__reduce__")); if (!object_reduce) goto BAD; #endif reduce = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__reduce__")); if (unlikely(!reduce)) goto BAD; if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, PYIDENT("__reduce_cython__"))) { reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__reduce_cython__")); if (unlikely(!reduce_cython)) goto BAD; ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__reduce__"), reduce_cython); if (unlikely(ret < 0)) goto BAD; ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__reduce_cython__")); if (unlikely(ret < 0)) goto BAD; setstate = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__setstate__")); if (!setstate) PyErr_Clear(); if (!setstate || __Pyx_setup_reduce_is_named(setstate, PYIDENT("__setstate_cython__"))) { setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__setstate_cython__")); if (unlikely(!setstate_cython)) goto BAD; ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__setstate__"), setstate_cython); if (unlikely(ret < 0)) goto BAD; ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__setstate_cython__")); if (unlikely(ret < 0)) goto BAD; } PyType_Modified((PyTypeObject*)type_obj); } } goto GOOD; BAD: if (!PyErr_Occurred()) PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); ret = -1; GOOD: #if !CYTHON_COMPILING_IN_CPYTHON Py_XDECREF(object_reduce); Py_XDECREF(object_reduce_ex); #endif Py_XDECREF(reduce); Py_XDECREF(reduce_ex); Py_XDECREF(reduce_cython); Py_XDECREF(setstate); Py_XDECREF(setstate_cython); return ret; } Cython-0.26.1/Cython/Utility/arrayarray.h0000664000175000017500000000774113143605603021077 0ustar stefanstefan00000000000000/////////////// ArrayAPI.proto /////////////// // arrayarray.h // // Artificial C-API for Python's type, // used by array.pxd // // last changes: 2009-05-15 rk // 2012-05-02 andreasvc // (see revision control) // #ifndef _ARRAYARRAY_H #define _ARRAYARRAY_H // These two forward declarations are explicitly handled in the type // declaration code, as including them here is too late for cython-defined // types to use them. // struct arrayobject; // typedef struct arrayobject arrayobject; // All possible arraydescr values are defined in the vector "descriptors" // below. That's defined later because the appropriate get and set // functions aren't visible yet. typedef struct arraydescr { int typecode; int itemsize; PyObject * (*getitem)(struct arrayobject *, Py_ssize_t); int (*setitem)(struct arrayobject *, Py_ssize_t, PyObject *); #if PY_MAJOR_VERSION >= 3 char *formats; #endif } arraydescr; struct arrayobject { PyObject_HEAD Py_ssize_t ob_size; union { char *ob_item; float *as_floats; double *as_doubles; int *as_ints; unsigned int *as_uints; unsigned char *as_uchars; signed char *as_schars; char *as_chars; unsigned long *as_ulongs; long *as_longs; #if PY_MAJOR_VERSION >= 3 unsigned long long *as_ulonglongs; long long *as_longlongs; #endif short *as_shorts; unsigned short *as_ushorts; Py_UNICODE *as_pyunicodes; void *as_voidptr; } data; Py_ssize_t allocated; struct arraydescr *ob_descr; PyObject *weakreflist; /* List of weak references */ #if PY_MAJOR_VERSION >= 3 int ob_exports; /* Number of exported buffers */ #endif }; #ifndef NO_NEWARRAY_INLINE // fast creation of a new array static CYTHON_INLINE PyObject * newarrayobject(PyTypeObject *type, Py_ssize_t size, struct arraydescr *descr) { arrayobject *op; size_t nbytes; if (size < 0) { PyErr_BadInternalCall(); return NULL; } nbytes = size * descr->itemsize; // Check for overflow if (nbytes / descr->itemsize != (size_t)size) { return PyErr_NoMemory(); } op = (arrayobject *) type->tp_alloc(type, 0); if (op == NULL) { return NULL; } op->ob_descr = descr; op->allocated = size; op->weakreflist = NULL; op->ob_size = size; if (size <= 0) { op->data.ob_item = NULL; } else { op->data.ob_item = PyMem_NEW(char, nbytes); if (op->data.ob_item == NULL) { Py_DECREF(op); return PyErr_NoMemory(); } } return (PyObject *) op; } #else PyObject* newarrayobject(PyTypeObject *type, Py_ssize_t size, struct arraydescr *descr); #endif /* ifndef NO_NEWARRAY_INLINE */ // fast resize (reallocation to the point) // not designed for filing small increments (but for fast opaque array apps) static CYTHON_INLINE int resize(arrayobject *self, Py_ssize_t n) { void *items = (void*) self->data.ob_item; PyMem_Resize(items, char, (size_t)(n * self->ob_descr->itemsize)); if (items == NULL) { PyErr_NoMemory(); return -1; } self->data.ob_item = (char*) items; self->ob_size = n; self->allocated = n; return 0; } // suitable for small increments; over allocation 50% ; static CYTHON_INLINE int resize_smart(arrayobject *self, Py_ssize_t n) { void *items = (void*) self->data.ob_item; Py_ssize_t newsize; if (n < self->allocated && n*4 > self->allocated) { self->ob_size = n; return 0; } newsize = n + (n / 2) + 1; if (newsize <= n) { /* overflow */ PyErr_NoMemory(); return -1; } PyMem_Resize(items, char, (size_t)(newsize * self->ob_descr->itemsize)); if (items == NULL) { PyErr_NoMemory(); return -1; } self->data.ob_item = (char*) items; self->ob_size = n; self->allocated = newsize; return 0; } #endif /* _ARRAYARRAY_H */ Cython-0.26.1/Cython/Utility/CommonStructures.c0000664000175000017500000000477613143605603022256 0ustar stefanstefan00000000000000/////////////// FetchCommonType.proto /////////////// static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); /////////////// FetchCommonType /////////////// static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { PyObject* fake_module; PyTypeObject* cached_type = NULL; fake_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI); if (!fake_module) return NULL; Py_INCREF(fake_module); cached_type = (PyTypeObject*) PyObject_GetAttrString(fake_module, type->tp_name); if (cached_type) { if (!PyType_Check((PyObject*)cached_type)) { PyErr_Format(PyExc_TypeError, "Shared Cython type %.200s is not a type object", type->tp_name); goto bad; } if (cached_type->tp_basicsize != type->tp_basicsize) { PyErr_Format(PyExc_TypeError, "Shared Cython type %.200s has the wrong size, try recompiling", type->tp_name); goto bad; } } else { if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; PyErr_Clear(); if (PyType_Ready(type) < 0) goto bad; if (PyObject_SetAttrString(fake_module, type->tp_name, (PyObject*) type) < 0) goto bad; Py_INCREF(type); cached_type = type; } done: Py_DECREF(fake_module); // NOTE: always returns owned reference, or NULL on error return cached_type; bad: Py_XDECREF(cached_type); cached_type = NULL; goto done; } /////////////// FetchCommonPointer.proto /////////////// static void* __Pyx_FetchCommonPointer(void* pointer, const char* name); /////////////// FetchCommonPointer /////////////// static void* __Pyx_FetchCommonPointer(void* pointer, const char* name) { #if PY_VERSION_HEX >= 0x02070000 PyObject* fake_module = NULL; PyObject* capsule = NULL; void* value = NULL; fake_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI); if (!fake_module) return NULL; Py_INCREF(fake_module); capsule = PyObject_GetAttrString(fake_module, name); if (!capsule) { if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; PyErr_Clear(); capsule = PyCapsule_New(pointer, name, NULL); if (!capsule) goto bad; if (PyObject_SetAttrString(fake_module, name, capsule) < 0) goto bad; } value = PyCapsule_GetPointer(capsule, name); bad: Py_XDECREF(capsule); Py_DECREF(fake_module); return value; #else return pointer; #endif } Cython-0.26.1/Cython/Utility/Overflow.c0000664000175000017500000002736313143605603020522 0ustar stefanstefan00000000000000/* These functions provide integer arithmetic with integer checking. They do not actually raise an exception when an overflow is detected, but rather set a bit in the overflow parameter. (This parameter may be re-used accross several arithmetic operations, so should be or-ed rather than assigned to.) The implementation is divided into two parts, the signed and unsigned basecases, which is where the magic happens, and a generic template matching a specific type to an implementation based on its (c-compile-time) size and signedness. When possible, branching is avoided, and preference is given to speed over accuracy (a low rate of falsely "detected" overflows are acceptable, undetected overflows are not). TODO: Hook up checking. TODO: Conditionally support 128-bit with intmax_t? */ /////////////// Common.proto /////////////// static int __Pyx_check_twos_complement(void) { if (-1 != ~0) { PyErr_SetString(PyExc_RuntimeError, "Two's complement required for overflow checks."); return 1; } else if (sizeof(short) == sizeof(int)) { PyErr_SetString(PyExc_RuntimeError, "sizeof(short) < sizeof(int) required for overflow checks."); return 1; } else { return 0; } } #define __PYX_IS_UNSIGNED(type) (((type) -1) > 0) #define __PYX_SIGN_BIT(type) (((unsigned type) 1) << (sizeof(type) * 8 - 1)) #define __PYX_HALF_MAX(type) (((type) 1) << (sizeof(type) * 8 - 2)) #define __PYX_MIN(type) (__PYX_IS_UNSIGNED(type) ? (type) 0 : 0 - __PYX_HALF_MAX(type) - __PYX_HALF_MAX(type)) #define __PYX_MAX(type) (~__PYX_MIN(type)) #define __Pyx_add_no_overflow(a, b, overflow) ((a) + (b)) #define __Pyx_add_const_no_overflow(a, b, overflow) ((a) + (b)) #define __Pyx_sub_no_overflow(a, b, overflow) ((a) - (b)) #define __Pyx_sub_const_no_overflow(a, b, overflow) ((a) - (b)) #define __Pyx_mul_no_overflow(a, b, overflow) ((a) * (b)) #define __Pyx_mul_const_no_overflow(a, b, overflow) ((a) * (b)) #define __Pyx_div_no_overflow(a, b, overflow) ((a) / (b)) #define __Pyx_div_const_no_overflow(a, b, overflow) ((a) / (b)) /////////////// Common.init /////////////// __Pyx_check_twos_complement(); /////////////// BaseCaseUnsigned.proto /////////////// static CYTHON_INLINE {{UINT}} __Pyx_add_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow); static CYTHON_INLINE {{UINT}} __Pyx_sub_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow); static CYTHON_INLINE {{UINT}} __Pyx_mul_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow); static CYTHON_INLINE {{UINT}} __Pyx_div_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow); // Use these when b is known at compile time. #define __Pyx_add_const_{{NAME}}_checking_overflow __Pyx_add_{{NAME}}_checking_overflow #define __Pyx_sub_const_{{NAME}}_checking_overflow __Pyx_sub_{{NAME}}_checking_overflow static CYTHON_INLINE {{UINT}} __Pyx_mul_const_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} constant, int *overflow); #define __Pyx_div_const_{{NAME}}_checking_overflow __Pyx_div_{{NAME}}_checking_overflow /////////////// BaseCaseUnsigned /////////////// static CYTHON_INLINE {{UINT}} __Pyx_add_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) { {{UINT}} r = a + b; *overflow |= r < a; return r; } static CYTHON_INLINE {{UINT}} __Pyx_sub_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) { {{UINT}} r = a - b; *overflow |= r > a; return r; } static CYTHON_INLINE {{UINT}} __Pyx_mul_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) { if (sizeof({{UINT}}) < sizeof(unsigned long)) { unsigned long big_r = ((unsigned long) a) * ((unsigned long) b); {{UINT}} r = ({{UINT}}) big_r; *overflow |= big_r != r; return r; #ifdef HAVE_LONG_LONG } else if (sizeof({{UINT}}) < sizeof(unsigned PY_LONG_LONG)) { unsigned PY_LONG_LONG big_r = ((unsigned PY_LONG_LONG) a) * ((unsigned PY_LONG_LONG) b); {{UINT}} r = ({{UINT}}) big_r; *overflow |= big_r != r; return r; #endif } else { {{UINT}} prod = a * b; double dprod = ((double) a) * ((double) b); // Overflow results in an error of at least 2^sizeof(UINT), // whereas rounding represents an error on the order of 2^(sizeof(UINT)-53). *overflow |= fabs(dprod - prod) > (__PYX_MAX({{UINT}}) / 2); return prod; } } static CYTHON_INLINE {{UINT}} __Pyx_mul_const_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) { if (b > 1) { *overflow |= a > __PYX_MAX({{UINT}}) / b; } return a * b; } static CYTHON_INLINE {{UINT}} __Pyx_div_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) { if (b == 0) { *overflow |= 1; return 0; } return a / b; } /////////////// BaseCaseSigned.proto /////////////// static CYTHON_INLINE {{INT}} __Pyx_add_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); static CYTHON_INLINE {{INT}} __Pyx_sub_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); static CYTHON_INLINE {{INT}} __Pyx_mul_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); static CYTHON_INLINE {{INT}} __Pyx_div_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); // Use when b is known at compile time. static CYTHON_INLINE {{INT}} __Pyx_add_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); static CYTHON_INLINE {{INT}} __Pyx_sub_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); static CYTHON_INLINE {{INT}} __Pyx_mul_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} constant, int *overflow); #define __Pyx_div_const_{{NAME}}_checking_overflow __Pyx_div_{{NAME}}_checking_overflow /////////////// BaseCaseSigned /////////////// static CYTHON_INLINE {{INT}} __Pyx_add_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { if (sizeof({{INT}}) < sizeof(long)) { long big_r = ((long) a) + ((long) b); {{INT}} r = ({{INT}}) big_r; *overflow |= big_r != r; return r; #ifdef HAVE_LONG_LONG } else if (sizeof({{INT}}) < sizeof(PY_LONG_LONG)) { PY_LONG_LONG big_r = ((PY_LONG_LONG) a) + ((PY_LONG_LONG) b); {{INT}} r = ({{INT}}) big_r; *overflow |= big_r != r; return r; #endif } else { // Signed overflow undefined, but unsigned overflow is well defined. {{INT}} r = ({{INT}}) ((unsigned {{INT}}) a + (unsigned {{INT}}) b); // Overflow happened if the operands have the same sign, but the result // has opposite sign. // sign(a) == sign(b) != sign(r) {{INT}} sign_a = __PYX_SIGN_BIT({{INT}}) & a; {{INT}} sign_b = __PYX_SIGN_BIT({{INT}}) & b; {{INT}} sign_r = __PYX_SIGN_BIT({{INT}}) & r; *overflow |= (sign_a == sign_b) & (sign_a != sign_r); return r; } } static CYTHON_INLINE {{INT}} __Pyx_add_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { if (b > 0) { *overflow |= a > __PYX_MAX({{INT}}) - b; } else if (b < 0) { *overflow |= a < __PYX_MIN({{INT}}) - b; } return a + b; } static CYTHON_INLINE {{INT}} __Pyx_sub_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { *overflow |= b == __PYX_MIN({{INT}}); return __Pyx_add_{{NAME}}_checking_overflow(a, -b, overflow); } static CYTHON_INLINE {{INT}} __Pyx_sub_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { *overflow |= b == __PYX_MIN({{INT}}); return __Pyx_add_const_{{NAME}}_checking_overflow(a, -b, overflow); } static CYTHON_INLINE {{INT}} __Pyx_mul_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { if (sizeof({{INT}}) < sizeof(long)) { long big_r = ((long) a) * ((long) b); {{INT}} r = ({{INT}}) big_r; *overflow |= big_r != r; return ({{INT}}) r; #ifdef HAVE_LONG_LONG } else if (sizeof({{INT}}) < sizeof(PY_LONG_LONG)) { PY_LONG_LONG big_r = ((PY_LONG_LONG) a) * ((PY_LONG_LONG) b); {{INT}} r = ({{INT}}) big_r; *overflow |= big_r != r; return ({{INT}}) r; #endif } else { {{INT}} prod = a * b; double dprod = ((double) a) * ((double) b); // Overflow results in an error of at least 2^sizeof(INT), // whereas rounding represents an error on the order of 2^(sizeof(INT)-53). *overflow |= fabs(dprod - prod) > (__PYX_MAX({{INT}}) / 2); return prod; } } static CYTHON_INLINE {{INT}} __Pyx_mul_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { if (b > 1) { *overflow |= a > __PYX_MAX({{INT}}) / b; *overflow |= a < __PYX_MIN({{INT}}) / b; } else if (b == -1) { *overflow |= a == __PYX_MIN({{INT}}); } else if (b < -1) { *overflow |= a > __PYX_MIN({{INT}}) / b; *overflow |= a < __PYX_MAX({{INT}}) / b; } return a * b; } static CYTHON_INLINE {{INT}} __Pyx_div_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { if (b == 0) { *overflow |= 1; return 0; } *overflow |= (a == __PYX_MIN({{INT}})) & (b == -1); return a / b; } /////////////// SizeCheck.init /////////////// __Pyx_check_sane_{{NAME}}(); /////////////// SizeCheck.proto /////////////// static int __Pyx_check_sane_{{NAME}}(void) { if (sizeof({{TYPE}}) <= sizeof(int) || #ifdef HAVE_LONG_LONG sizeof({{TYPE}}) == sizeof(PY_LONG_LONG) || #endif sizeof({{TYPE}}) == sizeof(long)) { return 0; } else { PyErr_Format(PyExc_RuntimeError, \ "Bad size for int type %.{{max(60, len(TYPE))}}s: %d", "{{TYPE}}", (int) sizeof({{TYPE}})); return 1; } } /////////////// Binop.proto /////////////// static CYTHON_INLINE {{TYPE}} __Pyx_{{BINOP}}_{{NAME}}_checking_overflow({{TYPE}} a, {{TYPE}} b, int *overflow); /////////////// Binop /////////////// static CYTHON_INLINE {{TYPE}} __Pyx_{{BINOP}}_{{NAME}}_checking_overflow({{TYPE}} a, {{TYPE}} b, int *overflow) { if (sizeof({{TYPE}}) < sizeof(int)) { return __Pyx_{{BINOP}}_no_overflow(a, b, overflow); } else if (__PYX_IS_UNSIGNED({{TYPE}})) { if (sizeof({{TYPE}}) == sizeof(unsigned int)) { return __Pyx_{{BINOP}}_unsigned_int_checking_overflow(a, b, overflow); } else if (sizeof({{TYPE}}) == sizeof(unsigned long)) { return __Pyx_{{BINOP}}_unsigned_long_checking_overflow(a, b, overflow); #ifdef HAVE_LONG_LONG } else if (sizeof({{TYPE}}) == sizeof(unsigned PY_LONG_LONG)) { return __Pyx_{{BINOP}}_unsigned_long_long_checking_overflow(a, b, overflow); #endif } else { abort(); return 0; /* handled elsewhere */ } } else { if (sizeof({{TYPE}}) == sizeof(int)) { return __Pyx_{{BINOP}}_int_checking_overflow(a, b, overflow); } else if (sizeof({{TYPE}}) == sizeof(long)) { return __Pyx_{{BINOP}}_long_checking_overflow(a, b, overflow); #ifdef HAVE_LONG_LONG } else if (sizeof({{TYPE}}) == sizeof(PY_LONG_LONG)) { return __Pyx_{{BINOP}}_long_long_checking_overflow(a, b, overflow); #endif } else { abort(); return 0; /* handled elsewhere */ } } } /////////////// LeftShift.proto /////////////// static CYTHON_INLINE {{TYPE}} __Pyx_lshift_{{NAME}}_checking_overflow({{TYPE}} a, {{TYPE}} b, int *overflow) { *overflow |= #if {{SIGNED}} (b < 0) | #endif (b > ({{TYPE}}) (8 * sizeof({{TYPE}}))) | (a > (__PYX_MAX({{TYPE}}) >> b)); return a << b; } #define __Pyx_lshift_const_{{NAME}}_checking_overflow __Pyx_lshift_{{NAME}}_checking_overflow /////////////// UnaryNegOverflows.proto /////////////// //FIXME: shouldn't the macro name be prefixed by "__Pyx_" ? Too late now, I guess... // from intobject.c #define UNARY_NEG_WOULD_OVERFLOW(x) \ (((x) < 0) & ((unsigned long)(x) == 0-(unsigned long)(x))) Cython-0.26.1/Cython/Utility/TestUtilityLoader.c0000664000175000017500000000042712542002467022342 0ustar stefanstefan00000000000000////////// TestUtilityLoader.proto ////////// test {{loader}} prototype ////////// TestUtilityLoader ////////// //@requires: OtherUtility test {{loader}} impl ////////// OtherUtility.proto ////////// req {{loader}} proto ////////// OtherUtility ////////// req {{loader}} impl Cython-0.26.1/Cython/Utility/MemoryView_C.c0000664000175000017500000006765413023021033021255 0ustar stefanstefan00000000000000////////// MemviewSliceStruct.proto ////////// /* memoryview slice struct */ struct {{memview_struct_name}}; typedef struct { struct {{memview_struct_name}} *memview; char *data; Py_ssize_t shape[{{max_dims}}]; Py_ssize_t strides[{{max_dims}}]; Py_ssize_t suboffsets[{{max_dims}}]; } {{memviewslice_name}}; /////////// Atomics.proto ///////////// #include #ifndef CYTHON_ATOMICS #define CYTHON_ATOMICS 1 #endif #define __pyx_atomic_int_type int // todo: Portland pgcc, maybe OS X's OSAtomicIncrement32, // libatomic + autotools-like distutils support? Such a pain... #if CYTHON_ATOMICS && __GNUC__ >= 4 && (__GNUC_MINOR__ > 1 || \ (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL >= 2)) && \ !defined(__i386__) /* gcc >= 4.1.2 */ #define __pyx_atomic_incr_aligned(value, lock) __sync_fetch_and_add(value, 1) #define __pyx_atomic_decr_aligned(value, lock) __sync_fetch_and_sub(value, 1) #ifdef __PYX_DEBUG_ATOMICS #warning "Using GNU atomics" #endif #elif CYTHON_ATOMICS && defined(_MSC_VER) && 0 /* msvc */ #include #undef __pyx_atomic_int_type #define __pyx_atomic_int_type LONG #define __pyx_atomic_incr_aligned(value, lock) InterlockedIncrement(value) #define __pyx_atomic_decr_aligned(value, lock) InterlockedDecrement(value) #ifdef __PYX_DEBUG_ATOMICS #pragma message ("Using MSVC atomics") #endif #elif CYTHON_ATOMICS && (defined(__ICC) || defined(__INTEL_COMPILER)) && 0 #define __pyx_atomic_incr_aligned(value, lock) _InterlockedIncrement(value) #define __pyx_atomic_decr_aligned(value, lock) _InterlockedDecrement(value) #ifdef __PYX_DEBUG_ATOMICS #warning "Using Intel atomics" #endif #else #undef CYTHON_ATOMICS #define CYTHON_ATOMICS 0 #ifdef __PYX_DEBUG_ATOMICS #warning "Not using atomics" #endif #endif typedef volatile __pyx_atomic_int_type __pyx_atomic_int; #if CYTHON_ATOMICS #define __pyx_add_acquisition_count(memview) \ __pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock) #define __pyx_sub_acquisition_count(memview) \ __pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock) #else #define __pyx_add_acquisition_count(memview) \ __pyx_add_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock) #define __pyx_sub_acquisition_count(memview) \ __pyx_sub_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock) #endif /////////////// ObjectToMemviewSlice.proto /////////////// static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *); ////////// MemviewSliceInit.proto ////////// #define __Pyx_BUF_MAX_NDIMS %(BUF_MAX_NDIMS)d #define __Pyx_MEMVIEW_DIRECT 1 #define __Pyx_MEMVIEW_PTR 2 #define __Pyx_MEMVIEW_FULL 4 #define __Pyx_MEMVIEW_CONTIG 8 #define __Pyx_MEMVIEW_STRIDED 16 #define __Pyx_MEMVIEW_FOLLOW 32 #define __Pyx_IS_C_CONTIG 1 #define __Pyx_IS_F_CONTIG 2 static int __Pyx_init_memviewslice( struct __pyx_memoryview_obj *memview, int ndim, __Pyx_memviewslice *memviewslice, int memview_is_new_reference); static CYTHON_INLINE int __pyx_add_acquisition_count_locked( __pyx_atomic_int *acquisition_count, PyThread_type_lock lock); static CYTHON_INLINE int __pyx_sub_acquisition_count_locked( __pyx_atomic_int *acquisition_count, PyThread_type_lock lock); #define __pyx_get_slice_count_pointer(memview) (memview->acquisition_count_aligned_p) #define __pyx_get_slice_count(memview) (*__pyx_get_slice_count_pointer(memview)) #define __PYX_INC_MEMVIEW(slice, have_gil) __Pyx_INC_MEMVIEW(slice, have_gil, __LINE__) #define __PYX_XDEC_MEMVIEW(slice, have_gil) __Pyx_XDEC_MEMVIEW(slice, have_gil, __LINE__) static CYTHON_INLINE void __Pyx_INC_MEMVIEW({{memviewslice_name}} *, int, int); static CYTHON_INLINE void __Pyx_XDEC_MEMVIEW({{memviewslice_name}} *, int, int); /////////////// MemviewSliceIndex.proto /////////////// static CYTHON_INLINE char *__pyx_memviewslice_index_full( const char *bufp, Py_ssize_t idx, Py_ssize_t stride, Py_ssize_t suboffset); /////////////// ObjectToMemviewSlice /////////////// //@requires: MemviewSliceValidateAndInit static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *obj) { {{memviewslice_name}} result = {{memslice_init}}; __Pyx_BufFmt_StackElem stack[{{struct_nesting_depth}}]; int axes_specs[] = { {{axes_specs}} }; int retcode; if (obj == Py_None) { /* We don't bother to refcount None */ result.memview = (struct __pyx_memoryview_obj *) Py_None; return result; } retcode = __Pyx_ValidateAndInit_memviewslice(axes_specs, {{c_or_f_flag}}, {{buf_flag}}, {{ndim}}, &{{dtype_typeinfo}}, stack, &result, obj); if (unlikely(retcode == -1)) goto __pyx_fail; return result; __pyx_fail: result.memview = NULL; result.data = NULL; return result; } /////////////// MemviewSliceValidateAndInit.proto /////////////// static int __Pyx_ValidateAndInit_memviewslice( int *axes_specs, int c_or_f_flag, int buf_flags, int ndim, __Pyx_TypeInfo *dtype, __Pyx_BufFmt_StackElem stack[], __Pyx_memviewslice *memviewslice, PyObject *original_obj); /////////////// MemviewSliceValidateAndInit /////////////// //@requires: Buffer.c::TypeInfoCompare static int __pyx_check_strides(Py_buffer *buf, int dim, int ndim, int spec) { if (buf->shape[dim] <= 1) return 1; if (buf->strides) { if (spec & __Pyx_MEMVIEW_CONTIG) { if (spec & (__Pyx_MEMVIEW_PTR|__Pyx_MEMVIEW_FULL)) { if (buf->strides[dim] != sizeof(void *)) { PyErr_Format(PyExc_ValueError, "Buffer is not indirectly contiguous " "in dimension %d.", dim); goto fail; } } else if (buf->strides[dim] != buf->itemsize) { PyErr_SetString(PyExc_ValueError, "Buffer and memoryview are not contiguous " "in the same dimension."); goto fail; } } if (spec & __Pyx_MEMVIEW_FOLLOW) { Py_ssize_t stride = buf->strides[dim]; if (stride < 0) stride = -stride; if (stride < buf->itemsize) { PyErr_SetString(PyExc_ValueError, "Buffer and memoryview are not contiguous " "in the same dimension."); goto fail; } } } else { if (spec & __Pyx_MEMVIEW_CONTIG && dim != ndim - 1) { PyErr_Format(PyExc_ValueError, "C-contiguous buffer is not contiguous in " "dimension %d", dim); goto fail; } else if (spec & (__Pyx_MEMVIEW_PTR)) { PyErr_Format(PyExc_ValueError, "C-contiguous buffer is not indirect in " "dimension %d", dim); goto fail; } else if (buf->suboffsets) { PyErr_SetString(PyExc_ValueError, "Buffer exposes suboffsets but no strides"); goto fail; } } return 1; fail: return 0; } static int __pyx_check_suboffsets(Py_buffer *buf, int dim, CYTHON_UNUSED int ndim, int spec) { // Todo: without PyBUF_INDIRECT we may not have suboffset information, i.e., the // ptr may not be set to NULL but may be uninitialized? if (spec & __Pyx_MEMVIEW_DIRECT) { if (buf->suboffsets && buf->suboffsets[dim] >= 0) { PyErr_Format(PyExc_ValueError, "Buffer not compatible with direct access " "in dimension %d.", dim); goto fail; } } if (spec & __Pyx_MEMVIEW_PTR) { if (!buf->suboffsets || (buf->suboffsets && buf->suboffsets[dim] < 0)) { PyErr_Format(PyExc_ValueError, "Buffer is not indirectly accessible " "in dimension %d.", dim); goto fail; } } return 1; fail: return 0; } static int __pyx_verify_contig(Py_buffer *buf, int ndim, int c_or_f_flag) { int i; if (c_or_f_flag & __Pyx_IS_F_CONTIG) { Py_ssize_t stride = 1; for (i = 0; i < ndim; i++) { if (stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1) { PyErr_SetString(PyExc_ValueError, "Buffer not fortran contiguous."); goto fail; } stride = stride * buf->shape[i]; } } else if (c_or_f_flag & __Pyx_IS_C_CONTIG) { Py_ssize_t stride = 1; for (i = ndim - 1; i >- 1; i--) { if (stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1) { PyErr_SetString(PyExc_ValueError, "Buffer not C contiguous."); goto fail; } stride = stride * buf->shape[i]; } } return 1; fail: return 0; } static int __Pyx_ValidateAndInit_memviewslice( int *axes_specs, int c_or_f_flag, int buf_flags, int ndim, __Pyx_TypeInfo *dtype, __Pyx_BufFmt_StackElem stack[], __Pyx_memviewslice *memviewslice, PyObject *original_obj) { struct __pyx_memoryview_obj *memview, *new_memview; __Pyx_RefNannyDeclarations Py_buffer *buf; int i, spec = 0, retval = -1; __Pyx_BufFmt_Context ctx; int from_memoryview = __pyx_memoryview_check(original_obj); __Pyx_RefNannySetupContext("ValidateAndInit_memviewslice", 0); if (from_memoryview && __pyx_typeinfo_cmp(dtype, ((struct __pyx_memoryview_obj *) original_obj)->typeinfo)) { /* We have a matching dtype, skip format parsing */ memview = (struct __pyx_memoryview_obj *) original_obj; new_memview = NULL; } else { memview = (struct __pyx_memoryview_obj *) __pyx_memoryview_new( original_obj, buf_flags, 0, dtype); new_memview = memview; if (unlikely(!memview)) goto fail; } buf = &memview->view; if (buf->ndim != ndim) { PyErr_Format(PyExc_ValueError, "Buffer has wrong number of dimensions (expected %d, got %d)", ndim, buf->ndim); goto fail; } if (new_memview) { __Pyx_BufFmt_Init(&ctx, stack, dtype); if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail; } if ((unsigned) buf->itemsize != dtype->size) { PyErr_Format(PyExc_ValueError, "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "u byte%s) " "does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "u byte%s)", buf->itemsize, (buf->itemsize > 1) ? "s" : "", dtype->name, dtype->size, (dtype->size > 1) ? "s" : ""); goto fail; } /* Check axes */ for (i = 0; i < ndim; i++) { spec = axes_specs[i]; if (!__pyx_check_strides(buf, i, ndim, spec)) goto fail; if (!__pyx_check_suboffsets(buf, i, ndim, spec)) goto fail; } /* Check contiguity */ if (buf->strides && !__pyx_verify_contig(buf, ndim, c_or_f_flag)) goto fail; /* Initialize */ if (unlikely(__Pyx_init_memviewslice(memview, ndim, memviewslice, new_memview != NULL) == -1)) { goto fail; } retval = 0; goto no_fail; fail: Py_XDECREF(new_memview); retval = -1; no_fail: __Pyx_RefNannyFinishContext(); return retval; } ////////// MemviewSliceInit ////////// static int __Pyx_init_memviewslice(struct __pyx_memoryview_obj *memview, int ndim, {{memviewslice_name}} *memviewslice, int memview_is_new_reference) { __Pyx_RefNannyDeclarations int i, retval=-1; Py_buffer *buf = &memview->view; __Pyx_RefNannySetupContext("init_memviewslice", 0); if (!buf) { PyErr_SetString(PyExc_ValueError, "buf is NULL."); goto fail; } else if (memviewslice->memview || memviewslice->data) { PyErr_SetString(PyExc_ValueError, "memviewslice is already initialized!"); goto fail; } if (buf->strides) { for (i = 0; i < ndim; i++) { memviewslice->strides[i] = buf->strides[i]; } } else { Py_ssize_t stride = buf->itemsize; for (i = ndim - 1; i >= 0; i--) { memviewslice->strides[i] = stride; stride *= buf->shape[i]; } } for (i = 0; i < ndim; i++) { memviewslice->shape[i] = buf->shape[i]; if (buf->suboffsets) { memviewslice->suboffsets[i] = buf->suboffsets[i]; } else { memviewslice->suboffsets[i] = -1; } } memviewslice->memview = memview; memviewslice->data = (char *)buf->buf; if (__pyx_add_acquisition_count(memview) == 0 && !memview_is_new_reference) { Py_INCREF(memview); } retval = 0; goto no_fail; fail: /* Don't decref, the memoryview may be borrowed. Let the caller do the cleanup */ /* __Pyx_XDECREF(memviewslice->memview); */ memviewslice->memview = 0; memviewslice->data = 0; retval = -1; no_fail: __Pyx_RefNannyFinishContext(); return retval; } static CYTHON_INLINE void __pyx_fatalerror(const char *fmt, ...) { va_list vargs; char msg[200]; #ifdef HAVE_STDARG_PROTOTYPES va_start(vargs, fmt); #else va_start(vargs); #endif vsnprintf(msg, 200, fmt, vargs); Py_FatalError(msg); va_end(vargs); } static CYTHON_INLINE int __pyx_add_acquisition_count_locked(__pyx_atomic_int *acquisition_count, PyThread_type_lock lock) { int result; PyThread_acquire_lock(lock, 1); result = (*acquisition_count)++; PyThread_release_lock(lock); return result; } static CYTHON_INLINE int __pyx_sub_acquisition_count_locked(__pyx_atomic_int *acquisition_count, PyThread_type_lock lock) { int result; PyThread_acquire_lock(lock, 1); result = (*acquisition_count)--; PyThread_release_lock(lock); return result; } static CYTHON_INLINE void __Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno) { int first_time; struct {{memview_struct_name}} *memview = memslice->memview; if (!memview || (PyObject *) memview == Py_None) return; /* allow uninitialized memoryview assignment */ if (__pyx_get_slice_count(memview) < 0) __pyx_fatalerror("Acquisition count is %d (line %d)", __pyx_get_slice_count(memview), lineno); first_time = __pyx_add_acquisition_count(memview) == 0; if (first_time) { if (have_gil) { Py_INCREF((PyObject *) memview); } else { PyGILState_STATE _gilstate = PyGILState_Ensure(); Py_INCREF((PyObject *) memview); PyGILState_Release(_gilstate); } } } static CYTHON_INLINE void __Pyx_XDEC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno) { int last_time; struct {{memview_struct_name}} *memview = memslice->memview; if (!memview ) { return; } else if ((PyObject *) memview == Py_None) { memslice->memview = NULL; return; } if (__pyx_get_slice_count(memview) <= 0) __pyx_fatalerror("Acquisition count is %d (line %d)", __pyx_get_slice_count(memview), lineno); last_time = __pyx_sub_acquisition_count(memview) == 1; memslice->data = NULL; if (last_time) { if (have_gil) { Py_CLEAR(memslice->memview); } else { PyGILState_STATE _gilstate = PyGILState_Ensure(); Py_CLEAR(memslice->memview); PyGILState_Release(_gilstate); } } else { memslice->memview = NULL; } } ////////// MemviewSliceCopyTemplate.proto ////////// static {{memviewslice_name}} __pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs, const char *mode, int ndim, size_t sizeof_dtype, int contig_flag, int dtype_is_object); ////////// MemviewSliceCopyTemplate ////////// static {{memviewslice_name}} __pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs, const char *mode, int ndim, size_t sizeof_dtype, int contig_flag, int dtype_is_object) { __Pyx_RefNannyDeclarations int i; __Pyx_memviewslice new_mvs = {{memslice_init}}; struct __pyx_memoryview_obj *from_memview = from_mvs->memview; Py_buffer *buf = &from_memview->view; PyObject *shape_tuple = NULL; PyObject *temp_int = NULL; struct __pyx_array_obj *array_obj = NULL; struct __pyx_memoryview_obj *memview_obj = NULL; __Pyx_RefNannySetupContext("__pyx_memoryview_copy_new_contig", 0); for (i = 0; i < ndim; i++) { if (from_mvs->suboffsets[i] >= 0) { PyErr_Format(PyExc_ValueError, "Cannot copy memoryview slice with " "indirect dimensions (axis %d)", i); goto fail; } } shape_tuple = PyTuple_New(ndim); if (unlikely(!shape_tuple)) { goto fail; } __Pyx_GOTREF(shape_tuple); for(i = 0; i < ndim; i++) { temp_int = PyInt_FromSsize_t(from_mvs->shape[i]); if(unlikely(!temp_int)) { goto fail; } else { PyTuple_SET_ITEM(shape_tuple, i, temp_int); temp_int = NULL; } } array_obj = __pyx_array_new(shape_tuple, sizeof_dtype, buf->format, (char *) mode, NULL); if (unlikely(!array_obj)) { goto fail; } __Pyx_GOTREF(array_obj); memview_obj = (struct __pyx_memoryview_obj *) __pyx_memoryview_new( (PyObject *) array_obj, contig_flag, dtype_is_object, from_mvs->memview->typeinfo); if (unlikely(!memview_obj)) goto fail; /* initialize new_mvs */ if (unlikely(__Pyx_init_memviewslice(memview_obj, ndim, &new_mvs, 1) < 0)) goto fail; if (unlikely(__pyx_memoryview_copy_contents(*from_mvs, new_mvs, ndim, ndim, dtype_is_object) < 0)) goto fail; goto no_fail; fail: __Pyx_XDECREF(new_mvs.memview); new_mvs.memview = NULL; new_mvs.data = NULL; no_fail: __Pyx_XDECREF(shape_tuple); __Pyx_XDECREF(temp_int); __Pyx_XDECREF(array_obj); __Pyx_RefNannyFinishContext(); return new_mvs; } ////////// CopyContentsUtility.proto ///////// #define {{func_cname}}(slice) \ __pyx_memoryview_copy_new_contig(&slice, "{{mode}}", {{ndim}}, \ sizeof({{dtype_decl}}), {{contig_flag}}, \ {{dtype_is_object}}) ////////// OverlappingSlices.proto ////////// static int __pyx_slices_overlap({{memviewslice_name}} *slice1, {{memviewslice_name}} *slice2, int ndim, size_t itemsize); ////////// OverlappingSlices ////////// /* Based on numpy's core/src/multiarray/array_assign.c */ /* Gets a half-open range [start, end) which contains the array data */ static void __pyx_get_array_memory_extents({{memviewslice_name}} *slice, void **out_start, void **out_end, int ndim, size_t itemsize) { char *start, *end; int i; start = end = slice->data; for (i = 0; i < ndim; i++) { Py_ssize_t stride = slice->strides[i]; Py_ssize_t extent = slice->shape[i]; if (extent == 0) { *out_start = *out_end = start; return; } else { if (stride > 0) end += stride * (extent - 1); else start += stride * (extent - 1); } } /* Return a half-open range */ *out_start = start; *out_end = end + itemsize; } /* Returns 1 if the arrays have overlapping data, 0 otherwise */ static int __pyx_slices_overlap({{memviewslice_name}} *slice1, {{memviewslice_name}} *slice2, int ndim, size_t itemsize) { void *start1, *end1, *start2, *end2; __pyx_get_array_memory_extents(slice1, &start1, &end1, ndim, itemsize); __pyx_get_array_memory_extents(slice2, &start2, &end2, ndim, itemsize); return (start1 < end2) && (start2 < end1); } ////////// MemviewSliceIsCContig.proto ////////// #define __pyx_memviewslice_is_c_contig{{ndim}}(slice) \ __pyx_memviewslice_is_contig(slice, 'C', {{ndim}}) ////////// MemviewSliceIsFContig.proto ////////// #define __pyx_memviewslice_is_f_contig{{ndim}}(slice) \ __pyx_memviewslice_is_contig(slice, 'F', {{ndim}}) ////////// MemviewSliceIsContig.proto ////////// static int __pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim); ////////// MemviewSliceIsContig ////////// static int __pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim) { int i, index, step, start; Py_ssize_t itemsize = mvs.memview->view.itemsize; if (order == 'F') { step = 1; start = 0; } else { step = -1; start = ndim - 1; } for (i = 0; i < ndim; i++) { index = start + step * i; if (mvs.suboffsets[index] >= 0 || mvs.strides[index] != itemsize) return 0; itemsize *= mvs.shape[index]; } return 1; } /////////////// MemviewSliceIndex /////////////// static CYTHON_INLINE char * __pyx_memviewslice_index_full(const char *bufp, Py_ssize_t idx, Py_ssize_t stride, Py_ssize_t suboffset) { bufp = bufp + idx * stride; if (suboffset >= 0) { bufp = *((char **) bufp) + suboffset; } return (char *) bufp; } /////////////// MemviewDtypeToObject.proto /////////////// {{if to_py_function}} static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp); /* proto */ {{endif}} {{if from_py_function}} static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj); /* proto */ {{endif}} /////////////// MemviewDtypeToObject /////////////// {{#__pyx_memview__to_object}} /* Convert a dtype to or from a Python object */ {{if to_py_function}} static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp) { return (PyObject *) {{to_py_function}}(*({{dtype}} *) itemp); } {{endif}} {{if from_py_function}} static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj) { {{dtype}} value = {{from_py_function}}(obj); if ({{error_condition}}) return 0; *({{dtype}} *) itemp = value; return 1; } {{endif}} /////////////// MemviewObjectToObject.proto /////////////// /* Function callbacks (for memoryview object) for dtype object */ static PyObject *{{get_function}}(const char *itemp); /* proto */ static int {{set_function}}(const char *itemp, PyObject *obj); /* proto */ /////////////// MemviewObjectToObject /////////////// static PyObject *{{get_function}}(const char *itemp) { PyObject *result = *(PyObject **) itemp; Py_INCREF(result); return result; } static int {{set_function}}(const char *itemp, PyObject *obj) { Py_INCREF(obj); Py_DECREF(*(PyObject **) itemp); *(PyObject **) itemp = obj; return 1; } /////////// ToughSlice ////////// /* Dimension is indexed with 'start:stop:step' */ if (unlikely(__pyx_memoryview_slice_memviewslice( &{{dst}}, {{src}}.shape[{{dim}}], {{src}}.strides[{{dim}}], {{src}}.suboffsets[{{dim}}], {{dim}}, {{new_ndim}}, &{{get_suboffset_dim()}}, {{start}}, {{stop}}, {{step}}, {{int(have_start)}}, {{int(have_stop)}}, {{int(have_step)}}, 1) < 0)) { {{error_goto}} } ////////// SimpleSlice ////////// /* Dimension is indexed with ':' only */ {{dst}}.shape[{{new_ndim}}] = {{src}}.shape[{{dim}}]; {{dst}}.strides[{{new_ndim}}] = {{src}}.strides[{{dim}}]; {{if access == 'direct'}} {{dst}}.suboffsets[{{new_ndim}}] = -1; {{else}} {{dst}}.suboffsets[{{new_ndim}}] = {{src}}.suboffsets[{{dim}}]; if ({{src}}.suboffsets[{{dim}}] >= 0) {{get_suboffset_dim()}} = {{new_ndim}}; {{endif}} ////////// SliceIndex ////////// // Dimension is indexed with an integer, we could use the ToughSlice // approach, but this is faster { Py_ssize_t __pyx_tmp_idx = {{idx}}; Py_ssize_t __pyx_tmp_shape = {{src}}.shape[{{dim}}]; Py_ssize_t __pyx_tmp_stride = {{src}}.strides[{{dim}}]; if ({{wraparound}} && (__pyx_tmp_idx < 0)) __pyx_tmp_idx += __pyx_tmp_shape; if ({{boundscheck}} && (__pyx_tmp_idx < 0 || __pyx_tmp_idx >= __pyx_tmp_shape)) { {{if not have_gil}} #ifdef WITH_THREAD PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure(); #endif {{endif}} PyErr_SetString(PyExc_IndexError, "Index out of bounds (axis {{dim}})"); {{if not have_gil}} #ifdef WITH_THREAD PyGILState_Release(__pyx_gilstate_save); #endif {{endif}} {{error_goto}} } {{if all_dimensions_direct}} {{dst}}.data += __pyx_tmp_idx * __pyx_tmp_stride; {{else}} if ({{get_suboffset_dim()}} < 0) { {{dst}}.data += __pyx_tmp_idx * __pyx_tmp_stride; /* This dimension is the first dimension, or is preceded by */ /* direct or indirect dimensions that are indexed away. */ /* Hence suboffset_dim must be less than zero, and we can have */ /* our data pointer refer to another block by dereferencing. */ /* slice.data -> B -> C becomes slice.data -> C */ {{if indirect}} { Py_ssize_t __pyx_tmp_suboffset = {{src}}.suboffsets[{{dim}}]; {{if generic}} if (__pyx_tmp_suboffset >= 0) {{endif}} {{dst}}.data = *((char **) {{dst}}.data) + __pyx_tmp_suboffset; } {{endif}} } else { {{dst}}.suboffsets[{{get_suboffset_dim()}}] += __pyx_tmp_idx * __pyx_tmp_stride; /* Note: dimension can not be indirect, the compiler will have */ /* issued an error */ } {{endif}} } ////////// FillStrided1DScalar.proto ////////// static void __pyx_fill_slice_{{dtype_name}}({{type_decl}} *p, Py_ssize_t extent, Py_ssize_t stride, size_t itemsize, void *itemp); ////////// FillStrided1DScalar ////////// /* Fill a slice with a scalar value. The dimension is direct and strided or contiguous */ /* This can be used as a callback for the memoryview object to efficienty assign a scalar */ /* Currently unused */ static void __pyx_fill_slice_{{dtype_name}}({{type_decl}} *p, Py_ssize_t extent, Py_ssize_t stride, size_t itemsize, void *itemp) { Py_ssize_t i; {{type_decl}} item = *(({{type_decl}} *) itemp); {{type_decl}} *endp; stride /= sizeof({{type_decl}}); endp = p + stride * extent; while (p < endp) { *p = item; p += stride; } } Cython-0.26.1/Cython/Plex/0000775000175000017500000000000013151203436016002 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Plex/Actions.py0000664000175000017500000000472512542002467017770 0ustar stefanstefan00000000000000#======================================================================= # # Python Lexical Analyser # # Actions for use in token specifications # #======================================================================= class Action(object): def perform(self, token_stream, text): pass # abstract def same_as(self, other): return self is other class Return(Action): """ Internal Plex action which causes |value| to be returned as the value of the associated token """ def __init__(self, value): self.value = value def perform(self, token_stream, text): return self.value def same_as(self, other): return isinstance(other, Return) and self.value == other.value def __repr__(self): return "Return(%s)" % repr(self.value) class Call(Action): """ Internal Plex action which causes a function to be called. """ def __init__(self, function): self.function = function def perform(self, token_stream, text): return self.function(token_stream, text) def __repr__(self): return "Call(%s)" % self.function.__name__ def same_as(self, other): return isinstance(other, Call) and self.function is other.function class Begin(Action): """ Begin(state_name) is a Plex action which causes the Scanner to enter the state |state_name|. See the docstring of Plex.Lexicon for more information. """ def __init__(self, state_name): self.state_name = state_name def perform(self, token_stream, text): token_stream.begin(self.state_name) def __repr__(self): return "Begin(%s)" % self.state_name def same_as(self, other): return isinstance(other, Begin) and self.state_name == other.state_name class Ignore(Action): """ IGNORE is a Plex action which causes its associated token to be ignored. See the docstring of Plex.Lexicon for more information. """ def perform(self, token_stream, text): return None def __repr__(self): return "IGNORE" IGNORE = Ignore() #IGNORE.__doc__ = Ignore.__doc__ class Text(Action): """ TEXT is a Plex action which causes the text of a token to be returned as the value of the token. See the docstring of Plex.Lexicon for more information. """ def perform(self, token_stream, text): return text def __repr__(self): return "TEXT" TEXT = Text() #TEXT.__doc__ = Text.__doc__ Cython-0.26.1/Cython/Plex/Machines.py0000664000175000017500000001712013023021033020071 0ustar stefanstefan00000000000000#======================================================================= # # Python Lexical Analyser # # Classes for building NFAs and DFAs # #======================================================================= from __future__ import absolute_import import sys from .Transitions import TransitionMap try: from sys import maxsize as maxint except ImportError: from sys import maxint try: unichr except NameError: unichr = chr LOWEST_PRIORITY = -maxint class Machine(object): """A collection of Nodes representing an NFA or DFA.""" states = None # [Node] next_state_number = 1 initial_states = None # {(name, bol): Node} def __init__(self): self.states = [] self.initial_states = {} def __del__(self): #print "Destroying", self ### for state in self.states: state.destroy() def new_state(self): """Add a new state to the machine and return it.""" s = Node() n = self.next_state_number self.next_state_number = n + 1 s.number = n self.states.append(s) return s def new_initial_state(self, name): state = self.new_state() self.make_initial_state(name, state) return state def make_initial_state(self, name, state): self.initial_states[name] = state def get_initial_state(self, name): return self.initial_states[name] def dump(self, file): file.write("Plex.Machine:\n") if self.initial_states is not None: file.write(" Initial states:\n") for (name, state) in sorted(self.initial_states.items()): file.write(" '%s': %d\n" % (name, state.number)) for s in self.states: s.dump(file) class Node(object): """A state of an NFA or DFA.""" transitions = None # TransitionMap action = None # Action action_priority = None # integer number = 0 # for debug output epsilon_closure = None # used by nfa_to_dfa() def __init__(self): # Preinitialise the list of empty transitions, because # the nfa-to-dfa algorithm needs it #self.transitions = {'':[]} self.transitions = TransitionMap() self.action_priority = LOWEST_PRIORITY def destroy(self): #print "Destroying", self ### self.transitions = None self.action = None self.epsilon_closure = None def add_transition(self, event, new_state): self.transitions.add(event, new_state) def link_to(self, state): """Add an epsilon-move from this state to another state.""" self.add_transition('', state) def set_action(self, action, priority): """Make this an accepting state with the given action. If there is already an action, choose the action with highest priority.""" if priority > self.action_priority: self.action = action self.action_priority = priority def get_action(self): return self.action def get_action_priority(self): return self.action_priority def is_accepting(self): return self.action is not None def __str__(self): return "State %d" % self.number def dump(self, file): # Header file.write(" State %d:\n" % self.number) # Transitions # self.dump_transitions(file) self.transitions.dump(file) # Action action = self.action priority = self.action_priority if action is not None: file.write(" %s [priority %d]\n" % (action, priority)) def __lt__(self, other): return self.number < other.number class FastMachine(object): """ FastMachine is a deterministic machine represented in a way that allows fast scanning. """ initial_states = None # {state_name:state} states = None # [state] where state = {event:state, 'else':state, 'action':Action} next_number = 1 # for debugging new_state_template = { '': None, 'bol': None, 'eol': None, 'eof': None, 'else': None } def __init__(self): self.initial_states = {} self.states = [] def __del__(self): for state in self.states: state.clear() def new_state(self, action=None): number = self.next_number self.next_number = number + 1 result = self.new_state_template.copy() result['number'] = number result['action'] = action self.states.append(result) return result def make_initial_state(self, name, state): self.initial_states[name] = state def add_transitions(self, state, event, new_state, maxint=maxint): if type(event) is tuple: code0, code1 = event if code0 == -maxint: state['else'] = new_state elif code1 != maxint: while code0 < code1: state[unichr(code0)] = new_state code0 += 1 else: state[event] = new_state def get_initial_state(self, name): return self.initial_states[name] def dump(self, file): file.write("Plex.FastMachine:\n") file.write(" Initial states:\n") for name, state in sorted(self.initial_states.items()): file.write(" %s: %s\n" % (repr(name), state['number'])) for state in self.states: self.dump_state(state, file) def dump_state(self, state, file): # Header file.write(" State %d:\n" % state['number']) # Transitions self.dump_transitions(state, file) # Action action = state['action'] if action is not None: file.write(" %s\n" % action) def dump_transitions(self, state, file): chars_leading_to_state = {} special_to_state = {} for (c, s) in state.items(): if len(c) == 1: chars = chars_leading_to_state.get(id(s), None) if chars is None: chars = [] chars_leading_to_state[id(s)] = chars chars.append(c) elif len(c) <= 4: special_to_state[c] = s ranges_to_state = {} for state in self.states: char_list = chars_leading_to_state.get(id(state), None) if char_list: ranges = self.chars_to_ranges(char_list) ranges_to_state[ranges] = state ranges_list = ranges_to_state.keys() ranges_list.sort() for ranges in ranges_list: key = self.ranges_to_string(ranges) state = ranges_to_state[ranges] file.write(" %s --> State %d\n" % (key, state['number'])) for key in ('bol', 'eol', 'eof', 'else'): state = special_to_state.get(key, None) if state: file.write(" %s --> State %d\n" % (key, state['number'])) def chars_to_ranges(self, char_list): char_list.sort() i = 0 n = len(char_list) result = [] while i < n: c1 = ord(char_list[i]) c2 = c1 i += 1 while i < n and ord(char_list[i]) == c2 + 1: i += 1 c2 += 1 result.append((chr(c1), chr(c2))) return tuple(result) def ranges_to_string(self, range_list): return ','.join(map(self.range_to_string, range_list)) def range_to_string(self, range_tuple): (c1, c2) = range_tuple if c1 == c2: return repr(c1) else: return "%s..%s" % (repr(c1), repr(c2)) Cython-0.26.1/Cython/Plex/Lexicons.py0000664000175000017500000001537312574327400020157 0ustar stefanstefan00000000000000#======================================================================= # # Python Lexical Analyser # # Lexical Analyser Specification # #======================================================================= from __future__ import absolute_import import types from . import Actions from . import DFA from . import Errors from . import Machines from . import Regexps # debug_flags for Lexicon constructor DUMP_NFA = 1 DUMP_DFA = 2 class State(object): """ This class is used as part of a Plex.Lexicon specification to introduce a user-defined state. Constructor: State(name, token_specifications) """ name = None tokens = None def __init__(self, name, tokens): self.name = name self.tokens = tokens class Lexicon(object): """ Lexicon(specification) builds a lexical analyser from the given |specification|. The specification consists of a list of specification items. Each specification item may be either: 1) A token definition, which is a tuple: (pattern, action) The |pattern| is a regular axpression built using the constructors defined in the Plex module. The |action| is the action to be performed when this pattern is recognised (see below). 2) A state definition: State(name, tokens) where |name| is a character string naming the state, and |tokens| is a list of token definitions as above. The meaning and usage of states is described below. Actions ------- The |action| in a token specication may be one of three things: 1) A function, which is called as follows: function(scanner, text) where |scanner| is the relevant Scanner instance, and |text| is the matched text. If the function returns anything other than None, that value is returned as the value of the token. If it returns None, scanning continues as if the IGNORE action were specified (see below). 2) One of the following special actions: IGNORE means that the recognised characters will be treated as white space and ignored. Scanning will continue until the next non-ignored token is recognised before returning. TEXT causes the scanned text itself to be returned as the value of the token. 3) Any other value, which is returned as the value of the token. States ------ At any given time, the scanner is in one of a number of states. Associated with each state is a set of possible tokens. When scanning, only tokens associated with the current state are recognised. There is a default state, whose name is the empty string. Token definitions which are not inside any State definition belong to the default state. The initial state of the scanner is the default state. The state can be changed in one of two ways: 1) Using Begin(state_name) as the action of a token. 2) Calling the begin(state_name) method of the Scanner. To change back to the default state, use '' as the state name. """ machine = None # Machine tables = None # StateTableMachine def __init__(self, specifications, debug=None, debug_flags=7, timings=None): if not isinstance(specifications, list): raise Errors.InvalidScanner("Scanner definition is not a list") if timings: from .Timing import time total_time = 0.0 time1 = time() nfa = Machines.Machine() default_initial_state = nfa.new_initial_state('') token_number = 1 for spec in specifications: if isinstance(spec, State): user_initial_state = nfa.new_initial_state(spec.name) for token in spec.tokens: self.add_token_to_machine( nfa, user_initial_state, token, token_number) token_number += 1 elif isinstance(spec, tuple): self.add_token_to_machine( nfa, default_initial_state, spec, token_number) token_number += 1 else: raise Errors.InvalidToken( token_number, "Expected a token definition (tuple) or State instance") if timings: time2 = time() total_time = total_time + (time2 - time1) time3 = time() if debug and (debug_flags & 1): debug.write("\n============= NFA ===========\n") nfa.dump(debug) dfa = DFA.nfa_to_dfa(nfa, debug=(debug_flags & 3) == 3 and debug) if timings: time4 = time() total_time = total_time + (time4 - time3) if debug and (debug_flags & 2): debug.write("\n============= DFA ===========\n") dfa.dump(debug) if timings: timings.write("Constructing NFA : %5.2f\n" % (time2 - time1)) timings.write("Converting to DFA: %5.2f\n" % (time4 - time3)) timings.write("TOTAL : %5.2f\n" % total_time) self.machine = dfa def add_token_to_machine(self, machine, initial_state, token_spec, token_number): try: (re, action_spec) = self.parse_token_definition(token_spec) # Disabled this -- matching empty strings can be useful #if re.nullable: # raise Errors.InvalidToken( # token_number, "Pattern can match 0 input symbols") if isinstance(action_spec, Actions.Action): action = action_spec else: try: action_spec.__call__ except AttributeError: action = Actions.Return(action_spec) else: action = Actions.Call(action_spec) final_state = machine.new_state() re.build_machine(machine, initial_state, final_state, match_bol=1, nocase=0) final_state.set_action(action, priority=-token_number) except Errors.PlexError as e: raise e.__class__("Token number %d: %s" % (token_number, e)) def parse_token_definition(self, token_spec): if not isinstance(token_spec, tuple): raise Errors.InvalidToken("Token definition is not a tuple") if len(token_spec) != 2: raise Errors.InvalidToken("Wrong number of items in token definition") pattern, action = token_spec if not isinstance(pattern, Regexps.RE): raise Errors.InvalidToken("Pattern is not an RE instance") return (pattern, action) def get_initial_state(self, name): return self.machine.get_initial_state(name) Cython-0.26.1/Cython/Plex/Regexps.py0000664000175000017500000003752012574327400020006 0ustar stefanstefan00000000000000#======================================================================= # # Python Lexical Analyser # # Regular Expressions # #======================================================================= from __future__ import absolute_import import types try: from sys import maxsize as maxint except ImportError: from sys import maxint from . import Errors # # Constants # BOL = 'bol' EOL = 'eol' EOF = 'eof' nl_code = ord('\n') # # Helper functions # def chars_to_ranges(s): """ Return a list of character codes consisting of pairs [code1a, code1b, code2a, code2b,...] which cover all the characters in |s|. """ char_list = list(s) char_list.sort() i = 0 n = len(char_list) result = [] while i < n: code1 = ord(char_list[i]) code2 = code1 + 1 i += 1 while i < n and code2 >= ord(char_list[i]): code2 += 1 i += 1 result.append(code1) result.append(code2) return result def uppercase_range(code1, code2): """ If the range of characters from code1 to code2-1 includes any lower case letters, return the corresponding upper case range. """ code3 = max(code1, ord('a')) code4 = min(code2, ord('z') + 1) if code3 < code4: d = ord('A') - ord('a') return (code3 + d, code4 + d) else: return None def lowercase_range(code1, code2): """ If the range of characters from code1 to code2-1 includes any upper case letters, return the corresponding lower case range. """ code3 = max(code1, ord('A')) code4 = min(code2, ord('Z') + 1) if code3 < code4: d = ord('a') - ord('A') return (code3 + d, code4 + d) else: return None def CodeRanges(code_list): """ Given a list of codes as returned by chars_to_ranges, return an RE which will match a character in any of the ranges. """ re_list = [CodeRange(code_list[i], code_list[i + 1]) for i in range(0, len(code_list), 2)] return Alt(*re_list) def CodeRange(code1, code2): """ CodeRange(code1, code2) is an RE which matches any character with a code |c| in the range |code1| <= |c| < |code2|. """ if code1 <= nl_code < code2: return Alt(RawCodeRange(code1, nl_code), RawNewline, RawCodeRange(nl_code + 1, code2)) else: return RawCodeRange(code1, code2) # # Abstract classes # class RE(object): """RE is the base class for regular expression constructors. The following operators are defined on REs: re1 + re2 is an RE which matches |re1| followed by |re2| re1 | re2 is an RE which matches either |re1| or |re2| """ nullable = 1 # True if this RE can match 0 input symbols match_nl = 1 # True if this RE can match a string ending with '\n' str = None # Set to a string to override the class's __str__ result def build_machine(self, machine, initial_state, final_state, match_bol, nocase): """ This method should add states to |machine| to implement this RE, starting at |initial_state| and ending at |final_state|. If |match_bol| is true, the RE must be able to match at the beginning of a line. If nocase is true, upper and lower case letters should be treated as equivalent. """ raise NotImplementedError("%s.build_machine not implemented" % self.__class__.__name__) def build_opt(self, m, initial_state, c): """ Given a state |s| of machine |m|, return a new state reachable from |s| on character |c| or epsilon. """ s = m.new_state() initial_state.link_to(s) initial_state.add_transition(c, s) return s def __add__(self, other): return Seq(self, other) def __or__(self, other): return Alt(self, other) def __str__(self): if self.str: return self.str else: return self.calc_str() def check_re(self, num, value): if not isinstance(value, RE): self.wrong_type(num, value, "Plex.RE instance") def check_string(self, num, value): if type(value) != type(''): self.wrong_type(num, value, "string") def check_char(self, num, value): self.check_string(num, value) if len(value) != 1: raise Errors.PlexValueError("Invalid value for argument %d of Plex.%s." "Expected a string of length 1, got: %s" % ( num, self.__class__.__name__, repr(value))) def wrong_type(self, num, value, expected): if type(value) == types.InstanceType: got = "%s.%s instance" % ( value.__class__.__module__, value.__class__.__name__) else: got = type(value).__name__ raise Errors.PlexTypeError("Invalid type for argument %d of Plex.%s " "(expected %s, got %s" % ( num, self.__class__.__name__, expected, got)) # # Primitive RE constructors # ------------------------- # # These are the basic REs from which all others are built. # ## class Char(RE): ## """ ## Char(c) is an RE which matches the character |c|. ## """ ## nullable = 0 ## def __init__(self, char): ## self.char = char ## self.match_nl = char == '\n' ## def build_machine(self, m, initial_state, final_state, match_bol, nocase): ## c = self.char ## if match_bol and c != BOL: ## s1 = self.build_opt(m, initial_state, BOL) ## else: ## s1 = initial_state ## if c == '\n' or c == EOF: ## s1 = self.build_opt(m, s1, EOL) ## if len(c) == 1: ## code = ord(self.char) ## s1.add_transition((code, code+1), final_state) ## if nocase and is_letter_code(code): ## code2 = other_case_code(code) ## s1.add_transition((code2, code2+1), final_state) ## else: ## s1.add_transition(c, final_state) ## def calc_str(self): ## return "Char(%s)" % repr(self.char) def Char(c): """ Char(c) is an RE which matches the character |c|. """ if len(c) == 1: result = CodeRange(ord(c), ord(c) + 1) else: result = SpecialSymbol(c) result.str = "Char(%s)" % repr(c) return result class RawCodeRange(RE): """ RawCodeRange(code1, code2) is a low-level RE which matches any character with a code |c| in the range |code1| <= |c| < |code2|, where the range does not include newline. For internal use only. """ nullable = 0 match_nl = 0 range = None # (code, code) uppercase_range = None # (code, code) or None lowercase_range = None # (code, code) or None def __init__(self, code1, code2): self.range = (code1, code2) self.uppercase_range = uppercase_range(code1, code2) self.lowercase_range = lowercase_range(code1, code2) def build_machine(self, m, initial_state, final_state, match_bol, nocase): if match_bol: initial_state = self.build_opt(m, initial_state, BOL) initial_state.add_transition(self.range, final_state) if nocase: if self.uppercase_range: initial_state.add_transition(self.uppercase_range, final_state) if self.lowercase_range: initial_state.add_transition(self.lowercase_range, final_state) def calc_str(self): return "CodeRange(%d,%d)" % (self.code1, self.code2) class _RawNewline(RE): """ RawNewline is a low-level RE which matches a newline character. For internal use only. """ nullable = 0 match_nl = 1 def build_machine(self, m, initial_state, final_state, match_bol, nocase): if match_bol: initial_state = self.build_opt(m, initial_state, BOL) s = self.build_opt(m, initial_state, EOL) s.add_transition((nl_code, nl_code + 1), final_state) RawNewline = _RawNewline() class SpecialSymbol(RE): """ SpecialSymbol(sym) is an RE which matches the special input symbol |sym|, which is one of BOL, EOL or EOF. """ nullable = 0 match_nl = 0 sym = None def __init__(self, sym): self.sym = sym def build_machine(self, m, initial_state, final_state, match_bol, nocase): # Sequences 'bol bol' and 'bol eof' are impossible, so only need # to allow for bol if sym is eol if match_bol and self.sym == EOL: initial_state = self.build_opt(m, initial_state, BOL) initial_state.add_transition(self.sym, final_state) class Seq(RE): """Seq(re1, re2, re3...) is an RE which matches |re1| followed by |re2| followed by |re3|...""" def __init__(self, *re_list): nullable = 1 for i, re in enumerate(re_list): self.check_re(i, re) nullable = nullable and re.nullable self.re_list = re_list self.nullable = nullable i = len(re_list) match_nl = 0 while i: i -= 1 re = re_list[i] if re.match_nl: match_nl = 1 break if not re.nullable: break self.match_nl = match_nl def build_machine(self, m, initial_state, final_state, match_bol, nocase): re_list = self.re_list if len(re_list) == 0: initial_state.link_to(final_state) else: s1 = initial_state n = len(re_list) for i, re in enumerate(re_list): if i < n - 1: s2 = m.new_state() else: s2 = final_state re.build_machine(m, s1, s2, match_bol, nocase) s1 = s2 match_bol = re.match_nl or (match_bol and re.nullable) def calc_str(self): return "Seq(%s)" % ','.join(map(str, self.re_list)) class Alt(RE): """Alt(re1, re2, re3...) is an RE which matches either |re1| or |re2| or |re3|...""" def __init__(self, *re_list): self.re_list = re_list nullable = 0 match_nl = 0 nullable_res = [] non_nullable_res = [] i = 1 for re in re_list: self.check_re(i, re) if re.nullable: nullable_res.append(re) nullable = 1 else: non_nullable_res.append(re) if re.match_nl: match_nl = 1 i += 1 self.nullable_res = nullable_res self.non_nullable_res = non_nullable_res self.nullable = nullable self.match_nl = match_nl def build_machine(self, m, initial_state, final_state, match_bol, nocase): for re in self.nullable_res: re.build_machine(m, initial_state, final_state, match_bol, nocase) if self.non_nullable_res: if match_bol: initial_state = self.build_opt(m, initial_state, BOL) for re in self.non_nullable_res: re.build_machine(m, initial_state, final_state, 0, nocase) def calc_str(self): return "Alt(%s)" % ','.join(map(str, self.re_list)) class Rep1(RE): """Rep1(re) is an RE which matches one or more repetitions of |re|.""" def __init__(self, re): self.check_re(1, re) self.re = re self.nullable = re.nullable self.match_nl = re.match_nl def build_machine(self, m, initial_state, final_state, match_bol, nocase): s1 = m.new_state() s2 = m.new_state() initial_state.link_to(s1) self.re.build_machine(m, s1, s2, match_bol or self.re.match_nl, nocase) s2.link_to(s1) s2.link_to(final_state) def calc_str(self): return "Rep1(%s)" % self.re class SwitchCase(RE): """ SwitchCase(re, nocase) is an RE which matches the same strings as RE, but treating upper and lower case letters according to |nocase|. If |nocase| is true, case is ignored, otherwise it is not. """ re = None nocase = None def __init__(self, re, nocase): self.re = re self.nocase = nocase self.nullable = re.nullable self.match_nl = re.match_nl def build_machine(self, m, initial_state, final_state, match_bol, nocase): self.re.build_machine(m, initial_state, final_state, match_bol, self.nocase) def calc_str(self): if self.nocase: name = "NoCase" else: name = "Case" return "%s(%s)" % (name, self.re) # # Composite RE constructors # ------------------------- # # These REs are defined in terms of the primitive REs. # Empty = Seq() Empty.__doc__ = \ """ Empty is an RE which matches the empty string. """ Empty.str = "Empty" def Str1(s): """ Str1(s) is an RE which matches the literal string |s|. """ result = Seq(*tuple(map(Char, s))) result.str = "Str(%s)" % repr(s) return result def Str(*strs): """ Str(s) is an RE which matches the literal string |s|. Str(s1, s2, s3, ...) is an RE which matches any of |s1| or |s2| or |s3|... """ if len(strs) == 1: return Str1(strs[0]) else: result = Alt(*tuple(map(Str1, strs))) result.str = "Str(%s)" % ','.join(map(repr, strs)) return result def Any(s): """ Any(s) is an RE which matches any character in the string |s|. """ #result = apply(Alt, tuple(map(Char, s))) result = CodeRanges(chars_to_ranges(s)) result.str = "Any(%s)" % repr(s) return result def AnyBut(s): """ AnyBut(s) is an RE which matches any character (including newline) which is not in the string |s|. """ ranges = chars_to_ranges(s) ranges.insert(0, -maxint) ranges.append(maxint) result = CodeRanges(ranges) result.str = "AnyBut(%s)" % repr(s) return result AnyChar = AnyBut("") AnyChar.__doc__ = \ """ AnyChar is an RE which matches any single character (including a newline). """ AnyChar.str = "AnyChar" def Range(s1, s2=None): """ Range(c1, c2) is an RE which matches any single character in the range |c1| to |c2| inclusive. Range(s) where |s| is a string of even length is an RE which matches any single character in the ranges |s[0]| to |s[1]|, |s[2]| to |s[3]|,... """ if s2: result = CodeRange(ord(s1), ord(s2) + 1) result.str = "Range(%s,%s)" % (s1, s2) else: ranges = [] for i in range(0, len(s1), 2): ranges.append(CodeRange(ord(s1[i]), ord(s1[i + 1]) + 1)) result = Alt(*ranges) result.str = "Range(%s)" % repr(s1) return result def Opt(re): """ Opt(re) is an RE which matches either |re| or the empty string. """ result = Alt(re, Empty) result.str = "Opt(%s)" % re return result def Rep(re): """ Rep(re) is an RE which matches zero or more repetitions of |re|. """ result = Opt(Rep1(re)) result.str = "Rep(%s)" % re return result def NoCase(re): """ NoCase(re) is an RE which matches the same strings as RE, but treating upper and lower case letters as equivalent. """ return SwitchCase(re, nocase=1) def Case(re): """ Case(re) is an RE which matches the same strings as RE, but treating upper and lower case letters as distinct, i.e. it cancels the effect of any enclosing NoCase(). """ return SwitchCase(re, nocase=0) # # RE Constants # Bol = Char(BOL) Bol.__doc__ = \ """ Bol is an RE which matches the beginning of a line. """ Bol.str = "Bol" Eol = Char(EOL) Eol.__doc__ = \ """ Eol is an RE which matches the end of a line. """ Eol.str = "Eol" Eof = Char(EOF) Eof.__doc__ = \ """ Eof is an RE which matches the end of the file. """ Eof.str = "Eof" Cython-0.26.1/Cython/Plex/__init__.py0000664000175000017500000000240212542002467020115 0ustar stefanstefan00000000000000#======================================================================= # # Python Lexical Analyser # #======================================================================= """ The Plex module provides lexical analysers with similar capabilities to GNU Flex. The following classes and functions are exported; see the attached docstrings for more information. Scanner For scanning a character stream under the direction of a Lexicon. Lexicon For constructing a lexical definition to be used by a Scanner. Str, Any, AnyBut, AnyChar, Seq, Alt, Opt, Rep, Rep1, Bol, Eol, Eof, Empty Regular expression constructors, for building pattern definitions for a Lexicon. State For defining scanner states when creating a Lexicon. TEXT, IGNORE, Begin Actions for associating with patterns when creating a Lexicon. """ from __future__ import absolute_import from .Actions import TEXT, IGNORE, Begin from .Lexicons import Lexicon, State from .Regexps import RE, Seq, Alt, Rep1, Empty, Str, Any, AnyBut, AnyChar, Range from .Regexps import Opt, Rep, Bol, Eol, Eof, Case, NoCase from .Scanners import Scanner Cython-0.26.1/Cython/Plex/DFA.py0000664000175000017500000001357412574327400016766 0ustar stefanstefan00000000000000#======================================================================= # # Python Lexical Analyser # # Converting NFA to DFA # #======================================================================= from __future__ import absolute_import from . import Machines from .Machines import LOWEST_PRIORITY from .Transitions import TransitionMap def nfa_to_dfa(old_machine, debug=None): """ Given a nondeterministic Machine, return a new equivalent Machine which is deterministic. """ # We build a new machine whose states correspond to sets of states # in the old machine. Initially we add a new state corresponding to # the epsilon-closure of each initial old state. Then we give transitions # to each new state which are the union of all transitions out of any # of the corresponding old states. The new state reached on a given # character is the one corresponding to the set of states reachable # on that character from any of the old states. As new combinations of # old states are created, new states are added as needed until closure # is reached. new_machine = Machines.FastMachine() state_map = StateMap(new_machine) # Seed the process using the initial states of the old machine. # Make the corresponding new states into initial states of the new # machine with the same names. for (key, old_state) in old_machine.initial_states.items(): new_state = state_map.old_to_new(epsilon_closure(old_state)) new_machine.make_initial_state(key, new_state) # Tricky bit here: we add things to the end of this list while we're # iterating over it. The iteration stops when closure is achieved. for new_state in new_machine.states: transitions = TransitionMap() for old_state in state_map.new_to_old(new_state): for event, old_target_states in old_state.transitions.items(): if event and old_target_states: transitions.add_set(event, set_epsilon_closure(old_target_states)) for event, old_states in transitions.items(): new_machine.add_transitions(new_state, event, state_map.old_to_new(old_states)) if debug: debug.write("\n===== State Mapping =====\n") state_map.dump(debug) return new_machine def set_epsilon_closure(state_set): """ Given a set of states, return the union of the epsilon closures of its member states. """ result = {} for state1 in state_set: for state2 in epsilon_closure(state1): result[state2] = 1 return result def epsilon_closure(state): """ Return the set of states reachable from the given state by epsilon moves. """ # Cache the result result = state.epsilon_closure if result is None: result = {} state.epsilon_closure = result add_to_epsilon_closure(result, state) return result def add_to_epsilon_closure(state_set, state): """ Recursively add to |state_set| states reachable from the given state by epsilon moves. """ if not state_set.get(state, 0): state_set[state] = 1 state_set_2 = state.transitions.get_epsilon() if state_set_2: for state2 in state_set_2: add_to_epsilon_closure(state_set, state2) class StateMap(object): """ Helper class used by nfa_to_dfa() to map back and forth between sets of states from the old machine and states of the new machine. """ new_machine = None # Machine old_to_new_dict = None # {(old_state,...) : new_state} new_to_old_dict = None # {id(new_state) : old_state_set} def __init__(self, new_machine): self.new_machine = new_machine self.old_to_new_dict = {} self.new_to_old_dict = {} def old_to_new(self, old_state_set): """ Return the state of the new machine corresponding to the set of old machine states represented by |state_set|. A new state will be created if necessary. If any of the old states are accepting states, the new state will be an accepting state with the highest priority action from the old states. """ key = self.make_key(old_state_set) new_state = self.old_to_new_dict.get(key, None) if not new_state: action = self.highest_priority_action(old_state_set) new_state = self.new_machine.new_state(action) self.old_to_new_dict[key] = new_state self.new_to_old_dict[id(new_state)] = old_state_set #for old_state in old_state_set.keys(): #new_state.merge_actions(old_state) return new_state def highest_priority_action(self, state_set): best_action = None best_priority = LOWEST_PRIORITY for state in state_set: priority = state.action_priority if priority > best_priority: best_action = state.action best_priority = priority return best_action # def old_to_new_set(self, old_state_set): # """ # Return the new state corresponding to a set of old states as # a singleton set. # """ # return {self.old_to_new(old_state_set):1} def new_to_old(self, new_state): """Given a new state, return a set of corresponding old states.""" return self.new_to_old_dict[id(new_state)] def make_key(self, state_set): """ Convert a set of states into a uniquified sorted tuple suitable for use as a dictionary key. """ lst = list(state_set) lst.sort() return tuple(lst) def dump(self, file): from .Transitions import state_set_str for new_state in self.new_machine.states: old_state_set = self.new_to_old_dict[id(new_state)] file.write(" State %s <-- %s\n" % ( new_state['number'], state_set_str(old_state_set))) Cython-0.26.1/Cython/Plex/Timing.py0000664000175000017500000000073012542002467017607 0ustar stefanstefan00000000000000# # Get time in platform-dependent way # from __future__ import absolute_import import os from sys import platform, exit, stderr if platform == 'mac': import MacOS def time(): return MacOS.GetTicks() / 60.0 timekind = "real" elif hasattr(os, 'times'): def time(): t = os.times() return t[0] + t[1] timekind = "cpu" else: stderr.write( "Don't know how to get time on platform %s\n" % repr(platform)) exit(1) Cython-0.26.1/Cython/Plex/Traditional.py0000664000175000017500000001003012574327400020626 0ustar stefanstefan00000000000000#======================================================================= # # Python Lexical Analyser # # Traditional Regular Expression Syntax # #======================================================================= from __future__ import absolute_import from .Regexps import Alt, Seq, Rep, Rep1, Opt, Any, AnyBut, Bol, Eol, Char from .Errors import PlexError class RegexpSyntaxError(PlexError): pass def re(s): """ Convert traditional string representation of regular expression |s| into Plex representation. """ return REParser(s).parse_re() class REParser(object): def __init__(self, s): self.s = s self.i = -1 self.end = 0 self.next() def parse_re(self): re = self.parse_alt() if not self.end: self.error("Unexpected %s" % repr(self.c)) return re def parse_alt(self): """Parse a set of alternative regexps.""" re = self.parse_seq() if self.c == '|': re_list = [re] while self.c == '|': self.next() re_list.append(self.parse_seq()) re = Alt(*re_list) return re def parse_seq(self): """Parse a sequence of regexps.""" re_list = [] while not self.end and not self.c in "|)": re_list.append(self.parse_mod()) return Seq(*re_list) def parse_mod(self): """Parse a primitive regexp followed by *, +, ? modifiers.""" re = self.parse_prim() while not self.end and self.c in "*+?": if self.c == '*': re = Rep(re) elif self.c == '+': re = Rep1(re) else: # self.c == '?' re = Opt(re) self.next() return re def parse_prim(self): """Parse a primitive regexp.""" c = self.get() if c == '.': re = AnyBut("\n") elif c == '^': re = Bol elif c == '$': re = Eol elif c == '(': re = self.parse_alt() self.expect(')') elif c == '[': re = self.parse_charset() self.expect(']') else: if c == '\\': c = self.get() re = Char(c) return re def parse_charset(self): """Parse a charset. Does not include the surrounding [].""" char_list = [] invert = 0 if self.c == '^': invert = 1 self.next() if self.c == ']': char_list.append(']') self.next() while not self.end and self.c != ']': c1 = self.get() if self.c == '-' and self.lookahead(1) != ']': self.next() c2 = self.get() for a in range(ord(c1), ord(c2) + 1): char_list.append(chr(a)) else: char_list.append(c1) chars = ''.join(char_list) if invert: return AnyBut(chars) else: return Any(chars) def next(self): """Advance to the next char.""" s = self.s i = self.i = self.i + 1 if i < len(s): self.c = s[i] else: self.c = '' self.end = 1 def get(self): if self.end: self.error("Premature end of string") c = self.c self.next() return c def lookahead(self, n): """Look ahead n chars.""" j = self.i + n if j < len(self.s): return self.s[j] else: return '' def expect(self, c): """ Expect to find character |c| at current position. Raises an exception otherwise. """ if self.c == c: self.next() else: self.error("Missing %s" % repr(c)) def error(self, mess): """Raise exception to signal syntax error in regexp.""" raise RegexpSyntaxError("Syntax error in regexp %s at position %d: %s" % ( repr(self.s), self.i, mess)) Cython-0.26.1/Cython/Plex/Scanners.pxd0000664000175000017500000000243412542002467020302 0ustar stefanstefan00000000000000from __future__ import absolute_import import cython from Cython.Plex.Actions cimport Action cdef class Scanner: cdef public lexicon cdef public stream cdef public name cdef public unicode buffer cdef public Py_ssize_t buf_start_pos cdef public Py_ssize_t next_pos cdef public Py_ssize_t cur_pos cdef public Py_ssize_t cur_line cdef public Py_ssize_t cur_line_start cdef public Py_ssize_t start_pos cdef public Py_ssize_t start_line cdef public Py_ssize_t start_col cdef public text cdef public initial_state # int? cdef public state_name cdef public list queue cdef public bint trace cdef public cur_char cdef public long input_state cdef public level @cython.locals(input_state=long) cdef next_char(self) @cython.locals(action=Action) cpdef tuple read(self) cdef tuple scan_a_token(self) cdef tuple position(self) @cython.locals(cur_pos=long, cur_line=long, cur_line_start=long, input_state=long, next_pos=long, state=dict, buf_start_pos=long, buf_len=long, buf_index=long, trace=bint, discard=long, data=unicode, buffer=unicode) cdef run_machine_inlined(self) cdef begin(self, state) cdef produce(self, value, text = *) Cython-0.26.1/Cython/Plex/Errors.py0000664000175000017500000000222112542002467017631 0ustar stefanstefan00000000000000#======================================================================= # # Python Lexical Analyser # # Exception classes # #======================================================================= class PlexError(Exception): message = "" class PlexTypeError(PlexError, TypeError): pass class PlexValueError(PlexError, ValueError): pass class InvalidRegex(PlexError): pass class InvalidToken(PlexError): def __init__(self, token_number, message): PlexError.__init__(self, "Token number %d: %s" % (token_number, message)) class InvalidScanner(PlexError): pass class AmbiguousAction(PlexError): message = "Two tokens with different actions can match the same string" def __init__(self): pass class UnrecognizedInput(PlexError): scanner = None position = None state_name = None def __init__(self, scanner, state_name): self.scanner = scanner self.position = scanner.get_position() self.state_name = state_name def __str__(self): return ("'%s', line %d, char %d: Token not recognised in state %r" % ( self.position + (self.state_name,))) Cython-0.26.1/Cython/Plex/Scanners.py0000664000175000017500000002771512542002467020150 0ustar stefanstefan00000000000000#======================================================================= # # Python Lexical Analyser # # # Scanning an input stream # #======================================================================= from __future__ import absolute_import import cython cython.declare(BOL=object, EOL=object, EOF=object, NOT_FOUND=object) from . import Errors from .Regexps import BOL, EOL, EOF NOT_FOUND = object() class Scanner(object): """ A Scanner is used to read tokens from a stream of characters using the token set specified by a Plex.Lexicon. Constructor: Scanner(lexicon, stream, name = '') See the docstring of the __init__ method for details. Methods: See the docstrings of the individual methods for more information. read() --> (value, text) Reads the next lexical token from the stream. position() --> (name, line, col) Returns the position of the last token read using the read() method. begin(state_name) Causes scanner to change state. produce(value [, text]) Causes return of a token value to the caller of the Scanner. """ # lexicon = None # Lexicon # stream = None # file-like object # name = '' # buffer = '' # buf_start_pos = 0 # position in input of start of buffer # next_pos = 0 # position in input of next char to read # cur_pos = 0 # position in input of current char # cur_line = 1 # line number of current char # cur_line_start = 0 # position in input of start of current line # start_pos = 0 # position in input of start of token # start_line = 0 # line number of start of token # start_col = 0 # position in line of start of token # text = None # text of last token read # initial_state = None # Node # state_name = '' # Name of initial state # queue = None # list of tokens to be returned # trace = 0 def __init__(self, lexicon, stream, name='', initial_pos=None): """ Scanner(lexicon, stream, name = '') |lexicon| is a Plex.Lexicon instance specifying the lexical tokens to be recognised. |stream| can be a file object or anything which implements a compatible read() method. |name| is optional, and may be the name of the file being scanned or any other identifying string. """ self.trace = 0 self.buffer = u'' self.buf_start_pos = 0 self.next_pos = 0 self.cur_pos = 0 self.cur_line = 1 self.start_pos = 0 self.start_line = 0 self.start_col = 0 self.text = None self.state_name = None self.lexicon = lexicon self.stream = stream self.name = name self.queue = [] self.initial_state = None self.begin('') self.next_pos = 0 self.cur_pos = 0 self.cur_line_start = 0 self.cur_char = BOL self.input_state = 1 if initial_pos is not None: self.cur_line, self.cur_line_start = initial_pos[1], -initial_pos[2] def read(self): """ Read the next lexical token from the stream and return a tuple (value, text), where |value| is the value associated with the token as specified by the Lexicon, and |text| is the actual string read from the stream. Returns (None, '') on end of file. """ queue = self.queue while not queue: self.text, action = self.scan_a_token() if action is None: self.produce(None) self.eof() else: value = action.perform(self, self.text) if value is not None: self.produce(value) result = queue[0] del queue[0] return result def scan_a_token(self): """ Read the next input sequence recognised by the machine and return (text, action). Returns ('', None) on end of file. """ self.start_pos = self.cur_pos self.start_line = self.cur_line self.start_col = self.cur_pos - self.cur_line_start action = self.run_machine_inlined() if action is not None: if self.trace: print("Scanner: read: Performing %s %d:%d" % ( action, self.start_pos, self.cur_pos)) text = self.buffer[ self.start_pos - self.buf_start_pos: self.cur_pos - self.buf_start_pos] return (text, action) else: if self.cur_pos == self.start_pos: if self.cur_char is EOL: self.next_char() if self.cur_char is None or self.cur_char is EOF: return (u'', None) raise Errors.UnrecognizedInput(self, self.state_name) def run_machine_inlined(self): """ Inlined version of run_machine for speed. """ state = self.initial_state cur_pos = self.cur_pos cur_line = self.cur_line cur_line_start = self.cur_line_start cur_char = self.cur_char input_state = self.input_state next_pos = self.next_pos buffer = self.buffer buf_start_pos = self.buf_start_pos buf_len = len(buffer) b_action, b_cur_pos, b_cur_line, b_cur_line_start, b_cur_char, b_input_state, b_next_pos = \ None, 0, 0, 0, u'', 0, 0 trace = self.trace while 1: if trace: #TRACE# print("State %d, %d/%d:%s -->" % ( #TRACE# state['number'], input_state, cur_pos, repr(cur_char))) #TRACE# # Begin inlined self.save_for_backup() #action = state.action #@slow action = state['action'] #@fast if action is not None: b_action, b_cur_pos, b_cur_line, b_cur_line_start, b_cur_char, b_input_state, b_next_pos = \ action, cur_pos, cur_line, cur_line_start, cur_char, input_state, next_pos # End inlined self.save_for_backup() c = cur_char #new_state = state.new_state(c) #@slow new_state = state.get(c, NOT_FOUND) #@fast if new_state is NOT_FOUND: #@fast new_state = c and state.get('else') #@fast if new_state: if trace: #TRACE# print("State %d" % new_state['number']) #TRACE# state = new_state # Begin inlined: self.next_char() if input_state == 1: cur_pos = next_pos # Begin inlined: c = self.read_char() buf_index = next_pos - buf_start_pos if buf_index < buf_len: c = buffer[buf_index] next_pos += 1 else: discard = self.start_pos - buf_start_pos data = self.stream.read(0x1000) buffer = self.buffer[discard:] + data self.buffer = buffer buf_start_pos += discard self.buf_start_pos = buf_start_pos buf_len = len(buffer) buf_index -= discard if data: c = buffer[buf_index] next_pos += 1 else: c = u'' # End inlined: c = self.read_char() if c == u'\n': cur_char = EOL input_state = 2 elif not c: cur_char = EOL input_state = 4 else: cur_char = c elif input_state == 2: cur_char = u'\n' input_state = 3 elif input_state == 3: cur_line += 1 cur_line_start = cur_pos = next_pos cur_char = BOL input_state = 1 elif input_state == 4: cur_char = EOF input_state = 5 else: # input_state = 5 cur_char = u'' # End inlined self.next_char() else: # not new_state if trace: #TRACE# print("blocked") #TRACE# # Begin inlined: action = self.back_up() if b_action is not None: (action, cur_pos, cur_line, cur_line_start, cur_char, input_state, next_pos) = \ (b_action, b_cur_pos, b_cur_line, b_cur_line_start, b_cur_char, b_input_state, b_next_pos) else: action = None break # while 1 # End inlined: action = self.back_up() self.cur_pos = cur_pos self.cur_line = cur_line self.cur_line_start = cur_line_start self.cur_char = cur_char self.input_state = input_state self.next_pos = next_pos if trace: #TRACE# if action is not None: #TRACE# print("Doing %s" % action) #TRACE# return action def next_char(self): input_state = self.input_state if self.trace: print("Scanner: next: %s [%d] %d" % (" " * 20, input_state, self.cur_pos)) if input_state == 1: self.cur_pos = self.next_pos c = self.read_char() if c == u'\n': self.cur_char = EOL self.input_state = 2 elif not c: self.cur_char = EOL self.input_state = 4 else: self.cur_char = c elif input_state == 2: self.cur_char = u'\n' self.input_state = 3 elif input_state == 3: self.cur_line += 1 self.cur_line_start = self.cur_pos = self.next_pos self.cur_char = BOL self.input_state = 1 elif input_state == 4: self.cur_char = EOF self.input_state = 5 else: # input_state = 5 self.cur_char = u'' if self.trace: print("--> [%d] %d %s" % (input_state, self.cur_pos, repr(self.cur_char))) def position(self): """ Return a tuple (name, line, col) representing the location of the last token read using the read() method. |name| is the name that was provided to the Scanner constructor; |line| is the line number in the stream (1-based); |col| is the position within the line of the first character of the token (0-based). """ return (self.name, self.start_line, self.start_col) def get_position(self): """Python accessible wrapper around position(), only for error reporting. """ return self.position() def begin(self, state_name): """Set the current state of the scanner to the named state.""" self.initial_state = ( self.lexicon.get_initial_state(state_name)) self.state_name = state_name def produce(self, value, text=None): """ Called from an action procedure, causes |value| to be returned as the token value from read(). If |text| is supplied, it is returned in place of the scanned text. produce() can be called more than once during a single call to an action procedure, in which case the tokens are queued up and returned one at a time by subsequent calls to read(), until the queue is empty, whereupon scanning resumes. """ if text is None: text = self.text self.queue.append((value, text)) def eof(self): """ Override this method if you want something to be done at end of file. """ Cython-0.26.1/Cython/Plex/Actions.pxd0000664000175000017500000000111112542002467020115 0ustar stefanstefan00000000000000 cdef class Action: cdef perform(self, token_stream, text) cpdef same_as(self, other) cdef class Return(Action): cdef object value cdef perform(self, token_stream, text) cpdef same_as(self, other) cdef class Call(Action): cdef object function cdef perform(self, token_stream, text) cpdef same_as(self, other) cdef class Begin(Action): cdef object state_name cdef perform(self, token_stream, text) cpdef same_as(self, other) cdef class Ignore(Action): cdef perform(self, token_stream, text) cdef class Text(Action): cdef perform(self, token_stream, text) Cython-0.26.1/Cython/Plex/Transitions.py0000664000175000017500000001602312574327400020701 0ustar stefanstefan00000000000000# # Plex - Transition Maps # # This version represents state sets directly as dicts for speed. # from __future__ import absolute_import try: from sys import maxsize as maxint except ImportError: from sys import maxint class TransitionMap(object): """ A TransitionMap maps an input event to a set of states. An input event is one of: a range of character codes, the empty string (representing an epsilon move), or one of the special symbols BOL, EOL, EOF. For characters, this implementation compactly represents the map by means of a list: [code_0, states_0, code_1, states_1, code_2, states_2, ..., code_n-1, states_n-1, code_n] where |code_i| is a character code, and |states_i| is a set of states corresponding to characters with codes |c| in the range |code_i| <= |c| <= |code_i+1|. The following invariants hold: n >= 1 code_0 == -maxint code_n == maxint code_i < code_i+1 for i in 0..n-1 states_0 == states_n-1 Mappings for the special events '', BOL, EOL, EOF are kept separately in a dictionary. """ map = None # The list of codes and states special = None # Mapping for special events def __init__(self, map=None, special=None): if not map: map = [-maxint, {}, maxint] if not special: special = {} self.map = map self.special = special #self.check() ### def add(self, event, new_state, TupleType=tuple): """ Add transition to |new_state| on |event|. """ if type(event) is TupleType: code0, code1 = event i = self.split(code0) j = self.split(code1) map = self.map while i < j: map[i + 1][new_state] = 1 i += 2 else: self.get_special(event)[new_state] = 1 def add_set(self, event, new_set, TupleType=tuple): """ Add transitions to the states in |new_set| on |event|. """ if type(event) is TupleType: code0, code1 = event i = self.split(code0) j = self.split(code1) map = self.map while i < j: map[i + 1].update(new_set) i += 2 else: self.get_special(event).update(new_set) def get_epsilon(self, none=None): """ Return the mapping for epsilon, or None. """ return self.special.get('', none) def iteritems(self, len=len): """ Return the mapping as an iterable of ((code1, code2), state_set) and (special_event, state_set) pairs. """ result = [] map = self.map else_set = map[1] i = 0 n = len(map) - 1 code0 = map[0] while i < n: set = map[i + 1] code1 = map[i + 2] if set or else_set: result.append(((code0, code1), set)) code0 = code1 i += 2 for event, set in self.special.items(): if set: result.append((event, set)) return iter(result) items = iteritems # ------------------- Private methods -------------------- def split(self, code, len=len, maxint=maxint): """ Search the list for the position of the split point for |code|, inserting a new split point if necessary. Returns index |i| such that |code| == |map[i]|. """ # We use a funky variation on binary search. map = self.map hi = len(map) - 1 # Special case: code == map[-1] if code == maxint: return hi # General case lo = 0 # loop invariant: map[lo] <= code < map[hi] and hi - lo >= 2 while hi - lo >= 4: # Find midpoint truncated to even index mid = ((lo + hi) // 2) & ~1 if code < map[mid]: hi = mid else: lo = mid # map[lo] <= code < map[hi] and hi - lo == 2 if map[lo] == code: return lo else: map[hi:hi] = [code, map[hi - 1].copy()] #self.check() ### return hi def get_special(self, event): """ Get state set for special event, adding a new entry if necessary. """ special = self.special set = special.get(event, None) if not set: set = {} special[event] = set return set # --------------------- Conversion methods ----------------------- def __str__(self): map_strs = [] map = self.map n = len(map) i = 0 while i < n: code = map[i] if code == -maxint: code_str = "-inf" elif code == maxint: code_str = "inf" else: code_str = str(code) map_strs.append(code_str) i += 1 if i < n: map_strs.append(state_set_str(map[i])) i += 1 special_strs = {} for event, set in self.special.items(): special_strs[event] = state_set_str(set) return "[%s]+%s" % ( ','.join(map_strs), special_strs ) # --------------------- Debugging methods ----------------------- def check(self): """Check data structure integrity.""" if not self.map[-3] < self.map[-1]: print(self) assert 0 def dump(self, file): map = self.map i = 0 n = len(map) - 1 while i < n: self.dump_range(map[i], map[i + 2], map[i + 1], file) i += 2 for event, set in self.special.items(): if set: if not event: event = 'empty' self.dump_trans(event, set, file) def dump_range(self, code0, code1, set, file): if set: if code0 == -maxint: if code1 == maxint: k = "any" else: k = "< %s" % self.dump_char(code1) elif code1 == maxint: k = "> %s" % self.dump_char(code0 - 1) elif code0 == code1 - 1: k = self.dump_char(code0) else: k = "%s..%s" % (self.dump_char(code0), self.dump_char(code1 - 1)) self.dump_trans(k, set, file) def dump_char(self, code): if 0 <= code <= 255: return repr(chr(code)) else: return "chr(%d)" % code def dump_trans(self, key, set, file): file.write(" %s --> %s\n" % (key, self.dump_set(set))) def dump_set(self, set): return state_set_str(set) # # State set manipulation functions # #def merge_state_sets(set1, set2): # for state in set2.keys(): # set1[state] = 1 def state_set_str(set): return "[%s]" % ','.join(["S%d" % state.number for state in set]) Cython-0.26.1/Cython/Tests/0000775000175000017500000000000013151203436016174 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Tests/xmlrunner.py0000664000175000017500000003471712574327400020622 0ustar stefanstefan00000000000000# -*- coding: utf-8 -*- """unittest-xml-reporting is a PyUnit-based TestRunner that can export test results to XML files that can be consumed by a wide range of tools, such as build systems, IDEs and Continuous Integration servers. This module provides the XMLTestRunner class, which is heavily based on the default TextTestRunner. This makes the XMLTestRunner very simple to use. The script below, adapted from the unittest documentation, shows how to use XMLTestRunner in a very simple way. In fact, the only difference between this script and the original one is the last line: import random import unittest import xmlrunner class TestSequenceFunctions(unittest.TestCase): def setUp(self): self.seq = range(10) def test_shuffle(self): # make sure the shuffled sequence does not lose any elements random.shuffle(self.seq) self.seq.sort() self.assertEqual(self.seq, range(10)) def test_choice(self): element = random.choice(self.seq) self.assert_(element in self.seq) def test_sample(self): self.assertRaises(ValueError, random.sample, self.seq, 20) for element in random.sample(self.seq, 5): self.assert_(element in self.seq) if __name__ == '__main__': unittest.main(testRunner=xmlrunner.XMLTestRunner(output='test-reports')) """ from __future__ import absolute_import import os import sys import time from unittest import TestResult, _TextTestResult, TextTestRunner import xml.dom.minidom try: from StringIO import StringIO except ImportError: from io import StringIO # doesn't accept 'str' in Py2 class XMLDocument(xml.dom.minidom.Document): def createCDATAOrText(self, data): if ']]>' in data: return self.createTextNode(data) return self.createCDATASection(data) class _TestInfo(object): """This class is used to keep useful information about the execution of a test method. """ # Possible test outcomes (SUCCESS, FAILURE, ERROR) = range(3) def __init__(self, test_result, test_method, outcome=SUCCESS, err=None): "Create a new instance of _TestInfo." self.test_result = test_result self.test_method = test_method self.outcome = outcome self.err = err self.stdout = test_result.stdout and test_result.stdout.getvalue().strip() or '' self.stderr = test_result.stdout and test_result.stderr.getvalue().strip() or '' def get_elapsed_time(self): """Return the time that shows how long the test method took to execute. """ return self.test_result.stop_time - self.test_result.start_time def get_description(self): "Return a text representation of the test method." return self.test_result.getDescription(self.test_method) def get_error_info(self): """Return a text representation of an exception thrown by a test method. """ if not self.err: return '' return self.test_result._exc_info_to_string( self.err, self.test_method) class _XMLTestResult(_TextTestResult): """A test result class that can express test results in a XML report. Used by XMLTestRunner. """ def __init__(self, stream=sys.stderr, descriptions=1, verbosity=1, elapsed_times=True): "Create a new instance of _XMLTestResult." _TextTestResult.__init__(self, stream, descriptions, verbosity) self.successes = [] self.callback = None self.elapsed_times = elapsed_times self.output_patched = False def _prepare_callback(self, test_info, target_list, verbose_str, short_str): """Append a _TestInfo to the given target list and sets a callback method to be called by stopTest method. """ target_list.append(test_info) def callback(): """This callback prints the test method outcome to the stream, as well as the elapsed time. """ # Ignore the elapsed times for a more reliable unit testing if not self.elapsed_times: self.start_time = self.stop_time = 0 if self.showAll: self.stream.writeln('(%.3fs) %s' % \ (test_info.get_elapsed_time(), verbose_str)) elif self.dots: self.stream.write(short_str) self.callback = callback def _patch_standard_output(self): """Replace the stdout and stderr streams with string-based streams in order to capture the tests' output. """ if not self.output_patched: (self.old_stdout, self.old_stderr) = (sys.stdout, sys.stderr) self.output_patched = True (sys.stdout, sys.stderr) = (self.stdout, self.stderr) = \ (StringIO(), StringIO()) def _restore_standard_output(self): "Restore the stdout and stderr streams." (sys.stdout, sys.stderr) = (self.old_stdout, self.old_stderr) self.output_patched = False def startTest(self, test): "Called before execute each test method." self._patch_standard_output() self.start_time = time.time() TestResult.startTest(self, test) if self.showAll: self.stream.write(' ' + self.getDescription(test)) self.stream.write(" ... ") def stopTest(self, test): "Called after execute each test method." self._restore_standard_output() _TextTestResult.stopTest(self, test) self.stop_time = time.time() if self.callback and callable(self.callback): self.callback() self.callback = None def addSuccess(self, test): "Called when a test executes successfully." self._prepare_callback(_TestInfo(self, test), self.successes, 'OK', '.') def addFailure(self, test, err): "Called when a test method fails." self._prepare_callback(_TestInfo(self, test, _TestInfo.FAILURE, err), self.failures, 'FAIL', 'F') def addError(self, test, err): "Called when a test method raises an error." self._prepare_callback(_TestInfo(self, test, _TestInfo.ERROR, err), self.errors, 'ERROR', 'E') def printErrorList(self, flavour, errors): "Write some information about the FAIL or ERROR to the stream." for test_info in errors: if isinstance(test_info, tuple): test_info, exc_info = test_info try: t = test_info.get_elapsed_time() except AttributeError: t = 0 try: descr = test_info.get_description() except AttributeError: try: descr = test_info.getDescription() except AttributeError: descr = str(test_info) try: err_info = test_info.get_error_info() except AttributeError: err_info = str(test_info) self.stream.writeln(self.separator1) self.stream.writeln('%s [%.3fs]: %s' % (flavour, t, descr)) self.stream.writeln(self.separator2) self.stream.writeln('%s' % err_info) def _get_info_by_testcase(self): """This method organizes test results by TestCase module. This information is used during the report generation, where a XML report will be generated for each TestCase. """ tests_by_testcase = {} for tests in (self.successes, self.failures, self.errors): for test_info in tests: if not isinstance(test_info, _TestInfo): print("Unexpected test result type: %r" % (test_info,)) continue testcase = type(test_info.test_method) # Ignore module name if it is '__main__' module = testcase.__module__ + '.' if module == '__main__.': module = '' testcase_name = module + testcase.__name__ if testcase_name not in tests_by_testcase: tests_by_testcase[testcase_name] = [] tests_by_testcase[testcase_name].append(test_info) return tests_by_testcase def _report_testsuite(suite_name, tests, xml_document): "Appends the testsuite section to the XML document." testsuite = xml_document.createElement('testsuite') xml_document.appendChild(testsuite) testsuite.setAttribute('name', str(suite_name)) testsuite.setAttribute('tests', str(len(tests))) testsuite.setAttribute('time', '%.3f' % sum([e.get_elapsed_time() for e in tests])) failures = len([1 for e in tests if e.outcome == _TestInfo.FAILURE]) testsuite.setAttribute('failures', str(failures)) errors = len([1 for e in tests if e.outcome == _TestInfo.ERROR]) testsuite.setAttribute('errors', str(errors)) return testsuite _report_testsuite = staticmethod(_report_testsuite) def _report_testcase(suite_name, test_result, xml_testsuite, xml_document): "Appends a testcase section to the XML document." testcase = xml_document.createElement('testcase') xml_testsuite.appendChild(testcase) testcase.setAttribute('classname', str(suite_name)) testcase.setAttribute('name', test_result.test_method.shortDescription() or getattr(test_result.test_method, '_testMethodName', str(test_result.test_method))) testcase.setAttribute('time', '%.3f' % test_result.get_elapsed_time()) if (test_result.outcome != _TestInfo.SUCCESS): elem_name = ('failure', 'error')[test_result.outcome-1] failure = xml_document.createElement(elem_name) testcase.appendChild(failure) failure.setAttribute('type', str(test_result.err[0].__name__)) failure.setAttribute('message', str(test_result.err[1])) error_info = test_result.get_error_info() failureText = xml_document.createCDATAOrText(error_info) failure.appendChild(failureText) _report_testcase = staticmethod(_report_testcase) def _report_output(test_runner, xml_testsuite, xml_document, stdout, stderr): "Appends the system-out and system-err sections to the XML document." systemout = xml_document.createElement('system-out') xml_testsuite.appendChild(systemout) systemout_text = xml_document.createCDATAOrText(stdout) systemout.appendChild(systemout_text) systemerr = xml_document.createElement('system-err') xml_testsuite.appendChild(systemerr) systemerr_text = xml_document.createCDATAOrText(stderr) systemerr.appendChild(systemerr_text) _report_output = staticmethod(_report_output) def generate_reports(self, test_runner): "Generates the XML reports to a given XMLTestRunner object." all_results = self._get_info_by_testcase() if type(test_runner.output) == str and not \ os.path.exists(test_runner.output): os.makedirs(test_runner.output) for suite, tests in all_results.items(): doc = XMLDocument() # Build the XML file testsuite = _XMLTestResult._report_testsuite(suite, tests, doc) stdout, stderr = [], [] for test in tests: _XMLTestResult._report_testcase(suite, test, testsuite, doc) if test.stdout: stdout.extend(['*****************', test.get_description(), test.stdout]) if test.stderr: stderr.extend(['*****************', test.get_description(), test.stderr]) _XMLTestResult._report_output(test_runner, testsuite, doc, '\n'.join(stdout), '\n'.join(stderr)) xml_content = doc.toprettyxml(indent='\t') if type(test_runner.output) is str: report_file = open('%s%sTEST-%s.xml' % \ (test_runner.output, os.sep, suite), 'w') try: report_file.write(xml_content) finally: report_file.close() else: # Assume that test_runner.output is a stream test_runner.output.write(xml_content) class XMLTestRunner(TextTestRunner): """A test runner class that outputs the results in JUnit like XML files. """ def __init__(self, output='.', stream=None, descriptions=True, verbose=False, elapsed_times=True): "Create a new instance of XMLTestRunner." if stream is None: stream = sys.stderr verbosity = (1, 2)[verbose] TextTestRunner.__init__(self, stream, descriptions, verbosity) self.output = output self.elapsed_times = elapsed_times def _make_result(self): """Create the TestResult object which will be used to store information about the executed tests. """ return _XMLTestResult(self.stream, self.descriptions, \ self.verbosity, self.elapsed_times) def run(self, test): "Run the given test case or test suite." # Prepare the test execution result = self._make_result() # Print a nice header self.stream.writeln() self.stream.writeln('Running tests...') self.stream.writeln(result.separator2) # Execute tests start_time = time.time() test(result) stop_time = time.time() time_taken = stop_time - start_time # Generate reports self.stream.writeln() self.stream.writeln('Generating XML reports...') result.generate_reports(self) # Print results result.printErrors() self.stream.writeln(result.separator2) run = result.testsRun self.stream.writeln("Ran %d test%s in %.3fs" % (run, run != 1 and "s" or "", time_taken)) self.stream.writeln() # Error traces if not result.wasSuccessful(): self.stream.write("FAILED (") failed, errored = (len(result.failures), len(result.errors)) if failed: self.stream.write("failures=%d" % failed) if errored: if failed: self.stream.write(", ") self.stream.write("errors=%d" % errored) self.stream.writeln(")") else: self.stream.writeln("OK") return result Cython-0.26.1/Cython/Tests/TestJediTyper.py0000664000175000017500000001552413143605603021317 0ustar stefanstefan00000000000000# -*- coding: utf-8 -*- # tag: jedi from __future__ import absolute_import import sys import os.path from textwrap import dedent from contextlib import contextmanager from tempfile import NamedTemporaryFile from Cython.Compiler.ParseTreeTransforms import NormalizeTree, InterpretCompilerDirectives from Cython.Compiler import Main, Symtab, Visitor from Cython.TestUtils import TransformTest TOOLS_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'Tools')) @contextmanager def _tempfile(code): code = dedent(code) if not isinstance(code, bytes): code = code.encode('utf8') with NamedTemporaryFile(suffix='.py') as f: f.write(code) f.seek(0) yield f def _test_typing(code, inject=False): sys.path.insert(0, TOOLS_DIR) try: import jedityper finally: sys.path.remove(TOOLS_DIR) lines = [] with _tempfile(code) as f: types = jedityper.analyse(f.name) if inject: lines = jedityper.inject_types(f.name, types) return types, lines class DeclarationsFinder(Visitor.VisitorTransform): directives = None visit_Node = Visitor.VisitorTransform.recurse_to_children def visit_CompilerDirectivesNode(self, node): if not self.directives: self.directives = [] self.directives.append(node) self.visitchildren(node) return node class TestJediTyper(TransformTest): def _test(self, code): return _test_typing(code)[0] def test_typing_global_int_loop(self): code = '''\ for i in range(10): a = i + 1 ''' types = self._test(code) self.assertIn((None, (1, 0)), types) variables = types.pop((None, (1, 0))) self.assertFalse(types) self.assertEqual({'a': set(['int']), 'i': set(['int'])}, variables) def test_typing_function_int_loop(self): code = '''\ def func(x): for i in range(x): a = i + 1 return a ''' types = self._test(code) self.assertIn(('func', (1, 0)), types) variables = types.pop(('func', (1, 0))) self.assertFalse(types) self.assertEqual({'a': set(['int']), 'i': set(['int'])}, variables) def test_conflicting_types_in_function(self): code = '''\ def func(a, b): print(a) a = 1 b += a a = 'abc' return a, str(b) print(func(1.5, 2)) ''' types = self._test(code) self.assertIn(('func', (1, 0)), types) variables = types.pop(('func', (1, 0))) self.assertFalse(types) self.assertEqual({'a': set(['float', 'int', 'str']), 'b': set(['int'])}, variables) def _test_typing_function_char_loop(self): code = '''\ def func(x): l = [] for c in x: l.append(c) return l print(func('abcdefg')) ''' types = self._test(code) self.assertIn(('func', (1, 0)), types) variables = types.pop(('func', (1, 0))) self.assertFalse(types) self.assertEqual({'a': set(['int']), 'i': set(['int'])}, variables) def test_typing_global_list(self): code = '''\ a = [x for x in range(10)] b = list(range(10)) c = a + b d = [0]*10 ''' types = self._test(code) self.assertIn((None, (1, 0)), types) variables = types.pop((None, (1, 0))) self.assertFalse(types) self.assertEqual({'a': set(['list']), 'b': set(['list']), 'c': set(['list']), 'd': set(['list'])}, variables) def test_typing_function_list(self): code = '''\ def func(x): a = [[], []] b = [0]* 10 + a c = a[0] print(func([0]*100)) ''' types = self._test(code) self.assertIn(('func', (1, 0)), types) variables = types.pop(('func', (1, 0))) self.assertFalse(types) self.assertEqual({'a': set(['list']), 'b': set(['list']), 'c': set(['list']), 'x': set(['list'])}, variables) def test_typing_global_dict(self): code = '''\ a = dict() b = {i: i**2 for i in range(10)} c = a ''' types = self._test(code) self.assertIn((None, (1, 0)), types) variables = types.pop((None, (1, 0))) self.assertFalse(types) self.assertEqual({'a': set(['dict']), 'b': set(['dict']), 'c': set(['dict'])}, variables) def test_typing_function_dict(self): code = '''\ def func(x): a = dict() b = {i: i**2 for i in range(10)} c = x print(func({1:2, 'x':7})) ''' types = self._test(code) self.assertIn(('func', (1, 0)), types) variables = types.pop(('func', (1, 0))) self.assertFalse(types) self.assertEqual({'a': set(['dict']), 'b': set(['dict']), 'c': set(['dict']), 'x': set(['dict'])}, variables) def test_typing_global_set(self): code = '''\ a = set() # b = {i for i in range(10)} # jedi does not support set comprehension yet c = a d = {1,2,3} e = a | b ''' types = self._test(code) self.assertIn((None, (1, 0)), types) variables = types.pop((None, (1, 0))) self.assertFalse(types) self.assertEqual({'a': set(['set']), 'c': set(['set']), 'd': set(['set']), 'e': set(['set'])}, variables) def test_typing_function_set(self): code = '''\ def func(x): a = set() # b = {i for i in range(10)} # jedi does not support set comprehension yet c = a d = a | b print(func({1,2,3})) ''' types = self._test(code) self.assertIn(('func', (1, 0)), types) variables = types.pop(('func', (1, 0))) self.assertFalse(types) self.assertEqual({'a': set(['set']), 'c': set(['set']), 'd': set(['set']), 'x': set(['set'])}, variables) class TestTypeInjection(TestJediTyper): """ Subtype of TestJediTyper that additionally tests type injection and compilation. """ def setUp(self): super(TestTypeInjection, self).setUp() compilation_options = Main.CompilationOptions(Main.default_options) ctx = compilation_options.create_context() transform = InterpretCompilerDirectives(ctx, ctx.compiler_directives) transform.module_scope = Symtab.ModuleScope('__main__', None, ctx) self.declarations_finder = DeclarationsFinder() self.pipeline = [NormalizeTree(None), transform, self.declarations_finder] def _test(self, code): types, lines = _test_typing(code, inject=True) tree = self.run_pipeline(self.pipeline, ''.join(lines)) directives = self.declarations_finder.directives # TODO: validate directives return types Cython-0.26.1/Cython/Tests/TestStringIOTree.py0000664000175000017500000000363212574327400021736 0ustar stefanstefan00000000000000import unittest from Cython import StringIOTree as stringtree code = """ cdef int spam # line 1 cdef ham(): a = 1 b = 2 c = 3 d = 4 def eggs(): pass cpdef bacon(): print spam print 'scotch' print 'tea?' print 'or coffee?' # line 16 """ linemap = dict(enumerate(code.splitlines())) class TestStringIOTree(unittest.TestCase): def setUp(self): self.tree = stringtree.StringIOTree() def test_markers(self): assert not self.tree.allmarkers() def test_insertion(self): self.write_lines((1, 2, 3)) line_4_to_6_insertion_point = self.tree.insertion_point() self.write_lines((7, 8)) line_9_to_13_insertion_point = self.tree.insertion_point() self.write_lines((14, 15, 16)) line_4_insertion_point = line_4_to_6_insertion_point.insertion_point() self.write_lines((5, 6), tree=line_4_to_6_insertion_point) line_9_to_12_insertion_point = ( line_9_to_13_insertion_point.insertion_point()) self.write_line(13, tree=line_9_to_13_insertion_point) self.write_line(4, tree=line_4_insertion_point) self.write_line(9, tree=line_9_to_12_insertion_point) line_10_insertion_point = line_9_to_12_insertion_point.insertion_point() self.write_line(11, tree=line_9_to_12_insertion_point) self.write_line(10, tree=line_10_insertion_point) self.write_line(12, tree=line_9_to_12_insertion_point) self.assertEqual(self.tree.allmarkers(), list(range(1, 17))) self.assertEqual(code.strip(), self.tree.getvalue().strip()) def write_lines(self, linenos, tree=None): for lineno in linenos: self.write_line(lineno, tree=tree) def write_line(self, lineno, tree=None): if tree is None: tree = self.tree tree.markers.append(lineno) tree.write(linemap[lineno] + '\n') Cython-0.26.1/Cython/Tests/__init__.py0000664000175000017500000000001512542002467020305 0ustar stefanstefan00000000000000# empty file Cython-0.26.1/Cython/Tests/TestCodeWriter.py0000664000175000017500000000441412542002467021464 0ustar stefanstefan00000000000000from Cython.TestUtils import CythonTest class TestCodeWriter(CythonTest): # CythonTest uses the CodeWriter heavily, so do some checking by # roundtripping Cython code through the test framework. # Note that this test is dependant upon the normal Cython parser # to generate the input trees to the CodeWriter. This save *a lot* # of time; better to spend that time writing other tests than perfecting # this one... # Whitespace is very significant in this process: # - always newline on new block (!) # - indent 4 spaces # - 1 space around every operator def t(self, codestr): self.assertCode(codestr, self.fragment(codestr).root) def test_print(self): self.t(u""" print x, y print x + y ** 2 print x, y, z, """) def test_if(self): self.t(u"if x:\n pass") def test_ifelifelse(self): self.t(u""" if x: pass elif y: pass elif z + 34 ** 34 - 2: pass else: pass """) def test_def(self): self.t(u""" def f(x, y, z): pass def f(x = 34, y = 54, z): pass """) def test_longness_and_signedness(self): self.t(u"def f(unsigned long long long long long int y):\n pass") def test_signed_short(self): self.t(u"def f(signed short int y):\n pass") def test_typed_args(self): self.t(u"def f(int x, unsigned long int y):\n pass") def test_cdef_var(self): self.t(u""" cdef int hello cdef int hello = 4, x = 3, y, z """) def test_for_loop(self): self.t(u""" for x, y, z in f(g(h(34) * 2) + 23): print x, y, z else: print 43 """) def test_inplace_assignment(self): self.t(u"x += 43") def test_attribute(self): self.t(u"a.x") if __name__ == "__main__": import unittest unittest.main() Cython-0.26.1/Cython/Parser/0000775000175000017500000000000013151203436016326 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Parser/ConcreteSyntaxTree.pyx0000664000175000017500000000507612542002467022675 0ustar stefanstefan00000000000000cdef extern from "graminit.c": ctypedef struct grammar: pass cdef grammar _PyParser_Grammar cdef int Py_file_input cdef extern from "node.h": ctypedef struct node void PyNode_Free(node* n) int NCH(node* n) node* CHILD(node* n, int ix) node* RCHILD(node* n, int ix) short TYPE(node* n) char* STR(node* n) cdef extern from "parsetok.h": ctypedef struct perrdetail: pass cdef void PyParser_SetError(perrdetail *err) except * cdef node * PyParser_ParseStringFlagsFilenameEx( const char * s, const char * filename, grammar * g, int start, perrdetail * err_ret, int * flags) import distutils.sysconfig import os import re def extract_names(path): # All parse tree types are #defined in these files as ints. type_names = {} for line in open(path): if line.startswith('#define'): try: _, name, value = line.strip().split() type_names[int(value)] = name except: pass return type_names cdef dict type_names = {} cdef print_tree(node* n, indent=""): if not type_names: type_names.update(extract_names( os.path.join(distutils.sysconfig.get_python_inc(), 'token.h'))) type_names.update(extract_names( os.path.join(os.path.dirname(__file__), 'graminit.h'))) print indent, type_names.get(TYPE(n), 'unknown'), STR(n) if NCH(n) == 0 else NCH(n) indent += " " for i in range(NCH(n)): print_tree(CHILD(n, i), indent) def handle_includes(source, path): # TODO: Use include directory. def include_here(include_line): included = os.path.join(os.path.dirname(path), include_line.group(1)[1:-1]) if not os.path.exists(included): return include_line.group(0) + ' # no such path: ' + included return handle_includes(open(included).read(), path) # TODO: Proper string tokenizing. return re.sub(r'^include\s+([^\n]+[\'"])\s*(#.*)?$', include_here, source, flags=re.M) def p_module(path): cdef perrdetail err cdef int flags cdef node* n source = open(path).read() if '\ninclude ' in source: # TODO: Tokanizer needs to understand includes. source = handle_includes(source, path) path = "preparse(%s)" % path n = PyParser_ParseStringFlagsFilenameEx( source, path, &_PyParser_Grammar, Py_file_input, &err, &flags) if n: # print_tree(n) PyNode_Free(n) else: PyParser_SetError(&err) Cython-0.26.1/Cython/Parser/__init__.py0000664000175000017500000000000012542002467020431 0ustar stefanstefan00000000000000Cython-0.26.1/Cython/Parser/Grammar0000664000175000017500000002372413023021033017634 0ustar stefanstefan00000000000000# Grammar for Cython, based on the Grammar for Python 3 # Note: This grammar is not yet used by the Cython parser and is subject to change. # Start symbols for the grammar: # single_input is a single interactive statement; # file_input is a module or sequence of commands read from an input file; # eval_input is the input for the eval() functions. # NB: compound_stmt in single_input is followed by extra NEWLINE! single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE file_input: (NEWLINE | stmt)* ENDMARKER eval_input: testlist NEWLINE* ENDMARKER decorator: '@' dotted_PY_NAME [ '(' [arglist] ')' ] NEWLINE decorators: decorator+ decorated: decorators (classdef | funcdef | async_funcdef | cdef_stmt) async_funcdef: 'async' funcdef funcdef: 'def' PY_NAME parameters ['->' test] ':' suite parameters: '(' [typedargslist] ')' typedargslist: (tfpdef ['=' (test | '*')] (',' tfpdef ['=' (test | '*')])* [',' ['*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef]] | '*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef) [',' ellipsis] tfpdef: maybe_typed_name [('not' | 'or') 'None'] [':' test] varargslist: (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' ['*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef]] | '*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef) vfpdef: maybe_typed_name ['not' 'None'] stmt: simple_stmt | compound_stmt | cdef_stmt | ctypedef_stmt | DEF_stmt | IF_stmt simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | import_stmt | global_stmt | nonlocal_stmt | assert_stmt | print_stmt) expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) | ('=' (yield_expr|testlist_star_expr))*) testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' | '<<=' | '>>=' | '**=' | '//=') print_stmt: 'print' ( [ test (',' test)* [','] ] | '>>' test [ (',' test)+ [','] ] ) # For normal assignments, additional restrictions enforced by the interpreter del_stmt: 'del' exprlist pass_stmt: 'pass' flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt break_stmt: 'break' continue_stmt: 'continue' return_stmt: 'return' [testlist] yield_stmt: yield_expr raise_stmt: 'raise' [test ['from' test]] # raise_stmt: 'raise' [test [',' test [',' test]]] import_stmt: import_PY_NAME | import_from import_PY_NAME: ('import' | 'cimport') dotted_as_PY_NAMEs # note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS import_from: ('from' (('.' | '...')* dotted_PY_NAME | ('.' | '...')+) ('import' | 'cimport') ('*' | '(' import_as_PY_NAMEs ')' | import_as_PY_NAMEs)) import_as_PY_NAME: PY_NAME ['as' PY_NAME] dotted_as_PY_NAME: dotted_PY_NAME ['as' PY_NAME] import_as_PY_NAMEs: import_as_PY_NAME (',' import_as_PY_NAME)* [','] dotted_as_PY_NAMEs: dotted_as_PY_NAME (',' dotted_as_PY_NAME)* dotted_PY_NAME: PY_NAME ('.' PY_NAME)* global_stmt: 'global' PY_NAME (',' PY_NAME)* nonlocal_stmt: 'nonlocal' PY_NAME (',' PY_NAME)* exec_stmt: 'exec' expr ['in' test [',' test]] assert_stmt: 'assert' test [',' test] compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] while_stmt: 'while' test ':' suite ['else' ':' suite] for_stmt: 'for' exprlist ('in' testlist | for_from_clause)':' suite ['else' ':' suite] for_from_clause: 'from' expr comp_op PY_NAME comp_op expr ['by' expr] try_stmt: ('try' ':' suite ((except_clause ':' suite)+ ['else' ':' suite] ['finally' ':' suite] | 'finally' ':' suite)) with_stmt: 'with' with_item (',' with_item)* ':' suite with_item: test ['as' expr] # NB compile.c makes sure that the default except clause is last except_clause: 'except' [test [('as' | ',') test]] suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT test: or_test ['if' or_test 'else' test] | lambdef test_nocond: or_test | lambdef_nocond lambdef: 'lambda' [varargslist] ':' test lambdef_nocond: 'lambda' [varargslist] ':' test_nocond or_test: and_test ('or' and_test)* and_test: not_test ('and' not_test)* not_test: 'not' not_test | comparison comparison: expr (comp_op expr)* # <> isn't actually a valid comparison operator in Python. It's here for the # sake of a __future__ import described in PEP 401 comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' star_expr: '*' expr expr: xor_expr ('|' xor_expr)* xor_expr: and_expr ('^' and_expr)* and_expr: shift_expr ('&' shift_expr)* shift_expr: arith_expr (('<<'|'>>') arith_expr)* arith_expr: term (('+'|'-') term)* term: factor (('*'|'/'|'%'|'//') factor)* factor: ('+'|'-'|'~') factor | power | address | size_of | cast power: atom_expr ['**' factor] atom_expr: ['await'] atom trailer* atom: ('(' [yield_expr|testlist_comp] ')' | '[' [testlist_comp] ']' | '{' [dictorsetmaker] '}' | new_expr | PY_NAME | NUMBER | STRING+ | ellipsis | 'None' | 'True' | 'False') testlist_comp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' (PY_NAME | 'sizeof') subscriptlist: subscript (',' subscript)* [','] subscript: test | [test] ':' [test] [sliceop] sliceop: ':' [test] exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] testlist: test (',' test)* [','] dictorsetmaker: ( ((test ':' test | '**' expr) (comp_for | (',' (test ':' test | '**' expr))* [','])) | ((test | star_expr) (comp_for | (',' (test | star_expr))* [','])) ) classdef: 'class' PY_NAME ['(' [arglist] ')'] ':' suite arglist: argument (',' argument)* [','] # The reason that keywords are test nodes instead of NAME is that using NAME # results in an ambiguity. ast.c makes sure it's a NAME. # "test '=' test" is really "keyword '=' test", but we have no such token. # These need to be in a single rule to avoid grammar that is ambiguous # to our LL(1) parser. Even though 'test' includes '*expr' in star_expr, # we explicitly match '*' here, too, to give it proper precedence. # Illegal combinations and orderings are blocked in ast.c: # multiple (test comp_for) arguements are blocked; keyword unpackings # that precede iterable unpackings are blocked; etc. argument: ( test [comp_for] | test '=' test | '**' expr | star_expr ) comp_iter: comp_for | comp_if comp_for: 'for' exprlist ('in' or_test | for_from_clause) [comp_iter] comp_if: 'if' test_nocond [comp_iter] # not used in grammar, but may appear in "node" passed from Parser to Compiler encoding_decl: NAME yield_expr: 'yield' [yield_arg] yield_arg: 'from' test | testlist # Cython extensions # Accommodate to Py2 tokenizer. ellipsis: '...' | '.' '.' '.' signedness: 'unsigned' | 'signed' longness: 'char' | 'short' | 'long' | 'long' 'long' # TODO: [unsigned] double doesn't make sens, but we need long double int_type: signedness [longness] | longness | [signedness] [longness] ('int' | 'double') | 'complex' type: ['const'] (NAME ('.' PY_NAME)* | int_type | '(' type ')') ['complex'] [type_qualifiers] maybe_typed_name: ['const'] (NAME [('.' PY_NAME)* ['complex'] [type_qualifiers] NAME] | (int_type | '(' type ')') ['complex'] [type_qualifiers] NAME) teplate_params: '[' NAME (',' NAME)* ']' type_qualifiers: type_qualifier+ type_qualifier: '*' | '**' | '&' | type_index ('.' NAME [type_index])* # TODO: old buffer syntax type_index: '[' [(NUMBER | type (',' type)* | (memory_view_index (',' memory_view_index)*))] ']' memory_view_index: ':' [':'] [NUMBER] address: '&' factor cast: '<' type ['?'] '>' factor size_of: 'sizeof' '(' (type) ')' type_id: 'typeid' '(' (type) ')' new_expr: 'new' type '(' [arglist] ')' # TODO: Restrict cdef_stmt to "top-level" statements. cdef_stmt: ('cdef' | 'cpdef') (cvar_def | cdef_type_decl | extern_block) cdef_type_decl: ctype_decl | fused | cclass ctype_decl: struct | enum | cppclass # TODO: Does the cdef/ctypedef distinction even make sense for fused? ctypedef_stmt: 'ctypedef' (cvar_decl | struct | enum | fused) # Note: these two are similar but can't be used in an or clause # as it would cause ambiguity in the LL(1) parser. # Requires a type cvar_decl: [visibility] type cname (NEWLINE | cfunc) # Allows an assignment cvar_def: [visibility] maybe_typed_name (['=' test] (',' PY_NAME ['=' test])* NEWLINE | cfunc) visibility: 'public' | 'api' | 'readonly' # TODO: Standardize gil_spec first or last. cfunc: [teplate_params] parameters [gil_spec] [exception_value] [gil_spec] (':' suite | NEWLINE) exception_value: 'except' (['?'] expr | '*' | '+' [PY_NAME]) gil_spec: 'with' ('gil' | 'nogil') | 'nogil' cname: NAME [STRING] cclass: classdef fused: 'fused' PY_NAME ':' NEWLINE INDENT ( type NEWLINE)+ DEDENT enum: 'enum' [cname] (NEWLINE | ':' enum_suite) enum_suite: NEWLINE INDENT (cname ['=' NUMBER] NEWLINE | pass_stmt NEWLINE)+ DEDENT struct: ('struct' | 'union') cname (NEWLINE | (':' struct_suite)) struct_suite: NEWLINE INDENT (cvar_decl | pass_stmt NEWLINE)+ DEDENT cppclass: 'cppclass' cname [teplate_params] [cppclass_bases] (NEWLINE | ':' cppclass_suite) cppclass_bases: '(' dotted_PY_NAME (',' dotted_PY_NAME [teplate_params])*')' cppclass_suite: NEWLINE INDENT (cvar_decl | ctype_decl | pass_stmt NEWLINE)+ DEDENT # TODO: C++ constructors, operators extern_block: 'extern' (cvar_decl | 'from' ('*' | STRING) ['namespace' STRING] [gil_spec] ':' (pass_stmt | extern_suite)) extern_suite: NEWLINE INDENT (['cdef' | 'cpdef'] (cvar_decl | cdef_type_decl) | ctypedef_stmt)+ DEDENT cy_type_kwd: 'struct' | 'union' | 'fused' | 'cppclass' | 'int' | 'double' | 'complex' cy_kwd: cy_type_kwd | signedness | longness | visibility | 'gil' | 'nogil' | 'namespace' | 'const' | 'by' | 'extern' PY_NAME: NAME | cy_kwd # TODO: Do we really want these? Don't play well with include... DEF_stmt: 'DEF' NAME '=' testlist IF_stmt: 'IF' test ':' suite ('ELIF' test ':' suite)* ['ELSE' ':' suite] Cython-0.26.1/Cython/__init__.py0000664000175000017500000000054613023021033017175 0ustar stefanstefan00000000000000from __future__ import absolute_import from .Shadow import __version__ # Void cython.* directives (for case insensitive operating systems). from .Shadow import * def load_ipython_extension(ip): """Load the extension in IPython.""" from .Build.IpythonMagic import CythonMagics # pylint: disable=cyclic-import ip.register_magics(CythonMagics) Cython-0.26.1/Cython/Debugger/0000775000175000017500000000000013151203436016616 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Debugger/libcython.py0000664000175000017500000012762413143605603021202 0ustar stefanstefan00000000000000""" GDB extension that adds Cython support. """ from __future__ import print_function try: input = raw_input except NameError: pass import sys import textwrap import traceback import functools import itertools import collections import gdb try: # python 2 UNICODE = unicode BYTES = str except NameError: # python 3 UNICODE = str BYTES = bytes try: from lxml import etree have_lxml = True except ImportError: have_lxml = False try: # Python 2.5 from xml.etree import cElementTree as etree except ImportError: try: # Python 2.5 from xml.etree import ElementTree as etree except ImportError: try: # normal cElementTree install import cElementTree as etree except ImportError: # normal ElementTree install import elementtree.ElementTree as etree try: import pygments.lexers import pygments.formatters except ImportError: pygments = None sys.stderr.write("Install pygments for colorized source code.\n") if hasattr(gdb, 'string_to_argv'): from gdb import string_to_argv else: from shlex import split as string_to_argv from Cython.Debugger import libpython # C or Python type CObject = 'CObject' PythonObject = 'PythonObject' _data_types = dict(CObject=CObject, PythonObject=PythonObject) _filesystemencoding = sys.getfilesystemencoding() or 'UTF-8' # decorators def dont_suppress_errors(function): "*sigh*, readline" @functools.wraps(function) def wrapper(*args, **kwargs): try: return function(*args, **kwargs) except Exception: traceback.print_exc() raise return wrapper def default_selected_gdb_frame(err=True): def decorator(function): @functools.wraps(function) def wrapper(self, frame=None, *args, **kwargs): try: frame = frame or gdb.selected_frame() except RuntimeError: raise gdb.GdbError("No frame is currently selected.") if err and frame.name() is None: raise NoFunctionNameInFrameError() return function(self, frame, *args, **kwargs) return wrapper return decorator def require_cython_frame(function): @functools.wraps(function) @require_running_program def wrapper(self, *args, **kwargs): frame = kwargs.get('frame') or gdb.selected_frame() if not self.is_cython_function(frame): raise gdb.GdbError('Selected frame does not correspond with a ' 'Cython function we know about.') return function(self, *args, **kwargs) return wrapper def dispatch_on_frame(c_command, python_command=None): def decorator(function): @functools.wraps(function) def wrapper(self, *args, **kwargs): is_cy = self.is_cython_function() is_py = self.is_python_function() if is_cy or (is_py and not python_command): function(self, *args, **kwargs) elif is_py: gdb.execute(python_command) elif self.is_relevant_function(): gdb.execute(c_command) else: raise gdb.GdbError("Not a function cygdb knows about. " "Use the normal GDB commands instead.") return wrapper return decorator def require_running_program(function): @functools.wraps(function) def wrapper(*args, **kwargs): try: gdb.selected_frame() except RuntimeError: raise gdb.GdbError("No frame is currently selected.") return function(*args, **kwargs) return wrapper def gdb_function_value_to_unicode(function): @functools.wraps(function) def wrapper(self, string, *args, **kwargs): if isinstance(string, gdb.Value): string = string.string() return function(self, string, *args, **kwargs) return wrapper # Classes that represent the debug information # Don't rename the parameters of these classes, they come directly from the XML class CythonModule(object): def __init__(self, module_name, filename, c_filename): self.name = module_name self.filename = filename self.c_filename = c_filename self.globals = {} # {cython_lineno: min(c_linenos)} self.lineno_cy2c = {} # {c_lineno: cython_lineno} self.lineno_c2cy = {} self.functions = {} class CythonVariable(object): def __init__(self, name, cname, qualified_name, type, lineno): self.name = name self.cname = cname self.qualified_name = qualified_name self.type = type self.lineno = int(lineno) class CythonFunction(CythonVariable): def __init__(self, module, name, cname, pf_cname, qualified_name, lineno, type=CObject, is_initmodule_function="False"): super(CythonFunction, self).__init__(name, cname, qualified_name, type, lineno) self.module = module self.pf_cname = pf_cname self.is_initmodule_function = is_initmodule_function == "True" self.locals = {} self.arguments = [] self.step_into_functions = set() # General purpose classes class CythonBase(object): @default_selected_gdb_frame(err=False) def is_cython_function(self, frame): return frame.name() in self.cy.functions_by_cname @default_selected_gdb_frame(err=False) def is_python_function(self, frame): """ Tells if a frame is associated with a Python function. If we can't read the Python frame information, don't regard it as such. """ if frame.name() == 'PyEval_EvalFrameEx': pyframe = libpython.Frame(frame).get_pyop() return pyframe and not pyframe.is_optimized_out() return False @default_selected_gdb_frame() def get_c_function_name(self, frame): return frame.name() @default_selected_gdb_frame() def get_c_lineno(self, frame): return frame.find_sal().line @default_selected_gdb_frame() def get_cython_function(self, frame): result = self.cy.functions_by_cname.get(frame.name()) if result is None: raise NoCythonFunctionInFrameError() return result @default_selected_gdb_frame() def get_cython_lineno(self, frame): """ Get the current Cython line number. Returns 0 if there is no correspondence between the C and Cython code. """ cyfunc = self.get_cython_function(frame) return cyfunc.module.lineno_c2cy.get(self.get_c_lineno(frame), 0) @default_selected_gdb_frame() def get_source_desc(self, frame): filename = lineno = lexer = None if self.is_cython_function(frame): filename = self.get_cython_function(frame).module.filename lineno = self.get_cython_lineno(frame) if pygments: lexer = pygments.lexers.CythonLexer(stripall=False) elif self.is_python_function(frame): pyframeobject = libpython.Frame(frame).get_pyop() if not pyframeobject: raise gdb.GdbError( 'Unable to read information on python frame') filename = pyframeobject.filename() lineno = pyframeobject.current_line_num() if pygments: lexer = pygments.lexers.PythonLexer(stripall=False) else: symbol_and_line_obj = frame.find_sal() if not symbol_and_line_obj or not symbol_and_line_obj.symtab: filename = None lineno = 0 else: filename = symbol_and_line_obj.symtab.fullname() lineno = symbol_and_line_obj.line if pygments: lexer = pygments.lexers.CLexer(stripall=False) return SourceFileDescriptor(filename, lexer), lineno @default_selected_gdb_frame() def get_source_line(self, frame): source_desc, lineno = self.get_source_desc() return source_desc.get_source(lineno) @default_selected_gdb_frame() def is_relevant_function(self, frame): """ returns whether we care about a frame on the user-level when debugging Cython code """ name = frame.name() older_frame = frame.older() if self.is_cython_function(frame) or self.is_python_function(frame): return True elif older_frame and self.is_cython_function(older_frame): # check for direct C function call from a Cython function cython_func = self.get_cython_function(older_frame) return name in cython_func.step_into_functions return False @default_selected_gdb_frame(err=False) def print_stackframe(self, frame, index, is_c=False): """ Print a C, Cython or Python stack frame and the line of source code if available. """ # do this to prevent the require_cython_frame decorator from # raising GdbError when calling self.cy.cy_cvalue.invoke() selected_frame = gdb.selected_frame() frame.select() try: source_desc, lineno = self.get_source_desc(frame) except NoFunctionNameInFrameError: print('#%-2d Unknown Frame (compile with -g)' % index) return if not is_c and self.is_python_function(frame): pyframe = libpython.Frame(frame).get_pyop() if pyframe is None or pyframe.is_optimized_out(): # print this python function as a C function return self.print_stackframe(frame, index, is_c=True) func_name = pyframe.co_name func_cname = 'PyEval_EvalFrameEx' func_args = [] elif self.is_cython_function(frame): cyfunc = self.get_cython_function(frame) f = lambda arg: self.cy.cy_cvalue.invoke(arg, frame=frame) func_name = cyfunc.name func_cname = cyfunc.cname func_args = [] # [(arg, f(arg)) for arg in cyfunc.arguments] else: source_desc, lineno = self.get_source_desc(frame) func_name = frame.name() func_cname = func_name func_args = [] try: gdb_value = gdb.parse_and_eval(func_cname) except RuntimeError: func_address = 0 else: func_address = gdb_value.address if not isinstance(func_address, int): # Seriously? Why is the address not an int? if not isinstance(func_address, (str, bytes)): func_address = str(func_address) func_address = int(func_address.split()[0], 0) a = ', '.join('%s=%s' % (name, val) for name, val in func_args) sys.stdout.write('#%-2d 0x%016x in %s(%s)' % (index, func_address, func_name, a)) if source_desc.filename is not None: sys.stdout.write(' at %s:%s' % (source_desc.filename, lineno)) sys.stdout.write('\n') try: sys.stdout.write(' ' + source_desc.get_source(lineno)) except gdb.GdbError: pass selected_frame.select() def get_remote_cython_globals_dict(self): m = gdb.parse_and_eval('__pyx_m') try: PyModuleObject = gdb.lookup_type('PyModuleObject') except RuntimeError: raise gdb.GdbError(textwrap.dedent("""\ Unable to lookup type PyModuleObject, did you compile python with debugging support (-g)?""")) m = m.cast(PyModuleObject.pointer()) return m['md_dict'] def get_cython_globals_dict(self): """ Get the Cython globals dict where the remote names are turned into local strings. """ remote_dict = self.get_remote_cython_globals_dict() pyobject_dict = libpython.PyObjectPtr.from_pyobject_ptr(remote_dict) result = {} seen = set() for k, v in pyobject_dict.items(): result[k.proxyval(seen)] = v return result def print_gdb_value(self, name, value, max_name_length=None, prefix=''): if libpython.pretty_printer_lookup(value): typename = '' else: typename = '(%s) ' % (value.type,) if max_name_length is None: print('%s%s = %s%s' % (prefix, name, typename, value)) else: print('%s%-*s = %s%s' % (prefix, max_name_length, name, typename, value)) def is_initialized(self, cython_func, local_name): cyvar = cython_func.locals[local_name] cur_lineno = self.get_cython_lineno() if '->' in cyvar.cname: # Closed over free variable if cur_lineno > cython_func.lineno: if cyvar.type == PythonObject: return int(gdb.parse_and_eval(cyvar.cname)) return True return False return cur_lineno > cyvar.lineno class SourceFileDescriptor(object): def __init__(self, filename, lexer, formatter=None): self.filename = filename self.lexer = lexer self.formatter = formatter def valid(self): return self.filename is not None def lex(self, code): if pygments and self.lexer and parameters.colorize_code: bg = parameters.terminal_background.value if self.formatter is None: formatter = pygments.formatters.TerminalFormatter(bg=bg) else: formatter = self.formatter return pygments.highlight(code, self.lexer, formatter) return code def _get_source(self, start, stop, lex_source, mark_line, lex_entire): with open(self.filename) as f: # to provide "correct" colouring, the entire code needs to be # lexed. However, this makes a lot of things terribly slow, so # we decide not to. Besides, it's unlikely to matter. if lex_source and lex_entire: f = self.lex(f.read()).splitlines() slice = itertools.islice(f, start - 1, stop - 1) for idx, line in enumerate(slice): if start + idx == mark_line: prefix = '>' else: prefix = ' ' if lex_source and not lex_entire: line = self.lex(line) yield '%s %4d %s' % (prefix, start + idx, line.rstrip()) def get_source(self, start, stop=None, lex_source=True, mark_line=0, lex_entire=False): exc = gdb.GdbError('Unable to retrieve source code') if not self.filename: raise exc start = max(start, 1) if stop is None: stop = start + 1 try: return '\n'.join( self._get_source(start, stop, lex_source, mark_line, lex_entire)) except IOError: raise exc # Errors class CyGDBError(gdb.GdbError): """ Base class for Cython-command related erorrs """ def __init__(self, *args): args = args or (self.msg,) super(CyGDBError, self).__init__(*args) class NoCythonFunctionInFrameError(CyGDBError): """ raised when the user requests the current cython function, which is unavailable """ msg = "Current function is a function cygdb doesn't know about" class NoFunctionNameInFrameError(NoCythonFunctionInFrameError): """ raised when the name of the C function could not be determined in the current C stack frame """ msg = ('C function name could not be determined in the current C stack ' 'frame') # Parameters class CythonParameter(gdb.Parameter): """ Base class for cython parameters """ def __init__(self, name, command_class, parameter_class, default=None): self.show_doc = self.set_doc = self.__class__.__doc__ super(CythonParameter, self).__init__(name, command_class, parameter_class) if default is not None: self.value = default def __bool__(self): return bool(self.value) __nonzero__ = __bool__ # Python 2 class CompleteUnqualifiedFunctionNames(CythonParameter): """ Have 'cy break' complete unqualified function or method names. """ class ColorizeSourceCode(CythonParameter): """ Tell cygdb whether to colorize source code. """ class TerminalBackground(CythonParameter): """ Tell cygdb about the user's terminal background (light or dark). """ class CythonParameters(object): """ Simple container class that might get more functionality in the distant future (mostly to remind us that we're dealing with parameters). """ def __init__(self): self.complete_unqualified = CompleteUnqualifiedFunctionNames( 'cy_complete_unqualified', gdb.COMMAND_BREAKPOINTS, gdb.PARAM_BOOLEAN, True) self.colorize_code = ColorizeSourceCode( 'cy_colorize_code', gdb.COMMAND_FILES, gdb.PARAM_BOOLEAN, True) self.terminal_background = TerminalBackground( 'cy_terminal_background_color', gdb.COMMAND_FILES, gdb.PARAM_STRING, "dark") parameters = CythonParameters() # Commands class CythonCommand(gdb.Command, CythonBase): """ Base class for Cython commands """ command_class = gdb.COMMAND_NONE @classmethod def _register(cls, clsname, args, kwargs): if not hasattr(cls, 'completer_class'): return cls(clsname, cls.command_class, *args, **kwargs) else: return cls(clsname, cls.command_class, cls.completer_class, *args, **kwargs) @classmethod def register(cls, *args, **kwargs): alias = getattr(cls, 'alias', None) if alias: cls._register(cls.alias, args, kwargs) return cls._register(cls.name, args, kwargs) class CyCy(CythonCommand): """ Invoke a Cython command. Available commands are: cy import cy break cy step cy next cy run cy cont cy finish cy up cy down cy select cy bt / cy backtrace cy list cy print cy set cy locals cy globals cy exec """ name = 'cy' command_class = gdb.COMMAND_NONE completer_class = gdb.COMPLETE_COMMAND def __init__(self, name, command_class, completer_class): # keep the signature 2.5 compatible (i.e. do not use f(*a, k=v) super(CythonCommand, self).__init__(name, command_class, completer_class, prefix=True) commands = dict( # GDB commands import_ = CyImport.register(), break_ = CyBreak.register(), step = CyStep.register(), next = CyNext.register(), run = CyRun.register(), cont = CyCont.register(), finish = CyFinish.register(), up = CyUp.register(), down = CyDown.register(), select = CySelect.register(), bt = CyBacktrace.register(), list = CyList.register(), print_ = CyPrint.register(), locals = CyLocals.register(), globals = CyGlobals.register(), exec_ = libpython.FixGdbCommand('cy exec', '-cy-exec'), _exec = CyExec.register(), set = CySet.register(), # GDB functions cy_cname = CyCName('cy_cname'), cy_cvalue = CyCValue('cy_cvalue'), cy_lineno = CyLine('cy_lineno'), cy_eval = CyEval('cy_eval'), ) for command_name, command in commands.items(): command.cy = self setattr(self, command_name, command) self.cy = self # Cython module namespace self.cython_namespace = {} # maps (unique) qualified function names (e.g. # cythonmodule.ClassName.method_name) to the CythonFunction object self.functions_by_qualified_name = {} # unique cnames of Cython functions self.functions_by_cname = {} # map function names like method_name to a list of all such # CythonFunction objects self.functions_by_name = collections.defaultdict(list) class CyImport(CythonCommand): """ Import debug information outputted by the Cython compiler Example: cy import FILE... """ name = 'cy import' command_class = gdb.COMMAND_STATUS completer_class = gdb.COMPLETE_FILENAME def invoke(self, args, from_tty): if isinstance(args, BYTES): args = args.decode(_filesystemencoding) for arg in string_to_argv(args): try: f = open(arg) except OSError as e: raise gdb.GdbError('Unable to open file %r: %s' % (args, e.args[1])) t = etree.parse(f) for module in t.getroot(): cython_module = CythonModule(**module.attrib) self.cy.cython_namespace[cython_module.name] = cython_module for variable in module.find('Globals'): d = variable.attrib cython_module.globals[d['name']] = CythonVariable(**d) for function in module.find('Functions'): cython_function = CythonFunction(module=cython_module, **function.attrib) # update the global function mappings name = cython_function.name qname = cython_function.qualified_name self.cy.functions_by_name[name].append(cython_function) self.cy.functions_by_qualified_name[ cython_function.qualified_name] = cython_function self.cy.functions_by_cname[ cython_function.cname] = cython_function d = cython_module.functions[qname] = cython_function for local in function.find('Locals'): d = local.attrib cython_function.locals[d['name']] = CythonVariable(**d) for step_into_func in function.find('StepIntoFunctions'): d = step_into_func.attrib cython_function.step_into_functions.add(d['name']) cython_function.arguments.extend( funcarg.tag for funcarg in function.find('Arguments')) for marker in module.find('LineNumberMapping'): cython_lineno = int(marker.attrib['cython_lineno']) c_linenos = list(map(int, marker.attrib['c_linenos'].split())) cython_module.lineno_cy2c[cython_lineno] = min(c_linenos) for c_lineno in c_linenos: cython_module.lineno_c2cy[c_lineno] = cython_lineno class CyBreak(CythonCommand): """ Set a breakpoint for Cython code using Cython qualified name notation, e.g.: cy break cython_modulename.ClassName.method_name... or normal notation: cy break function_or_method_name... or for a line number: cy break cython_module:lineno... Set a Python breakpoint: Break on any function or method named 'func' in module 'modname' cy break -p modname.func... Break on any function or method named 'func' cy break -p func... """ name = 'cy break' command_class = gdb.COMMAND_BREAKPOINTS def _break_pyx(self, name): modulename, _, lineno = name.partition(':') lineno = int(lineno) if modulename: cython_module = self.cy.cython_namespace[modulename] else: cython_module = self.get_cython_function().module if lineno in cython_module.lineno_cy2c: c_lineno = cython_module.lineno_cy2c[lineno] breakpoint = '%s:%s' % (cython_module.c_filename, c_lineno) gdb.execute('break ' + breakpoint) else: raise gdb.GdbError("Not a valid line number. " "Does it contain actual code?") def _break_funcname(self, funcname): func = self.cy.functions_by_qualified_name.get(funcname) if func and func.is_initmodule_function: func = None break_funcs = [func] if not func: funcs = self.cy.functions_by_name.get(funcname) or [] funcs = [f for f in funcs if not f.is_initmodule_function] if not funcs: gdb.execute('break ' + funcname) return if len(funcs) > 1: # multiple functions, let the user pick one print('There are multiple such functions:') for idx, func in enumerate(funcs): print('%3d) %s' % (idx, func.qualified_name)) while True: try: result = input( "Select a function, press 'a' for all " "functions or press 'q' or '^D' to quit: ") except EOFError: return else: if result.lower() == 'q': return elif result.lower() == 'a': break_funcs = funcs break elif (result.isdigit() and 0 <= int(result) < len(funcs)): break_funcs = [funcs[int(result)]] break else: print('Not understood...') else: break_funcs = [funcs[0]] for func in break_funcs: gdb.execute('break %s' % func.cname) if func.pf_cname: gdb.execute('break %s' % func.pf_cname) def invoke(self, function_names, from_tty): if isinstance(function_names, BYTES): function_names = function_names.decode(_filesystemencoding) argv = string_to_argv(function_names) if function_names.startswith('-p'): argv = argv[1:] python_breakpoints = True else: python_breakpoints = False for funcname in argv: if python_breakpoints: gdb.execute('py-break %s' % funcname) elif ':' in funcname: self._break_pyx(funcname) else: self._break_funcname(funcname) @dont_suppress_errors def complete(self, text, word): # Filter init-module functions (breakpoints can be set using # modulename:linenumber). names = [n for n, L in self.cy.functions_by_name.items() if any(not f.is_initmodule_function for f in L)] qnames = [n for n, f in self.cy.functions_by_qualified_name.items() if not f.is_initmodule_function] if parameters.complete_unqualified: all_names = itertools.chain(qnames, names) else: all_names = qnames words = text.strip().split() if not words or '.' not in words[-1]: # complete unqualified seen = set(text[:-len(word)].split()) return [n for n in all_names if n.startswith(word) and n not in seen] # complete qualified name lastword = words[-1] compl = [n for n in qnames if n.startswith(lastword)] if len(lastword) > len(word): # readline sees something (e.g. a '.') as a word boundary, so don't # "recomplete" this prefix strip_prefix_length = len(lastword) - len(word) compl = [n[strip_prefix_length:] for n in compl] return compl class CythonInfo(CythonBase, libpython.PythonInfo): """ Implementation of the interface dictated by libpython.LanguageInfo. """ def lineno(self, frame): # Take care of the Python and Cython levels. We need to care for both # as we can't simply dispath to 'py-step', since that would work for # stepping through Python code, but it would not step back into Cython- # related code. The C level should be dispatched to the 'step' command. if self.is_cython_function(frame): return self.get_cython_lineno(frame) return super(CythonInfo, self).lineno(frame) def get_source_line(self, frame): try: line = super(CythonInfo, self).get_source_line(frame) except gdb.GdbError: return None else: return line.strip() or None def exc_info(self, frame): if self.is_python_function: return super(CythonInfo, self).exc_info(frame) def runtime_break_functions(self): if self.is_cython_function(): return self.get_cython_function().step_into_functions return () def static_break_functions(self): result = ['PyEval_EvalFrameEx'] result.extend(self.cy.functions_by_cname) return result class CythonExecutionControlCommand(CythonCommand, libpython.ExecutionControlCommandBase): @classmethod def register(cls): return cls(cls.name, cython_info) class CyStep(CythonExecutionControlCommand, libpython.PythonStepperMixin): "Step through Cython, Python or C code." name = 'cy -step' stepinto = True def invoke(self, args, from_tty): if self.is_python_function(): self.python_step(self.stepinto) elif not self.is_cython_function(): if self.stepinto: command = 'step' else: command = 'next' self.finish_executing(gdb.execute(command, to_string=True)) else: self.step(stepinto=self.stepinto) class CyNext(CyStep): "Step-over Cython, Python or C code." name = 'cy -next' stepinto = False class CyRun(CythonExecutionControlCommand): """ Run a Cython program. This is like the 'run' command, except that it displays Cython or Python source lines as well """ name = 'cy run' invoke = CythonExecutionControlCommand.run class CyCont(CythonExecutionControlCommand): """ Continue a Cython program. This is like the 'run' command, except that it displays Cython or Python source lines as well. """ name = 'cy cont' invoke = CythonExecutionControlCommand.cont class CyFinish(CythonExecutionControlCommand): """ Execute until the function returns. """ name = 'cy finish' invoke = CythonExecutionControlCommand.finish class CyUp(CythonCommand): """ Go up a Cython, Python or relevant C frame. """ name = 'cy up' _command = 'up' def invoke(self, *args): try: gdb.execute(self._command, to_string=True) while not self.is_relevant_function(gdb.selected_frame()): gdb.execute(self._command, to_string=True) except RuntimeError as e: raise gdb.GdbError(*e.args) frame = gdb.selected_frame() index = 0 while frame: frame = frame.older() index += 1 self.print_stackframe(index=index - 1) class CyDown(CyUp): """ Go down a Cython, Python or relevant C frame. """ name = 'cy down' _command = 'down' class CySelect(CythonCommand): """ Select a frame. Use frame numbers as listed in `cy backtrace`. This command is useful because `cy backtrace` prints a reversed backtrace. """ name = 'cy select' def invoke(self, stackno, from_tty): try: stackno = int(stackno) except ValueError: raise gdb.GdbError("Not a valid number: %r" % (stackno,)) frame = gdb.selected_frame() while frame.newer(): frame = frame.newer() stackdepth = libpython.stackdepth(frame) try: gdb.execute('select %d' % (stackdepth - stackno - 1,)) except RuntimeError as e: raise gdb.GdbError(*e.args) class CyBacktrace(CythonCommand): 'Print the Cython stack' name = 'cy bt' alias = 'cy backtrace' command_class = gdb.COMMAND_STACK completer_class = gdb.COMPLETE_NONE @require_running_program def invoke(self, args, from_tty): # get the first frame frame = gdb.selected_frame() while frame.older(): frame = frame.older() print_all = args == '-a' index = 0 while frame: try: is_relevant = self.is_relevant_function(frame) except CyGDBError: is_relevant = False if print_all or is_relevant: self.print_stackframe(frame, index) index += 1 frame = frame.newer() class CyList(CythonCommand): """ List Cython source code. To disable to customize colouring see the cy_* parameters. """ name = 'cy list' command_class = gdb.COMMAND_FILES completer_class = gdb.COMPLETE_NONE # @dispatch_on_frame(c_command='list') def invoke(self, _, from_tty): sd, lineno = self.get_source_desc() source = sd.get_source(lineno - 5, lineno + 5, mark_line=lineno, lex_entire=True) print(source) class CyPrint(CythonCommand): """ Print a Cython variable using 'cy-print x' or 'cy-print module.function.x' """ name = 'cy print' command_class = gdb.COMMAND_DATA def invoke(self, name, from_tty, max_name_length=None): if self.is_python_function(): return gdb.execute('py-print ' + name) elif self.is_cython_function(): value = self.cy.cy_cvalue.invoke(name.lstrip('*')) for c in name: if c == '*': value = value.dereference() else: break self.print_gdb_value(name, value, max_name_length) else: gdb.execute('print ' + name) def complete(self): if self.is_cython_function(): f = self.get_cython_function() return list(itertools.chain(f.locals, f.globals)) else: return [] sortkey = lambda item: item[0].lower() class CyLocals(CythonCommand): """ List the locals from the current Cython frame. """ name = 'cy locals' command_class = gdb.COMMAND_STACK completer_class = gdb.COMPLETE_NONE @dispatch_on_frame(c_command='info locals', python_command='py-locals') def invoke(self, args, from_tty): cython_function = self.get_cython_function() if cython_function.is_initmodule_function: self.cy.globals.invoke(args, from_tty) return local_cython_vars = cython_function.locals max_name_length = len(max(local_cython_vars, key=len)) for name, cyvar in sorted(local_cython_vars.items(), key=sortkey): if self.is_initialized(self.get_cython_function(), cyvar.name): value = gdb.parse_and_eval(cyvar.cname) if not value.is_optimized_out: self.print_gdb_value(cyvar.name, value, max_name_length, '') class CyGlobals(CyLocals): """ List the globals from the current Cython module. """ name = 'cy globals' command_class = gdb.COMMAND_STACK completer_class = gdb.COMPLETE_NONE @dispatch_on_frame(c_command='info variables', python_command='py-globals') def invoke(self, args, from_tty): global_python_dict = self.get_cython_globals_dict() module_globals = self.get_cython_function().module.globals max_globals_len = 0 max_globals_dict_len = 0 if module_globals: max_globals_len = len(max(module_globals, key=len)) if global_python_dict: max_globals_dict_len = len(max(global_python_dict)) max_name_length = max(max_globals_len, max_globals_dict_len) seen = set() print('Python globals:') for k, v in sorted(global_python_dict.items(), key=sortkey): v = v.get_truncated_repr(libpython.MAX_OUTPUT_LEN) seen.add(k) print(' %-*s = %s' % (max_name_length, k, v)) print('C globals:') for name, cyvar in sorted(module_globals.items(), key=sortkey): if name not in seen: try: value = gdb.parse_and_eval(cyvar.cname) except RuntimeError: pass else: if not value.is_optimized_out: self.print_gdb_value(cyvar.name, value, max_name_length, ' ') class EvaluateOrExecuteCodeMixin(object): """ Evaluate or execute Python code in a Cython or Python frame. The 'evalcode' method evaluations Python code, prints a traceback if an exception went uncaught, and returns any return value as a gdb.Value (NULL on exception). """ def _fill_locals_dict(self, executor, local_dict_pointer): "Fill a remotely allocated dict with values from the Cython C stack" cython_func = self.get_cython_function() for name, cyvar in cython_func.locals.items(): if cyvar.type == PythonObject and self.is_initialized(cython_func, name): try: val = gdb.parse_and_eval(cyvar.cname) except RuntimeError: continue else: if val.is_optimized_out: continue pystringp = executor.alloc_pystring(name) code = ''' (PyObject *) PyDict_SetItem( (PyObject *) %d, (PyObject *) %d, (PyObject *) %s) ''' % (local_dict_pointer, pystringp, cyvar.cname) try: if gdb.parse_and_eval(code) < 0: gdb.parse_and_eval('PyErr_Print()') raise gdb.GdbError("Unable to execute Python code.") finally: # PyDict_SetItem doesn't steal our reference executor.xdecref(pystringp) def _find_first_cython_or_python_frame(self): frame = gdb.selected_frame() while frame: if (self.is_cython_function(frame) or self.is_python_function(frame)): frame.select() return frame frame = frame.older() raise gdb.GdbError("There is no Cython or Python frame on the stack.") def _evalcode_cython(self, executor, code, input_type): with libpython.FetchAndRestoreError(): # get the dict of Cython globals and construct a dict in the # inferior with Cython locals global_dict = gdb.parse_and_eval( '(PyObject *) PyModule_GetDict(__pyx_m)') local_dict = gdb.parse_and_eval('(PyObject *) PyDict_New()') try: self._fill_locals_dict(executor, libpython.pointervalue(local_dict)) result = executor.evalcode(code, input_type, global_dict, local_dict) finally: executor.xdecref(libpython.pointervalue(local_dict)) return result def evalcode(self, code, input_type): """ Evaluate `code` in a Python or Cython stack frame using the given `input_type`. """ frame = self._find_first_cython_or_python_frame() executor = libpython.PythonCodeExecutor() if self.is_python_function(frame): return libpython._evalcode_python(executor, code, input_type) return self._evalcode_cython(executor, code, input_type) class CyExec(CythonCommand, libpython.PyExec, EvaluateOrExecuteCodeMixin): """ Execute Python code in the nearest Python or Cython frame. """ name = '-cy-exec' command_class = gdb.COMMAND_STACK completer_class = gdb.COMPLETE_NONE def invoke(self, expr, from_tty): expr, input_type = self.readcode(expr) executor = libpython.PythonCodeExecutor() executor.xdecref(self.evalcode(expr, executor.Py_single_input)) class CySet(CythonCommand): """ Set a Cython variable to a certain value cy set my_cython_c_variable = 10 cy set my_cython_py_variable = $cy_eval("{'doner': 'kebab'}") This is equivalent to set $cy_value("my_cython_variable") = 10 """ name = 'cy set' command_class = gdb.COMMAND_DATA completer_class = gdb.COMPLETE_NONE @require_cython_frame def invoke(self, expr, from_tty): name_and_expr = expr.split('=', 1) if len(name_and_expr) != 2: raise gdb.GdbError("Invalid expression. Use 'cy set var = expr'.") varname, expr = name_and_expr cname = self.cy.cy_cname.invoke(varname.strip()) gdb.execute("set %s = %s" % (cname, expr)) # Functions class CyCName(gdb.Function, CythonBase): """ Get the C name of a Cython variable in the current context. Examples: print $cy_cname("function") print $cy_cname("Class.method") print $cy_cname("module.function") """ @require_cython_frame @gdb_function_value_to_unicode def invoke(self, cyname, frame=None): frame = frame or gdb.selected_frame() cname = None if self.is_cython_function(frame): cython_function = self.get_cython_function(frame) if cyname in cython_function.locals: cname = cython_function.locals[cyname].cname elif cyname in cython_function.module.globals: cname = cython_function.module.globals[cyname].cname else: qname = '%s.%s' % (cython_function.module.name, cyname) if qname in cython_function.module.functions: cname = cython_function.module.functions[qname].cname if not cname: cname = self.cy.functions_by_qualified_name.get(cyname) if not cname: raise gdb.GdbError('No such Cython variable: %s' % cyname) return cname class CyCValue(CyCName): """ Get the value of a Cython variable. """ @require_cython_frame @gdb_function_value_to_unicode def invoke(self, cyname, frame=None): globals_dict = self.get_cython_globals_dict() cython_function = self.get_cython_function(frame) if self.is_initialized(cython_function, cyname): cname = super(CyCValue, self).invoke(cyname, frame=frame) return gdb.parse_and_eval(cname) elif cyname in globals_dict: return globals_dict[cyname]._gdbval else: raise gdb.GdbError("Variable %s is not initialized." % cyname) class CyLine(gdb.Function, CythonBase): """ Get the current Cython line. """ @require_cython_frame def invoke(self): return self.get_cython_lineno() class CyEval(gdb.Function, CythonBase, EvaluateOrExecuteCodeMixin): """ Evaluate Python code in the nearest Python or Cython frame and return """ @gdb_function_value_to_unicode def invoke(self, python_expression): input_type = libpython.PythonCodeExecutor.Py_eval_input return self.evalcode(python_expression, input_type) cython_info = CythonInfo() cy = CyCy.register() cython_info.cy = cy def register_defines(): libpython.source_gdb_script(textwrap.dedent("""\ define cy step cy -step end define cy next cy -next end document cy step %s end document cy next %s end """) % (CyStep.__doc__, CyNext.__doc__)) register_defines() Cython-0.26.1/Cython/Debugger/Cygdb.py0000664000175000017500000001316713143605603020233 0ustar stefanstefan00000000000000#!/usr/bin/env python """ The Cython debugger The current directory should contain a directory named 'cython_debug', or a path to the cython project directory should be given (the parent directory of cython_debug). Additional gdb args can be provided only if a path to the project directory is given. """ import os import sys import glob import tempfile import textwrap import subprocess import optparse import logging logger = logging.getLogger(__name__) def make_command_file(path_to_debug_info, prefix_code='', no_import=False): if not no_import: pattern = os.path.join(path_to_debug_info, 'cython_debug', 'cython_debug_info_*') debug_files = glob.glob(pattern) if not debug_files: sys.exit('%s.\nNo debug files were found in %s. Aborting.' % ( usage, os.path.abspath(path_to_debug_info))) fd, tempfilename = tempfile.mkstemp() f = os.fdopen(fd, 'w') try: f.write(prefix_code) f.write(textwrap.dedent('''\ # This is a gdb command file # See https://sourceware.org/gdb/onlinedocs/gdb/Command-Files.html set breakpoint pending on set print pretty on python # Activate virtualenv, if we were launched from one import os virtualenv = os.getenv('VIRTUAL_ENV') if virtualenv: path_to_activate_this_py = os.path.join(virtualenv, 'bin', 'activate_this.py') print("gdb command file: Activating virtualenv: %s; path_to_activate_this_py: %s" % ( virtualenv, path_to_activate_this_py)) with open(path_to_activate_this_py) as f: exec(f.read(), dict(__file__=path_to_activate_this_py)) from Cython.Debugger import libcython, libpython end ''')) if no_import: # don't do this, this overrides file command in .gdbinit # f.write("file %s\n" % sys.executable) pass else: path = os.path.join(path_to_debug_info, "cython_debug", "interpreter") interpreter_file = open(path) try: interpreter = interpreter_file.read() finally: interpreter_file.close() f.write("file %s\n" % interpreter) f.write('\n'.join('cy import %s\n' % fn for fn in debug_files)) f.write(textwrap.dedent('''\ python import sys try: gdb.lookup_type('PyModuleObject') except RuntimeError: sys.stderr.write( 'Python was not compiled with debug symbols (or it was ' 'stripped). Some functionality may not work (properly).\\n') end source .cygdbinit ''')) finally: f.close() return tempfilename usage = "Usage: cygdb [options] [PATH [-- GDB_ARGUMENTS]]" def main(path_to_debug_info=None, gdb_argv=None, no_import=False): """ Start the Cython debugger. This tells gdb to import the Cython and Python extensions (libcython.py and libpython.py) and it enables gdb's pending breakpoints. path_to_debug_info is the path to the Cython build directory gdb_argv is the list of options to gdb no_import tells cygdb whether it should import debug information """ parser = optparse.OptionParser(usage=usage) parser.add_option("--gdb-executable", dest="gdb", default='gdb', help="gdb executable to use [default: gdb]") parser.add_option("--verbose", "-v", dest="verbosity", action="count", default=0, help="Verbose mode. Multiple -v options increase the verbosity") (options, args) = parser.parse_args() if path_to_debug_info is None: if len(args) > 1: path_to_debug_info = args[0] else: path_to_debug_info = os.curdir if gdb_argv is None: gdb_argv = args[1:] if path_to_debug_info == '--': no_import = True logging_level = logging.WARN if options.verbosity == 1: logging_level = logging.INFO if options.verbosity >= 2: logging_level = logging.DEBUG logging.basicConfig(level=logging_level) logger.info("verbosity = %r", options.verbosity) logger.debug("options = %r; args = %r", options, args) logger.debug("Done parsing command-line options. path_to_debug_info = %r, gdb_argv = %r", path_to_debug_info, gdb_argv) tempfilename = make_command_file(path_to_debug_info, no_import=no_import) logger.info("Launching %s with command file: %s and gdb_argv: %s", options.gdb, tempfilename, gdb_argv) with open(tempfilename) as tempfile: logger.debug('Command file (%s) contains: """\n%s"""', tempfilename, tempfile.read()) logger.info("Spawning %s...", options.gdb) p = subprocess.Popen([options.gdb, '-command', tempfilename] + gdb_argv) logger.info("Spawned %s (pid %d)", options.gdb, p.pid) while True: try: logger.debug("Waiting for gdb (pid %d) to exit...", p.pid) ret = p.wait() logger.debug("Wait for gdb (pid %d) to exit is done. Returned: %r", p.pid, ret) except KeyboardInterrupt: pass else: break logger.debug("Closing temp command file with fd: %s", tempfile.fileno()) logger.debug("Removing temp command file: %s", tempfilename) os.remove(tempfilename) logger.debug("Removed temp command file: %s", tempfilename) Cython-0.26.1/Cython/Debugger/Tests/0000775000175000017500000000000013151203436017720 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Debugger/Tests/cfuncs.h0000664000175000017500000000003413143605603021352 0ustar stefanstefan00000000000000void some_c_function(void); Cython-0.26.1/Cython/Debugger/Tests/cfuncs.c0000664000175000017500000000010712542002467021347 0ustar stefanstefan00000000000000void some_c_function(void) { int a, b, c; a = 1; b = 2; } Cython-0.26.1/Cython/Debugger/Tests/test_libpython_in_gdb.py0000664000175000017500000000762312574327400024661 0ustar stefanstefan00000000000000# -*- coding: UTF-8 -*- """ Test libpython.py. This is already partly tested by test_libcython_in_gdb and Lib/test/test_gdb.py in the Python source. These tests are run in gdb and called from test_libcython_in_gdb.main() """ import os import sys import gdb from Cython.Debugger import libcython from Cython.Debugger import libpython from . import test_libcython_in_gdb from .test_libcython_in_gdb import _debug, inferior_python_version class TestPrettyPrinters(test_libcython_in_gdb.DebugTestCase): """ Test whether types of Python objects are correctly inferred and that the right libpython.PySomeTypeObjectPtr classes are instantiated. Also test whether values are appropriately formatted (don't be too laborious as Lib/test/test_gdb.py already covers this extensively). Don't take care of decreffing newly allocated objects as a new interpreter is started for every test anyway. """ def setUp(self): super(TestPrettyPrinters, self).setUp() self.break_and_run('b = c = d = 0') def get_pyobject(self, code): value = gdb.parse_and_eval(code) assert libpython.pointervalue(value) != 0 return value def pyobject_fromcode(self, code, gdbvar=None): if gdbvar is not None: d = {'varname':gdbvar, 'code':code} gdb.execute('set $%(varname)s = %(code)s' % d) code = '$' + gdbvar return libpython.PyObjectPtr.from_pyobject_ptr(self.get_pyobject(code)) def get_repr(self, pyobject): return pyobject.get_truncated_repr(libpython.MAX_OUTPUT_LEN) def alloc_bytestring(self, string, gdbvar=None): if inferior_python_version < (3, 0): funcname = 'PyString_FromStringAndSize' else: funcname = 'PyBytes_FromStringAndSize' assert '"' not in string # ensure double quotes code = '(PyObject *) %s("%s", %d)' % (funcname, string, len(string)) return self.pyobject_fromcode(code, gdbvar=gdbvar) def alloc_unicodestring(self, string, gdbvar=None): self.alloc_bytestring(string.encode('UTF-8'), gdbvar='_temp') postfix = libpython.get_inferior_unicode_postfix() funcname = 'PyUnicode%s_FromEncodedObject' % (postfix,) return self.pyobject_fromcode( '(PyObject *) %s($_temp, "UTF-8", "strict")' % funcname, gdbvar=gdbvar) def test_bytestring(self): bytestring = self.alloc_bytestring("spam") if inferior_python_version < (3, 0): bytestring_class = libpython.PyStringObjectPtr expected = repr("spam") else: bytestring_class = libpython.PyBytesObjectPtr expected = "b'spam'" self.assertEqual(type(bytestring), bytestring_class) self.assertEqual(self.get_repr(bytestring), expected) def test_unicode(self): unicode_string = self.alloc_unicodestring(u"spam ἄλφα") expected = "'spam ἄλφα'" if inferior_python_version < (3, 0): expected = 'u' + expected self.assertEqual(type(unicode_string), libpython.PyUnicodeObjectPtr) self.assertEqual(self.get_repr(unicode_string), expected) def test_int(self): if inferior_python_version < (3, 0): intval = self.pyobject_fromcode('PyInt_FromLong(100)') self.assertEqual(type(intval), libpython.PyIntObjectPtr) self.assertEqual(self.get_repr(intval), '100') def test_long(self): longval = self.pyobject_fromcode('PyLong_FromLong(200)', gdbvar='longval') assert gdb.parse_and_eval('$longval->ob_type == &PyLong_Type') self.assertEqual(type(longval), libpython.PyLongObjectPtr) self.assertEqual(self.get_repr(longval), '200') def test_frame_type(self): frame = self.pyobject_fromcode('PyEval_GetFrame()') self.assertEqual(type(frame), libpython.PyFrameObjectPtr) Cython-0.26.1/Cython/Debugger/Tests/__init__.py0000664000175000017500000000001512542002467022031 0ustar stefanstefan00000000000000# empty file Cython-0.26.1/Cython/Debugger/Tests/test_libcython_in_gdb.py0000664000175000017500000003670213150045407024637 0ustar stefanstefan00000000000000""" Tests that run inside GDB. Note: debug information is already imported by the file generated by Cython.Debugger.Cygdb.make_command_file() """ from __future__ import absolute_import import os import re import sys import trace import inspect import warnings import unittest import textwrap import tempfile import functools import traceback import itertools #from test import test_support import gdb from .. import libcython from .. import libpython from . import TestLibCython as test_libcython from ...Utils import add_metaclass # for some reason sys.argv is missing in gdb sys.argv = ['gdb'] def print_on_call_decorator(func): @functools.wraps(func) def wrapper(self, *args, **kwargs): _debug(type(self).__name__, func.__name__) try: return func(self, *args, **kwargs) except Exception as e: _debug("An exception occurred:", traceback.format_exc(e)) raise return wrapper class TraceMethodCallMeta(type): def __init__(self, name, bases, dict): for func_name, func in dict.items(): if inspect.isfunction(func): setattr(self, func_name, print_on_call_decorator(func)) @add_metaclass(TraceMethodCallMeta) class DebugTestCase(unittest.TestCase): """ Base class for test cases. On teardown it kills the inferior and unsets all breakpoints. """ def __init__(self, name): super(DebugTestCase, self).__init__(name) self.cy = libcython.cy self.module = libcython.cy.cython_namespace['codefile'] self.spam_func, self.spam_meth = libcython.cy.functions_by_name['spam'] self.ham_func = libcython.cy.functions_by_qualified_name[ 'codefile.ham'] self.eggs_func = libcython.cy.functions_by_qualified_name[ 'codefile.eggs'] def read_var(self, varname, cast_to=None): result = gdb.parse_and_eval('$cy_cvalue("%s")' % varname) if cast_to: result = cast_to(result) return result def local_info(self): return gdb.execute('info locals', to_string=True) def lineno_equals(self, source_line=None, lineno=None): if source_line is not None: lineno = test_libcython.source_to_lineno[source_line] frame = gdb.selected_frame() self.assertEqual(libcython.cython_info.lineno(frame), lineno) def break_and_run(self, source_line): break_lineno = test_libcython.source_to_lineno[source_line] gdb.execute('cy break codefile:%d' % break_lineno, to_string=True) gdb.execute('run', to_string=True) def tearDown(self): gdb.execute('delete breakpoints', to_string=True) try: gdb.execute('kill inferior 1', to_string=True) except RuntimeError: pass gdb.execute('set args -c "import codefile"') class TestDebugInformationClasses(DebugTestCase): def test_CythonModule(self): "test that debug information was parsed properly into data structures" self.assertEqual(self.module.name, 'codefile') global_vars = ('c_var', 'python_var', '__name__', '__builtins__', '__doc__', '__file__') assert set(global_vars).issubset(self.module.globals) def test_CythonVariable(self): module_globals = self.module.globals c_var = module_globals['c_var'] python_var = module_globals['python_var'] self.assertEqual(c_var.type, libcython.CObject) self.assertEqual(python_var.type, libcython.PythonObject) self.assertEqual(c_var.qualified_name, 'codefile.c_var') def test_CythonFunction(self): self.assertEqual(self.spam_func.qualified_name, 'codefile.spam') self.assertEqual(self.spam_meth.qualified_name, 'codefile.SomeClass.spam') self.assertEqual(self.spam_func.module, self.module) assert self.eggs_func.pf_cname, (self.eggs_func, self.eggs_func.pf_cname) assert not self.ham_func.pf_cname assert not self.spam_func.pf_cname assert not self.spam_meth.pf_cname self.assertEqual(self.spam_func.type, libcython.CObject) self.assertEqual(self.ham_func.type, libcython.CObject) self.assertEqual(self.spam_func.arguments, ['a']) self.assertEqual(self.spam_func.step_into_functions, set(['puts', 'some_c_function'])) expected_lineno = test_libcython.source_to_lineno['def spam(a=0):'] self.assertEqual(self.spam_func.lineno, expected_lineno) self.assertEqual(sorted(self.spam_func.locals), list('abcd')) class TestParameters(unittest.TestCase): def test_parameters(self): gdb.execute('set cy_colorize_code on') assert libcython.parameters.colorize_code gdb.execute('set cy_colorize_code off') assert not libcython.parameters.colorize_code class TestBreak(DebugTestCase): def test_break(self): breakpoint_amount = len(gdb.breakpoints() or ()) gdb.execute('cy break codefile.spam') self.assertEqual(len(gdb.breakpoints()), breakpoint_amount + 1) bp = gdb.breakpoints()[-1] self.assertEqual(bp.type, gdb.BP_BREAKPOINT) assert self.spam_func.cname in bp.location assert bp.enabled def test_python_break(self): gdb.execute('cy break -p join') assert 'def join(' in gdb.execute('cy run', to_string=True) def test_break_lineno(self): beginline = 'import os' nextline = 'cdef int c_var = 12' self.break_and_run(beginline) self.lineno_equals(beginline) step_result = gdb.execute('cy step', to_string=True) self.lineno_equals(nextline) assert step_result.rstrip().endswith(nextline) class TestKilled(DebugTestCase): def test_abort(self): gdb.execute("set args -c 'import os; os.abort()'") output = gdb.execute('cy run', to_string=True) assert 'abort' in output.lower() class DebugStepperTestCase(DebugTestCase): def step(self, varnames_and_values, source_line=None, lineno=None): gdb.execute(self.command) for varname, value in varnames_and_values: self.assertEqual(self.read_var(varname), value, self.local_info()) self.lineno_equals(source_line, lineno) class TestStep(DebugStepperTestCase): """ Test stepping. Stepping happens in the code found in Cython/Debugger/Tests/codefile. """ def test_cython_step(self): gdb.execute('cy break codefile.spam') gdb.execute('run', to_string=True) self.lineno_equals('def spam(a=0):') gdb.execute('cy step', to_string=True) self.lineno_equals('b = c = d = 0') self.command = 'cy step' self.step([('b', 0)], source_line='b = 1') self.step([('b', 1), ('c', 0)], source_line='c = 2') self.step([('c', 2)], source_line='int(10)') self.step([], source_line='puts("spam")') gdb.execute('cont', to_string=True) self.assertEqual(len(gdb.inferiors()), 1) self.assertEqual(gdb.inferiors()[0].pid, 0) def test_c_step(self): self.break_and_run('some_c_function()') gdb.execute('cy step', to_string=True) self.assertEqual(gdb.selected_frame().name(), 'some_c_function') def test_python_step(self): self.break_and_run('os.path.join("foo", "bar")') result = gdb.execute('cy step', to_string=True) curframe = gdb.selected_frame() self.assertEqual(curframe.name(), 'PyEval_EvalFrameEx') pyframe = libpython.Frame(curframe).get_pyop() # With Python 3 inferiors, pyframe.co_name will return a PyUnicodePtr, # be compatible frame_name = pyframe.co_name.proxyval(set()) self.assertEqual(frame_name, 'join') assert re.match(r'\d+ def join\(', result), result class TestNext(DebugStepperTestCase): def test_cython_next(self): self.break_and_run('c = 2') lines = ( 'int(10)', 'puts("spam")', 'os.path.join("foo", "bar")', 'some_c_function()', ) for line in lines: gdb.execute('cy next') self.lineno_equals(line) class TestLocalsGlobals(DebugTestCase): def test_locals(self): self.break_and_run('int(10)') result = gdb.execute('cy locals', to_string=True) assert 'a = 0', repr(result) assert 'b = (int) 1', result assert 'c = (int) 2' in result, repr(result) def test_globals(self): self.break_and_run('int(10)') result = gdb.execute('cy globals', to_string=True) assert '__name__ ' in result, repr(result) assert '__doc__ ' in result, repr(result) assert 'os ' in result, repr(result) assert 'c_var ' in result, repr(result) assert 'python_var ' in result, repr(result) class TestBacktrace(DebugTestCase): def test_backtrace(self): libcython.parameters.colorize_code.value = False self.break_and_run('os.path.join("foo", "bar")') def match_backtrace_output(result): assert re.search(r'\#\d+ *0x.* in spam\(\) at .*codefile\.pyx:22', result), result assert 'os.path.join("foo", "bar")' in result, result result = gdb.execute('cy bt', to_string=True) match_backtrace_output(result) result = gdb.execute('cy bt -a', to_string=True) match_backtrace_output(result) # Apparently not everyone has main() # assert re.search(r'\#0 *0x.* in main\(\)', result), result class TestFunctions(DebugTestCase): def test_functions(self): self.break_and_run('c = 2') result = gdb.execute('print $cy_cname("b")', to_string=True) assert re.search('__pyx_.*b', result), result result = gdb.execute('print $cy_lineno()', to_string=True) supposed_lineno = test_libcython.source_to_lineno['c = 2'] assert str(supposed_lineno) in result, (supposed_lineno, result) result = gdb.execute('print $cy_cvalue("b")', to_string=True) assert '= 1' in result class TestPrint(DebugTestCase): def test_print(self): self.break_and_run('c = 2') result = gdb.execute('cy print b', to_string=True) self.assertEqual('b = (int) 1\n', result) class TestUpDown(DebugTestCase): def test_updown(self): self.break_and_run('os.path.join("foo", "bar")') gdb.execute('cy step') self.assertRaises(RuntimeError, gdb.execute, 'cy down') result = gdb.execute('cy up', to_string=True) assert 'spam()' in result assert 'os.path.join("foo", "bar")' in result class TestExec(DebugTestCase): def setUp(self): super(TestExec, self).setUp() self.fd, self.tmpfilename = tempfile.mkstemp() self.tmpfile = os.fdopen(self.fd, 'r+') def tearDown(self): super(TestExec, self).tearDown() try: self.tmpfile.close() finally: os.remove(self.tmpfilename) def eval_command(self, command): gdb.execute('cy exec open(%r, "w").write(str(%s))' % (self.tmpfilename, command)) return self.tmpfile.read().strip() def test_cython_exec(self): self.break_and_run('os.path.join("foo", "bar")') # test normal behaviour self.assertEqual("[0]", self.eval_command('[a]')) # test multiline code result = gdb.execute(textwrap.dedent('''\ cy exec pass "nothing" end ''')) result = self.tmpfile.read().rstrip() self.assertEqual('', result) def test_python_exec(self): self.break_and_run('os.path.join("foo", "bar")') gdb.execute('cy step') gdb.execute('cy exec some_random_var = 14') self.assertEqual('14', self.eval_command('some_random_var')) class CySet(DebugTestCase): def test_cyset(self): self.break_and_run('os.path.join("foo", "bar")') gdb.execute('cy set a = $cy_eval("{None: []}")') stringvalue = self.read_var("a", cast_to=str) self.assertEqual(stringvalue, "{None: []}") class TestCyEval(DebugTestCase): "Test the $cy_eval() gdb function." def test_cy_eval(self): # This function leaks a few objects in the GDB python process. This # is no biggie self.break_and_run('os.path.join("foo", "bar")') result = gdb.execute('print $cy_eval("None")', to_string=True) assert re.match(r'\$\d+ = None\n', result), result result = gdb.execute('print $cy_eval("[a]")', to_string=True) assert re.match(r'\$\d+ = \[0\]', result), result class TestClosure(DebugTestCase): def break_and_run_func(self, funcname): gdb.execute('cy break ' + funcname) gdb.execute('cy run') def test_inner(self): self.break_and_run_func('inner') self.assertEqual('', gdb.execute('cy locals', to_string=True)) # Allow the Cython-generated code to initialize the scope variable gdb.execute('cy step') self.assertEqual(str(self.read_var('a')), "'an object'") print_result = gdb.execute('cy print a', to_string=True).strip() self.assertEqual(print_result, "a = 'an object'") def test_outer(self): self.break_and_run_func('outer') self.assertEqual('', gdb.execute('cy locals', to_string=True)) # Initialize scope with 'a' uninitialized gdb.execute('cy step') self.assertEqual('', gdb.execute('cy locals', to_string=True)) # Initialize 'a' to 1 gdb.execute('cy step') print_result = gdb.execute('cy print a', to_string=True).strip() self.assertEqual(print_result, "a = 'an object'") _do_debug = os.environ.get('GDB_DEBUG') if _do_debug: _debug_file = open('/dev/tty', 'w') def _debug(*messages): if _do_debug: messages = itertools.chain([sys._getframe(1).f_code.co_name, ':'], messages) _debug_file.write(' '.join(str(msg) for msg in messages) + '\n') def run_unittest_in_module(modulename): try: gdb.lookup_type('PyModuleObject') except RuntimeError: msg = ("Unable to run tests, Python was not compiled with " "debugging information. Either compile python with " "-g or get a debug build (configure with --with-pydebug).") warnings.warn(msg) os._exit(1) else: m = __import__(modulename, fromlist=['']) tests = inspect.getmembers(m, inspect.isclass) # test_support.run_unittest(tests) test_loader = unittest.TestLoader() suite = unittest.TestSuite( [test_loader.loadTestsFromTestCase(cls) for name, cls in tests]) result = unittest.TextTestRunner(verbosity=1).run(suite) return result.wasSuccessful() def runtests(): """ Run the libcython and libpython tests. Ensure that an appropriate status is returned to the parent test process. """ from Cython.Debugger.Tests import test_libpython_in_gdb success_libcython = run_unittest_in_module(__name__) success_libpython = run_unittest_in_module(test_libpython_in_gdb.__name__) if not success_libcython or not success_libpython: sys.exit(2) def main(version, trace_code=False): global inferior_python_version inferior_python_version = version if trace_code: tracer = trace.Trace(count=False, trace=True, outfile=sys.stderr, ignoredirs=[sys.prefix, sys.exec_prefix]) tracer.runfunc(runtests) else: runtests() Cython-0.26.1/Cython/Debugger/Tests/codefile0000664000175000017500000000120113143605603021412 0ustar stefanstefan00000000000000cdef extern from "stdio.h": int puts(char *s) cdef extern from "cfuncs.h": void some_c_function() import os cdef int c_var = 12 python_var = 13 def spam(a=0): cdef: int b, c b = c = d = 0 b = 1 c = 2 int(10) puts("spam") os.path.join("foo", "bar") some_c_function() cpdef eggs(): pass cdef ham(): pass cdef class SomeClass(object): def spam(self): pass def outer(): cdef object a = "an object" def inner(): b = 2 # access closed over variables print a, b return inner outer()() spam() print "bye!" def use_ham(): ham() Cython-0.26.1/Cython/Debugger/Tests/TestLibCython.py0000664000175000017500000002020613143605603023030 0ustar stefanstefan00000000000000 import os import re import sys import shutil import warnings import textwrap import unittest import tempfile import subprocess #import distutils.core #from distutils import sysconfig from distutils import ccompiler import runtests import Cython.Distutils.extension import Cython.Distutils.old_build_ext as build_ext from Cython.Debugger import Cygdb as cygdb root = os.path.dirname(os.path.abspath(__file__)) codefile = os.path.join(root, 'codefile') cfuncs_file = os.path.join(root, 'cfuncs.c') with open(codefile) as f: source_to_lineno = dict((line.strip(), i + 1) for i, line in enumerate(f)) have_gdb = None def test_gdb(): global have_gdb if have_gdb is not None: return have_gdb have_gdb = False try: p = subprocess.Popen(['gdb', '-nx', '--version'], stdout=subprocess.PIPE) except OSError: # gdb not found gdb_version = None else: stdout, _ = p.communicate() # Based on Lib/test/test_gdb.py regex = "GNU gdb [^\d]*(\d+)\.(\d+)" gdb_version = re.match(regex, stdout.decode('ascii', 'ignore')) if gdb_version: gdb_version_number = list(map(int, gdb_version.groups())) if gdb_version_number >= [7, 2]: have_gdb = True with tempfile.NamedTemporaryFile(mode='w+') as python_version_script: python_version_script.write( 'python import sys; print("%s %s" % sys.version_info[:2])') python_version_script.flush() p = subprocess.Popen(['gdb', '-batch', '-x', python_version_script.name], stdout=subprocess.PIPE) stdout, _ = p.communicate() try: internal_python_version = list(map(int, stdout.decode('ascii', 'ignore').split())) if internal_python_version < [2, 6]: have_gdb = False except ValueError: have_gdb = False if not have_gdb: warnings.warn('Skipping gdb tests, need gdb >= 7.2 with Python >= 2.6') return have_gdb class DebuggerTestCase(unittest.TestCase): def setUp(self): """ Run gdb and have cygdb import the debug information from the code defined in TestParseTreeTransforms's setUp method """ if not test_gdb(): return self.tempdir = tempfile.mkdtemp() self.destfile = os.path.join(self.tempdir, 'codefile.pyx') self.debug_dest = os.path.join(self.tempdir, 'cython_debug', 'cython_debug_info_codefile') self.cfuncs_destfile = os.path.join(self.tempdir, 'cfuncs') self.cwd = os.getcwd() try: os.chdir(self.tempdir) shutil.copy(codefile, self.destfile) shutil.copy(cfuncs_file, self.cfuncs_destfile + '.c') shutil.copy(cfuncs_file.replace('.c', '.h'), self.cfuncs_destfile + '.h') compiler = ccompiler.new_compiler() compiler.compile(['cfuncs.c'], debug=True, extra_postargs=['-fPIC']) opts = dict( test_directory=self.tempdir, module='codefile', ) optimization_disabler = build_ext.Optimization() cython_compile_testcase = runtests.CythonCompileTestCase( workdir=self.tempdir, # we clean up everything (not only compiled files) cleanup_workdir=False, tags=runtests.parse_tags(codefile), **opts ) new_stderr = open(os.devnull, 'w') stderr = sys.stderr sys.stderr = new_stderr optimization_disabler.disable_optimization() try: cython_compile_testcase.run_cython( targetdir=self.tempdir, incdir=None, annotate=False, extra_compile_options={ 'gdb_debug':True, 'output_dir':self.tempdir, }, **opts ) cython_compile_testcase.run_distutils( incdir=None, workdir=self.tempdir, extra_extension_args={'extra_objects':['cfuncs.o']}, **opts ) finally: optimization_disabler.restore_state() sys.stderr = stderr new_stderr.close() # ext = Cython.Distutils.extension.Extension( # 'codefile', # ['codefile.pyx'], # cython_gdb=True, # extra_objects=['cfuncs.o']) # # distutils.core.setup( # script_args=['build_ext', '--inplace'], # ext_modules=[ext], # cmdclass=dict(build_ext=Cython.Distutils.build_ext) # ) except: os.chdir(self.cwd) raise def tearDown(self): if not test_gdb(): return os.chdir(self.cwd) shutil.rmtree(self.tempdir) class GdbDebuggerTestCase(DebuggerTestCase): def setUp(self): if not test_gdb(): return super(GdbDebuggerTestCase, self).setUp() prefix_code = textwrap.dedent('''\ python import os import sys import traceback def excepthook(type, value, tb): traceback.print_exception(type, value, tb) sys.stderr.flush() sys.stdout.flush() os._exit(1) sys.excepthook = excepthook # Have tracebacks end up on sys.stderr (gdb replaces sys.stderr # with an object that calls gdb.write()) sys.stderr = sys.__stderr__ end ''') code = textwrap.dedent('''\ python from Cython.Debugger.Tests import test_libcython_in_gdb test_libcython_in_gdb.main(version=%r) end ''' % (sys.version_info[:2],)) self.gdb_command_file = cygdb.make_command_file(self.tempdir, prefix_code) with open(self.gdb_command_file, 'a') as f: f.write(code) args = ['gdb', '-batch', '-x', self.gdb_command_file, '-n', '--args', sys.executable, '-c', 'import codefile'] paths = [] path = os.environ.get('PYTHONPATH') if path: paths.append(path) paths.append(os.path.dirname(os.path.dirname( os.path.abspath(Cython.__file__)))) env = dict(os.environ, PYTHONPATH=os.pathsep.join(paths)) self.p = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) def tearDown(self): if not test_gdb(): return try: super(GdbDebuggerTestCase, self).tearDown() if self.p: try: self.p.stdout.close() except: pass try: self.p.stderr.close() except: pass self.p.wait() finally: os.remove(self.gdb_command_file) class TestAll(GdbDebuggerTestCase): def test_all(self): if not test_gdb(): return out, err = self.p.communicate() out = out.decode('UTF-8') err = err.decode('UTF-8') exit_status = self.p.returncode if exit_status == 1: sys.stderr.write(out) sys.stderr.write(err) elif exit_status >= 2: border = u'*' * 30 start = u'%s v INSIDE GDB v %s' % (border, border) stderr = u'%s v STDERR v %s' % (border, border) end = u'%s ^ INSIDE GDB ^ %s' % (border, border) errmsg = u'\n%s\n%s%s\n%s%s' % (start, out, stderr, err, end) sys.stderr.write(errmsg) # FIXME: re-enable this to make the test fail on internal failures #self.assertEqual(exit_status, 0) if __name__ == '__main__': unittest.main() Cython-0.26.1/Cython/Debugger/__init__.py0000664000175000017500000000001512542002467020727 0ustar stefanstefan00000000000000# empty file Cython-0.26.1/Cython/Debugger/libpython.py0000664000175000017500000024444413143605603021217 0ustar stefanstefan00000000000000#!/usr/bin/python # NOTE: this file is taken from the Python source distribution # It can be found under Tools/gdb/libpython.py. It is shipped with Cython # because it's not installed as a python module, and because changes are only # merged into new python versions (v3.2+). ''' From gdb 7 onwards, gdb's build can be configured --with-python, allowing gdb to be extended with Python code e.g. for library-specific data visualizations, such as for the C++ STL types. Documentation on this API can be seen at: http://sourceware.org/gdb/current/onlinedocs/gdb/Python-API.html This python module deals with the case when the process being debugged (the "inferior process" in gdb parlance) is itself python, or more specifically, linked against libpython. In this situation, almost every item of data is a (PyObject*), and having the debugger merely print their addresses is not very enlightening. This module embeds knowledge about the implementation details of libpython so that we can emit useful visualizations e.g. a string, a list, a dict, a frame giving file/line information and the state of local variables In particular, given a gdb.Value corresponding to a PyObject* in the inferior process, we can generate a "proxy value" within the gdb process. For example, given a PyObject* in the inferior process that is in fact a PyListObject* holding three PyObject* that turn out to be PyStringObject* instances, we can generate a proxy value within the gdb process that is a list of strings: ["foo", "bar", "baz"] Doing so can be expensive for complicated graphs of objects, and could take some time, so we also have a "write_repr" method that writes a representation of the data to a file-like object. This allows us to stop the traversal by having the file-like object raise an exception if it gets too much data. With both "proxyval" and "write_repr" we keep track of the set of all addresses visited so far in the traversal, to avoid infinite recursion due to cycles in the graph of object references. We try to defer gdb.lookup_type() invocations for python types until as late as possible: for a dynamically linked python binary, when the process starts in the debugger, the libpython.so hasn't been dynamically loaded yet, so none of the type names are known to the debugger The module also extends gdb with some python-specific commands. ''' try: input = raw_input except NameError: pass import os import re import sys import struct import locale import atexit import warnings import tempfile import textwrap import itertools import gdb try: xrange except NameError: xrange = range if sys.version_info[0] < 3: # I think this is the only way to fix this bug :'( # http://sourceware.org/bugzilla/show_bug.cgi?id=12285 out, err = sys.stdout, sys.stderr reload(sys).setdefaultencoding('UTF-8') sys.stdout = out sys.stderr = err # Look up the gdb.Type for some standard types: _type_char_ptr = gdb.lookup_type('char').pointer() # char* _type_unsigned_char_ptr = gdb.lookup_type('unsigned char').pointer() _type_void_ptr = gdb.lookup_type('void').pointer() # void* SIZEOF_VOID_P = _type_void_ptr.sizeof Py_TPFLAGS_HEAPTYPE = (1 << 9) Py_TPFLAGS_INT_SUBCLASS = (1 << 23) Py_TPFLAGS_LONG_SUBCLASS = (1 << 24) Py_TPFLAGS_LIST_SUBCLASS = (1 << 25) Py_TPFLAGS_TUPLE_SUBCLASS = (1 << 26) Py_TPFLAGS_STRING_SUBCLASS = (1 << 27) Py_TPFLAGS_BYTES_SUBCLASS = (1 << 27) Py_TPFLAGS_UNICODE_SUBCLASS = (1 << 28) Py_TPFLAGS_DICT_SUBCLASS = (1 << 29) Py_TPFLAGS_BASE_EXC_SUBCLASS = (1 << 30) Py_TPFLAGS_TYPE_SUBCLASS = (1 << 31) MAX_OUTPUT_LEN = 1024 hexdigits = "0123456789abcdef" ENCODING = locale.getpreferredencoding() class NullPyObjectPtr(RuntimeError): pass def safety_limit(val): # Given a integer value from the process being debugged, limit it to some # safety threshold so that arbitrary breakage within said process doesn't # break the gdb process too much (e.g. sizes of iterations, sizes of lists) return min(val, 1000) def safe_range(val): # As per range, but don't trust the value too much: cap it to a safety # threshold in case the data was corrupted return range(safety_limit(val)) def write_unicode(file, text): # Write a byte or unicode string to file. Unicode strings are encoded to # ENCODING encoding with 'backslashreplace' error handler to avoid # UnicodeEncodeError. if not isinstance(text, str): text = text.encode(ENCODING, 'backslashreplace') file.write(text) def os_fsencode(filename): if isinstance(filename, str): # only encode in Py2 return filename encoding = sys.getfilesystemencoding() if encoding == 'mbcs': # mbcs doesn't support surrogateescape return filename.encode(encoding) encoded = [] for char in filename: # surrogateescape error handler if 0xDC80 <= ord(char) <= 0xDCFF: byte = chr(ord(char) - 0xDC00) else: byte = char.encode(encoding) encoded.append(byte) return ''.join(encoded) class StringTruncated(RuntimeError): pass class TruncatedStringIO(object): '''Similar to cStringIO, but can truncate the output by raising a StringTruncated exception''' def __init__(self, maxlen=None): self._val = '' self.maxlen = maxlen def write(self, data): if self.maxlen: if len(data) + len(self._val) > self.maxlen: # Truncation: self._val += data[0:self.maxlen - len(self._val)] raise StringTruncated() self._val += data def getvalue(self): return self._val # pretty printer lookup all_pretty_typenames = set() class PrettyPrinterTrackerMeta(type): def __init__(self, name, bases, dict): super(PrettyPrinterTrackerMeta, self).__init__(name, bases, dict) all_pretty_typenames.add(self._typename) # Class decorator that adds a metaclass and recreates the class with it. # Copied from 'six'. See Cython/Utils.py. def _add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper @_add_metaclass(PrettyPrinterTrackerMeta) class PyObjectPtr(object): """ Class wrapping a gdb.Value that's a either a (PyObject*) within the inferior process, or some subclass pointer e.g. (PyStringObject*) There will be a subclass for every refined PyObject type that we care about. Note that at every stage the underlying pointer could be NULL, point to corrupt data, etc; this is the debugger, after all. """ _typename = 'PyObject' def __init__(self, gdbval, cast_to=None): if cast_to: self._gdbval = gdbval.cast(cast_to) else: self._gdbval = gdbval def field(self, name): ''' Get the gdb.Value for the given field within the PyObject, coping with some python 2 versus python 3 differences. Various libpython types are defined using the "PyObject_HEAD" and "PyObject_VAR_HEAD" macros. In Python 2, this these are defined so that "ob_type" and (for a var object) "ob_size" are fields of the type in question. In Python 3, this is defined as an embedded PyVarObject type thus: PyVarObject ob_base; so that the "ob_size" field is located insize the "ob_base" field, and the "ob_type" is most easily accessed by casting back to a (PyObject*). ''' if self.is_null(): raise NullPyObjectPtr(self) if name == 'ob_type': pyo_ptr = self._gdbval.cast(PyObjectPtr.get_gdb_type()) return pyo_ptr.dereference()[name] if name == 'ob_size': pyo_ptr = self._gdbval.cast(PyVarObjectPtr.get_gdb_type()) return pyo_ptr.dereference()[name] # General case: look it up inside the object: return self._gdbval.dereference()[name] def pyop_field(self, name): ''' Get a PyObjectPtr for the given PyObject* field within this PyObject, coping with some python 2 versus python 3 differences. ''' return PyObjectPtr.from_pyobject_ptr(self.field(name)) def write_field_repr(self, name, out, visited): ''' Extract the PyObject* field named "name", and write its representation to file-like object "out" ''' field_obj = self.pyop_field(name) field_obj.write_repr(out, visited) def get_truncated_repr(self, maxlen): ''' Get a repr-like string for the data, but truncate it at "maxlen" bytes (ending the object graph traversal as soon as you do) ''' out = TruncatedStringIO(maxlen) try: self.write_repr(out, set()) except StringTruncated: # Truncation occurred: return out.getvalue() + '...(truncated)' # No truncation occurred: return out.getvalue() def type(self): return PyTypeObjectPtr(self.field('ob_type')) def is_null(self): return not self._gdbval def is_optimized_out(self): ''' Is the value of the underlying PyObject* visible to the debugger? This can vary with the precise version of the compiler used to build Python, and the precise version of gdb. See e.g. https://bugzilla.redhat.com/show_bug.cgi?id=556975 with PyEval_EvalFrameEx's "f" ''' return self._gdbval.is_optimized_out def safe_tp_name(self): try: return self.type().field('tp_name').string() except NullPyObjectPtr: # NULL tp_name? return 'unknown' except RuntimeError: # Can't even read the object at all? return 'unknown' def proxyval(self, visited): ''' Scrape a value from the inferior process, and try to represent it within the gdb process, whilst (hopefully) avoiding crashes when the remote data is corrupt. Derived classes will override this. For example, a PyIntObject* with ob_ival 42 in the inferior process should result in an int(42) in this process. visited: a set of all gdb.Value pyobject pointers already visited whilst generating this value (to guard against infinite recursion when visiting object graphs with loops). Analogous to Py_ReprEnter and Py_ReprLeave ''' class FakeRepr(object): """ Class representing a non-descript PyObject* value in the inferior process for when we don't have a custom scraper, intended to have a sane repr(). """ def __init__(self, tp_name, address): self.tp_name = tp_name self.address = address def __repr__(self): # For the NULL pointer, we have no way of knowing a type, so # special-case it as per # http://bugs.python.org/issue8032#msg100882 if self.address == 0: return '0x0' return '<%s at remote 0x%x>' % (self.tp_name, self.address) return FakeRepr(self.safe_tp_name(), int(self._gdbval)) def write_repr(self, out, visited): ''' Write a string representation of the value scraped from the inferior process to "out", a file-like object. ''' # Default implementation: generate a proxy value and write its repr # However, this could involve a lot of work for complicated objects, # so for derived classes we specialize this return out.write(repr(self.proxyval(visited))) @classmethod def subclass_from_type(cls, t): ''' Given a PyTypeObjectPtr instance wrapping a gdb.Value that's a (PyTypeObject*), determine the corresponding subclass of PyObjectPtr to use Ideally, we would look up the symbols for the global types, but that isn't working yet: (gdb) python print gdb.lookup_symbol('PyList_Type')[0].value Traceback (most recent call last): File "", line 1, in NotImplementedError: Symbol type not yet supported in Python scripts. Error while executing Python code. For now, we use tp_flags, after doing some string comparisons on the tp_name for some special-cases that don't seem to be visible through flags ''' try: tp_name = t.field('tp_name').string() tp_flags = int(t.field('tp_flags')) except RuntimeError: # Handle any kind of error e.g. NULL ptrs by simply using the base # class return cls #print 'tp_flags = 0x%08x' % tp_flags #print 'tp_name = %r' % tp_name name_map = {'bool': PyBoolObjectPtr, 'classobj': PyClassObjectPtr, 'instance': PyInstanceObjectPtr, 'NoneType': PyNoneStructPtr, 'frame': PyFrameObjectPtr, 'set' : PySetObjectPtr, 'frozenset' : PySetObjectPtr, 'builtin_function_or_method' : PyCFunctionObjectPtr, } if tp_name in name_map: return name_map[tp_name] if tp_flags & (Py_TPFLAGS_HEAPTYPE|Py_TPFLAGS_TYPE_SUBCLASS): return PyTypeObjectPtr if tp_flags & Py_TPFLAGS_INT_SUBCLASS: return PyIntObjectPtr if tp_flags & Py_TPFLAGS_LONG_SUBCLASS: return PyLongObjectPtr if tp_flags & Py_TPFLAGS_LIST_SUBCLASS: return PyListObjectPtr if tp_flags & Py_TPFLAGS_TUPLE_SUBCLASS: return PyTupleObjectPtr if tp_flags & Py_TPFLAGS_STRING_SUBCLASS: try: gdb.lookup_type('PyBytesObject') return PyBytesObjectPtr except RuntimeError: return PyStringObjectPtr if tp_flags & Py_TPFLAGS_UNICODE_SUBCLASS: return PyUnicodeObjectPtr if tp_flags & Py_TPFLAGS_DICT_SUBCLASS: return PyDictObjectPtr if tp_flags & Py_TPFLAGS_BASE_EXC_SUBCLASS: return PyBaseExceptionObjectPtr # Use the base class: return cls @classmethod def from_pyobject_ptr(cls, gdbval): ''' Try to locate the appropriate derived class dynamically, and cast the pointer accordingly. ''' try: p = PyObjectPtr(gdbval) cls = cls.subclass_from_type(p.type()) return cls(gdbval, cast_to=cls.get_gdb_type()) except RuntimeError as exc: # Handle any kind of error e.g. NULL ptrs by simply using the base # class pass return cls(gdbval) @classmethod def get_gdb_type(cls): return gdb.lookup_type(cls._typename).pointer() def as_address(self): return int(self._gdbval) class PyVarObjectPtr(PyObjectPtr): _typename = 'PyVarObject' class ProxyAlreadyVisited(object): ''' Placeholder proxy to use when protecting against infinite recursion due to loops in the object graph. Analogous to the values emitted by the users of Py_ReprEnter and Py_ReprLeave ''' def __init__(self, rep): self._rep = rep def __repr__(self): return self._rep def _write_instance_repr(out, visited, name, pyop_attrdict, address): '''Shared code for use by old-style and new-style classes: write a representation to file-like object "out"''' out.write('<') out.write(name) # Write dictionary of instance attributes: if isinstance(pyop_attrdict, PyDictObjectPtr): out.write('(') first = True for pyop_arg, pyop_val in pyop_attrdict.items(): if not first: out.write(', ') first = False out.write(pyop_arg.proxyval(visited)) out.write('=') pyop_val.write_repr(out, visited) out.write(')') out.write(' at remote 0x%x>' % address) class InstanceProxy(object): def __init__(self, cl_name, attrdict, address): self.cl_name = cl_name self.attrdict = attrdict self.address = address def __repr__(self): if isinstance(self.attrdict, dict): kwargs = ', '.join("%s=%r" % (arg, val) for arg, val in self.attrdict.items()) return '<%s(%s) at remote 0x%x>' % ( self.cl_name, kwargs, self.address) else: return '<%s at remote 0x%x>' % ( self.cl_name, self.address) def _PyObject_VAR_SIZE(typeobj, nitems): return ( ( typeobj.field('tp_basicsize') + nitems * typeobj.field('tp_itemsize') + (SIZEOF_VOID_P - 1) ) & ~(SIZEOF_VOID_P - 1) ).cast(gdb.lookup_type('size_t')) class PyTypeObjectPtr(PyObjectPtr): _typename = 'PyTypeObject' def get_attr_dict(self): ''' Get the PyDictObject ptr representing the attribute dictionary (or None if there's a problem) ''' try: typeobj = self.type() dictoffset = int_from_int(typeobj.field('tp_dictoffset')) if dictoffset != 0: if dictoffset < 0: type_PyVarObject_ptr = gdb.lookup_type('PyVarObject').pointer() tsize = int_from_int(self._gdbval.cast(type_PyVarObject_ptr)['ob_size']) if tsize < 0: tsize = -tsize size = _PyObject_VAR_SIZE(typeobj, tsize) dictoffset += size assert dictoffset > 0 assert dictoffset % SIZEOF_VOID_P == 0 dictptr = self._gdbval.cast(_type_char_ptr) + dictoffset PyObjectPtrPtr = PyObjectPtr.get_gdb_type().pointer() dictptr = dictptr.cast(PyObjectPtrPtr) return PyObjectPtr.from_pyobject_ptr(dictptr.dereference()) except RuntimeError: # Corrupt data somewhere; fail safe pass # Not found, or some kind of error: return None def proxyval(self, visited): ''' Support for new-style classes. Currently we just locate the dictionary using a transliteration to python of _PyObject_GetDictPtr, ignoring descriptors ''' # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('<...>') visited.add(self.as_address()) pyop_attr_dict = self.get_attr_dict() if pyop_attr_dict: attr_dict = pyop_attr_dict.proxyval(visited) else: attr_dict = {} tp_name = self.safe_tp_name() # New-style class: return InstanceProxy(tp_name, attr_dict, int(self._gdbval)) def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('<...>') return visited.add(self.as_address()) try: tp_name = self.field('tp_name').string() except RuntimeError: tp_name = 'unknown' out.write('' % (tp_name, self.as_address())) # pyop_attrdict = self.get_attr_dict() # _write_instance_repr(out, visited, # self.safe_tp_name(), pyop_attrdict, self.as_address()) class ProxyException(Exception): def __init__(self, tp_name, args): self.tp_name = tp_name self.args = args def __repr__(self): return '%s%r' % (self.tp_name, self.args) class PyBaseExceptionObjectPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyBaseExceptionObject* i.e. an exception within the process being debugged. """ _typename = 'PyBaseExceptionObject' def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('(...)') visited.add(self.as_address()) arg_proxy = self.pyop_field('args').proxyval(visited) return ProxyException(self.safe_tp_name(), arg_proxy) def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('(...)') return visited.add(self.as_address()) out.write(self.safe_tp_name()) self.write_field_repr('args', out, visited) class PyClassObjectPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyClassObject* i.e. a instance within the process being debugged. """ _typename = 'PyClassObject' class BuiltInFunctionProxy(object): def __init__(self, ml_name): self.ml_name = ml_name def __repr__(self): return "" % self.ml_name class BuiltInMethodProxy(object): def __init__(self, ml_name, pyop_m_self): self.ml_name = ml_name self.pyop_m_self = pyop_m_self def __repr__(self): return '' % ( self.ml_name, self.pyop_m_self.safe_tp_name(), self.pyop_m_self.as_address()) class PyCFunctionObjectPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyCFunctionObject* (see Include/methodobject.h and Objects/methodobject.c) """ _typename = 'PyCFunctionObject' def proxyval(self, visited): m_ml = self.field('m_ml') # m_ml is a (PyMethodDef*) ml_name = m_ml['ml_name'].string() pyop_m_self = self.pyop_field('m_self') if pyop_m_self.is_null(): return BuiltInFunctionProxy(ml_name) else: return BuiltInMethodProxy(ml_name, pyop_m_self) class PyCodeObjectPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyCodeObject* i.e. a instance within the process being debugged. """ _typename = 'PyCodeObject' def addr2line(self, addrq): ''' Get the line number for a given bytecode offset Analogous to PyCode_Addr2Line; translated from pseudocode in Objects/lnotab_notes.txt ''' co_lnotab = self.pyop_field('co_lnotab').proxyval(set()) # Initialize lineno to co_firstlineno as per PyCode_Addr2Line # not 0, as lnotab_notes.txt has it: lineno = int_from_int(self.field('co_firstlineno')) addr = 0 for addr_incr, line_incr in zip(co_lnotab[::2], co_lnotab[1::2]): addr += ord(addr_incr) if addr > addrq: return lineno lineno += ord(line_incr) return lineno class PyDictObjectPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyDictObject* i.e. a dict instance within the process being debugged. """ _typename = 'PyDictObject' def iteritems(self): ''' Yields a sequence of (PyObjectPtr key, PyObjectPtr value) pairs, analagous to dict.items() ''' for i in safe_range(self.field('ma_mask') + 1): ep = self.field('ma_table') + i pyop_value = PyObjectPtr.from_pyobject_ptr(ep['me_value']) if not pyop_value.is_null(): pyop_key = PyObjectPtr.from_pyobject_ptr(ep['me_key']) yield (pyop_key, pyop_value) items = iteritems def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('{...}') visited.add(self.as_address()) result = {} for pyop_key, pyop_value in self.items(): proxy_key = pyop_key.proxyval(visited) proxy_value = pyop_value.proxyval(visited) result[proxy_key] = proxy_value return result def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('{...}') return visited.add(self.as_address()) out.write('{') first = True for pyop_key, pyop_value in self.items(): if not first: out.write(', ') first = False pyop_key.write_repr(out, visited) out.write(': ') pyop_value.write_repr(out, visited) out.write('}') class PyInstanceObjectPtr(PyObjectPtr): _typename = 'PyInstanceObject' def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('<...>') visited.add(self.as_address()) # Get name of class: in_class = self.pyop_field('in_class') cl_name = in_class.pyop_field('cl_name').proxyval(visited) # Get dictionary of instance attributes: in_dict = self.pyop_field('in_dict').proxyval(visited) # Old-style class: return InstanceProxy(cl_name, in_dict, int(self._gdbval)) def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('<...>') return visited.add(self.as_address()) # Old-style class: # Get name of class: in_class = self.pyop_field('in_class') cl_name = in_class.pyop_field('cl_name').proxyval(visited) # Get dictionary of instance attributes: pyop_in_dict = self.pyop_field('in_dict') _write_instance_repr(out, visited, cl_name, pyop_in_dict, self.as_address()) class PyIntObjectPtr(PyObjectPtr): _typename = 'PyIntObject' def proxyval(self, visited): result = int_from_int(self.field('ob_ival')) return result class PyListObjectPtr(PyObjectPtr): _typename = 'PyListObject' def __getitem__(self, i): # Get the gdb.Value for the (PyObject*) with the given index: field_ob_item = self.field('ob_item') return field_ob_item[i] def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('[...]') visited.add(self.as_address()) result = [PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited) for i in safe_range(int_from_int(self.field('ob_size')))] return result def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('[...]') return visited.add(self.as_address()) out.write('[') for i in safe_range(int_from_int(self.field('ob_size'))): if i > 0: out.write(', ') element = PyObjectPtr.from_pyobject_ptr(self[i]) element.write_repr(out, visited) out.write(']') class PyLongObjectPtr(PyObjectPtr): _typename = 'PyLongObject' def proxyval(self, visited): ''' Python's Include/longobjrep.h has this declaration: struct _longobject { PyObject_VAR_HEAD digit ob_digit[1]; }; with this description: The absolute value of a number is equal to SUM(for i=0 through abs(ob_size)-1) ob_digit[i] * 2**(SHIFT*i) Negative numbers are represented with ob_size < 0; zero is represented by ob_size == 0. where SHIFT can be either: #define PyLong_SHIFT 30 #define PyLong_SHIFT 15 ''' ob_size = int(self.field('ob_size')) if ob_size == 0: return int(0) ob_digit = self.field('ob_digit') if gdb.lookup_type('digit').sizeof == 2: SHIFT = 15 else: SHIFT = 30 digits = [ob_digit[i] * (1 << (SHIFT*i)) for i in safe_range(abs(ob_size))] result = sum(digits) if ob_size < 0: result = -result return result def write_repr(self, out, visited): # Write this out as a Python 3 int literal, i.e. without the "L" suffix proxy = self.proxyval(visited) out.write("%s" % proxy) class PyBoolObjectPtr(PyLongObjectPtr): """ Class wrapping a gdb.Value that's a PyBoolObject* i.e. one of the two instances (Py_True/Py_False) within the process being debugged. """ _typename = 'PyBoolObject' def proxyval(self, visited): castto = gdb.lookup_type('PyLongObject').pointer() self._gdbval = self._gdbval.cast(castto) return bool(PyLongObjectPtr(self._gdbval).proxyval(visited)) class PyNoneStructPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyObject* pointing to the singleton (we hope) _Py_NoneStruct with ob_type PyNone_Type """ _typename = 'PyObject' def proxyval(self, visited): return None class PyFrameObjectPtr(PyObjectPtr): _typename = 'PyFrameObject' def __init__(self, gdbval, cast_to=None): PyObjectPtr.__init__(self, gdbval, cast_to) if not self.is_optimized_out(): self.co = PyCodeObjectPtr.from_pyobject_ptr(self.field('f_code')) self.co_name = self.co.pyop_field('co_name') self.co_filename = self.co.pyop_field('co_filename') self.f_lineno = int_from_int(self.field('f_lineno')) self.f_lasti = int_from_int(self.field('f_lasti')) self.co_nlocals = int_from_int(self.co.field('co_nlocals')) self.co_varnames = PyTupleObjectPtr.from_pyobject_ptr(self.co.field('co_varnames')) def iter_locals(self): ''' Yield a sequence of (name,value) pairs of PyObjectPtr instances, for the local variables of this frame ''' if self.is_optimized_out(): return f_localsplus = self.field('f_localsplus') for i in safe_range(self.co_nlocals): pyop_value = PyObjectPtr.from_pyobject_ptr(f_localsplus[i]) if not pyop_value.is_null(): pyop_name = PyObjectPtr.from_pyobject_ptr(self.co_varnames[i]) yield (pyop_name, pyop_value) def iter_globals(self): ''' Yield a sequence of (name,value) pairs of PyObjectPtr instances, for the global variables of this frame ''' if self.is_optimized_out(): return pyop_globals = self.pyop_field('f_globals') return iter(pyop_globals.items()) def iter_builtins(self): ''' Yield a sequence of (name,value) pairs of PyObjectPtr instances, for the builtin variables ''' if self.is_optimized_out(): return pyop_builtins = self.pyop_field('f_builtins') return iter(pyop_builtins.items()) def get_var_by_name(self, name): ''' Look for the named local variable, returning a (PyObjectPtr, scope) pair where scope is a string 'local', 'global', 'builtin' If not found, return (None, None) ''' for pyop_name, pyop_value in self.iter_locals(): if name == pyop_name.proxyval(set()): return pyop_value, 'local' for pyop_name, pyop_value in self.iter_globals(): if name == pyop_name.proxyval(set()): return pyop_value, 'global' for pyop_name, pyop_value in self.iter_builtins(): if name == pyop_name.proxyval(set()): return pyop_value, 'builtin' return None, None def filename(self): '''Get the path of the current Python source file, as a string''' if self.is_optimized_out(): return '(frame information optimized out)' return self.co_filename.proxyval(set()) def current_line_num(self): '''Get current line number as an integer (1-based) Translated from PyFrame_GetLineNumber and PyCode_Addr2Line See Objects/lnotab_notes.txt ''' if self.is_optimized_out(): return None f_trace = self.field('f_trace') if f_trace: # we have a non-NULL f_trace: return self.f_lineno else: #try: return self.co.addr2line(self.f_lasti) #except ValueError: # return self.f_lineno def current_line(self): '''Get the text of the current source line as a string, with a trailing newline character''' if self.is_optimized_out(): return '(frame information optimized out)' filename = self.filename() with open(os_fsencode(filename), 'r') as f: all_lines = f.readlines() # Convert from 1-based current_line_num to 0-based list offset: return all_lines[self.current_line_num()-1] def write_repr(self, out, visited): if self.is_optimized_out(): out.write('(frame information optimized out)') return out.write('Frame 0x%x, for file %s, line %i, in %s (' % (self.as_address(), self.co_filename.proxyval(visited), self.current_line_num(), self.co_name.proxyval(visited))) first = True for pyop_name, pyop_value in self.iter_locals(): if not first: out.write(', ') first = False out.write(pyop_name.proxyval(visited)) out.write('=') pyop_value.write_repr(out, visited) out.write(')') class PySetObjectPtr(PyObjectPtr): _typename = 'PySetObject' def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('%s(...)' % self.safe_tp_name()) visited.add(self.as_address()) members = [] table = self.field('table') for i in safe_range(self.field('mask')+1): setentry = table[i] key = setentry['key'] if key != 0: key_proxy = PyObjectPtr.from_pyobject_ptr(key).proxyval(visited) if key_proxy != '': members.append(key_proxy) if self.safe_tp_name() == 'frozenset': return frozenset(members) else: return set(members) def write_repr(self, out, visited): # Emulate Python 3's set_repr tp_name = self.safe_tp_name() # Guard against infinite loops: if self.as_address() in visited: out.write('(...)') return visited.add(self.as_address()) # Python 3's set_repr special-cases the empty set: if not self.field('used'): out.write(tp_name) out.write('()') return # Python 3 uses {} for set literals: if tp_name != 'set': out.write(tp_name) out.write('(') out.write('{') first = True table = self.field('table') for i in safe_range(self.field('mask')+1): setentry = table[i] key = setentry['key'] if key != 0: pyop_key = PyObjectPtr.from_pyobject_ptr(key) key_proxy = pyop_key.proxyval(visited) # FIXME! if key_proxy != '': if not first: out.write(', ') first = False pyop_key.write_repr(out, visited) out.write('}') if tp_name != 'set': out.write(')') class PyBytesObjectPtr(PyObjectPtr): _typename = 'PyBytesObject' def __str__(self): field_ob_size = self.field('ob_size') field_ob_sval = self.field('ob_sval') return ''.join(struct.pack('b', field_ob_sval[i]) for i in safe_range(field_ob_size)) def proxyval(self, visited): return str(self) def write_repr(self, out, visited, py3=True): # Write this out as a Python 3 bytes literal, i.e. with a "b" prefix # Get a PyStringObject* within the Python 2 gdb process: proxy = self.proxyval(visited) # Transliteration of Python 3's Objects/bytesobject.c:PyBytes_Repr # to Python 2 code: quote = "'" if "'" in proxy and not '"' in proxy: quote = '"' if py3: out.write('b') out.write(quote) for byte in proxy: if byte == quote or byte == '\\': out.write('\\') out.write(byte) elif byte == '\t': out.write('\\t') elif byte == '\n': out.write('\\n') elif byte == '\r': out.write('\\r') elif byte < ' ' or ord(byte) >= 0x7f: out.write('\\x') out.write(hexdigits[(ord(byte) & 0xf0) >> 4]) out.write(hexdigits[ord(byte) & 0xf]) else: out.write(byte) out.write(quote) class PyStringObjectPtr(PyBytesObjectPtr): _typename = 'PyStringObject' def write_repr(self, out, visited): return super(PyStringObjectPtr, self).write_repr(out, visited, py3=False) class PyTupleObjectPtr(PyObjectPtr): _typename = 'PyTupleObject' def __getitem__(self, i): # Get the gdb.Value for the (PyObject*) with the given index: field_ob_item = self.field('ob_item') return field_ob_item[i] def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('(...)') visited.add(self.as_address()) result = tuple([PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited) for i in safe_range(int_from_int(self.field('ob_size')))]) return result def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('(...)') return visited.add(self.as_address()) out.write('(') for i in safe_range(int_from_int(self.field('ob_size'))): if i > 0: out.write(', ') element = PyObjectPtr.from_pyobject_ptr(self[i]) element.write_repr(out, visited) if self.field('ob_size') == 1: out.write(',)') else: out.write(')') def _unichr_is_printable(char): # Logic adapted from Python 3's Tools/unicode/makeunicodedata.py if char == u" ": return True import unicodedata return unicodedata.category(char) not in ("C", "Z") if sys.maxunicode >= 0x10000: try: _unichr = unichr except NameError: _unichr = chr else: # Needed for proper surrogate support if sizeof(Py_UNICODE) is 2 in gdb def _unichr(x): if x < 0x10000: return unichr(x) x -= 0x10000 ch1 = 0xD800 | (x >> 10) ch2 = 0xDC00 | (x & 0x3FF) return unichr(ch1) + unichr(ch2) class PyUnicodeObjectPtr(PyObjectPtr): _typename = 'PyUnicodeObject' def char_width(self): _type_Py_UNICODE = gdb.lookup_type('Py_UNICODE') return _type_Py_UNICODE.sizeof def proxyval(self, visited): # From unicodeobject.h: # Py_ssize_t length; /* Length of raw Unicode data in buffer */ # Py_UNICODE *str; /* Raw Unicode buffer */ field_length = int(self.field('length')) field_str = self.field('str') # Gather a list of ints from the Py_UNICODE array; these are either # UCS-2 or UCS-4 code points: if self.char_width() > 2: Py_UNICODEs = [int(field_str[i]) for i in safe_range(field_length)] else: # A more elaborate routine if sizeof(Py_UNICODE) is 2 in the # inferior process: we must join surrogate pairs. Py_UNICODEs = [] i = 0 limit = safety_limit(field_length) while i < limit: ucs = int(field_str[i]) i += 1 if ucs < 0xD800 or ucs >= 0xDC00 or i == field_length: Py_UNICODEs.append(ucs) continue # This could be a surrogate pair. ucs2 = int(field_str[i]) if ucs2 < 0xDC00 or ucs2 > 0xDFFF: continue code = (ucs & 0x03FF) << 10 code |= ucs2 & 0x03FF code += 0x00010000 Py_UNICODEs.append(code) i += 1 # Convert the int code points to unicode characters, and generate a # local unicode instance. # This splits surrogate pairs if sizeof(Py_UNICODE) is 2 here (in gdb). result = u''.join([_unichr(ucs) for ucs in Py_UNICODEs]) return result def write_repr(self, out, visited): # Get a PyUnicodeObject* within the Python 2 gdb process: proxy = self.proxyval(visited) # Transliteration of Python 3's Object/unicodeobject.c:unicode_repr # to Python 2: try: gdb.parse_and_eval('PyString_Type') except RuntimeError: # Python 3, don't write 'u' as prefix pass else: # Python 2, write the 'u' out.write('u') if "'" in proxy and '"' not in proxy: quote = '"' else: quote = "'" out.write(quote) i = 0 while i < len(proxy): ch = proxy[i] i += 1 # Escape quotes and backslashes if ch == quote or ch == '\\': out.write('\\') out.write(ch) # Map special whitespace to '\t', \n', '\r' elif ch == '\t': out.write('\\t') elif ch == '\n': out.write('\\n') elif ch == '\r': out.write('\\r') # Map non-printable US ASCII to '\xhh' */ elif ch < ' ' or ch == 0x7F: out.write('\\x') out.write(hexdigits[(ord(ch) >> 4) & 0x000F]) out.write(hexdigits[ord(ch) & 0x000F]) # Copy ASCII characters as-is elif ord(ch) < 0x7F: out.write(ch) # Non-ASCII characters else: ucs = ch ch2 = None if sys.maxunicode < 0x10000: # If sizeof(Py_UNICODE) is 2 here (in gdb), join # surrogate pairs before calling _unichr_is_printable. if (i < len(proxy) and 0xD800 <= ord(ch) < 0xDC00 \ and 0xDC00 <= ord(proxy[i]) <= 0xDFFF): ch2 = proxy[i] ucs = ch + ch2 i += 1 # Unfortuately, Python 2's unicode type doesn't seem # to expose the "isprintable" method printable = _unichr_is_printable(ucs) if printable: try: ucs.encode(ENCODING) except UnicodeEncodeError: printable = False # Map Unicode whitespace and control characters # (categories Z* and C* except ASCII space) if not printable: if ch2 is not None: # Match Python 3's representation of non-printable # wide characters. code = (ord(ch) & 0x03FF) << 10 code |= ord(ch2) & 0x03FF code += 0x00010000 else: code = ord(ucs) # Map 8-bit characters to '\\xhh' if code <= 0xff: out.write('\\x') out.write(hexdigits[(code >> 4) & 0x000F]) out.write(hexdigits[code & 0x000F]) # Map 21-bit characters to '\U00xxxxxx' elif code >= 0x10000: out.write('\\U') out.write(hexdigits[(code >> 28) & 0x0000000F]) out.write(hexdigits[(code >> 24) & 0x0000000F]) out.write(hexdigits[(code >> 20) & 0x0000000F]) out.write(hexdigits[(code >> 16) & 0x0000000F]) out.write(hexdigits[(code >> 12) & 0x0000000F]) out.write(hexdigits[(code >> 8) & 0x0000000F]) out.write(hexdigits[(code >> 4) & 0x0000000F]) out.write(hexdigits[code & 0x0000000F]) # Map 16-bit characters to '\uxxxx' else: out.write('\\u') out.write(hexdigits[(code >> 12) & 0x000F]) out.write(hexdigits[(code >> 8) & 0x000F]) out.write(hexdigits[(code >> 4) & 0x000F]) out.write(hexdigits[code & 0x000F]) else: # Copy characters as-is out.write(ch) if ch2 is not None: out.write(ch2) out.write(quote) def __unicode__(self): return self.proxyval(set()) def __str__(self): # In Python 3, everything is unicode (including attributes of e.g. # code objects, such as function names). The Python 2 debugger code # uses PyUnicodePtr objects to format strings etc, whereas with a # Python 2 debuggee we'd get PyStringObjectPtr instances with __str__. # Be compatible with that. return unicode(self).encode('UTF-8') def int_from_int(gdbval): return int(str(gdbval)) def stringify(val): # TODO: repr() puts everything on one line; pformat can be nicer, but # can lead to v.long results; this function isolates the choice if True: return repr(val) else: from pprint import pformat return pformat(val) class PyObjectPtrPrinter: "Prints a (PyObject*)" def __init__ (self, gdbval): self.gdbval = gdbval def to_string (self): pyop = PyObjectPtr.from_pyobject_ptr(self.gdbval) if True: return pyop.get_truncated_repr(MAX_OUTPUT_LEN) else: # Generate full proxy value then stringify it. # Doing so could be expensive proxyval = pyop.proxyval(set()) return stringify(proxyval) def pretty_printer_lookup(gdbval): type = gdbval.type.unqualified() if type.code == gdb.TYPE_CODE_PTR: type = type.target().unqualified() if str(type) in all_pretty_typenames: return PyObjectPtrPrinter(gdbval) """ During development, I've been manually invoking the code in this way: (gdb) python import sys sys.path.append('/home/david/coding/python-gdb') import libpython end then reloading it after each edit like this: (gdb) python reload(libpython) The following code should ensure that the prettyprinter is registered if the code is autoloaded by gdb when visiting libpython.so, provided that this python file is installed to the same path as the library (or its .debug file) plus a "-gdb.py" suffix, e.g: /usr/lib/libpython2.6.so.1.0-gdb.py /usr/lib/debug/usr/lib/libpython2.6.so.1.0.debug-gdb.py """ def register(obj): if obj is None: obj = gdb # Wire up the pretty-printer obj.pretty_printers.append(pretty_printer_lookup) register(gdb.current_objfile()) # Unfortunately, the exact API exposed by the gdb module varies somewhat # from build to build # See http://bugs.python.org/issue8279?#msg102276 class Frame(object): ''' Wrapper for gdb.Frame, adding various methods ''' def __init__(self, gdbframe): self._gdbframe = gdbframe def older(self): older = self._gdbframe.older() if older: return Frame(older) else: return None def newer(self): newer = self._gdbframe.newer() if newer: return Frame(newer) else: return None def select(self): '''If supported, select this frame and return True; return False if unsupported Not all builds have a gdb.Frame.select method; seems to be present on Fedora 12 onwards, but absent on Ubuntu buildbot''' if not hasattr(self._gdbframe, 'select'): print ('Unable to select frame: ' 'this build of gdb does not expose a gdb.Frame.select method') return False self._gdbframe.select() return True def get_index(self): '''Calculate index of frame, starting at 0 for the newest frame within this thread''' index = 0 # Go down until you reach the newest frame: iter_frame = self while iter_frame.newer(): index += 1 iter_frame = iter_frame.newer() return index def is_evalframeex(self): '''Is this a PyEval_EvalFrameEx frame?''' if self._gdbframe.name() == 'PyEval_EvalFrameEx': ''' I believe we also need to filter on the inline struct frame_id.inline_depth, only regarding frames with an inline depth of 0 as actually being this function So we reject those with type gdb.INLINE_FRAME ''' if self._gdbframe.type() == gdb.NORMAL_FRAME: # We have a PyEval_EvalFrameEx frame: return True return False def read_var(self, varname): """ read_var with respect to code blocks (gdbframe.read_var works with respect to the most recent block) Apparently this function doesn't work, though, as it seems to read variables in other frames also sometimes. """ block = self._gdbframe.block() var = None while block and var is None: try: var = self._gdbframe.read_var(varname, block) except ValueError: pass block = block.superblock return var def get_pyop(self): try: # self.read_var does not always work properly, so select our frame # and restore the previously selected frame selected_frame = gdb.selected_frame() self._gdbframe.select() f = gdb.parse_and_eval('f') selected_frame.select() except RuntimeError: return None else: return PyFrameObjectPtr.from_pyobject_ptr(f) @classmethod def get_selected_frame(cls): _gdbframe = gdb.selected_frame() if _gdbframe: return Frame(_gdbframe) return None @classmethod def get_selected_python_frame(cls): '''Try to obtain the Frame for the python code in the selected frame, or None''' frame = cls.get_selected_frame() while frame: if frame.is_evalframeex(): return frame frame = frame.older() # Not found: return None def print_summary(self): if self.is_evalframeex(): pyop = self.get_pyop() if pyop: line = pyop.get_truncated_repr(MAX_OUTPUT_LEN) write_unicode(sys.stdout, '#%i %s\n' % (self.get_index(), line)) sys.stdout.write(pyop.current_line()) else: sys.stdout.write('#%i (unable to read python frame information)\n' % self.get_index()) else: sys.stdout.write('#%i\n' % self.get_index()) class PyList(gdb.Command): '''List the current Python source code, if any Use py-list START to list at a different line number within the python source. Use py-list START, END to list a specific range of lines within the python source. ''' def __init__(self): gdb.Command.__init__ (self, "py-list", gdb.COMMAND_FILES, gdb.COMPLETE_NONE) def invoke(self, args, from_tty): import re start = None end = None m = re.match(r'\s*(\d+)\s*', args) if m: start = int(m.group(0)) end = start + 10 m = re.match(r'\s*(\d+)\s*,\s*(\d+)\s*', args) if m: start, end = map(int, m.groups()) frame = Frame.get_selected_python_frame() if not frame: print('Unable to locate python frame') return pyop = frame.get_pyop() if not pyop: print('Unable to read information on python frame') return filename = pyop.filename() lineno = pyop.current_line_num() if start is None: start = lineno - 5 end = lineno + 5 if start<1: start = 1 with open(os_fsencode(filename), 'r') as f: all_lines = f.readlines() # start and end are 1-based, all_lines is 0-based; # so [start-1:end] as a python slice gives us [start, end] as a # closed interval for i, line in enumerate(all_lines[start-1:end]): linestr = str(i+start) # Highlight current line: if i + start == lineno: linestr = '>' + linestr sys.stdout.write('%4s %s' % (linestr, line)) # ...and register the command: PyList() def move_in_stack(move_up): '''Move up or down the stack (for the py-up/py-down command)''' frame = Frame.get_selected_python_frame() while frame: if move_up: iter_frame = frame.older() else: iter_frame = frame.newer() if not iter_frame: break if iter_frame.is_evalframeex(): # Result: if iter_frame.select(): iter_frame.print_summary() return frame = iter_frame if move_up: print('Unable to find an older python frame') else: print('Unable to find a newer python frame') class PyUp(gdb.Command): 'Select and print the python stack frame that called this one (if any)' def __init__(self): gdb.Command.__init__ (self, "py-up", gdb.COMMAND_STACK, gdb.COMPLETE_NONE) def invoke(self, args, from_tty): move_in_stack(move_up=True) class PyDown(gdb.Command): 'Select and print the python stack frame called by this one (if any)' def __init__(self): gdb.Command.__init__ (self, "py-down", gdb.COMMAND_STACK, gdb.COMPLETE_NONE) def invoke(self, args, from_tty): move_in_stack(move_up=False) # Not all builds of gdb have gdb.Frame.select if hasattr(gdb.Frame, 'select'): PyUp() PyDown() class PyBacktrace(gdb.Command): 'Display the current python frame and all the frames within its call stack (if any)' def __init__(self): gdb.Command.__init__ (self, "py-bt", gdb.COMMAND_STACK, gdb.COMPLETE_NONE) def invoke(self, args, from_tty): frame = Frame.get_selected_python_frame() while frame: if frame.is_evalframeex(): frame.print_summary() frame = frame.older() PyBacktrace() class PyPrint(gdb.Command): 'Look up the given python variable name, and print it' def __init__(self): gdb.Command.__init__ (self, "py-print", gdb.COMMAND_DATA, gdb.COMPLETE_NONE) def invoke(self, args, from_tty): name = str(args) frame = Frame.get_selected_python_frame() if not frame: print('Unable to locate python frame') return pyop_frame = frame.get_pyop() if not pyop_frame: print('Unable to read information on python frame') return pyop_var, scope = pyop_frame.get_var_by_name(name) if pyop_var: print('%s %r = %s' % ( scope, name, pyop_var.get_truncated_repr(MAX_OUTPUT_LEN))) else: print('%r not found' % name) PyPrint() class PyLocals(gdb.Command): 'Look up the given python variable name, and print it' def invoke(self, args, from_tty): name = str(args) frame = Frame.get_selected_python_frame() if not frame: print('Unable to locate python frame') return pyop_frame = frame.get_pyop() if not pyop_frame: print('Unable to read information on python frame') return namespace = self.get_namespace(pyop_frame) namespace = [(name.proxyval(set()), val) for name, val in namespace] if namespace: name, val = max(namespace, key=lambda item: len(item[0])) max_name_length = len(name) for name, pyop_value in namespace: value = pyop_value.get_truncated_repr(MAX_OUTPUT_LEN) print('%-*s = %s' % (max_name_length, name, value)) def get_namespace(self, pyop_frame): return pyop_frame.iter_locals() class PyGlobals(PyLocals): 'List all the globals in the currently select Python frame' def get_namespace(self, pyop_frame): return pyop_frame.iter_globals() PyLocals("py-locals", gdb.COMMAND_DATA, gdb.COMPLETE_NONE) PyGlobals("py-globals", gdb.COMMAND_DATA, gdb.COMPLETE_NONE) class PyNameEquals(gdb.Function): def _get_pycurframe_attr(self, attr): frame = Frame(gdb.selected_frame()) if frame.is_evalframeex(): pyframe = frame.get_pyop() if pyframe is None: warnings.warn("Use a Python debug build, Python breakpoints " "won't work otherwise.") return None return getattr(pyframe, attr).proxyval(set()) return None def invoke(self, funcname): attr = self._get_pycurframe_attr('co_name') return attr is not None and attr == funcname.string() PyNameEquals("pyname_equals") class PyModEquals(PyNameEquals): def invoke(self, modname): attr = self._get_pycurframe_attr('co_filename') if attr is not None: filename, ext = os.path.splitext(os.path.basename(attr)) return filename == modname.string() return False PyModEquals("pymod_equals") class PyBreak(gdb.Command): """ Set a Python breakpoint. Examples: Break on any function or method named 'func' in module 'modname' py-break modname.func Break on any function or method named 'func' py-break func """ def invoke(self, funcname, from_tty): if '.' in funcname: modname, dot, funcname = funcname.rpartition('.') cond = '$pyname_equals("%s") && $pymod_equals("%s")' % (funcname, modname) else: cond = '$pyname_equals("%s")' % funcname gdb.execute('break PyEval_EvalFrameEx if ' + cond) PyBreak("py-break", gdb.COMMAND_RUNNING, gdb.COMPLETE_NONE) class _LoggingState(object): """ State that helps to provide a reentrant gdb.execute() function. """ def __init__(self): self.fd, self.filename = tempfile.mkstemp() self.file = os.fdopen(self.fd, 'r+') _execute("set logging file %s" % self.filename) self.file_position_stack = [] atexit.register(os.close, self.fd) atexit.register(os.remove, self.filename) def __enter__(self): if not self.file_position_stack: _execute("set logging redirect on") _execute("set logging on") _execute("set pagination off") self.file_position_stack.append(os.fstat(self.fd).st_size) return self def getoutput(self): gdb.flush() self.file.seek(self.file_position_stack[-1]) result = self.file.read() return result def __exit__(self, exc_type, exc_val, tb): startpos = self.file_position_stack.pop() self.file.seek(startpos) self.file.truncate() if not self.file_position_stack: _execute("set logging off") _execute("set logging redirect off") _execute("set pagination on") def execute(command, from_tty=False, to_string=False): """ Replace gdb.execute() with this function and have it accept a 'to_string' argument (new in 7.2). Have it properly capture stderr also. Ensure reentrancy. """ if to_string: with _logging_state as state: _execute(command, from_tty) return state.getoutput() else: _execute(command, from_tty) _execute = gdb.execute gdb.execute = execute _logging_state = _LoggingState() def get_selected_inferior(): """ Return the selected inferior in gdb. """ # Woooh, another bug in gdb! Is there an end in sight? # http://sourceware.org/bugzilla/show_bug.cgi?id=12212 return gdb.inferiors()[0] selected_thread = gdb.selected_thread() for inferior in gdb.inferiors(): for thread in inferior.threads(): if thread == selected_thread: return inferior def source_gdb_script(script_contents, to_string=False): """ Source a gdb script with script_contents passed as a string. This is useful to provide defines for py-step and py-next to make them repeatable (this is not possible with gdb.execute()). See http://sourceware.org/bugzilla/show_bug.cgi?id=12216 """ fd, filename = tempfile.mkstemp() f = os.fdopen(fd, 'w') f.write(script_contents) f.close() gdb.execute("source %s" % filename, to_string=to_string) os.remove(filename) def register_defines(): source_gdb_script(textwrap.dedent("""\ define py-step -py-step end define py-next -py-next end document py-step %s end document py-next %s end """) % (PyStep.__doc__, PyNext.__doc__)) def stackdepth(frame): "Tells the stackdepth of a gdb frame." depth = 0 while frame: frame = frame.older() depth += 1 return depth class ExecutionControlCommandBase(gdb.Command): """ Superclass for language specific execution control. Language specific features should be implemented by lang_info using the LanguageInfo interface. 'name' is the name of the command. """ def __init__(self, name, lang_info): super(ExecutionControlCommandBase, self).__init__( name, gdb.COMMAND_RUNNING, gdb.COMPLETE_NONE) self.lang_info = lang_info def install_breakpoints(self): all_locations = itertools.chain( self.lang_info.static_break_functions(), self.lang_info.runtime_break_functions()) for location in all_locations: result = gdb.execute('break %s' % location, to_string=True) yield re.search(r'Breakpoint (\d+)', result).group(1) def delete_breakpoints(self, breakpoint_list): for bp in breakpoint_list: gdb.execute("delete %s" % bp) def filter_output(self, result): reflags = re.MULTILINE output_on_halt = [ (r'^Program received signal .*', reflags|re.DOTALL), (r'.*[Ww]arning.*', 0), (r'^Program exited .*', reflags), ] output_always = [ # output when halting on a watchpoint (r'^(Old|New) value = .*', reflags), # output from the 'display' command (r'^\d+: \w+ = .*', reflags), ] def filter_output(regexes): output = [] for regex, flags in regexes: for match in re.finditer(regex, result, flags): output.append(match.group(0)) return '\n'.join(output) # Filter the return value output of the 'finish' command match_finish = re.search(r'^Value returned is \$\d+ = (.*)', result, re.MULTILINE) if match_finish: finish_output = 'Value returned: %s\n' % match_finish.group(1) else: finish_output = '' return (filter_output(output_on_halt), finish_output + filter_output(output_always)) def stopped(self): return get_selected_inferior().pid == 0 def finish_executing(self, result): """ After doing some kind of code running in the inferior, print the line of source code or the result of the last executed gdb command (passed in as the `result` argument). """ output_on_halt, output_always = self.filter_output(result) if self.stopped(): print(output_always) print(output_on_halt) else: frame = gdb.selected_frame() source_line = self.lang_info.get_source_line(frame) if self.lang_info.is_relevant_function(frame): raised_exception = self.lang_info.exc_info(frame) if raised_exception: print(raised_exception) if source_line: if output_always.rstrip(): print(output_always.rstrip()) print(source_line) else: print(result) def _finish(self): """ Execute until the function returns (or until something else makes it stop) """ if gdb.selected_frame().older() is not None: return gdb.execute('finish', to_string=True) else: # outermost frame, continue return gdb.execute('cont', to_string=True) def _finish_frame(self): """ Execute until the function returns to a relevant caller. """ while True: result = self._finish() try: frame = gdb.selected_frame() except RuntimeError: break hitbp = re.search(r'Breakpoint (\d+)', result) is_relevant = self.lang_info.is_relevant_function(frame) if hitbp or is_relevant or self.stopped(): break return result def finish(self, *args): "Implements the finish command." result = self._finish_frame() self.finish_executing(result) def step(self, stepinto, stepover_command='next'): """ Do a single step or step-over. Returns the result of the last gdb command that made execution stop. This implementation, for stepping, sets (conditional) breakpoints for all functions that are deemed relevant. It then does a step over until either something halts execution, or until the next line is reached. If, however, stepover_command is given, it should be a string gdb command that continues execution in some way. The idea is that the caller has set a (conditional) breakpoint or watchpoint that can work more efficiently than the step-over loop. For Python this means setting a watchpoint for f->f_lasti, which means we can then subsequently "finish" frames. We want f->f_lasti instead of f->f_lineno, because the latter only works properly with local trace functions, see PyFrameObjectPtr.current_line_num and PyFrameObjectPtr.addr2line. """ if stepinto: breakpoint_list = list(self.install_breakpoints()) beginframe = gdb.selected_frame() if self.lang_info.is_relevant_function(beginframe): # If we start in a relevant frame, initialize stuff properly. If # we don't start in a relevant frame, the loop will halt # immediately. So don't call self.lang_info.lineno() as it may # raise for irrelevant frames. beginline = self.lang_info.lineno(beginframe) if not stepinto: depth = stackdepth(beginframe) newframe = beginframe while True: if self.lang_info.is_relevant_function(newframe): result = gdb.execute(stepover_command, to_string=True) else: result = self._finish_frame() if self.stopped(): break newframe = gdb.selected_frame() is_relevant_function = self.lang_info.is_relevant_function(newframe) try: framename = newframe.name() except RuntimeError: framename = None m = re.search(r'Breakpoint (\d+)', result) if m: if is_relevant_function and m.group(1) in breakpoint_list: # although we hit a breakpoint, we still need to check # that the function, in case hit by a runtime breakpoint, # is in the right context break if newframe != beginframe: # new function if not stepinto: # see if we returned to the caller newdepth = stackdepth(newframe) is_relevant_function = (newdepth < depth and is_relevant_function) if is_relevant_function: break else: # newframe equals beginframe, check for a difference in the # line number lineno = self.lang_info.lineno(newframe) if lineno and lineno != beginline: break if stepinto: self.delete_breakpoints(breakpoint_list) self.finish_executing(result) def run(self, args, from_tty): self.finish_executing(gdb.execute('run ' + args, to_string=True)) def cont(self, *args): self.finish_executing(gdb.execute('cont', to_string=True)) class LanguageInfo(object): """ This class defines the interface that ExecutionControlCommandBase needs to provide language-specific execution control. Classes that implement this interface should implement: lineno(frame) Tells the current line number (only called for a relevant frame). If lineno is a false value it is not checked for a difference. is_relevant_function(frame) tells whether we care about frame 'frame' get_source_line(frame) get the line of source code for the current line (only called for a relevant frame). If the source code cannot be retrieved this function should return None exc_info(frame) -- optional tells whether an exception was raised, if so, it should return a string representation of the exception value, None otherwise. static_break_functions() returns an iterable of function names that are considered relevant and should halt step-into execution. This is needed to provide a performing step-into runtime_break_functions() -- optional list of functions that we should break into depending on the context """ def exc_info(self, frame): "See this class' docstring." def runtime_break_functions(self): """ Implement this if the list of step-into functions depends on the context. """ return () class PythonInfo(LanguageInfo): def pyframe(self, frame): pyframe = Frame(frame).get_pyop() if pyframe: return pyframe else: raise gdb.RuntimeError( "Unable to find the Python frame, run your code with a debug " "build (configure with --with-pydebug or compile with -g).") def lineno(self, frame): return self.pyframe(frame).current_line_num() def is_relevant_function(self, frame): return Frame(frame).is_evalframeex() def get_source_line(self, frame): try: pyframe = self.pyframe(frame) return '%4d %s' % (pyframe.current_line_num(), pyframe.current_line().rstrip()) except IOError: return None def exc_info(self, frame): try: tstate = frame.read_var('tstate').dereference() if gdb.parse_and_eval('tstate->frame == f'): # tstate local variable initialized, check for an exception inf_type = tstate['curexc_type'] inf_value = tstate['curexc_value'] if inf_type: return 'An exception was raised: %s' % (inf_value,) except (ValueError, RuntimeError): # Could not read the variable tstate or it's memory, it's ok pass def static_break_functions(self): yield 'PyEval_EvalFrameEx' class PythonStepperMixin(object): """ Make this a mixin so CyStep can also inherit from this and use a CythonCodeStepper at the same time. """ def python_step(self, stepinto): """ Set a watchpoint on the Python bytecode instruction pointer and try to finish the frame """ output = gdb.execute('watch f->f_lasti', to_string=True) watchpoint = int(re.search(r'[Ww]atchpoint (\d+):', output).group(1)) self.step(stepinto=stepinto, stepover_command='finish') gdb.execute('delete %s' % watchpoint) class PyStep(ExecutionControlCommandBase, PythonStepperMixin): "Step through Python code." stepinto = True def invoke(self, args, from_tty): self.python_step(stepinto=self.stepinto) class PyNext(PyStep): "Step-over Python code." stepinto = False class PyFinish(ExecutionControlCommandBase): "Execute until function returns to a caller." invoke = ExecutionControlCommandBase.finish class PyRun(ExecutionControlCommandBase): "Run the program." invoke = ExecutionControlCommandBase.run class PyCont(ExecutionControlCommandBase): invoke = ExecutionControlCommandBase.cont def _pointervalue(gdbval): """ Return the value of the pionter as a Python int. gdbval.type must be a pointer type """ # don't convert with int() as it will raise a RuntimeError if gdbval.address is not None: return int(gdbval.address) else: # the address attribute is None sometimes, in which case we can # still convert the pointer to an int return int(gdbval) def pointervalue(gdbval): pointer = _pointervalue(gdbval) try: if pointer < 0: raise gdb.GdbError("Negative pointer value, presumably a bug " "in gdb, aborting.") except RuntimeError: # work around yet another bug in gdb where you get random behaviour # and tracebacks pass return pointer def get_inferior_unicode_postfix(): try: gdb.parse_and_eval('PyUnicode_FromEncodedObject') except RuntimeError: try: gdb.parse_and_eval('PyUnicodeUCS2_FromEncodedObject') except RuntimeError: return 'UCS4' else: return 'UCS2' else: return '' class PythonCodeExecutor(object): Py_single_input = 256 Py_file_input = 257 Py_eval_input = 258 def malloc(self, size): chunk = (gdb.parse_and_eval("(void *) malloc((size_t) %d)" % size)) pointer = pointervalue(chunk) if pointer == 0: raise gdb.GdbError("No memory could be allocated in the inferior.") return pointer def alloc_string(self, string): pointer = self.malloc(len(string)) get_selected_inferior().write_memory(pointer, string) return pointer def alloc_pystring(self, string): stringp = self.alloc_string(string) PyString_FromStringAndSize = 'PyString_FromStringAndSize' try: gdb.parse_and_eval(PyString_FromStringAndSize) except RuntimeError: # Python 3 PyString_FromStringAndSize = ('PyUnicode%s_FromStringAndSize' % (get_inferior_unicode_postfix(),)) try: result = gdb.parse_and_eval( '(PyObject *) %s((char *) %d, (size_t) %d)' % ( PyString_FromStringAndSize, stringp, len(string))) finally: self.free(stringp) pointer = pointervalue(result) if pointer == 0: raise gdb.GdbError("Unable to allocate Python string in " "the inferior.") return pointer def free(self, pointer): gdb.parse_and_eval("free((void *) %d)" % pointer) def incref(self, pointer): "Increment the reference count of a Python object in the inferior." gdb.parse_and_eval('Py_IncRef((PyObject *) %d)' % pointer) def xdecref(self, pointer): "Decrement the reference count of a Python object in the inferior." # Py_DecRef is like Py_XDECREF, but a function. So we don't have # to check for NULL. This should also decref all our allocated # Python strings. gdb.parse_and_eval('Py_DecRef((PyObject *) %d)' % pointer) def evalcode(self, code, input_type, global_dict=None, local_dict=None): """ Evaluate python code `code` given as a string in the inferior and return the result as a gdb.Value. Returns a new reference in the inferior. Of course, executing any code in the inferior may be dangerous and may leave the debuggee in an unsafe state or terminate it alltogether. """ if '\0' in code: raise gdb.GdbError("String contains NUL byte.") code += '\0' pointer = self.alloc_string(code) globalsp = pointervalue(global_dict) localsp = pointervalue(local_dict) if globalsp == 0 or localsp == 0: raise gdb.GdbError("Unable to obtain or create locals or globals.") code = """ PyRun_String( (char *) %(code)d, (int) %(start)d, (PyObject *) %(globals)s, (PyObject *) %(locals)d) """ % dict(code=pointer, start=input_type, globals=globalsp, locals=localsp) with FetchAndRestoreError(): try: pyobject_return_value = gdb.parse_and_eval(code) finally: self.free(pointer) return pyobject_return_value class FetchAndRestoreError(PythonCodeExecutor): """ Context manager that fetches the error indicator in the inferior and restores it on exit. """ def __init__(self): self.sizeof_PyObjectPtr = gdb.lookup_type('PyObject').pointer().sizeof self.pointer = self.malloc(self.sizeof_PyObjectPtr * 3) type = self.pointer value = self.pointer + self.sizeof_PyObjectPtr traceback = self.pointer + self.sizeof_PyObjectPtr * 2 self.errstate = type, value, traceback def __enter__(self): gdb.parse_and_eval("PyErr_Fetch(%d, %d, %d)" % self.errstate) def __exit__(self, *args): if gdb.parse_and_eval("(int) PyErr_Occurred()"): gdb.parse_and_eval("PyErr_Print()") pyerr_restore = ("PyErr_Restore(" "(PyObject *) *%d," "(PyObject *) *%d," "(PyObject *) *%d)") try: gdb.parse_and_eval(pyerr_restore % self.errstate) finally: self.free(self.pointer) class FixGdbCommand(gdb.Command): def __init__(self, command, actual_command): super(FixGdbCommand, self).__init__(command, gdb.COMMAND_DATA, gdb.COMPLETE_NONE) self.actual_command = actual_command def fix_gdb(self): """ It seems that invoking either 'cy exec' and 'py-exec' work perfectly fine, but after this gdb's python API is entirely broken. Maybe some uncleared exception value is still set? sys.exc_clear() didn't help. A demonstration: (gdb) cy exec 'hello' 'hello' (gdb) python gdb.execute('cont') RuntimeError: Cannot convert value to int. Error while executing Python code. (gdb) python gdb.execute('cont') [15148 refs] Program exited normally. """ warnings.filterwarnings('ignore', r'.*', RuntimeWarning, re.escape(__name__)) try: int(gdb.parse_and_eval("(void *) 0")) == 0 except RuntimeError: pass # warnings.resetwarnings() def invoke(self, args, from_tty): self.fix_gdb() try: gdb.execute('%s %s' % (self.actual_command, args)) except RuntimeError as e: raise gdb.GdbError(str(e)) self.fix_gdb() def _evalcode_python(executor, code, input_type): """ Execute Python code in the most recent stack frame. """ global_dict = gdb.parse_and_eval('PyEval_GetGlobals()') local_dict = gdb.parse_and_eval('PyEval_GetLocals()') if (pointervalue(global_dict) == 0 or pointervalue(local_dict) == 0): raise gdb.GdbError("Unable to find the locals or globals of the " "most recent Python function (relative to the " "selected frame).") return executor.evalcode(code, input_type, global_dict, local_dict) class PyExec(gdb.Command): def readcode(self, expr): if expr: return expr, PythonCodeExecutor.Py_single_input else: lines = [] while True: try: line = input('>') except EOFError: break else: if line.rstrip() == 'end': break lines.append(line) return '\n'.join(lines), PythonCodeExecutor.Py_file_input def invoke(self, expr, from_tty): expr, input_type = self.readcode(expr) executor = PythonCodeExecutor() executor.xdecref(_evalcode_python(executor, input_type, global_dict, local_dict)) gdb.execute('set breakpoint pending on') if hasattr(gdb, 'GdbError'): # Wrap py-step and py-next in gdb defines to make them repeatable. py_step = PyStep('-py-step', PythonInfo()) py_next = PyNext('-py-next', PythonInfo()) register_defines() py_finish = PyFinish('py-finish', PythonInfo()) py_run = PyRun('py-run', PythonInfo()) py_cont = PyCont('py-cont', PythonInfo()) py_exec = FixGdbCommand('py-exec', '-py-exec') _py_exec = PyExec("-py-exec", gdb.COMMAND_DATA, gdb.COMPLETE_NONE) else: warnings.warn("Use gdb 7.2 or higher to use the py-exec command.") Cython-0.26.1/Cython/Debugger/DebugWriter.py0000664000175000017500000000346313023021033021406 0ustar stefanstefan00000000000000from __future__ import absolute_import import os import sys import errno try: from lxml import etree have_lxml = True except ImportError: have_lxml = False try: from xml.etree import cElementTree as etree except ImportError: try: from xml.etree import ElementTree as etree except ImportError: etree = None from ..Compiler import Errors class CythonDebugWriter(object): """ Class to output debugging information for cygdb It writes debug information to cython_debug/cython_debug_info_ in the build directory. """ def __init__(self, output_dir): if etree is None: raise Errors.NoElementTreeInstalledException() self.output_dir = os.path.join(output_dir or os.curdir, 'cython_debug') self.tb = etree.TreeBuilder() # set by Cython.Compiler.ParseTreeTransforms.DebugTransform self.module_name = None self.start('cython_debug', attrs=dict(version='1.0')) def start(self, name, attrs=None): self.tb.start(name, attrs or {}) def end(self, name): self.tb.end(name) def serialize(self): self.tb.end('Module') self.tb.end('cython_debug') xml_root_element = self.tb.close() try: os.makedirs(self.output_dir) except OSError as e: if e.errno != errno.EEXIST: raise et = etree.ElementTree(xml_root_element) kw = {} if have_lxml: kw['pretty_print'] = True fn = "cython_debug_info_" + self.module_name et.write(os.path.join(self.output_dir, fn), encoding="UTF-8", **kw) interpreter_path = os.path.join(self.output_dir, 'interpreter') with open(interpreter_path, 'w') as f: f.write(sys.executable) Cython-0.26.1/Cython/StringIOTree.py0000664000175000017500000000621712574327400017776 0ustar stefanstefan00000000000000try: from cStringIO import StringIO except ImportError: from io import StringIO # does not support writing 'str' in Py2 class StringIOTree(object): """ See module docs. """ def __init__(self, stream=None): self.prepended_children = [] if stream is None: stream = StringIO() self.stream = stream self.write = stream.write self.markers = [] def getvalue(self): content = [x.getvalue() for x in self.prepended_children] content.append(self.stream.getvalue()) return "".join(content) def copyto(self, target): """Potentially cheaper than getvalue as no string concatenation needs to happen.""" for child in self.prepended_children: child.copyto(target) stream_content = self.stream.getvalue() if stream_content: target.write(stream_content) def commit(self): # Save what we have written until now so that the buffer # itself is empty -- this makes it ready for insertion if self.stream.tell(): self.prepended_children.append(StringIOTree(self.stream)) self.prepended_children[-1].markers = self.markers self.markers = [] self.stream = StringIO() self.write = self.stream.write def insert(self, iotree): """ Insert a StringIOTree (and all of its contents) at this location. Further writing to self appears after what is inserted. """ self.commit() self.prepended_children.append(iotree) def insertion_point(self): """ Returns a new StringIOTree, which is left behind at the current position (it what is written to the result will appear right before whatever is next written to self). Calling getvalue() or copyto() on the result will only return the contents written to it. """ # Save what we have written until now # This is so that getvalue on the result doesn't include it. self.commit() # Construct the new forked object to return other = StringIOTree() self.prepended_children.append(other) return other def allmarkers(self): children = self.prepended_children return [m for c in children for m in c.allmarkers()] + self.markers __doc__ = r""" Implements a buffer with insertion points. When you know you need to "get back" to a place and write more later, simply call insertion_point() at that spot and get a new StringIOTree object that is "left behind". EXAMPLE: >>> a = StringIOTree() >>> _= a.write('first\n') >>> b = a.insertion_point() >>> _= a.write('third\n') >>> _= b.write('second\n') >>> a.getvalue().split() ['first', 'second', 'third'] >>> c = b.insertion_point() >>> d = c.insertion_point() >>> _= d.write('alpha\n') >>> _= b.write('gamma\n') >>> _= c.write('beta\n') >>> b.getvalue().split() ['second', 'alpha', 'beta', 'gamma'] >>> i = StringIOTree() >>> d.insert(i) >>> _= i.write('inserted\n') >>> out = StringIO() >>> a.copyto(out) >>> out.getvalue().split() ['first', 'second', 'alpha', 'inserted', 'beta', 'gamma', 'third'] """ Cython-0.26.1/Cython/Debugging.py0000664000175000017500000000105012542002467017337 0ustar stefanstefan00000000000000############################################### # # Odds and ends for debugging # ############################################### def print_call_chain(*args): import sys print(" ".join(map(str, args))) f = sys._getframe(1) while f: name = f.f_code.co_name s = f.f_locals.get('self', None) if s: c = getattr(s, "__class__", None) if c: name = "%s.%s" % (c.__name__, name) print("Called from: %s %s" % (name, f.f_lineno)) f = f.f_back print("-" * 70) Cython-0.26.1/Cython/Tempita/0000775000175000017500000000000013151203436016475 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Tempita/_looper.py0000664000175000017500000001011012542002467020503 0ustar stefanstefan00000000000000""" Helper for looping over sequences, particular in templates. Often in a loop in a template it's handy to know what's next up, previously up, if this is the first or last item in the sequence, etc. These can be awkward to manage in a normal Python loop, but using the looper you can get a better sense of the context. Use like:: >>> for loop, item in looper(['a', 'b', 'c']): ... print loop.number, item ... if not loop.last: ... print '---' 1 a --- 2 b --- 3 c """ import sys from Cython.Tempita.compat3 import basestring_ __all__ = ['looper'] class looper(object): """ Helper for looping (particularly in templates) Use this like:: for loop, item in looper(seq): if loop.first: ... """ def __init__(self, seq): self.seq = seq def __iter__(self): return looper_iter(self.seq) def __repr__(self): return '<%s for %r>' % ( self.__class__.__name__, self.seq) class looper_iter(object): def __init__(self, seq): self.seq = list(seq) self.pos = 0 def __iter__(self): return self def __next__(self): if self.pos >= len(self.seq): raise StopIteration result = loop_pos(self.seq, self.pos), self.seq[self.pos] self.pos += 1 return result if sys.version < "3": next = __next__ class loop_pos(object): def __init__(self, seq, pos): self.seq = seq self.pos = pos def __repr__(self): return '' % ( self.seq[self.pos], self.pos) def index(self): return self.pos index = property(index) def number(self): return self.pos + 1 number = property(number) def item(self): return self.seq[self.pos] item = property(item) def __next__(self): try: return self.seq[self.pos + 1] except IndexError: return None __next__ = property(__next__) if sys.version < "3": next = __next__ def previous(self): if self.pos == 0: return None return self.seq[self.pos - 1] previous = property(previous) def odd(self): return not self.pos % 2 odd = property(odd) def even(self): return self.pos % 2 even = property(even) def first(self): return self.pos == 0 first = property(first) def last(self): return self.pos == len(self.seq) - 1 last = property(last) def length(self): return len(self.seq) length = property(length) def first_group(self, getter=None): """ Returns true if this item is the start of a new group, where groups mean that some attribute has changed. The getter can be None (the item itself changes), an attribute name like ``'.attr'``, a function, or a dict key or list index. """ if self.first: return True return self._compare_group(self.item, self.previous, getter) def last_group(self, getter=None): """ Returns true if this item is the end of a new group, where groups mean that some attribute has changed. The getter can be None (the item itself changes), an attribute name like ``'.attr'``, a function, or a dict key or list index. """ if self.last: return True return self._compare_group(self.item, self.__next__, getter) def _compare_group(self, item, other, getter): if getter is None: return item != other elif (isinstance(getter, basestring_) and getter.startswith('.')): getter = getter[1:] if getter.endswith('()'): getter = getter[:-2] return getattr(item, getter)() != getattr(other, getter)() else: return getattr(item, getter) != getattr(other, getter) elif hasattr(getter, '__call__'): return getter(item) != getter(other) else: return item[getter] != other[getter] Cython-0.26.1/Cython/Tempita/compat3.py0000664000175000017500000000160712574327400020427 0ustar stefanstefan00000000000000import sys __all__ = ['b', 'basestring_', 'bytes', 'unicode_', 'next', 'is_unicode'] if sys.version < "3": b = bytes = str basestring_ = basestring unicode_ = unicode else: def b(s): if isinstance(s, str): return s.encode('latin1') return bytes(s) basestring_ = (bytes, str) bytes = bytes unicode_ = str text = str if sys.version < "3": def next(obj): return obj.next() else: next = next if sys.version < "3": def is_unicode(obj): return isinstance(obj, unicode) else: def is_unicode(obj): return isinstance(obj, str) def coerce_text(v): if not isinstance(v, basestring_): if sys.version < "3": attr = '__unicode__' else: attr = '__str__' if hasattr(v, attr): return unicode(v) else: return bytes(v) return v Cython-0.26.1/Cython/Tempita/__init__.py0000664000175000017500000000023012574327400020607 0ustar stefanstefan00000000000000# The original Tempita implements all of its templating code here. # Moved it to _tempita.py to make the compilation portable. from ._tempita import * Cython-0.26.1/Cython/Tempita/_tempita.py0000664000175000017500000011524413143605603020663 0ustar stefanstefan00000000000000""" A small templating language This implements a small templating language. This language implements if/elif/else, for/continue/break, expressions, and blocks of Python code. The syntax is:: {{any expression (function calls etc)}} {{any expression | filter}} {{for x in y}}...{{endfor}} {{if x}}x{{elif y}}y{{else}}z{{endif}} {{py:x=1}} {{py: def foo(bar): return 'baz' }} {{default var = default_value}} {{# comment}} You use this with the ``Template`` class or the ``sub`` shortcut. The ``Template`` class takes the template string and the name of the template (for errors) and a default namespace. Then (like ``string.Template``) you can call the ``tmpl.substitute(**kw)`` method to make a substitution (or ``tmpl.substitute(a_dict)``). ``sub(content, **kw)`` substitutes the template immediately. You can use ``__name='tmpl.html'`` to set the name of the template. If there are syntax errors ``TemplateError`` will be raised. """ from __future__ import absolute_import import re import sys import cgi try: from urllib import quote as url_quote except ImportError: # Py3 from urllib.parse import quote as url_quote import os import tokenize from io import StringIO from ._looper import looper from .compat3 import bytes, unicode_, basestring_, next, is_unicode, coerce_text __all__ = ['TemplateError', 'Template', 'sub', 'HTMLTemplate', 'sub_html', 'html', 'bunch'] in_re = re.compile(r'\s+in\s+') var_re = re.compile(r'^[a-z_][a-z0-9_]*$', re.I) class TemplateError(Exception): """Exception raised while parsing a template """ def __init__(self, message, position, name=None): Exception.__init__(self, message) self.position = position self.name = name def __str__(self): msg = ' '.join(self.args) if self.position: msg = '%s at line %s column %s' % ( msg, self.position[0], self.position[1]) if self.name: msg += ' in %s' % self.name return msg class _TemplateContinue(Exception): pass class _TemplateBreak(Exception): pass def get_file_template(name, from_template): path = os.path.join(os.path.dirname(from_template.name), name) return from_template.__class__.from_filename( path, namespace=from_template.namespace, get_template=from_template.get_template) class Template(object): default_namespace = { 'start_braces': '{{', 'end_braces': '}}', 'looper': looper, } default_encoding = 'utf8' default_inherit = None def __init__(self, content, name=None, namespace=None, stacklevel=None, get_template=None, default_inherit=None, line_offset=0, delimeters=None): self.content = content # set delimeters if delimeters is None: delimeters = (self.default_namespace['start_braces'], self.default_namespace['end_braces']) else: #assert len(delimeters) == 2 and all([isinstance(delimeter, basestring) # for delimeter in delimeters]) self.default_namespace = self.__class__.default_namespace.copy() self.default_namespace['start_braces'] = delimeters[0] self.default_namespace['end_braces'] = delimeters[1] self.delimeters = delimeters self._unicode = is_unicode(content) if name is None and stacklevel is not None: try: caller = sys._getframe(stacklevel) except ValueError: pass else: globals = caller.f_globals lineno = caller.f_lineno if '__file__' in globals: name = globals['__file__'] if name.endswith('.pyc') or name.endswith('.pyo'): name = name[:-1] elif '__name__' in globals: name = globals['__name__'] else: name = '' if lineno: name += ':%s' % lineno self.name = name self._parsed = parse(content, name=name, line_offset=line_offset, delimeters=self.delimeters) if namespace is None: namespace = {} self.namespace = namespace self.get_template = get_template if default_inherit is not None: self.default_inherit = default_inherit def from_filename(cls, filename, namespace=None, encoding=None, default_inherit=None, get_template=get_file_template): f = open(filename, 'rb') c = f.read() f.close() if encoding: c = c.decode(encoding) return cls(content=c, name=filename, namespace=namespace, default_inherit=default_inherit, get_template=get_template) from_filename = classmethod(from_filename) def __repr__(self): return '<%s %s name=%r>' % ( self.__class__.__name__, hex(id(self))[2:], self.name) def substitute(self, *args, **kw): if args: if kw: raise TypeError( "You can only give positional *or* keyword arguments") if len(args) > 1: raise TypeError( "You can only give one positional argument") if not hasattr(args[0], 'items'): raise TypeError( "If you pass in a single argument, you must pass in a dictionary-like object (with a .items() method); you gave %r" % (args[0],)) kw = args[0] ns = kw ns['__template_name__'] = self.name if self.namespace: ns.update(self.namespace) result, defs, inherit = self._interpret(ns) if not inherit: inherit = self.default_inherit if inherit: result = self._interpret_inherit(result, defs, inherit, ns) return result def _interpret(self, ns): __traceback_hide__ = True parts = [] defs = {} self._interpret_codes(self._parsed, ns, out=parts, defs=defs) if '__inherit__' in defs: inherit = defs.pop('__inherit__') else: inherit = None return ''.join(parts), defs, inherit def _interpret_inherit(self, body, defs, inherit_template, ns): __traceback_hide__ = True if not self.get_template: raise TemplateError( 'You cannot use inheritance without passing in get_template', position=None, name=self.name) templ = self.get_template(inherit_template, self) self_ = TemplateObject(self.name) for name, value in defs.items(): setattr(self_, name, value) self_.body = body ns = ns.copy() ns['self'] = self_ return templ.substitute(ns) def _interpret_codes(self, codes, ns, out, defs): __traceback_hide__ = True for item in codes: if isinstance(item, basestring_): out.append(item) else: self._interpret_code(item, ns, out, defs) def _interpret_code(self, code, ns, out, defs): __traceback_hide__ = True name, pos = code[0], code[1] if name == 'py': self._exec(code[2], ns, pos) elif name == 'continue': raise _TemplateContinue() elif name == 'break': raise _TemplateBreak() elif name == 'for': vars, expr, content = code[2], code[3], code[4] expr = self._eval(expr, ns, pos) self._interpret_for(vars, expr, content, ns, out, defs) elif name == 'cond': parts = code[2:] self._interpret_if(parts, ns, out, defs) elif name == 'expr': parts = code[2].split('|') base = self._eval(parts[0], ns, pos) for part in parts[1:]: func = self._eval(part, ns, pos) base = func(base) out.append(self._repr(base, pos)) elif name == 'default': var, expr = code[2], code[3] if var not in ns: result = self._eval(expr, ns, pos) ns[var] = result elif name == 'inherit': expr = code[2] value = self._eval(expr, ns, pos) defs['__inherit__'] = value elif name == 'def': name = code[2] signature = code[3] parts = code[4] ns[name] = defs[name] = TemplateDef(self, name, signature, body=parts, ns=ns, pos=pos) elif name == 'comment': return else: assert 0, "Unknown code: %r" % name def _interpret_for(self, vars, expr, content, ns, out, defs): __traceback_hide__ = True for item in expr: if len(vars) == 1: ns[vars[0]] = item else: if len(vars) != len(item): raise ValueError( 'Need %i items to unpack (got %i items)' % (len(vars), len(item))) for name, value in zip(vars, item): ns[name] = value try: self._interpret_codes(content, ns, out, defs) except _TemplateContinue: continue except _TemplateBreak: break def _interpret_if(self, parts, ns, out, defs): __traceback_hide__ = True # @@: if/else/else gets through for part in parts: assert not isinstance(part, basestring_) name, pos = part[0], part[1] if name == 'else': result = True else: result = self._eval(part[2], ns, pos) if result: self._interpret_codes(part[3], ns, out, defs) break def _eval(self, code, ns, pos): __traceback_hide__ = True try: try: value = eval(code, self.default_namespace, ns) except SyntaxError as e: raise SyntaxError( 'invalid syntax in expression: %s' % code) return value except Exception as e: if getattr(e, 'args', None): arg0 = e.args[0] else: arg0 = coerce_text(e) e.args = (self._add_line_info(arg0, pos),) raise def _exec(self, code, ns, pos): __traceback_hide__ = True try: exec(code, self.default_namespace, ns) except Exception as e: if e.args: e.args = (self._add_line_info(e.args[0], pos),) else: e.args = (self._add_line_info(None, pos),) raise def _repr(self, value, pos): __traceback_hide__ = True try: if value is None: return '' if self._unicode: try: value = unicode_(value) except UnicodeDecodeError: value = bytes(value) else: if not isinstance(value, basestring_): value = coerce_text(value) if (is_unicode(value) and self.default_encoding): value = value.encode(self.default_encoding) except Exception as e: e.args = (self._add_line_info(e.args[0], pos),) raise else: if self._unicode and isinstance(value, bytes): if not self.default_encoding: raise UnicodeDecodeError( 'Cannot decode bytes value %r into unicode ' '(no default_encoding provided)' % value) try: value = value.decode(self.default_encoding) except UnicodeDecodeError as e: raise UnicodeDecodeError( e.encoding, e.object, e.start, e.end, e.reason + ' in string %r' % value) elif not self._unicode and is_unicode(value): if not self.default_encoding: raise UnicodeEncodeError( 'Cannot encode unicode value %r into bytes ' '(no default_encoding provided)' % value) value = value.encode(self.default_encoding) return value def _add_line_info(self, msg, pos): msg = "%s at line %s column %s" % ( msg, pos[0], pos[1]) if self.name: msg += " in file %s" % self.name return msg def sub(content, delimeters=None, **kw): name = kw.get('__name') tmpl = Template(content, name=name, delimeters=delimeters) return tmpl.substitute(kw) def paste_script_template_renderer(content, vars, filename=None): tmpl = Template(content, name=filename) return tmpl.substitute(vars) class bunch(dict): def __init__(self, **kw): for name, value in kw.items(): setattr(self, name, value) def __setattr__(self, name, value): self[name] = value def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError(name) def __getitem__(self, key): if 'default' in self: try: return dict.__getitem__(self, key) except KeyError: return dict.__getitem__(self, 'default') else: return dict.__getitem__(self, key) def __repr__(self): return '<%s %s>' % ( self.__class__.__name__, ' '.join(['%s=%r' % (k, v) for k, v in sorted(self.items())])) ############################################################ ## HTML Templating ############################################################ class html(object): def __init__(self, value): self.value = value def __str__(self): return self.value def __html__(self): return self.value def __repr__(self): return '<%s %r>' % ( self.__class__.__name__, self.value) def html_quote(value, force=True): if not force and hasattr(value, '__html__'): return value.__html__() if value is None: return '' if not isinstance(value, basestring_): value = coerce_text(value) if sys.version >= "3" and isinstance(value, bytes): value = cgi.escape(value.decode('latin1'), 1) value = value.encode('latin1') else: value = cgi.escape(value, 1) if sys.version < "3": if is_unicode(value): value = value.encode('ascii', 'xmlcharrefreplace') return value def url(v): v = coerce_text(v) if is_unicode(v): v = v.encode('utf8') return url_quote(v) def attr(**kw): parts = [] for name, value in sorted(kw.items()): if value is None: continue if name.endswith('_'): name = name[:-1] parts.append('%s="%s"' % (html_quote(name), html_quote(value))) return html(' '.join(parts)) class HTMLTemplate(Template): default_namespace = Template.default_namespace.copy() default_namespace.update(dict( html=html, attr=attr, url=url, html_quote=html_quote, )) def _repr(self, value, pos): if hasattr(value, '__html__'): value = value.__html__() quote = False else: quote = True plain = Template._repr(self, value, pos) if quote: return html_quote(plain) else: return plain def sub_html(content, **kw): name = kw.get('__name') tmpl = HTMLTemplate(content, name=name) return tmpl.substitute(kw) class TemplateDef(object): def __init__(self, template, func_name, func_signature, body, ns, pos, bound_self=None): self._template = template self._func_name = func_name self._func_signature = func_signature self._body = body self._ns = ns self._pos = pos self._bound_self = bound_self def __repr__(self): return '' % ( self._func_name, self._func_signature, self._template.name, self._pos) def __str__(self): return self() def __call__(self, *args, **kw): values = self._parse_signature(args, kw) ns = self._ns.copy() ns.update(values) if self._bound_self is not None: ns['self'] = self._bound_self out = [] subdefs = {} self._template._interpret_codes(self._body, ns, out, subdefs) return ''.join(out) def __get__(self, obj, type=None): if obj is None: return self return self.__class__( self._template, self._func_name, self._func_signature, self._body, self._ns, self._pos, bound_self=obj) def _parse_signature(self, args, kw): values = {} sig_args, var_args, var_kw, defaults = self._func_signature extra_kw = {} for name, value in kw.items(): if not var_kw and name not in sig_args: raise TypeError( 'Unexpected argument %s' % name) if name in sig_args: values[sig_args] = value else: extra_kw[name] = value args = list(args) sig_args = list(sig_args) while args: while sig_args and sig_args[0] in values: sig_args.pop(0) if sig_args: name = sig_args.pop(0) values[name] = args.pop(0) elif var_args: values[var_args] = tuple(args) break else: raise TypeError( 'Extra position arguments: %s' % ', '.join([repr(v) for v in args])) for name, value_expr in defaults.items(): if name not in values: values[name] = self._template._eval( value_expr, self._ns, self._pos) for name in sig_args: if name not in values: raise TypeError( 'Missing argument: %s' % name) if var_kw: values[var_kw] = extra_kw return values class TemplateObject(object): def __init__(self, name): self.__name = name self.get = TemplateObjectGetter(self) def __repr__(self): return '<%s %s>' % (self.__class__.__name__, self.__name) class TemplateObjectGetter(object): def __init__(self, template_obj): self.__template_obj = template_obj def __getattr__(self, attr): return getattr(self.__template_obj, attr, Empty) def __repr__(self): return '<%s around %r>' % (self.__class__.__name__, self.__template_obj) class _Empty(object): def __call__(self, *args, **kw): return self def __str__(self): return '' def __repr__(self): return 'Empty' def __unicode__(self): return u'' def __iter__(self): return iter(()) def __bool__(self): return False if sys.version < "3": __nonzero__ = __bool__ Empty = _Empty() del _Empty ############################################################ ## Lexing and Parsing ############################################################ def lex(s, name=None, trim_whitespace=True, line_offset=0, delimeters=None): """ Lex a string into chunks: >>> lex('hey') ['hey'] >>> lex('hey {{you}}') ['hey ', ('you', (1, 7))] >>> lex('hey {{') Traceback (most recent call last): ... TemplateError: No }} to finish last expression at line 1 column 7 >>> lex('hey }}') Traceback (most recent call last): ... TemplateError: }} outside expression at line 1 column 7 >>> lex('hey {{ {{') Traceback (most recent call last): ... TemplateError: {{ inside expression at line 1 column 10 """ if delimeters is None: delimeters = ( Template.default_namespace['start_braces'], Template.default_namespace['end_braces'] ) in_expr = False chunks = [] last = 0 last_pos = (line_offset + 1, 1) token_re = re.compile(r'%s|%s' % (re.escape(delimeters[0]), re.escape(delimeters[1]))) for match in token_re.finditer(s): expr = match.group(0) pos = find_position(s, match.end(), last, last_pos) if expr == delimeters[0] and in_expr: raise TemplateError('%s inside expression' % delimeters[0], position=pos, name=name) elif expr == delimeters[1] and not in_expr: raise TemplateError('%s outside expression' % delimeters[1], position=pos, name=name) if expr == delimeters[0]: part = s[last:match.start()] if part: chunks.append(part) in_expr = True else: chunks.append((s[last:match.start()], last_pos)) in_expr = False last = match.end() last_pos = pos if in_expr: raise TemplateError('No %s to finish last expression' % delimeters[1], name=name, position=last_pos) part = s[last:] if part: chunks.append(part) if trim_whitespace: chunks = trim_lex(chunks) return chunks statement_re = re.compile(r'^(?:if |elif |for |def |inherit |default |py:)') single_statements = ['else', 'endif', 'endfor', 'enddef', 'continue', 'break'] trail_whitespace_re = re.compile(r'\n\r?[\t ]*$') lead_whitespace_re = re.compile(r'^[\t ]*\n') def trim_lex(tokens): r""" Takes a lexed set of tokens, and removes whitespace when there is a directive on a line by itself: >>> tokens = lex('{{if x}}\nx\n{{endif}}\ny', trim_whitespace=False) >>> tokens [('if x', (1, 3)), '\nx\n', ('endif', (3, 3)), '\ny'] >>> trim_lex(tokens) [('if x', (1, 3)), 'x\n', ('endif', (3, 3)), 'y'] """ last_trim = None for i, current in enumerate(tokens): if isinstance(current, basestring_): # we don't trim this continue item = current[0] if not statement_re.search(item) and item not in single_statements: continue if not i: prev = '' else: prev = tokens[i - 1] if i + 1 >= len(tokens): next_chunk = '' else: next_chunk = tokens[i + 1] if (not isinstance(next_chunk, basestring_) or not isinstance(prev, basestring_)): continue prev_ok = not prev or trail_whitespace_re.search(prev) if i == 1 and not prev.strip(): prev_ok = True if last_trim is not None and last_trim + 2 == i and not prev.strip(): prev_ok = 'last' if (prev_ok and (not next_chunk or lead_whitespace_re.search(next_chunk) or (i == len(tokens) - 2 and not next_chunk.strip()))): if prev: if ((i == 1 and not prev.strip()) or prev_ok == 'last'): tokens[i - 1] = '' else: m = trail_whitespace_re.search(prev) # +1 to leave the leading \n on: prev = prev[:m.start() + 1] tokens[i - 1] = prev if next_chunk: last_trim = i if i == len(tokens) - 2 and not next_chunk.strip(): tokens[i + 1] = '' else: m = lead_whitespace_re.search(next_chunk) next_chunk = next_chunk[m.end():] tokens[i + 1] = next_chunk return tokens def find_position(string, index, last_index, last_pos): """Given a string and index, return (line, column)""" lines = string.count('\n', last_index, index) if lines > 0: column = index - string.rfind('\n', last_index, index) else: column = last_pos[1] + (index - last_index) return (last_pos[0] + lines, column) def parse(s, name=None, line_offset=0, delimeters=None): r""" Parses a string into a kind of AST >>> parse('{{x}}') [('expr', (1, 3), 'x')] >>> parse('foo') ['foo'] >>> parse('{{if x}}test{{endif}}') [('cond', (1, 3), ('if', (1, 3), 'x', ['test']))] >>> parse('series->{{for x in y}}x={{x}}{{endfor}}') ['series->', ('for', (1, 11), ('x',), 'y', ['x=', ('expr', (1, 27), 'x')])] >>> parse('{{for x, y in z:}}{{continue}}{{endfor}}') [('for', (1, 3), ('x', 'y'), 'z', [('continue', (1, 21))])] >>> parse('{{py:x=1}}') [('py', (1, 3), 'x=1')] >>> parse('{{if x}}a{{elif y}}b{{else}}c{{endif}}') [('cond', (1, 3), ('if', (1, 3), 'x', ['a']), ('elif', (1, 12), 'y', ['b']), ('else', (1, 23), None, ['c']))] Some exceptions:: >>> parse('{{continue}}') Traceback (most recent call last): ... TemplateError: continue outside of for loop at line 1 column 3 >>> parse('{{if x}}foo') Traceback (most recent call last): ... TemplateError: No {{endif}} at line 1 column 3 >>> parse('{{else}}') Traceback (most recent call last): ... TemplateError: else outside of an if block at line 1 column 3 >>> parse('{{if x}}{{for x in y}}{{endif}}{{endfor}}') Traceback (most recent call last): ... TemplateError: Unexpected endif at line 1 column 25 >>> parse('{{if}}{{endif}}') Traceback (most recent call last): ... TemplateError: if with no expression at line 1 column 3 >>> parse('{{for x y}}{{endfor}}') Traceback (most recent call last): ... TemplateError: Bad for (no "in") in 'x y' at line 1 column 3 >>> parse('{{py:x=1\ny=2}}') Traceback (most recent call last): ... TemplateError: Multi-line py blocks must start with a newline at line 1 column 3 """ if delimeters is None: delimeters = ( Template.default_namespace['start_braces'], Template.default_namespace['end_braces'] ) tokens = lex(s, name=name, line_offset=line_offset, delimeters=delimeters) result = [] while tokens: next_chunk, tokens = parse_expr(tokens, name) result.append(next_chunk) return result def parse_expr(tokens, name, context=()): if isinstance(tokens[0], basestring_): return tokens[0], tokens[1:] expr, pos = tokens[0] expr = expr.strip() if expr.startswith('py:'): expr = expr[3:].lstrip(' \t') if expr.startswith('\n') or expr.startswith('\r'): expr = expr.lstrip('\r\n') if '\r' in expr: expr = expr.replace('\r\n', '\n') expr = expr.replace('\r', '') expr += '\n' else: if '\n' in expr: raise TemplateError( 'Multi-line py blocks must start with a newline', position=pos, name=name) return ('py', pos, expr), tokens[1:] elif expr in ('continue', 'break'): if 'for' not in context: raise TemplateError( 'continue outside of for loop', position=pos, name=name) return (expr, pos), tokens[1:] elif expr.startswith('if '): return parse_cond(tokens, name, context) elif (expr.startswith('elif ') or expr == 'else'): raise TemplateError( '%s outside of an if block' % expr.split()[0], position=pos, name=name) elif expr in ('if', 'elif', 'for'): raise TemplateError( '%s with no expression' % expr, position=pos, name=name) elif expr in ('endif', 'endfor', 'enddef'): raise TemplateError( 'Unexpected %s' % expr, position=pos, name=name) elif expr.startswith('for '): return parse_for(tokens, name, context) elif expr.startswith('default '): return parse_default(tokens, name, context) elif expr.startswith('inherit '): return parse_inherit(tokens, name, context) elif expr.startswith('def '): return parse_def(tokens, name, context) elif expr.startswith('#'): return ('comment', pos, tokens[0][0]), tokens[1:] return ('expr', pos, tokens[0][0]), tokens[1:] def parse_cond(tokens, name, context): start = tokens[0][1] pieces = [] context = context + ('if',) while 1: if not tokens: raise TemplateError( 'Missing {{endif}}', position=start, name=name) if (isinstance(tokens[0], tuple) and tokens[0][0] == 'endif'): return ('cond', start) + tuple(pieces), tokens[1:] next_chunk, tokens = parse_one_cond(tokens, name, context) pieces.append(next_chunk) def parse_one_cond(tokens, name, context): (first, pos), tokens = tokens[0], tokens[1:] content = [] if first.endswith(':'): first = first[:-1] if first.startswith('if '): part = ('if', pos, first[3:].lstrip(), content) elif first.startswith('elif '): part = ('elif', pos, first[5:].lstrip(), content) elif first == 'else': part = ('else', pos, None, content) else: assert 0, "Unexpected token %r at %s" % (first, pos) while 1: if not tokens: raise TemplateError( 'No {{endif}}', position=pos, name=name) if (isinstance(tokens[0], tuple) and (tokens[0][0] == 'endif' or tokens[0][0].startswith('elif ') or tokens[0][0] == 'else')): return part, tokens next_chunk, tokens = parse_expr(tokens, name, context) content.append(next_chunk) def parse_for(tokens, name, context): first, pos = tokens[0] tokens = tokens[1:] context = ('for',) + context content = [] assert first.startswith('for ') if first.endswith(':'): first = first[:-1] first = first[3:].strip() match = in_re.search(first) if not match: raise TemplateError( 'Bad for (no "in") in %r' % first, position=pos, name=name) vars = first[:match.start()] if '(' in vars: raise TemplateError( 'You cannot have () in the variable section of a for loop (%r)' % vars, position=pos, name=name) vars = tuple([ v.strip() for v in first[:match.start()].split(',') if v.strip()]) expr = first[match.end():] while 1: if not tokens: raise TemplateError( 'No {{endfor}}', position=pos, name=name) if (isinstance(tokens[0], tuple) and tokens[0][0] == 'endfor'): return ('for', pos, vars, expr, content), tokens[1:] next_chunk, tokens = parse_expr(tokens, name, context) content.append(next_chunk) def parse_default(tokens, name, context): first, pos = tokens[0] assert first.startswith('default ') first = first.split(None, 1)[1] parts = first.split('=', 1) if len(parts) == 1: raise TemplateError( "Expression must be {{default var=value}}; no = found in %r" % first, position=pos, name=name) var = parts[0].strip() if ',' in var: raise TemplateError( "{{default x, y = ...}} is not supported", position=pos, name=name) if not var_re.search(var): raise TemplateError( "Not a valid variable name for {{default}}: %r" % var, position=pos, name=name) expr = parts[1].strip() return ('default', pos, var, expr), tokens[1:] def parse_inherit(tokens, name, context): first, pos = tokens[0] assert first.startswith('inherit ') expr = first.split(None, 1)[1] return ('inherit', pos, expr), tokens[1:] def parse_def(tokens, name, context): first, start = tokens[0] tokens = tokens[1:] assert first.startswith('def ') first = first.split(None, 1)[1] if first.endswith(':'): first = first[:-1] if '(' not in first: func_name = first sig = ((), None, None, {}) elif not first.endswith(')'): raise TemplateError("Function definition doesn't end with ): %s" % first, position=start, name=name) else: first = first[:-1] func_name, sig_text = first.split('(', 1) sig = parse_signature(sig_text, name, start) context = context + ('def',) content = [] while 1: if not tokens: raise TemplateError( 'Missing {{enddef}}', position=start, name=name) if (isinstance(tokens[0], tuple) and tokens[0][0] == 'enddef'): return ('def', start, func_name, sig, content), tokens[1:] next_chunk, tokens = parse_expr(tokens, name, context) content.append(next_chunk) def parse_signature(sig_text, name, pos): tokens = tokenize.generate_tokens(StringIO(sig_text).readline) sig_args = [] var_arg = None var_kw = None defaults = {} def get_token(pos=False): try: tok_type, tok_string, (srow, scol), (erow, ecol), line = next(tokens) except StopIteration: return tokenize.ENDMARKER, '' if pos: return tok_type, tok_string, (srow, scol), (erow, ecol) else: return tok_type, tok_string while 1: var_arg_type = None tok_type, tok_string = get_token() if tok_type == tokenize.ENDMARKER: break if tok_type == tokenize.OP and (tok_string == '*' or tok_string == '**'): var_arg_type = tok_string tok_type, tok_string = get_token() if tok_type != tokenize.NAME: raise TemplateError('Invalid signature: (%s)' % sig_text, position=pos, name=name) var_name = tok_string tok_type, tok_string = get_token() if tok_type == tokenize.ENDMARKER or (tok_type == tokenize.OP and tok_string == ','): if var_arg_type == '*': var_arg = var_name elif var_arg_type == '**': var_kw = var_name else: sig_args.append(var_name) if tok_type == tokenize.ENDMARKER: break continue if var_arg_type is not None: raise TemplateError('Invalid signature: (%s)' % sig_text, position=pos, name=name) if tok_type == tokenize.OP and tok_string == '=': nest_type = None unnest_type = None nest_count = 0 start_pos = end_pos = None parts = [] while 1: tok_type, tok_string, s, e = get_token(True) if start_pos is None: start_pos = s end_pos = e if tok_type == tokenize.ENDMARKER and nest_count: raise TemplateError('Invalid signature: (%s)' % sig_text, position=pos, name=name) if (not nest_count and (tok_type == tokenize.ENDMARKER or (tok_type == tokenize.OP and tok_string == ','))): default_expr = isolate_expression(sig_text, start_pos, end_pos) defaults[var_name] = default_expr sig_args.append(var_name) break parts.append((tok_type, tok_string)) if nest_count and tok_type == tokenize.OP and tok_string == nest_type: nest_count += 1 elif nest_count and tok_type == tokenize.OP and tok_string == unnest_type: nest_count -= 1 if not nest_count: nest_type = unnest_type = None elif not nest_count and tok_type == tokenize.OP and tok_string in ('(', '[', '{'): nest_type = tok_string nest_count = 1 unnest_type = {'(': ')', '[': ']', '{': '}'}[nest_type] return sig_args, var_arg, var_kw, defaults def isolate_expression(string, start_pos, end_pos): srow, scol = start_pos srow -= 1 erow, ecol = end_pos erow -= 1 lines = string.splitlines(True) if srow == erow: return lines[srow][scol:ecol] parts = [lines[srow][scol:]] parts.extend(lines[srow+1:erow]) if erow < len(lines): # It'll sometimes give (end_row_past_finish, 0) parts.append(lines[erow][:ecol]) return ''.join(parts) _fill_command_usage = """\ %prog [OPTIONS] TEMPLATE arg=value Use py:arg=value to set a Python value; otherwise all values are strings. """ def fill_command(args=None): import sys import optparse import pkg_resources import os if args is None: args = sys.argv[1:] dist = pkg_resources.get_distribution('Paste') parser = optparse.OptionParser( version=coerce_text(dist), usage=_fill_command_usage) parser.add_option( '-o', '--output', dest='output', metavar="FILENAME", help="File to write output to (default stdout)") parser.add_option( '--html', dest='use_html', action='store_true', help="Use HTML style filling (including automatic HTML quoting)") parser.add_option( '--env', dest='use_env', action='store_true', help="Put the environment in as top-level variables") options, args = parser.parse_args(args) if len(args) < 1: print('You must give a template filename') sys.exit(2) template_name = args[0] args = args[1:] vars = {} if options.use_env: vars.update(os.environ) for value in args: if '=' not in value: print('Bad argument: %r' % value) sys.exit(2) name, value = value.split('=', 1) if name.startswith('py:'): name = name[:3] value = eval(value) vars[name] = value if template_name == '-': template_content = sys.stdin.read() template_name = '' else: f = open(template_name, 'rb') template_content = f.read() f.close() if options.use_html: TemplateClass = HTMLTemplate else: TemplateClass = Template template = TemplateClass(template_content, name=template_name) result = template.substitute(vars) if options.output: f = open(options.output, 'wb') f.write(result) f.close() else: sys.stdout.write(result) if __name__ == '__main__': fill_command() Cython-0.26.1/Cython/CodeWriter.py0000664000175000017500000003561613143605603017531 0ustar stefanstefan00000000000000""" Serializes a Cython code tree to Cython code. This is primarily useful for debugging and testing purposes. The output is in a strict format, no whitespace or comments from the input is preserved (and it could not be as it is not present in the code tree). """ from __future__ import absolute_import, print_function from .Compiler.Visitor import TreeVisitor from .Compiler.ExprNodes import * class LinesResult(object): def __init__(self): self.lines = [] self.s = u"" def put(self, s): self.s += s def newline(self): self.lines.append(self.s) self.s = u"" def putline(self, s): self.put(s) self.newline() class DeclarationWriter(TreeVisitor): indent_string = u" " def __init__(self, result=None): super(DeclarationWriter, self).__init__() if result is None: result = LinesResult() self.result = result self.numindents = 0 self.tempnames = {} self.tempblockindex = 0 def write(self, tree): self.visit(tree) return self.result def indent(self): self.numindents += 1 def dedent(self): self.numindents -= 1 def startline(self, s=u""): self.result.put(self.indent_string * self.numindents + s) def put(self, s): self.result.put(s) def putline(self, s): self.result.putline(self.indent_string * self.numindents + s) def endline(self, s=u""): self.result.putline(s) def line(self, s): self.startline(s) self.endline() def comma_separated_list(self, items, output_rhs=False): if len(items) > 0: for item in items[:-1]: self.visit(item) if output_rhs and item.default is not None: self.put(u" = ") self.visit(item.default) self.put(u", ") self.visit(items[-1]) def visit_Node(self, node): raise AssertionError("Node not handled by serializer: %r" % node) def visit_ModuleNode(self, node): self.visitchildren(node) def visit_StatListNode(self, node): self.visitchildren(node) def visit_CDefExternNode(self, node): if node.include_file is None: file = u'*' else: file = u'"%s"' % node.include_file self.putline(u"cdef extern from %s:" % file) self.indent() self.visit(node.body) self.dedent() def visit_CPtrDeclaratorNode(self, node): self.put('*') self.visit(node.base) def visit_CReferenceDeclaratorNode(self, node): self.put('&') self.visit(node.base) def visit_CArrayDeclaratorNode(self, node): self.visit(node.base) self.put(u'[') if node.dimension is not None: self.visit(node.dimension) self.put(u']') def visit_CArrayDeclaratorNode(self, node): self.visit(node.base) self.put(u'[') if node.dimension is not None: self.visit(node.dimension) self.put(u']') def visit_CFuncDeclaratorNode(self, node): # TODO: except, gil, etc. self.visit(node.base) self.put(u'(') self.comma_separated_list(node.args) self.endline(u')') def visit_CNameDeclaratorNode(self, node): self.put(node.name) def visit_CSimpleBaseTypeNode(self, node): # See Parsing.p_sign_and_longness if node.is_basic_c_type: self.put(("unsigned ", "", "signed ")[node.signed]) if node.longness < 0: self.put("short " * -node.longness) elif node.longness > 0: self.put("long " * node.longness) self.put(node.name) def visit_CComplexBaseTypeNode(self, node): self.put(u'(') self.visit(node.base_type) self.visit(node.declarator) self.put(u')') def visit_CNestedBaseTypeNode(self, node): self.visit(node.base_type) self.put(u'.') self.put(node.name) def visit_TemplatedTypeNode(self, node): self.visit(node.base_type_node) self.put(u'[') self.comma_separated_list(node.positional_args + node.keyword_args.key_value_pairs) self.put(u']') def visit_CVarDefNode(self, node): self.startline(u"cdef ") self.visit(node.base_type) self.put(u" ") self.comma_separated_list(node.declarators, output_rhs=True) self.endline() def visit_container_node(self, node, decl, extras, attributes): # TODO: visibility self.startline(decl) if node.name: self.put(u' ') self.put(node.name) if node.cname is not None: self.put(u' "%s"' % node.cname) if extras: self.put(extras) self.endline(':') self.indent() if not attributes: self.putline('pass') else: for attribute in attributes: self.visit(attribute) self.dedent() def visit_CStructOrUnionDefNode(self, node): if node.typedef_flag: decl = u'ctypedef ' else: decl = u'cdef ' if node.visibility == 'public': decl += u'public ' if node.packed: decl += u'packed ' decl += node.kind self.visit_container_node(node, decl, None, node.attributes) def visit_CppClassNode(self, node): extras = "" if node.templates: extras = u"[%s]" % ", ".join(node.templates) if node.base_classes: extras += "(%s)" % ", ".join(node.base_classes) self.visit_container_node(node, u"cdef cppclass", extras, node.attributes) def visit_CEnumDefNode(self, node): self.visit_container_node(node, u"cdef enum", None, node.items) def visit_CEnumDefItemNode(self, node): self.startline(node.name) if node.cname: self.put(u' "%s"' % node.cname) if node.value: self.put(u" = ") self.visit(node.value) self.endline() def visit_CClassDefNode(self, node): assert not node.module_name if node.decorators: for decorator in node.decorators: self.visit(decorator) self.startline(u"cdef class ") self.put(node.class_name) if node.base_class_name: self.put(u"(") if node.base_class_module: self.put(node.base_class_module) self.put(u".") self.put(node.base_class_name) self.put(u")") self.endline(u":") self.indent() self.visit(node.body) self.dedent() def visit_CTypeDefNode(self, node): self.startline(u"ctypedef ") self.visit(node.base_type) self.put(u" ") self.visit(node.declarator) self.endline() def visit_FuncDefNode(self, node): self.startline(u"def %s(" % node.name) self.comma_separated_list(node.args) self.endline(u"):") self.indent() self.visit(node.body) self.dedent() def visit_CArgDeclNode(self, node): if node.base_type.name is not None: self.visit(node.base_type) self.put(u" ") self.visit(node.declarator) if node.default is not None: self.put(u" = ") self.visit(node.default) def visit_CImportStatNode(self, node): self.startline(u"cimport ") self.put(node.module_name) if node.as_name: self.put(u" as ") self.put(node.as_name) self.endline() def visit_FromCImportStatNode(self, node): self.startline(u"from ") self.put(node.module_name) self.put(u" cimport ") first = True for pos, name, as_name, kind in node.imported_names: assert kind is None if first: first = False else: self.put(u", ") self.put(name) if as_name: self.put(u" as ") self.put(as_name) self.endline() def visit_NameNode(self, node): self.put(node.name) def visit_IntNode(self, node): self.put(node.value) def visit_NoneNode(self, node): self.put(u"None") def visit_NotNode(self, node): self.put(u"(not ") self.visit(node.operand) self.put(u")") def visit_DecoratorNode(self, node): self.startline("@") self.visit(node.decorator) self.endline() def visit_BinopNode(self, node): self.visit(node.operand1) self.put(u" %s " % node.operator) self.visit(node.operand2) def visit_AttributeNode(self, node): self.visit(node.obj) self.put(u".%s" % node.attribute) def visit_BoolNode(self, node): self.put(str(node.value)) # FIXME: represent string nodes correctly def visit_StringNode(self, node): value = node.value if value.encoding is not None: value = value.encode(value.encoding) self.put(repr(value)) def visit_PassStatNode(self, node): self.startline(u"pass") self.endline() class CodeWriter(DeclarationWriter): def visit_SingleAssignmentNode(self, node): self.startline() self.visit(node.lhs) self.put(u" = ") self.visit(node.rhs) self.endline() def visit_CascadedAssignmentNode(self, node): self.startline() for lhs in node.lhs_list: self.visit(lhs) self.put(u" = ") self.visit(node.rhs) self.endline() def visit_PrintStatNode(self, node): self.startline(u"print ") self.comma_separated_list(node.arg_tuple.args) if not node.append_newline: self.put(u",") self.endline() def visit_ForInStatNode(self, node): self.startline(u"for ") self.visit(node.target) self.put(u" in ") self.visit(node.iterator.sequence) self.endline(u":") self.indent() self.visit(node.body) self.dedent() if node.else_clause is not None: self.line(u"else:") self.indent() self.visit(node.else_clause) self.dedent() def visit_IfStatNode(self, node): # The IfClauseNode is handled directly without a seperate match # for clariy. self.startline(u"if ") self.visit(node.if_clauses[0].condition) self.endline(":") self.indent() self.visit(node.if_clauses[0].body) self.dedent() for clause in node.if_clauses[1:]: self.startline("elif ") self.visit(clause.condition) self.endline(":") self.indent() self.visit(clause.body) self.dedent() if node.else_clause is not None: self.line("else:") self.indent() self.visit(node.else_clause) self.dedent() def visit_SequenceNode(self, node): self.comma_separated_list(node.args) # Might need to discover whether we need () around tuples...hmm... def visit_SimpleCallNode(self, node): self.visit(node.function) self.put(u"(") self.comma_separated_list(node.args) self.put(")") def visit_GeneralCallNode(self, node): self.visit(node.function) self.put(u"(") posarg = node.positional_args if isinstance(posarg, AsTupleNode): self.visit(posarg.arg) else: self.comma_separated_list(posarg.args) # TupleNode.args if node.keyword_args: if isinstance(node.keyword_args, DictNode): for i, (name, value) in enumerate(node.keyword_args.key_value_pairs): if i > 0: self.put(', ') self.visit(name) self.put('=') self.visit(value) else: raise Exception("Not implemented yet") self.put(u")") def visit_ExprStatNode(self, node): self.startline() self.visit(node.expr) self.endline() def visit_InPlaceAssignmentNode(self, node): self.startline() self.visit(node.lhs) self.put(u" %s= " % node.operator) self.visit(node.rhs) self.endline() def visit_WithStatNode(self, node): self.startline() self.put(u"with ") self.visit(node.manager) if node.target is not None: self.put(u" as ") self.visit(node.target) self.endline(u":") self.indent() self.visit(node.body) self.dedent() def visit_TryFinallyStatNode(self, node): self.line(u"try:") self.indent() self.visit(node.body) self.dedent() self.line(u"finally:") self.indent() self.visit(node.finally_clause) self.dedent() def visit_TryExceptStatNode(self, node): self.line(u"try:") self.indent() self.visit(node.body) self.dedent() for x in node.except_clauses: self.visit(x) if node.else_clause is not None: self.visit(node.else_clause) def visit_ExceptClauseNode(self, node): self.startline(u"except") if node.pattern is not None: self.put(u" ") self.visit(node.pattern) if node.target is not None: self.put(u", ") self.visit(node.target) self.endline(":") self.indent() self.visit(node.body) self.dedent() def visit_ReturnStatNode(self, node): self.startline("return ") self.visit(node.value) self.endline() def visit_ReraiseStatNode(self, node): self.line("raise") def visit_ImportNode(self, node): self.put(u"(import %s)" % node.module_name.value) def visit_TempsBlockNode(self, node): """ Temporaries are output like $1_1', where the first number is an index of the TempsBlockNode and the second number is an index of the temporary which that block allocates. """ idx = 0 for handle in node.temps: self.tempnames[handle] = "$%d_%d" % (self.tempblockindex, idx) idx += 1 self.tempblockindex += 1 self.visit(node.body) def visit_TempRefNode(self, node): self.put(self.tempnames[node.handle]) class PxdWriter(DeclarationWriter): def __call__(self, node): print(u'\n'.join(self.write(node).lines)) return node def visit_CFuncDefNode(self, node): if 'inline' in node.modifiers: return if node.overridable: self.startline(u'cpdef ') else: self.startline(u'cdef ') if node.visibility != 'private': self.put(node.visibility) self.put(u' ') if node.api: self.put(u'api ') self.visit(node.declarator) def visit_StatNode(self, node): pass Cython-0.26.1/Cython/Utils.py0000664000175000017500000003411513143605603016553 0ustar stefanstefan00000000000000# # Cython -- Things that don't belong # anywhere else in particular # from __future__ import absolute_import try: from __builtin__ import basestring except ImportError: basestring = str import os import sys import re import io import codecs import shutil from contextlib import contextmanager modification_time = os.path.getmtime def cached_function(f): cache = {} uncomputed = object() def wrapper(*args): res = cache.get(args, uncomputed) if res is uncomputed: res = cache[args] = f(*args) return res wrapper.uncached = f return wrapper def cached_method(f): cache_name = '__%s_cache' % f.__name__ def wrapper(self, *args): cache = getattr(self, cache_name, None) if cache is None: cache = {} setattr(self, cache_name, cache) if args in cache: return cache[args] res = cache[args] = f(self, *args) return res return wrapper def replace_suffix(path, newsuf): base, _ = os.path.splitext(path) return base + newsuf def open_new_file(path): if os.path.exists(path): # Make sure to create a new file here so we can # safely hard link the output files. os.unlink(path) # we use the ISO-8859-1 encoding here because we only write pure # ASCII strings or (e.g. for file names) byte encoded strings as # Unicode, so we need a direct mapping from the first 256 Unicode # characters to a byte sequence, which ISO-8859-1 provides # note: can't use io.open() in Py2 as we may be writing str objects return codecs.open(path, "w", encoding="ISO-8859-1") def castrate_file(path, st): # Remove junk contents from an output file after a # failed compilation. # Also sets access and modification times back to # those specified by st (a stat struct). try: f = open_new_file(path) except EnvironmentError: pass else: f.write( "#error Do not use this file, it is the result of a failed Cython compilation.\n") f.close() if st: os.utime(path, (st.st_atime, st.st_mtime-1)) def file_newer_than(path, time): ftime = modification_time(path) return ftime > time def safe_makedirs(path): try: os.makedirs(path) except OSError: if not os.path.isdir(path): raise def copy_file_to_dir_if_newer(sourcefile, destdir): """ Copy file sourcefile to directory destdir (creating it if needed), preserving metadata. If the destination file exists and is not older than the source file, the copying is skipped. """ destfile = os.path.join(destdir, os.path.basename(sourcefile)) try: desttime = modification_time(destfile) except OSError: # New file does not exist, destdir may or may not exist safe_makedirs(destdir) else: # New file already exists if not file_newer_than(sourcefile, desttime): return shutil.copy2(sourcefile, destfile) @cached_function def search_include_directories(dirs, qualified_name, suffix, pos, include=False, sys_path=False): # Search the list of include directories for the given # file name. If a source file position is given, first # searches the directory containing that file. Returns # None if not found, but does not report an error. # The 'include' option will disable package dereferencing. # If 'sys_path' is True, also search sys.path. if sys_path: dirs = dirs + tuple(sys.path) if pos: file_desc = pos[0] from Cython.Compiler.Scanning import FileSourceDescriptor if not isinstance(file_desc, FileSourceDescriptor): raise RuntimeError("Only file sources for code supported") if include: dirs = (os.path.dirname(file_desc.filename),) + dirs else: dirs = (find_root_package_dir(file_desc.filename),) + dirs dotted_filename = qualified_name if suffix: dotted_filename += suffix if not include: names = qualified_name.split('.') package_names = tuple(names[:-1]) module_name = names[-1] module_filename = module_name + suffix package_filename = "__init__" + suffix for dir in dirs: path = os.path.join(dir, dotted_filename) if path_exists(path): return path if not include: package_dir = check_package_dir(dir, package_names) if package_dir is not None: path = os.path.join(package_dir, module_filename) if path_exists(path): return path path = os.path.join(dir, package_dir, module_name, package_filename) if path_exists(path): return path return None @cached_function def find_root_package_dir(file_path): dir = os.path.dirname(file_path) if file_path == dir: return dir elif is_package_dir(dir): return find_root_package_dir(dir) else: return dir @cached_function def check_package_dir(dir, package_names): for dirname in package_names: dir = os.path.join(dir, dirname) if not is_package_dir(dir): return None return dir @cached_function def is_package_dir(dir_path): for filename in ("__init__.py", "__init__.pyc", "__init__.pyx", "__init__.pxd"): path = os.path.join(dir_path, filename) if path_exists(path): return 1 @cached_function def path_exists(path): # try on the filesystem first if os.path.exists(path): return True # figure out if a PEP 302 loader is around try: loader = __loader__ # XXX the code below assumes a 'zipimport.zipimporter' instance # XXX should be easy to generalize, but too lazy right now to write it archive_path = getattr(loader, 'archive', None) if archive_path: normpath = os.path.normpath(path) if normpath.startswith(archive_path): arcname = normpath[len(archive_path)+1:] try: loader.get_data(arcname) return True except IOError: return False except NameError: pass return False # file name encodings def decode_filename(filename): if isinstance(filename, bytes): try: filename_encoding = sys.getfilesystemencoding() if filename_encoding is None: filename_encoding = sys.getdefaultencoding() filename = filename.decode(filename_encoding) except UnicodeDecodeError: pass return filename # support for source file encoding detection _match_file_encoding = re.compile(u"coding[:=]\s*([-\w.]+)").search def detect_file_encoding(source_filename): f = open_source_file(source_filename, encoding="UTF-8", error_handling='ignore') try: return detect_opened_file_encoding(f) finally: f.close() def detect_opened_file_encoding(f): # PEPs 263 and 3120 # Most of the time the first two lines fall in the first 250 chars, # and this bulk read/split is much faster. lines = f.read(250).split(u"\n") if len(lines) > 1: m = _match_file_encoding(lines[0]) if m: return m.group(1) elif len(lines) > 2: m = _match_file_encoding(lines[1]) if m: return m.group(1) else: return "UTF-8" # Fallback to one-char-at-a-time detection. f.seek(0) chars = [] for i in range(2): c = f.read(1) while c and c != u'\n': chars.append(c) c = f.read(1) encoding = _match_file_encoding(u''.join(chars)) if encoding: return encoding.group(1) return "UTF-8" def skip_bom(f): """ Read past a BOM at the beginning of a source file. This could be added to the scanner, but it's *substantially* easier to keep it at this level. """ if f.read(1) != u'\uFEFF': f.seek(0) def open_source_file(source_filename, mode="r", encoding=None, error_handling=None): if encoding is None: # Most of the time the coding is unspecified, so be optimistic that # it's UTF-8. f = open_source_file(source_filename, encoding="UTF-8", mode=mode, error_handling='ignore') encoding = detect_opened_file_encoding(f) if encoding == "UTF-8" and error_handling == 'ignore': f.seek(0) skip_bom(f) return f else: f.close() if not os.path.exists(source_filename): try: loader = __loader__ if source_filename.startswith(loader.archive): return open_source_from_loader( loader, source_filename, encoding, error_handling) except (NameError, AttributeError): pass stream = io.open(source_filename, mode=mode, encoding=encoding, errors=error_handling) skip_bom(stream) return stream def open_source_from_loader(loader, source_filename, encoding=None, error_handling=None): nrmpath = os.path.normpath(source_filename) arcname = nrmpath[len(loader.archive)+1:] data = loader.get_data(arcname) return io.TextIOWrapper(io.BytesIO(data), encoding=encoding, errors=error_handling) def str_to_number(value): # note: this expects a string as input that was accepted by the # parser already, with an optional "-" sign in front is_neg = False if value[:1] == '-': is_neg = True value = value[1:] if len(value) < 2: value = int(value, 0) elif value[0] == '0': literal_type = value[1] # 0'o' - 0'b' - 0'x' if literal_type in 'xX': # hex notation ('0x1AF') value = int(value[2:], 16) elif literal_type in 'oO': # Py3 octal notation ('0o136') value = int(value[2:], 8) elif literal_type in 'bB': # Py3 binary notation ('0b101') value = int(value[2:], 2) else: # Py2 octal notation ('0136') value = int(value, 8) else: value = int(value, 0) return -value if is_neg else value def long_literal(value): if isinstance(value, basestring): value = str_to_number(value) return not -2**31 <= value < 2**31 @cached_function def get_cython_cache_dir(): """get the cython cache dir Priority: 1. CYTHON_CACHE_DIR 2. (OS X): ~/Library/Caches/Cython (posix not OS X): XDG_CACHE_HOME/cython if XDG_CACHE_HOME defined 3. ~/.cython """ if 'CYTHON_CACHE_DIR' in os.environ: return os.environ['CYTHON_CACHE_DIR'] parent = None if os.name == 'posix': if sys.platform == 'darwin': parent = os.path.expanduser('~/Library/Caches') else: # this could fallback on ~/.cache parent = os.environ.get('XDG_CACHE_HOME') if parent and os.path.isdir(parent): return os.path.join(parent, 'cython') # last fallback: ~/.cython return os.path.expanduser(os.path.join('~', '.cython')) @contextmanager def captured_fd(stream=2, encoding=None): pipe_in = t = None orig_stream = os.dup(stream) # keep copy of original stream try: pipe_in, pipe_out = os.pipe() os.dup2(pipe_out, stream) # replace stream by copy of pipe try: os.close(pipe_out) # close original pipe-out stream data = [] def copy(): try: while True: d = os.read(pipe_in, 1000) if d: data.append(d) else: break finally: os.close(pipe_in) def get_output(): output = b''.join(data) if encoding: output = output.decode(encoding) return output from threading import Thread t = Thread(target=copy) t.daemon = True # just in case t.start() yield get_output finally: os.dup2(orig_stream, stream) # restore original stream if t is not None: t.join() finally: os.close(orig_stream) def print_bytes(s, end=b'\n', file=sys.stdout, flush=True): file.flush() try: out = file.buffer # Py3 except AttributeError: out = file # Py2 out.write(s) if end: out.write(end) if flush: out.flush() class LazyStr: def __init__(self, callback): self.callback = callback def __str__(self): return self.callback() def __repr__(self): return self.callback() def __add__(self, right): return self.callback() + right def __radd__(self, left): return left + self.callback() class OrderedSet(object): def __init__(self, elements=()): self._list = [] self._set = set() self.update(elements) def __iter__(self): return iter(self._list) def update(self, elements): for e in elements: self.add(e) def add(self, e): if e not in self._set: self._list.append(e) self._set.add(e) # Class decorator that adds a metaclass and recreates the class with it. # Copied from 'six'. def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper Cython-0.26.1/Cython/TestUtils.py0000664000175000017500000001745313023021033017403 0ustar stefanstefan00000000000000from __future__ import absolute_import import os import unittest import tempfile from .Compiler import Errors from .CodeWriter import CodeWriter from .Compiler.TreeFragment import TreeFragment, strip_common_indent from .Compiler.Visitor import TreeVisitor, VisitorTransform from .Compiler import TreePath class NodeTypeWriter(TreeVisitor): def __init__(self): super(NodeTypeWriter, self).__init__() self._indents = 0 self.result = [] def visit_Node(self, node): if not self.access_path: name = u"(root)" else: tip = self.access_path[-1] if tip[2] is not None: name = u"%s[%d]" % tip[1:3] else: name = tip[1] self.result.append(u" " * self._indents + u"%s: %s" % (name, node.__class__.__name__)) self._indents += 1 self.visitchildren(node) self._indents -= 1 def treetypes(root): """Returns a string representing the tree by class names. There's a leading and trailing whitespace so that it can be compared by simple string comparison while still making test cases look ok.""" w = NodeTypeWriter() w.visit(root) return u"\n".join([u""] + w.result + [u""]) class CythonTest(unittest.TestCase): def setUp(self): self.listing_file = Errors.listing_file self.echo_file = Errors.echo_file Errors.listing_file = Errors.echo_file = None def tearDown(self): Errors.listing_file = self.listing_file Errors.echo_file = self.echo_file def assertLines(self, expected, result): "Checks that the given strings or lists of strings are equal line by line" if not isinstance(expected, list): expected = expected.split(u"\n") if not isinstance(result, list): result = result.split(u"\n") for idx, (expected_line, result_line) in enumerate(zip(expected, result)): self.assertEqual(expected_line, result_line, "Line %d:\nExp: %s\nGot: %s" % (idx, expected_line, result_line)) self.assertEqual(len(expected), len(result), "Unmatched lines. Got:\n%s\nExpected:\n%s" % ("\n".join(expected), u"\n".join(result))) def codeToLines(self, tree): writer = CodeWriter() writer.write(tree) return writer.result.lines def codeToString(self, tree): return "\n".join(self.codeToLines(tree)) def assertCode(self, expected, result_tree): result_lines = self.codeToLines(result_tree) expected_lines = strip_common_indent(expected.split("\n")) for idx, (line, expected_line) in enumerate(zip(result_lines, expected_lines)): self.assertEqual(expected_line, line, "Line %d:\nGot: %s\nExp: %s" % (idx, line, expected_line)) self.assertEqual(len(result_lines), len(expected_lines), "Unmatched lines. Got:\n%s\nExpected:\n%s" % ("\n".join(result_lines), expected)) def assertNodeExists(self, path, result_tree): self.assertNotEqual(TreePath.find_first(result_tree, path), None, "Path '%s' not found in result tree" % path) def fragment(self, code, pxds=None, pipeline=None): "Simply create a tree fragment using the name of the test-case in parse errors." if pxds is None: pxds = {} if pipeline is None: pipeline = [] name = self.id() if name.startswith("__main__."): name = name[len("__main__."):] name = name.replace(".", "_") return TreeFragment(code, name, pxds, pipeline=pipeline) def treetypes(self, root): return treetypes(root) def should_fail(self, func, exc_type=Exception): """Calls "func" and fails if it doesn't raise the right exception (any exception by default). Also returns the exception in question. """ try: func() self.fail("Expected an exception of type %r" % exc_type) except exc_type as e: self.assertTrue(isinstance(e, exc_type)) return e def should_not_fail(self, func): """Calls func and succeeds if and only if no exception is raised (i.e. converts exception raising into a failed testcase). Returns the return value of func.""" try: return func() except Exception as exc: self.fail(str(exc)) class TransformTest(CythonTest): """ Utility base class for transform unit tests. It is based around constructing test trees (either explicitly or by parsing a Cython code string); running the transform, serialize it using a customized Cython serializer (with special markup for nodes that cannot be represented in Cython), and do a string-comparison line-by-line of the result. To create a test case: - Call run_pipeline. The pipeline should at least contain the transform you are testing; pyx should be either a string (passed to the parser to create a post-parse tree) or a node representing input to pipeline. The result will be a transformed result. - Check that the tree is correct. If wanted, assertCode can be used, which takes a code string as expected, and a ModuleNode in result_tree (it serializes the ModuleNode to a string and compares line-by-line). All code strings are first stripped for whitespace lines and then common indentation. Plans: One could have a pxd dictionary parameter to run_pipeline. """ def run_pipeline(self, pipeline, pyx, pxds=None): if pxds is None: pxds = {} tree = self.fragment(pyx, pxds).root # Run pipeline for T in pipeline: tree = T(tree) return tree class TreeAssertVisitor(VisitorTransform): # actually, a TreeVisitor would be enough, but this needs to run # as part of the compiler pipeline def visit_CompilerDirectivesNode(self, node): directives = node.directives if 'test_assert_path_exists' in directives: for path in directives['test_assert_path_exists']: if TreePath.find_first(node, path) is None: Errors.error( node.pos, "Expected path '%s' not found in result tree" % path) if 'test_fail_if_path_exists' in directives: for path in directives['test_fail_if_path_exists']: if TreePath.find_first(node, path) is not None: Errors.error( node.pos, "Unexpected path '%s' found in result tree" % path) self.visitchildren(node) return node visit_Node = VisitorTransform.recurse_to_children def unpack_source_tree(tree_file, dir=None): if dir is None: dir = tempfile.mkdtemp() header = [] cur_file = None f = open(tree_file) try: lines = f.readlines() finally: f.close() del f try: for line in lines: if line[:5] == '#####': filename = line.strip().strip('#').strip().replace('/', os.path.sep) path = os.path.join(dir, filename) if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) if cur_file is not None: f, cur_file = cur_file, None f.close() cur_file = open(path, 'w') elif cur_file is not None: cur_file.write(line) elif line.strip() and not line.lstrip().startswith('#'): if line.strip() not in ('"""', "'''"): header.append(line) finally: if cur_file is not None: cur_file.close() return dir, ''.join(header) Cython-0.26.1/Cython/Distutils/0000775000175000017500000000000013151203436017056 5ustar stefanstefan00000000000000Cython-0.26.1/Cython/Distutils/extension.py0000664000175000017500000001114212574327400021451 0ustar stefanstefan00000000000000"""Pyrex.Distutils.extension Provides a modified Extension class, that understands how to describe Pyrex extension modules in setup scripts.""" __revision__ = "$Id:$" import sys import distutils.extension as _Extension try: import warnings except ImportError: warnings = None class Extension(_Extension.Extension): # When adding arguments to this constructor, be sure to update # user_options.extend in build_ext.py. def __init__(self, name, sources, include_dirs=None, define_macros=None, undef_macros=None, library_dirs=None, libraries=None, runtime_library_dirs=None, extra_objects=None, extra_compile_args=None, extra_link_args=None, export_symbols=None, #swig_opts=None, depends=None, language=None, cython_include_dirs=None, cython_directives=None, cython_create_listing=False, cython_line_directives=False, cython_cplus=False, cython_c_in_temp=False, cython_gen_pxi=False, cython_gdb=False, no_c_in_traceback=False, cython_compile_time_env=None, **kw): # Translate pyrex_X to cython_X for backwards compatibility. had_pyrex_options = False for key in list(kw): if key.startswith('pyrex_'): had_pyrex_options = True kw['cython' + key[5:]] = kw.pop(key) if had_pyrex_options: Extension.__init__( self, name, sources, include_dirs=include_dirs, define_macros=define_macros, undef_macros=undef_macros, library_dirs=library_dirs, libraries=libraries, runtime_library_dirs=runtime_library_dirs, extra_objects=extra_objects, extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, export_symbols=export_symbols, #swig_opts=swig_opts, depends=depends, language=language, no_c_in_traceback=no_c_in_traceback, **kw) return _Extension.Extension.__init__( self, name, sources, include_dirs=include_dirs, define_macros=define_macros, undef_macros=undef_macros, library_dirs=library_dirs, libraries=libraries, runtime_library_dirs=runtime_library_dirs, extra_objects=extra_objects, extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, export_symbols=export_symbols, #swig_opts=swig_opts, depends=depends, language=language, **kw) self.cython_include_dirs = cython_include_dirs or [] self.cython_directives = cython_directives or {} self.cython_create_listing = cython_create_listing self.cython_line_directives = cython_line_directives self.cython_cplus = cython_cplus self.cython_c_in_temp = cython_c_in_temp self.cython_gen_pxi = cython_gen_pxi self.cython_gdb = cython_gdb self.no_c_in_traceback = no_c_in_traceback self.cython_compile_time_env = cython_compile_time_env # class Extension read_setup_file = _Extension.read_setup_file # reuse and extend original docstring from base class (if we can) if sys.version_info[0] < 3 and _Extension.Extension.__doc__: # -OO discards docstrings Extension.__doc__ = _Extension.Extension.__doc__ + """\ cython_include_dirs : [string] list of directories to search for Pyrex header files (.pxd) (in Unix form for portability) cython_directives : {string:value} dict of compiler directives cython_create_listing_file : boolean write pyrex error messages to a listing (.lis) file. cython_line_directives : boolean emit pyx line numbers for debugging/profiling cython_cplus : boolean use the C++ compiler for compiling and linking. cython_c_in_temp : boolean put generated C files in temp directory. cython_gen_pxi : boolean generate .pxi file for public declarations cython_gdb : boolean generate Cython debug information for this extension for cygdb no_c_in_traceback : boolean emit the c file and line number from the traceback for exceptions """ Cython-0.26.1/Cython/Distutils/build_ext.py0000664000175000017500000000150113143605603021407 0ustar stefanstefan00000000000000import sys if 'setuptools' in sys.modules: try: from setuptools.command.build_ext import build_ext as _build_ext except ImportError: # We may be in the process of importing setuptools, which tries # to import this. from distutils.command.build_ext import build_ext as _build_ext else: from distutils.command.build_ext import build_ext as _build_ext class new_build_ext(_build_ext, object): def finalize_options(self): if self.distribution.ext_modules: from Cython.Build.Dependencies import cythonize self.distribution.ext_modules[:] = cythonize( self.distribution.ext_modules) super(new_build_ext, self).finalize_options() # This will become new_build_ext in the future. from .old_build_ext import old_build_ext as build_ext Cython-0.26.1/Cython/Distutils/__init__.py0000664000175000017500000000014212542002467021170 0ustar stefanstefan00000000000000from Cython.Distutils.build_ext import build_ext from Cython.Distutils.extension import Extension Cython-0.26.1/Cython/Distutils/old_build_ext.py0000664000175000017500000003237413023021033022243 0ustar stefanstefan00000000000000"""Cython.Distutils.old_build_ext Implements a version of the Distutils 'build_ext' command, for building Cython extension modules.""" # This module should be kept compatible with Python 2.3. __revision__ = "$Id:$" import inspect import sys import os from distutils.errors import DistutilsPlatformError from distutils.dep_util import newer, newer_group from distutils import log from distutils.command import build_ext as _build_ext from distutils import sysconfig import warnings try: from __builtin__ import basestring except ImportError: basestring = str def _check_stack(path): try: for frame in inspect.getouterframes(inspect.currentframe(), 0): if path in frame[1].replace(os.sep, '/'): return True except Exception: pass return False if (not _check_stack('setuptools/extensions.py') and not _check_stack('pyximport/pyxbuild.py') and not _check_stack('Cython/Distutils/build_ext.py')): warnings.warn( "Cython.Distutils.old_build_ext does not properly handle dependencies " "and is deprecated.") extension_name_re = _build_ext.extension_name_re show_compilers = _build_ext.show_compilers class Optimization(object): def __init__(self): self.flags = ( 'OPT', 'CFLAGS', 'CPPFLAGS', 'EXTRA_CFLAGS', 'BASECFLAGS', 'PY_CFLAGS', ) self.state = sysconfig.get_config_vars(*self.flags) self.config_vars = sysconfig.get_config_vars() def disable_optimization(self): "disable optimization for the C or C++ compiler" badoptions = ('-O1', '-O2', '-O3') for flag, option in zip(self.flags, self.state): if option is not None: L = [opt for opt in option.split() if opt not in badoptions] self.config_vars[flag] = ' '.join(L) def restore_state(self): "restore the original state" for flag, option in zip(self.flags, self.state): if option is not None: self.config_vars[flag] = option optimization = Optimization() class old_build_ext(_build_ext.build_ext): description = "build C/C++ and Cython extensions (compile/link to build directory)" sep_by = _build_ext.build_ext.sep_by user_options = _build_ext.build_ext.user_options boolean_options = _build_ext.build_ext.boolean_options help_options = _build_ext.build_ext.help_options # Add the pyrex specific data. user_options.extend([ ('cython-cplus', None, "generate C++ source files"), ('cython-create-listing', None, "write errors to a listing file"), ('cython-line-directives', None, "emit source line directives"), ('cython-include-dirs=', None, "path to the Cython include files" + sep_by), ('cython-c-in-temp', None, "put generated C files in temp directory"), ('cython-gen-pxi', None, "generate .pxi file for public declarations"), ('cython-directives=', None, "compiler directive overrides"), ('cython-gdb', None, "generate debug information for cygdb"), ('cython-compile-time-env', None, "cython compile time environment"), # For backwards compatibility. ('pyrex-cplus', None, "generate C++ source files"), ('pyrex-create-listing', None, "write errors to a listing file"), ('pyrex-line-directives', None, "emit source line directives"), ('pyrex-include-dirs=', None, "path to the Cython include files" + sep_by), ('pyrex-c-in-temp', None, "put generated C files in temp directory"), ('pyrex-gen-pxi', None, "generate .pxi file for public declarations"), ('pyrex-directives=', None, "compiler directive overrides"), ('pyrex-gdb', None, "generate debug information for cygdb"), ]) boolean_options.extend([ 'cython-cplus', 'cython-create-listing', 'cython-line-directives', 'cython-c-in-temp', 'cython-gdb', # For backwards compatibility. 'pyrex-cplus', 'pyrex-create-listing', 'pyrex-line-directives', 'pyrex-c-in-temp', 'pyrex-gdb', ]) def initialize_options(self): _build_ext.build_ext.initialize_options(self) self.cython_cplus = 0 self.cython_create_listing = 0 self.cython_line_directives = 0 self.cython_include_dirs = None self.cython_directives = None self.cython_c_in_temp = 0 self.cython_gen_pxi = 0 self.cython_gdb = False self.no_c_in_traceback = 0 self.cython_compile_time_env = None def __getattr__(self, name): if name[:6] == 'pyrex_': return getattr(self, 'cython_' + name[6:]) else: return _build_ext.build_ext.__getattr__(self, name) def __setattr__(self, name, value): if name[:6] == 'pyrex_': return setattr(self, 'cython_' + name[6:], value) else: # _build_ext.build_ext.__setattr__(self, name, value) self.__dict__[name] = value def finalize_options (self): _build_ext.build_ext.finalize_options(self) if self.cython_include_dirs is None: self.cython_include_dirs = [] elif isinstance(self.cython_include_dirs, basestring): self.cython_include_dirs = \ self.cython_include_dirs.split(os.pathsep) if self.cython_directives is None: self.cython_directives = {} # finalize_options () def run(self): # We have one shot at this before build_ext initializes the compiler. # If --pyrex-gdb is in effect as a command line option or as option # of any Extension module, disable optimization for the C or C++ # compiler. if self.cython_gdb or [1 for ext in self.extensions if getattr(ext, 'cython_gdb', False)]: optimization.disable_optimization() _build_ext.build_ext.run(self) def build_extensions(self): # First, sanity-check the 'extensions' list self.check_extensions_list(self.extensions) for ext in self.extensions: ext.sources = self.cython_sources(ext.sources, ext) self.build_extension(ext) def cython_sources(self, sources, extension): """ Walk the list of source files in 'sources', looking for Cython source files (.pyx and .py). Run Cython on all that are found, and return a modified 'sources' list with Cython source files replaced by the generated C (or C++) files. """ try: from Cython.Compiler.Main \ import CompilationOptions, \ default_options as cython_default_options, \ compile as cython_compile from Cython.Compiler.Errors import PyrexError except ImportError: e = sys.exc_info()[1] print("failed to import Cython: %s" % e) raise DistutilsPlatformError("Cython does not appear to be installed") new_sources = [] cython_sources = [] cython_targets = {} # Setup create_list and cplus from the extension options if # Cython.Distutils.extension.Extension is used, otherwise just # use what was parsed from the command-line or the configuration file. # cplus will also be set to true is extension.language is equal to # 'C++' or 'c++'. #try: # create_listing = self.cython_create_listing or \ # extension.cython_create_listing # cplus = self.cython_cplus or \ # extension.cython_cplus or \ # (extension.language != None and \ # extension.language.lower() == 'c++') #except AttributeError: # create_listing = self.cython_create_listing # cplus = self.cython_cplus or \ # (extension.language != None and \ # extension.language.lower() == 'c++') create_listing = self.cython_create_listing or \ getattr(extension, 'cython_create_listing', 0) line_directives = self.cython_line_directives or \ getattr(extension, 'cython_line_directives', 0) no_c_in_traceback = self.no_c_in_traceback or \ getattr(extension, 'no_c_in_traceback', 0) cplus = self.cython_cplus or getattr(extension, 'cython_cplus', 0) or \ (extension.language and extension.language.lower() == 'c++') cython_gen_pxi = self.cython_gen_pxi or getattr(extension, 'cython_gen_pxi', 0) cython_gdb = self.cython_gdb or getattr(extension, 'cython_gdb', False) cython_compile_time_env = self.cython_compile_time_env or \ getattr(extension, 'cython_compile_time_env', None) # Set up the include_path for the Cython compiler: # 1. Start with the command line option. # 2. Add in any (unique) paths from the extension # cython_include_dirs (if Cython.Distutils.extension is used). # 3. Add in any (unique) paths from the extension include_dirs includes = self.cython_include_dirs try: for i in extension.cython_include_dirs: if not i in includes: includes.append(i) except AttributeError: pass # In case extension.include_dirs is a generator, evaluate it and keep # result extension.include_dirs = list(extension.include_dirs) for i in extension.include_dirs: if not i in includes: includes.append(i) # Set up Cython compiler directives: # 1. Start with the command line option. # 2. Add in any (unique) entries from the extension # cython_directives (if Cython.Distutils.extension is used). directives = self.cython_directives if hasattr(extension, "cython_directives"): directives.update(extension.cython_directives) # Set the target_ext to '.c'. Cython will change this to '.cpp' if # needed. if cplus: target_ext = '.cpp' else: target_ext = '.c' # Decide whether to drop the generated C files into the temp dir # or the source tree. if not self.inplace and (self.cython_c_in_temp or getattr(extension, 'cython_c_in_temp', 0)): target_dir = os.path.join(self.build_temp, "pyrex") for package_name in extension.name.split('.')[:-1]: target_dir = os.path.join(target_dir, package_name) else: target_dir = None newest_dependency = None for source in sources: (base, ext) = os.path.splitext(os.path.basename(source)) if ext == ".py": # FIXME: we might want to special case this some more ext = '.pyx' if ext == ".pyx": # Cython source file output_dir = target_dir or os.path.dirname(source) new_sources.append(os.path.join(output_dir, base + target_ext)) cython_sources.append(source) cython_targets[source] = new_sources[-1] elif ext == '.pxi' or ext == '.pxd': if newest_dependency is None \ or newer(source, newest_dependency): newest_dependency = source else: new_sources.append(source) if not cython_sources: return new_sources module_name = extension.name for source in cython_sources: target = cython_targets[source] depends = [source] + list(extension.depends or ()) if(source[-4:].lower()==".pyx" and os.path.isfile(source[:-3]+"pxd")): depends += [source[:-3]+"pxd"] rebuild = self.force or newer_group(depends, target, 'newer') if not rebuild and newest_dependency is not None: rebuild = newer(newest_dependency, target) if rebuild: log.info("cythoning %s to %s", source, target) self.mkpath(os.path.dirname(target)) if self.inplace: output_dir = os.curdir else: output_dir = self.build_lib options = CompilationOptions(cython_default_options, use_listing_file = create_listing, include_path = includes, compiler_directives = directives, output_file = target, cplus = cplus, emit_linenums = line_directives, c_line_in_traceback = not no_c_in_traceback, generate_pxi = cython_gen_pxi, output_dir = output_dir, gdb_debug = cython_gdb, compile_time_env = cython_compile_time_env) result = cython_compile(source, options=options, full_module_name=module_name) else: log.info("skipping '%s' Cython extension (up-to-date)", target) return new_sources # cython_sources () # class build_ext Cython-0.26.1/CHANGES.rst0000664000175000017500000020527713151203171015441 0ustar stefanstefan00000000000000================ Cython Changelog ================ 0.26.1 (2017-08-29) =================== Features added -------------- Bugs fixed ---------- * ``cython.view.array`` was missing ``.__len__()``. * Extension types with a ``.pxd`` override for their ``__releasebuffer__`` slot (e.g. as provided by Cython for the Python ``array.array`` type) could leak a reference to the buffer owner on release, thus not freeing the memory. (Github issue #1638) * Auto-decoding failed in 0.26 for strings inside of C++ containers. (Github issue #1790) * Compile error when inheriting from C++ container types. (Github issue #1788) * Invalid C code in generators (declaration after code). (Github issue #1801) * Arithmetic operations on ``const`` integer variables could generate invalid code. (Github issue #1798) * Local variables with names of special Python methods failed to compile inside of closures. (Github issue #1797) * Problem with indirect Emacs buffers in cython-mode. Patch by Martin Albrecht (Github issue #1743). * Extension types named ``result`` or ``PickleError`` generated invalid unpickling code. Patch by Jason Madden (Github issue #1786). * Bazel integration failed to compile ``.py`` files. Patch by Guro Bokum (Github issue #1784). * Some include directories and dependencies were referenced with their absolute paths in the generated files despite lying within the project directory. 0.26 (2017-07-19) ================= Features added -------------- * Pythran can be used as a backend for evaluating NumPy array expressions. Patch by Adrien Guinet (Github issue #1607). * cdef classes now support pickling by default when possible. This can be disabled with the ``auto_pickle`` directive. * Speed up comparisons of strings if their hash value is available. Patch by Claudio Freire (Github issue #1571). * Support pyximport from zip files. Patch by Sergei Lebedev (Github issue #1485). * IPython magic now respects the ``__all__`` variable and ignores names with leading-underscore (like ``import *`` does). Patch by Syrtis Major (Github issue #1625). * ``abs()`` is optimised for C complex numbers. Patch by da-woods (Github issue #1648). * The display of C lines in Cython tracebacks can now be enabled at runtime via ``import cython_runtime; cython_runtime.cline_in_traceback=True``. The default has been changed to False. * The overhead of calling fused types generic functions was reduced. * "cdef extern" include files are now also searched relative to the current file. Patch by Jeroen Demeyer (Github issue #1654). * Optional optimization for re-aquiring the GIL, controlled by the `fast_gil` directive. Bugs fixed ---------- * Item lookup/assignment with a unicode character as index that is typed (explicitly or implicitly) as ``Py_UCS4`` or ``Py_UNICODE`` used the integer value instead of the Unicode string value. Code that relied on the previous behaviour now triggers a warning that can be disabled by applying an explicit cast. (Github issue #1602) * f-string processing was adapted to changes in PEP 498 and CPython 3.6. * Invalid C code when decoding from UTF-16(LE/BE) byte strings. (Github issue #1696) * Unicode escapes in 'ur' raw-unicode strings were not resolved in Py2 code. Original patch by Aaron Gallagher (Github issue #1594). * File paths of code objects are now relative. Original patch by Jelmer Vernooij (Github issue #1565). * Decorators of cdef class methods could be executed twice. Patch by Jeroen Demeyer (Github issue #1724). * Dict iteration using the Py2 ``iter*`` methods failed in PyPy3. Patch by Armin Rigo (Github issue #1631). * Several warnings in the generated code are now suppressed. Other changes ------------- * The ``unraisable_tracebacks`` option now defaults to ``True``. * Coercion of C++ containers to Python is no longer automatic on attribute access (Github issue #1521). * Access to Python attributes of cimported modules without the corresponding import is now a compile-time (rather than runtime) error. * Do not use special dll linkage for "cdef public" functions. Patch by Jeroen Demeyer (Github issue #1687). * cdef/cpdef methods must match their declarations. See Github Issue #1732. This is now a warning and will be an error in future releases. 0.25.2 (2016-12-08) =================== Bugs fixed ---------- * Fixes several issues with C++ template deduction. * Fixes a issue with bound method type inference (Github issue #551). * Fixes a bug with cascaded tuple assignment (Github issue #1523). * Fixed or silenced many Clang warnings. * Fixes bug with powers of pure real complex numbers (Github issue #1538). 0.25.1 (2016-10-26) =================== Bugs fixed ---------- * Fixes a bug with ``isinstance(o, Exception)`` (Github issue #1496). * Fixes bug with ``cython.view.array`` missing utility code in some cases (Github issue #1502). Other changes ------------- * The distutils extension ``Cython.Distutils.build_ext`` has been reverted, temporarily, to be ``old_build_ext`` to give projects time to migrate. The new build_ext is available as ``new_build_ext``. 0.25 (2016-10-25) ================= Features added -------------- * def/cpdef methods of cdef classes benefit from Cython's internal function implementation, which enables introspection and line profiling for them. Implementation sponsored by Turbostream (www.turbostream-cfd.com). * Calls to Python functions are faster, following the recent "FastCall" optimisations that Victor Stinner implemented for CPython 3.6. See https://bugs.python.org/issue27128 and related issues. * The new METH_FASTCALL calling convention for PyCFunctions is supported in CPython 3.6. See https://bugs.python.org/issue27810 * Initial support for using Cython modules in Pyston. Patch by Daetalus. * Dynamic Python attributes are allowed on cdef classes if an attribute ``cdef dict __dict__`` is declared in the class. Patch by empyrical. * Cython implemented C++ classes can make direct calls to base class methods. Patch by empyrical. * C++ classes can now have typedef members. STL containers updated with value_type. * New directive ``cython.no_gc`` to fully disable GC for a cdef class. Patch by Claudio Freire. * Buffer variables are no longer excluded from ``locals()``. Patch by da-woods. * Building f-strings is faster, especially when formatting C integers. * for-loop iteration over "std::string". * ``libc/math.pxd`` provides ``e`` and ``pi`` as alias constants to simplify usage as a drop-in replacement for Python's math module. * Speed up cython.inline(). * Binary lshift operations with small constant Python integers are faster. * Some integer operations on Python long objects are faster in Python 2.7. * Support for the C++ ``typeid`` operator. * Support for bazel using a the pyx_library rule in //Tools:rules.bzl. Significant Bugs fixed ---------------------- * Division of complex numbers avoids overflow by using Smith's method. * Some function signatures in ``libc.math`` and ``numpy.pxd`` were incorrect. Patch by Michael Seifert. Other changes ------------- * The "%%cython" IPython/jupyter magic now defaults to the language level of the current jupyter kernel. The language level can be set explicitly with "%%cython -2" or "%%cython -3". * The distutils extension ``Cython.Distutils.build_ext`` has now been updated to use cythonize which properly handles dependencies. The old extension can still be found in ``Cython.Distutils.old_build_ext`` and is now deprecated. * ``directive_defaults`` is no longer available in ``Cython.Compiler.Options``, use ``get_directive_defaults()`` instead. 0.24.1 (2016-07-15) =================== Bugs fixed ---------- * IPython cell magic was lacking a good way to enable Python 3 code semantics. It can now be used as "%%cython -3". * Follow a recent change in `PEP 492 `_ and CPython 3.5.2 that now requires the ``__aiter__()`` method of asynchronous iterators to be a simple ``def`` method instead of an ``async def`` method. * Coroutines and generators were lacking the ``__module__`` special attribute. * C++ ``std::complex`` values failed to auto-convert from and to Python complex objects. * Namespaced C++ types could not be used as memory view types due to lack of name mangling. Patch by Ivan Smirnov. * Assignments between identical C++ types that were declared with differently typedefed template types could fail. * Rebuilds could fail to evaluate dependency timestamps in C++ mode. Patch by Ian Henriksen. * Macros defined in the ``distutils`` compiler option do not require values anymore. Patch by Ian Henriksen. * Minor fixes for MSVC, Cygwin and PyPy. 0.24 (2016-04-04) ================= Features added -------------- * PEP 498: Literal String Formatting (f-strings). Original patch by Jelle Zijlstra. https://www.python.org/dev/peps/pep-0498/ * PEP 515: Underscores as visual separators in number literals. https://www.python.org/dev/peps/pep-0515/ * Parser was adapted to some minor syntax changes in Py3.6, e.g. https://bugs.python.org/issue9232 * The embedded C code comments that show the original source code can be discarded with the new directive ``emit_code_comments=False``. * Cpdef enums are now first-class iterable, callable types in Python. * Ctuples can now be declared in pure Python code. * Posix declarations for DLL loading and stdio extensions were added. Patch by Lars Buitinck. * The Py2-only builtins ``unicode()``, ``xrange()``, ``reduce()`` and ``long`` are now also available in compile time ``DEF`` expressions when compiling with Py3. * Exception type tests have slightly lower overhead. This fixes ticket 868. * @property syntax fully supported in cdef classes, old syntax deprecated. * C++ classes can now be declared with default template parameters. Bugs fixed ---------- * C++ exceptions raised by overloaded C++ operators were not always handled. Patch by Ian Henriksen. * C string literals were previously always stored as non-const global variables in the module. They are now stored as global constants when possible, and otherwise as non-const C string literals in the generated code that uses them. This improves compatibility with strict C compiler options and prevents non-const strings literals with the same content from being incorrectly merged. * Compile time evaluated ``str`` expressions (``DEF``) now behave in a more useful way by turning into Unicode strings when compiling under Python 3. This allows using them as intermediate values in expressions. Previously, they always evaluated to bytes objects. * ``isinf()`` declarations in ``libc/math.pxd`` and ``numpy/math.pxd`` now reflect the actual tristate ``int`` return value instead of using ``bint``. * Literal assignments to ctuples avoid Python tuple round-trips in some more corner cases. * Iteration over ``dict(...).items()`` failed to get optimised when dict arguments included keyword arguments. * cProfile now correctly profiles cpdef functions and methods. 0.23.5 (2016-03-26) =================== * Compile errors and warnings in integer type conversion code. This fixes ticket 877. Patches by Christian Neukirchen, Nikolaus Rath, Ian Henriksen. * Reference leak when "*args" argument was reassigned in closures. * Truth-testing Unicode strings could waste time and memory in Py3.3+. * Return values of async functions could be ignored and replaced by ``None``. * Compiler crash in CPython 3.6. * Fix prange() to behave identically to range(). The end condition was miscalculated when the range was not exactly divisible by the step. * Optimised ``all(genexpr)``/``any(genexpr)`` calls could warn about unused code. This fixes ticket 876. 0.23.4 (2015-10-10) =================== Bugs fixed ---------- * Memory leak when calling Python functions in PyPy. * Compilation problem with MSVC in C99-ish mode. * Warning about unused values in a helper macro. 0.23.3 (2015-09-29) =================== Bugs fixed ---------- * Invalid C code for some builtin methods. This fixes ticket 856 again. * Incorrect C code in helper functions for PyLong conversion and string decoding. This fixes ticket 863, ticket 864 and ticket 865. Original patch by Nikolaus Rath. * Large folded or inserted integer constants could use too small C integer types and thus trigger a value wrap-around. Other changes ------------- * The coroutine and generator types of Cython now also register directly with the ``Coroutine`` and ``Generator`` ABCs in the ``backports_abc`` module if it can be imported. This fixes ticket 870. 0.23.2 (2015-09-11) =================== Bugs fixed ---------- * Compiler crash when analysing some optimised expressions. * Coverage plugin was adapted to coverage.py 4.0 beta 2. * C++ destructor calls could fail when '&' operator is overwritten. * Incorrect C literal generation for large integers in compile-time evaluated DEF expressions and constant folded expressions. * Byte string constants could end up as Unicode strings when originating from compile-time evaluated DEF expressions. * Invalid C code when caching known builtin methods. This fixes ticket 860. * ``ino_t`` in ``posix.types`` was not declared as ``unsigned``. * Declarations in ``libcpp/memory.pxd`` were missing ``operator!()``. Patch by Leo Razoumov. * Static cdef methods can now be declared in .pxd files. 0.23.1 (2015-08-22) =================== Bugs fixed ---------- * Invalid C code for generators. This fixes ticket 858. * Invalid C code for some builtin methods. This fixes ticket 856. * Invalid C code for unused local buffer variables. This fixes ticket 154. * Test failures on 32bit systems. This fixes ticket 857. * Code that uses ``from xyz import *`` and global C struct/union/array variables could fail to compile due to missing helper functions. This fixes ticket 851. * Misnamed PEP 492 coroutine property ``cr_yieldfrom`` renamed to ``cr_await`` to match CPython. * Missing deallocation code for C++ object attributes in certain extension class hierarchies. * Crash when async coroutine was not awaited. * Compiler crash on ``yield`` in signature annotations and default argument values. Both are forbidden now. * Compiler crash on certain constructs in ``finally`` clauses. * Cython failed to build when CPython's pgen is installed. 0.23 (2015-08-08) ================= Features added -------------- * PEP 492 (async/await) was implemented. See https://www.python.org/dev/peps/pep-0492/ * PEP 448 (Additional Unpacking Generalizations) was implemented. See https://www.python.org/dev/peps/pep-0448/ * Support for coverage.py 4.0+ can be enabled by adding the plugin "Cython.Coverage" to the ".coveragerc" config file. * Annotated HTML source pages can integrate (XML) coverage reports. * Tracing is supported in ``nogil`` functions/sections and module init code. * When generators are used in a Cython module and the module imports the modules "inspect" and/or "asyncio", Cython enables interoperability by patching these modules during the import to recognise Cython's internal generator and coroutine types. This can be disabled by C compiling the module with "-D CYTHON_PATCH_ASYNCIO=0" or "-D CYTHON_PATCH_INSPECT=0" * When generators or coroutines are used in a Cython module, their types are registered with the ``Generator`` and ``Coroutine`` ABCs in the ``collections`` or ``collections.abc`` stdlib module at import time to enable interoperability with code that needs to detect and process Python generators/coroutines. These ABCs were added in CPython 3.5 and are available for older Python versions through the ``backports_abc`` module on PyPI. See https://bugs.python.org/issue24018 * Adding/subtracting/dividing/modulus and equality comparisons with constant Python floats and small integers are faster. * Binary and/or/xor/rshift operations with small constant Python integers are faster. * When called on generator expressions, the builtins ``all()``, ``any()``, ``dict()``, ``list()``, ``set()``, ``sorted()`` and ``unicode.join()`` avoid the generator iteration overhead by inlining a part of their functionality into the for-loop. * Keyword argument dicts are no longer copied on function entry when they are not being used or only passed through to other function calls (e.g. in wrapper functions). * The ``PyTypeObject`` declaration in ``cpython.object`` was extended. * The builtin ``type`` type is now declared as PyTypeObject in source, allowing for extern functions taking type parameters to have the correct C signatures. Note that this might break code that uses ``type`` just for passing around Python types in typed variables. Removing the type declaration provides a backwards compatible fix. * ``wraparound()`` and ``boundscheck()`` are available as no-ops in pure Python mode. * Const iterators were added to the provided C++ STL declarations. * Smart pointers were added to the provided C++ STL declarations. Patch by Daniel Filonik. * ``NULL`` is allowed as default argument when embedding signatures. This fixes ticket 843. * When compiling with ``--embed``, the internal module name is changed to ``__main__`` to allow arbitrary program names, including those that would be invalid for modules. Note that this prevents reuse of the generated C code as an importable module. * External C++ classes that overload the assignment operator can be used. Patch by Ian Henriksen. * Support operator bool() for C++ classes so they can be used in if statements. Bugs fixed ---------- * Calling "yield from" from Python on a Cython generator that returned a value triggered a crash in CPython. This is now being worked around. See https://bugs.python.org/issue23996 * Language level 3 did not enable true division (a.k.a. float division) for integer operands. * Functions with fused argument types that included a generic 'object' fallback could end up using that fallback also for other explicitly listed object types. * Relative cimports could accidentally fall back to trying an absolute cimport on failure. * The result of calling a C struct constructor no longer requires an intermediate assignment when coercing to a Python dict. * C++ exception declarations with mapping functions could fail to compile when pre-declared in .pxd files. * ``cpdef void`` methods are now permitted. * ``abs(cint)`` could fail to compile in MSVC and used sub-optimal code in C++. Patch by David Vierra, original patch by Michael Enßlin. * Buffer index calculations using index variables with small C integer types could overflow for large buffer sizes. Original patch by David Vierra. * C unions use a saner way to coerce from and to Python dicts. * When compiling a module ``foo.pyx``, the directories in ``sys.path`` are no longer searched when looking for ``foo.pxd``. Patch by Jeroen Demeyer. * Memory leaks in the embedding main function were fixed. Original patch by Michael Enßlin. * Some complex Python expressions could fail to compile inside of finally clauses. * Unprefixed 'str' literals were not supported as C varargs arguments. * Fixed type errors in conversion enum types to/from Python. Note that this imposes stricter correctness requirements on enum declarations. Other changes ------------- * Changed mangling scheme in header files generated by ``cdef api`` declarations. * Installation under CPython 3.3+ no longer requires a pass of the 2to3 tool. This also makes it possible to run Cython in Python 3.3+ from a source checkout without installing it first. Patch by Petr Viktorin. * ``jedi-typer.py`` (in ``Tools/``) was extended and renamed to ``jedityper.py`` (to make it importable) and now works with and requires Jedi 0.9. Patch by Tzer-jen Wei. 0.22.1 (2015-06-20) =================== Bugs fixed ---------- * Crash when returning values on generator termination. * In some cases, exceptions raised during internal isinstance() checks were not propagated. * Runtime reported file paths of source files (e.g for profiling and tracing) are now relative to the build root directory instead of the main source file. * Tracing exception handling code could enter the trace function with an active exception set. * The internal generator function type was not shared across modules. * Comparisons of (inferred) ctuples failed to compile. * Closures inside of cdef functions returning ``void`` failed to compile. * Using ``const`` C++ references in intermediate parts of longer expressions could fail to compile. * C++ exception declarations with mapping functions could fail to compile when pre-declared in .pxd files. * C++ compilation could fail with an ambiguity error in recent MacOS-X Xcode versions. * C compilation could fail in pypy3. * Fixed a memory leak in the compiler when compiling multiple modules. * When compiling multiple modules, external library dependencies could leak into later compiler runs. Fix by Jeroen Demeyer. This fixes ticket 845. 0.22 (2015-02-11) ================= Features added -------------- * C functions can coerce to Python functions, which allows passing them around as callable objects. * C arrays can be assigned by value and auto-coerce from Python iterables and to Python lists (and tuples). * Extern C functions can now be declared as cpdef to export them to the module's Python namespace. Extern C functions in pxd files export their values to their own module, iff it exists. * Anonymous C tuple types can be declared as (ctype1, ctype2, ...). * PEP 479: turn accidental StopIteration exceptions that exit generators into a RuntimeError, activated with future import "generator_stop". See https://www.python.org/dev/peps/pep-0479/ * Looping over ``reversed(range())`` is optimised in the same way as ``range()``. Patch by Favian Contreras. Bugs fixed ---------- * Mismatching 'except' declarations on signatures in .pxd and .pyx files failed to produce a compile error. * Failure to find any files for the path pattern(s) passed into ``cythonize()`` is now an error to more easily detect accidental typos. * The ``logaddexp`` family of functions in ``numpy.math`` now has correct declarations. * In Py2.6/7 and Py3.2, simple Cython memory views could accidentally be interpreted as non-contiguous by CPython, which could trigger a CPython bug when copying data from them, thus leading to data corruption. See CPython issues 12834 and 23349. Other changes ------------- * Preliminary support for defining the Cython language with a formal grammar. To try parsing your files against this grammar, use the --formal_grammar directive. Experimental. * ``_`` is no longer considered a cacheable builtin as it could interfere with gettext. * Cythonize-computed metadata now cached in the generated C files. * Several corrections and extensions in numpy, cpython, and libcpp pxd files. 0.21.2 (2014-12-27) =================== Bugs fixed ---------- * Crash when assigning a C value to both a Python and C target at the same time. * Automatic coercion from C++ strings to ``str`` generated incomplete code that failed to compile. * Declaring a constructor in a C++ child class erroneously required a default constructor declaration in the super class. * ``resize_smart()`` in ``cpython.array`` was broken. * Functions in ``libcpp.cast`` are now declared as ``nogil``. * Some missing C-API declarations were added. * Py3 main code in embedding program code was lacking casts. * Exception related to distutils "Distribution" class type in pyximport under latest CPython 2.7 and 3.4 releases when setuptools is being imported later. 0.21.1 (2014-10-18) =================== Features added -------------- * New ``cythonize`` option ``-a`` to generate the annotated HTML source view. * Missing C-API declarations in ``cpython.unicode`` were added. * Passing ``language='c++'`` into cythonize() globally enables C++ mode for all modules that were not passed as Extension objects (i.e. only source files and file patterns). * ``Py_hash_t`` is a known type (used in CPython for hash values). * ``PySlice_*()`` C-API functions are available from the ``cpython.slice`` module. * Allow arrays of C++ classes. Bugs fixed ---------- * Reference leak for non-simple Python expressions in boolean and/or expressions. * To fix a name collision and to reflect availability on host platforms, standard C declarations [ clock(), time(), struct tm and tm* functions ] were moved from posix/time.pxd to a new libc/time.pxd. Patch by Charles Blake. * Rerunning unmodified modules in IPython's cython support failed. Patch by Matthias Bussonier. * Casting C++ ``std::string`` to Python byte strings failed when auto-decoding was enabled. * Fatal exceptions in global module init code could lead to crashes if the already created module was used later on (e.g. through a stale reference in sys.modules or elsewhere). * ``cythonize.py`` script was not installed on MS-Windows. Other changes ------------- * Compilation no longer fails hard when unknown compilation options are passed. Instead, it raises a warning and ignores them (as it did silently before 0.21). This will be changed back to an error in a future release. 0.21 (2014-09-10) ================= Features added -------------- * C (cdef) functions allow inner Python functions. * Enums can now be declared as cpdef to export their values to the module's Python namespace. Cpdef enums in pxd files export their values to their own module, iff it exists. * Allow @staticmethod decorator to declare static cdef methods. This is especially useful for declaring "constructors" for cdef classes that can take non-Python arguments. * Taking a ``char*`` from a temporary Python string object is safer in more cases and can be done inside of non-trivial expressions, including arguments of a function call. A compile time error is raised only when such a pointer is assigned to a variable and would thus exceed the lifetime of the string itself. * Generators have new properties ``__name__`` and ``__qualname__`` that provide the plain/qualified name of the generator function (following CPython 3.5). See http://bugs.python.org/issue21205 * The ``inline`` function modifier is available as a decorator ``@cython.inline`` in pure mode. * When cygdb is run in a virtualenv, it enables the same virtualenv inside of the debugger. Patch by Marc Abramowitz. * PEP 465: dedicated infix operator for matrix multiplication (A @ B). * HTML output of annotated code uses Pygments for code highlighting and generally received a major overhaul by Matthias Bussonier. * IPython magic support is now available directly from Cython with the command "%load_ext cython". Cython code can directly be executed in a cell when marked with "%%cython". Code analysis is available with "%%cython -a". Patch by Martín Gaitán. * Simple support for declaring Python object types in Python signature annotations. Currently requires setting the compiler directive ``annotation_typing=True``. * New directive ``use_switch`` (defaults to True) to optionally disable the optimization of chained if statement to C switch statements. * Defines dynamic_cast et al. in ``libcpp.cast`` and C++ heap data structure operations in ``libcpp.algorithm``. * Shipped header declarations in ``posix.*`` were extended to cover more of the POSIX API. Patches by Lars Buitinck and Mark Peek. Optimizations ------------- * Simple calls to C implemented Python functions/methods are faster. This also speeds up many operations on builtins that Cython cannot otherwise optimise. * The "and"/"or" operators try to avoid unnecessary coercions of their arguments. They now evaluate the truth value of each argument independently and only coerce the final result of the whole expression to the target type (e.g. the type on the left side of an assignment). This also avoids reference counting overhead for Python values during evaluation and generally improves the code flow in the generated C code. * The Python expression "2 ** N" is optimised into bit shifting. See http://bugs.python.org/issue21420 * Cascaded assignments (a = b = ...) try to minimise the number of type coercions. * Calls to ``slice()`` are translated to a straight C-API call. Bugs fixed ---------- * Crash when assigning memory views from ternary conditional expressions. * Nested C++ templates could lead to unseparated ">>" characters being generated into the C++ declarations, which older C++ compilers could not parse. * Sending SIGINT (Ctrl-C) during parallel cythonize() builds could hang the child processes. * No longer ignore local setup.cfg files for distutils in pyximport. Patch by Martin Teichmann. * Taking a ``char*`` from an indexed Python string generated unsafe reference counting code. * Set literals now create all of their items before trying to add them to the set, following the behaviour in CPython. This makes a difference in the rare case that the item creation has side effects and some items are not hashable (or if hashing them has side effects, too). * Cython no longer generates the cross product of C functions for code that uses memory views of fused types in function signatures (e.g. ``cdef func(floating[:] a, floating[:] b)``). This is considered the expected behaviour by most users and was previously inconsistent with other structured types like C arrays. Code that really wants all type combinations can create the same fused memoryview type under different names and use those in the signature to make it clear which types are independent. * Names that were unknown at compile time were looked up as builtins at runtime but not as global module names. Trying both lookups helps with globals() manipulation. * Fixed stl container conversion for typedef element types. * ``obj.pop(x)`` truncated large C integer values of x to ``Py_ssize_t``. * ``__init__.pyc`` is recognised as marking a package directory (in addition to .py, .pyx and .pxd). * Syntax highlighting in ``cython-mode.el`` for Emacs no longer incorrectly highlights keywords found as part of longer names. * Correctly handle ``from cython.submodule cimport name``. * Fix infinite recursion when using super with cpdef methods. * No-args ``dir()`` was not guaranteed to return a sorted list. Other changes ------------- * The header line in the generated C files no longer contains the timestamp but only the Cython version that wrote it. This was changed to make builds more reproducible. * Removed support for CPython 2.4, 2.5 and 3.1. * The licensing implications on the generated code were clarified to avoid legal constraints for users. 0.20.2 (2014-06-16) =================== Features added -------------- * Some optimisations for set/frozenset instantiation. * Support for C++ unordered_set and unordered_map. Bugs fixed ---------- * Access to attributes of optimised builtin methods (e.g. ``[].append.__name__``) could fail to compile. * Memory leak when extension subtypes add a memory view as attribute to those of the parent type without having Python object attributes or a user provided dealloc method. * Compiler crash on readonly properties in "binding" mode. * Auto-encoding with ``c_string_encoding=ascii`` failed in Py3.3. * Crash when subtyping freelist enabled Cython extension types with Python classes that use ``__slots__``. * Freelist usage is restricted to CPython to avoid problems with other Python implementations. * Memory leak in memory views when copying overlapping, contiguous slices. * Format checking when requesting non-contiguous buffers from ``cython.array`` objects was accidentally omitted in Py3. * C++ destructor calls in extension types could fail to compile in clang. * Buffer format validation failed for sequences of strings in structs. * Docstrings on extension type attributes in .pxd files were rejected. 0.20.1 (2014-02-11) =================== Bugs fixed ---------- * Build error under recent MacOS-X versions where ``isspace()`` could not be resolved by clang. * List/Tuple literals multiplied by more than one factor were only multiplied by the last factor instead of all. * Lookups of special methods (specifically for context managers) could fail in Python <= 2.6/3.1. * Local variables were erroneously appended to the signature introspection of Cython implemented functions with keyword-only arguments under Python 3. * In-place assignments to variables with inferred Python builtin/extension types could fail with type errors if the result value type was incompatible with the type of the previous value. * The C code generation order of cdef classes, closures, helper code, etc. was not deterministic, thus leading to high code churn. * Type inference could fail to deduce C enum types. * Type inference could deduce unsafe or inefficient types from integer assignments within a mix of inferred Python variables and integer variables. 0.20 (2014-01-18) ================= Features added -------------- * Support for CPython 3.4. * Support for calling C++ template functions. * ``yield`` is supported in ``finally`` clauses. * The C code generated for finally blocks is duplicated for each exit case to allow for better optimisations by the C compiler. * Cython tries to undo the Python optimisationism of assigning a bound method to a local variable when it can generate better code for the direct call. * Constant Python float values are cached. * String equality comparisons can use faster type specific code in more cases than before. * String/Unicode formatting using the '%' operator uses a faster C-API call. * ``bytearray`` has become a known type and supports coercion from and to C strings. Indexing, slicing and decoding is optimised. Note that this may have an impact on existing code due to type inference. * Using ``cdef basestring stringvar`` and function arguments typed as ``basestring`` is now meaningful and allows assigning exactly ``str`` and ``unicode`` objects, but no subtypes of these types. * Support for the ``__debug__`` builtin. * Assertions in Cython compiled modules are disabled if the running Python interpreter was started with the "-O" option. * Some types that Cython provides internally, such as functions and generators, are now shared across modules if more than one Cython implemented module is imported. * The type inference algorithm works more fine granular by taking the results of the control flow analysis into account. * A new script in ``bin/cythonize`` provides a command line frontend to the cythonize() compilation function (including distutils build). * The new extension type decorator ``@cython.no_gc_clear`` prevents objects from being cleared during cyclic garbage collection, thus making sure that object attributes are kept alive until deallocation. * During cyclic garbage collection, attributes of extension types that cannot create reference cycles due to their type (e.g. strings) are no longer considered for traversal or clearing. This can reduce the processing overhead when searching for or cleaning up reference cycles. * Package compilation (i.e. ``__init__.py`` files) now works, starting with Python 3.3. * The cython-mode.el script for Emacs was updated. Patch by Ivan Andrus. * An option common_utility_include_dir was added to cythonize() to save oft-used utility code once in a separate directory rather than as part of each generated file. * ``unraisable_tracebacks`` directive added to control printing of tracebacks of unraisable exceptions. Bugs fixed ---------- * Abstract Python classes that subtyped a Cython extension type failed to raise an exception on instantiation, and thus ended up being instantiated. * ``set.add(a_tuple)`` and ``set.discard(a_tuple)`` failed with a TypeError in Py2.4. * The PEP 3155 ``__qualname__`` was incorrect for nested classes and inner classes/functions declared as ``global``. * Several corner cases in the try-finally statement were fixed. * The metaclass of a Python class was not inherited from its parent class(es). It is now extracted from the list of base classes if not provided explicitly using the Py3 ``metaclass`` keyword argument. In Py2 compilation mode, a ``__metaclass__`` entry in the class dict will still take precedence if not using Py3 metaclass syntax, but only *after* creating the class dict (which may have been done by a metaclass of a base class, see PEP 3115). It is generally recommended to use the explicit Py3 syntax to define metaclasses for Python types at compile time. * The automatic C switch statement generation behaves more safely for heterogeneous value types (e.g. mixing enum and char), allowing for a slightly wider application and reducing corner cases. It now always generates a 'default' clause to avoid C compiler warnings about unmatched enum values. * Fixed a bug where class hierarchies declared out-of-order could result in broken generated code. * Fixed a bug which prevented overriding const methods of C++ classes. * Fixed a crash when converting Python objects to C++ strings fails. Other changes ------------- * In Py3 compilation mode, Python2-style metaclasses declared by a ``__metaclass__`` class dict entry are ignored. * In Py3.4+, the Cython generator type uses ``tp_finalize()`` for safer cleanup instead of ``tp_del()``. 0.19.2 (2013-10-13) =================== Features added -------------- Bugs fixed ---------- * Some standard declarations were fixed or updated, including the previously incorrect declaration of ``PyBuffer_FillInfo()`` and some missing bits in ``libc.math``. * Heap allocated subtypes of ``type`` used the wrong base type struct at the C level. * Calling the unbound method dict.keys/value/items() in dict subtypes could call the bound object method instead of the unbound supertype method. * "yield" wasn't supported in "return" value expressions. * Using the "bint" type in memory views lead to unexpected results. It is now an error. * Assignments to global/closure variables could catch them in an illegal state while deallocating the old value. Other changes ------------- 0.19.1 (2013-05-11) =================== Features added -------------- * Completely empty C-API structs for extension type slots (protocols like number/mapping/sequence) are no longer generated into the C code. * Docstrings that directly follow a public/readonly attribute declaration in a cdef class will be used as docstring of the auto-generated property. This fixes ticket 206. * The automatic signature documentation tries to preserve more semantics of default arguments and argument types. Specifically, ``bint`` arguments now appear as type ``bool``. * A warning is emitted when negative literal indices are found inside of a code section that disables ``wraparound`` handling. This helps with fixing invalid code that might fail in the face of future compiler optimisations. * Constant folding for boolean expressions (and/or) was improved. * Added a build_dir option to cythonize() which allows one to place the generated .c files outside the source tree. Bugs fixed ---------- * ``isinstance(X, type)`` failed to get optimised into a call to ``PyType_Check()``, as done for other builtin types. * A spurious ``from datetime cimport *`` was removed from the "cpython" declaration package. This means that the "datetime" declarations (added in 0.19) are no longer available directly from the "cpython" namespace, but only from "cpython.datetime". This is the correct way of doing it because the declarations refer to a standard library module, not the core CPython C-API itself. * The C code for extension types is now generated in topological order instead of source code order to avoid C compiler errors about missing declarations for subtypes that are defined before their parent. * The ``memoryview`` type name no longer shows up in the module dict of modules that use memory views. This fixes trac ticket 775. * Regression in 0.19 that rejected valid C expressions from being used in C array size declarations. * In C++ mode, the C99-only keyword ``restrict`` could accidentally be seen by the GNU C++ compiler. It is now specially handled for both GCC and MSVC. * Testing large (> int) C integer values for their truth value could fail due to integer wrap-around. Other changes ------------- 0.19 (2013-04-19) ================= Features added -------------- * New directives ``c_string_type`` and ``c_string_encoding`` to more easily and automatically convert between C strings and the different Python string types. * The extension type flag ``Py_TPFLAGS_HAVE_VERSION_TAG`` is enabled by default on extension types and can be disabled using the ``type_version_tag`` compiler directive. * EXPERIMENTAL support for simple Cython code level line tracing. Enabled by the "linetrace" compiler directive. * Cython implemented functions make their argument and return type annotations available through the ``__annotations__`` attribute (PEP 3107). * Access to non-cdef module globals and Python object attributes is faster. * ``Py_UNICODE*`` coerces from and to Python unicode strings. This is helpful when talking to Windows APIs, which use compatible wchar_t arrays for strings. Note that the ``Py_UNICODE`` type is otherwise deprecated as of CPython 3.3. * ``isinstance(obj, basestring)`` is optimised. In Python 3 it only tests for instances of ``str`` (i.e. Py2 ``unicode``). * The ``basestring`` builtin is mapped to ``str`` (i.e. Py2 ``unicode``) when compiling the generated C code under Python 3. * Closures use freelists, which can speed up their creation quite substantially. This is also visible for short running generator expressions, for example. * A new class decorator ``@cython.freelist(N)`` creates a static freelist of N instances for an extension type, thus avoiding the costly allocation step if possible. This can speed up object instantiation by 20-30% in suitable scenarios. Note that freelists are currently only supported for base types, not for types that inherit from others. * Fast extension type instantiation using the ``Type.__new__(Type)`` idiom has gained support for passing arguments. It is also a bit faster for types defined inside of the module. * The Python2-only dict methods ``.iter*()`` and ``.view*()`` (requires Python 2.7) are automatically mapped to the equivalent keys/values/items methods in Python 3 for typed dictionaries. * Slicing unicode strings, lists and tuples is faster. * list.append() is faster on average. * ``raise Exception() from None`` suppresses the exception context in Py3.3. * Py3 compatible ``exec(tuple)`` syntax is supported in Py2 code. * Keyword arguments are supported for cdef functions. * External C++ classes can be declared nogil. Patch by John Stumpo. This fixes trac ticket 805. Bugs fixed ---------- * 2-value slicing of unknown objects passes the correct slice when the ``getitem`` protocol is used instead of the ``getslice`` protocol (especially in Python 3), i.e. ``None`` values for missing bounds instead of ``[0,maxsize]``. It is also a bit faster in some cases, e.g. for constant bounds. This fixes trac ticket 636. * Cascaded assignments of None values to extension type variables failed with a ``TypeError`` at runtime. * The ``__defaults__`` attribute was not writable for Cython implemented functions. * Default values of keyword-only arguments showed up in ``__defaults__`` instead of ``__kwdefaults__`` (which was not implemented). Both are available for Cython implemented functions now, as specified in Python 3.x. * ``yield`` works inside of ``with gil`` sections. It previously lead to a crash. This fixes trac ticket 803. * Static methods without explicitly named positional arguments (e.g. having only ``*args``) crashed when being called. This fixes trac ticket 804. * ``dir()`` without arguments previously returned an unsorted list, which now gets sorted as expected. * ``dict.items()``, ``dict.keys()`` and ``dict.values()`` no longer return lists in Python 3. * Exiting from an ``except-as`` clause now deletes the exception in Python 3 mode. * The declarations of ``frexp()`` and ``ldexp()`` in ``math.pxd`` were incorrect. Other changes ------------- 0.18 (2013-01-28) ================= Features added -------------- * Named Unicode escapes ("\N{...}") are supported. * Python functions/classes provide the special attribute "__qualname__" as defined by PEP 3155. * Added a directive ``overflowcheck`` which raises an OverflowException when arithmetic with C ints overflow. This has a modest performance penalty, but is much faster than using Python ints. * Calls to nested Python functions are resolved at compile time. * Type inference works across nested functions. * ``py_bytes_string.decode(...)`` is optimised. * C ``const`` declarations are supported in the language. Bugs fixed ---------- * Automatic C++ exception mapping didn't work in nogil functions (only in "with nogil" blocks). Other changes ------------- 0.17.4 (2013-01-03) =================== Bugs fixed ---------- * Garbage collection triggered during deallocation of container classes could lead to a double-deallocation. 0.17.3 (2012-12-14) =================== Features added -------------- Bugs fixed ---------- * During final interpreter cleanup (with types cleanup enabled at compile time), extension types that inherit from base types over more than one level that were cimported from other modules could lead to a crash. * Weak-reference support in extension types (with a ``cdef __weakref__`` attribute) generated incorrect deallocation code. * In CPython 3.3, converting a Unicode character to the Py_UNICODE type could fail to raise an overflow for non-BMP characters that do not fit into a wchar_t on the current platform. * Negative C integer constants lost their longness suffix in the generated C code. Other changes ------------- 0.17.2 (2012-11-20) =================== Features added -------------- * ``cythonize()`` gained a best effort compile mode that can be used to simply ignore .py files that fail to compile. Bugs fixed ---------- * Replacing an object reference with the value of one of its cdef attributes could generate incorrect C code that accessed the object after deleting its last reference. * C-to-Python type coercions during cascaded comparisons could generate invalid C code, specifically when using the 'in' operator. * "obj[1,]" passed a single integer into the item getter instead of a tuple. * Cyclic imports at module init time did not work in Py3. * The names of C++ destructors for template classes were built incorrectly. * In pure mode, type casts in Cython syntax and the C ampersand operator are now rejected. Use the pure mode replacements instead. * In pure mode, C type names and the sizeof() function are no longer recognised as such and can be used as normal Python names. * The extended C level support for the CPython array type was declared too late to be used by user defined classes. * C++ class nesting was broken. * Better checking for required nullary constructors for stack-allocated C++ instances. * Remove module docstring in no-docstring mode. * Fix specialization for varargs function signatures. * Fix several compiler crashes. Other changes ------------- * An experimental distutils script for compiling the CPython standard library was added as Tools/cystdlib.py. 0.17.1 (2012-09-26) =================== Features added -------------- Bugs fixed ---------- * A reference leak was fixed in the new dict iteration code when the loop target was not a plain variable but an unpacked tuple. * Memory views did not handle the special case of a NULL buffer strides value, as allowed by PEP3118. Other changes ------------- 0.17 (2012-09-01) ================= Features added -------------- * Alpha quality support for compiling and running Cython generated extension modules in PyPy (through cpyext). Note that this requires at least PyPy 1.9 and in many cases also adaptations in user code, especially to avoid borrowed references when no owned reference is being held directly in C space (a reference in a Python list or dict is not enough, for example). See the documentation on porting Cython code to PyPy. * "yield from" is supported (PEP 380) and a couple of minor problems with generators were fixed. * C++ STL container classes automatically coerce from and to the equivalent Python container types on typed assignments and casts. Note that the data in the containers is copied during this conversion. * C++ iterators can now be iterated over using "for x in cpp_container" whenever cpp_container has begin() and end() methods returning objects satisfying the iterator pattern (that is, it can be incremented, dereferenced, and compared (for non-equality)). * cdef classes can now have C++ class members (provided a zero-argument constructor exists) * A new cpython.array standard cimport file allows to efficiently talk to the stdlib array.array data type in Python 2. Since CPython does not export an official C-API for this module, it receives special casing by the compiler in order to avoid setup overhead on user side. In Python 3, both buffers and memory views on the array type already worked out of the box with earlier versions of Cython due to the native support for the buffer interface in the Py3 array module. * Fast dict iteration is now enabled optimistically also for untyped variables when the common iteration methods are used. * The unicode string processing code was adapted for the upcoming CPython 3.3 (PEP 393, new Unicode buffer layout). * Buffer arguments and memory view arguments in Python functions can be declared "not None" to raise a TypeError on None input. * c(p)def functions in pure mode can specify their return type with "@cython.returns()". * Automatic dispatch for fused functions with memoryview arguments * Support newaxis indexing for memoryviews * Support decorators for fused functions Bugs fixed ---------- * Old-style Py2 imports did not work reliably in Python 3.x and were broken in Python 3.3. Regardless of this fix, it's generally best to be explicit about relative and global imports in Cython code because old-style imports have a higher overhead. To this end, "from __future__ import absolute_import" is supported in Python/Cython 2.x code now (previous versions of Cython already used it when compiling Python 3 code). * Stricter constraints on the "inline" and "final" modifiers. If your code does not compile due to this change, chances are these modifiers were previously being ignored by the compiler and can be removed without any performance regression. * Exceptions are always instantiated while raising them (as in Python), instead of risking to instantiate them in potentially unsafe situations when they need to be handled or otherwise processed. * locals() properly ignores names that do not have Python compatible types (including automatically inferred types). * Some garbage collection issues of memory views were fixed. * numpy.pxd compiles in Python 3 mode. * Several C compiler warnings were fixed. * Several bugs related to memoryviews and fused types were fixed. * Several bug-fixes and improvements related to cythonize(), including ccache-style caching. Other changes ------------- * libc.string provides a convenience declaration for const uchar in addition to const char. * User declared char* types are now recognised as such and auto-coerce to and from Python bytes strings. * callable() and next() compile to more efficient C code. * list.append() is faster on average. * Modules generated by @cython.inline() are written into the directory pointed to by the environment variable CYTHON_CACHE_DIR if set. 0.16 (2012-04-21) ================= Features added -------------- * Enhancements to Cython's function type (support for weak references, default arguments, code objects, dynamic attributes, classmethods, staticmethods, and more) * Fused Types - Template-like support for functions and methods CEP 522 (docs) * Typed views on memory - Support for efficient direct and indirect buffers (indexing, slicing, transposing, ...) CEP 517 (docs) * super() without arguments * Final cdef methods (which translate into direct calls on known instances) Bugs fixed ---------- * fix alignment handling for record types in buffer support Other changes ------------- * support default arguments for closures * search sys.path for pxd files * support C++ template casting * faster traceback building and faster generator termination * support inplace operators on indexed buffers * allow nested prange sections 0.15.1 (2011-09-19) =================== Features added -------------- Bugs fixed ---------- Other changes ------------- 0.15 (2011-08-05) ================= Features added -------------- * Generators (yield) - Cython has full support for generators, generator expressions and PEP 342 coroutines. * The nonlocal keyword is supported. * Re-acquiring the gil: with gil - works as expected within a nogil context. * OpenMP support: prange. * Control flow analysis prunes dead code and emits warnings and errors about uninitialised variables. * Debugger command cy set to assign values of expressions to Cython variables and cy exec counterpart $cy_eval(). * Exception chaining PEP 3134. * Relative imports PEP 328. * Improved pure syntax including cython.cclass, cython.cfunc, and cython.ccall. * The with statement has its own dedicated and faster C implementation. * Support for del. * Boundschecking directives implemented for builtin Python sequence types. * Several updates and additions to the shipped standard library .pxd files. * Forward declaration of types is no longer required for circular references. Bugs fixed ---------- Other changes ------------- * Uninitialized variables are no longer initialized to None and accessing them has the same semantics as standard Python. * globals() now returns a read-only dict of the Cython module's globals, rather than the globals of the first non-Cython module in the stack * Many C++ exceptions are now special cased to give closer Python counterparts. This means that except+ functions that formerly raised generic RuntimeErrors may raise something else such as ArithmeticError. * The inlined generator expressions (introduced in Cython 0.13) were disabled in favour of full generator expression support. This breaks code that previously used them inside of cdef functions (usage in def functions continues to work) and induces a performance regression for cases that continue to work but that were previously inlined. We hope to reinstate this feature in the near future. 0.14.1 (2011-02-04) =================== Features added -------------- * The gdb debugging support was extended to include all major Cython features, including closures. * raise MemoryError() is now safe to use as Cython replaces it with the correct C-API call. Bugs fixed ---------- Other changes ------------- * Decorators on special methods of cdef classes now raise a compile time error rather than being ignored. * In Python 3 language level mode (-3 option), the 'str' type is now mapped to 'unicode', so that cdef str s declares a Unicode string even when running in Python 2. 0.14 (2010-12-14) ================= Features added -------------- * Python classes can now be nested and receive a proper closure at definition time. * Redefinition is supported for Python functions, even within the same scope. * Lambda expressions are supported in class bodies and at the module level. * Metaclasses are supported for Python classes, both in Python 2 and Python 3 syntax. The Python 3 syntax (using a keyword argument in the type declaration) is preferred and optimised at compile time. * "final" extension classes prevent inheritance in Python space. This feature is available through the new "cython.final" decorator. In the future, these classes may receive further optimisations. * "internal" extension classes do not show up in the module dictionary. This feature is available through the new "cython.internal" decorator. * Extension type inheritance from builtin types, such as "cdef class MyUnicode(unicode)", now works without further external type redeclarations (which are also strongly discouraged now and continue to issue a warning). * GDB support. http://docs.cython.org/src/userguide/debugging.html * A new build system with support for inline distutils directives, correct dependency tracking, and parallel compilation. http://wiki.cython.org/enhancements/distutils_preprocessing * Support for dynamic compilation at runtime via the new cython.inline function and cython.compile decorator. http://wiki.cython.org/enhancements/inline * "nogil" blocks are supported when compiling pure Python code by writing "with cython.nogil". * Iterating over arbitrary pointer types is now supported, as is an optimized version of the in operator, e.g. x in ptr[a:b]. Bugs fixed ---------- * In parallel assignments, the right side was evaluated in reverse order in 0.13. This could result in errors if it had side effects (e.g. function calls). * In some cases, methods of builtin types would raise a SystemError instead of an AttributeError when called on None. Other changes ------------- * Constant tuples are now cached over the lifetime of an extension module, just like CPython does. Constant argument tuples of Python function calls are also cached. * Closures have tightened to include exactly the names used in the inner functions and classes. Previously, they held the complete locals of the defining function. * The builtin "next()" function in Python 2.6 and later is now implemented internally and therefore available in all Python versions. This makes it the preferred and portable way of manually advancing an iterator. * In addition to the previously supported inlined generator expressions in 0.13, "sorted(genexpr)" can now be used as well. Typing issues were fixed in "sum(genexpr)" that could lead to invalid C code being generated. Other known issues with inlined generator expressions were also fixed that make upgrading to 0.14 a strong recommendation for code that uses them. Note that general generators and generator expressions continue to be not supported. * Inplace arithmetic operators now respect the cdivision directive and are supported for complex types. * Typing a variable as type "complex" previously gave it the Python object type. It now uses the appropriate C/C++ double complex type. A side-effect is that assignments and typed function parameters now accept anything that Python can coerce to a complex, including integers and floats, and not only complex instances. * Large integer literals pass through the compiler in a safer way. To prevent truncation in C code, non 32-bit literals are turned into Python objects if not used in a C context. This context can either be given by a clear C literal suffix such as "UL" or "LL" (or "L" in Python 3 code), or it can be an assignment to a typed variable or a typed function argument, in which case it is up to the user to take care of a sufficiently large value space of the target. * Python functions are declared in the order they appear in the file, rather than all being created at module creation time. This is consistent with Python and needed to support, for example, conditional or repeated declarations of functions. In the face of circular imports this may cause code to break, so a new --disable-function-redefinition flag was added to revert to the old behavior. This flag will be removed in a future release, so should only be used as a stopgap until old code can be fixed. 0.13 (2010-08-25) ================= Features added -------------- * Closures are fully supported for Python functions. Cython supports inner functions and lambda expressions. Generators and generator expressions are not supported in this release. * Proper C++ support. Cython knows about C++ classes, templates and overloaded function signatures, so that Cython code can interact with them in a straight forward way. * Type inference is enabled by default for safe C types (e.g. double, bint, C++ classes) and known extension types. This reduces the need for explicit type declarations and can improve the performance of untyped code in some cases. There is also a verbose compile mode for testing the impact on user code. * Cython's for-in-loop can iterate over C arrays and sliced pointers. The type of the loop variable will be inferred automatically in this case. * The Py_UNICODE integer type for Unicode code points is fully supported, including for-loops and 'in' tests on unicode strings. It coerces from and to single character unicode strings. Note that untyped for-loop variables will automatically be inferred as Py_UNICODE when iterating over a unicode string. In most cases, this will be much more efficient than yielding sliced string objects, but can also have a negative performance impact when the variable is used in a Python context multiple times, so that it needs to coerce to a unicode string object more than once. If this happens, typing the loop variable as unicode or object will help. * The built-in functions any(), all(), sum(), list(), set() and dict() are inlined as plain for loops when called on generator expressions. Note that generator expressions are not generally supported apart from this feature. Also, tuple(genexpr) is not currently supported - use tuple([listcomp]) instead. * More shipped standard library declarations. The python_* and stdlib/stdio .pxd files have been deprecated in favor of clib.* and cpython[.*] and may get removed in a future release. * Pure Python mode no longer disallows non-Python keywords like 'cdef', 'include' or 'cimport'. It also no longer recognises syntax extensions like the for-from loop. * Parsing has improved for Python 3 syntax in Python code, although not all features are correctly supported. The missing Python 3 features are being worked on for the next release. * from __future__ import print_function is supported in Python 2.6 and later. Note that there is currently no emulation for earlier Python versions, so code that uses print() with this future import will require at least Python 2.6. * New compiler directive language_level (valid values: 2 or 3) with corresponding command line options -2 and -3 requests source code compatibility with Python 2.x or Python 3.x respectively. Language level 3 currently enforces unicode literals for unprefixed string literals, enables the print function (requires Python 2.6 or later) and keeps loop variables in list comprehensions from leaking. * Loop variables in set/dict comprehensions no longer leak into the surrounding scope (following Python 2.7). List comprehensions are unchanged in language level 2. * print >> stream Bugs fixed ---------- Other changes ------------- * The availability of type inference by default means that Cython will also infer the type of pointers on assignments. Previously, code like this:: cdef char* s = ... untyped_variable = s would convert the char* to a Python bytes string and assign that. This is no longer the case and no coercion will happen in the example above. The correct way of doing this is through an explicit cast or by typing the target variable, i.e. :: cdef char* s = ... untyped_variable1 = s untyped_variable2 = s cdef object py_object = s cdef bytes bytes_string = s * bool is no longer a valid type name by default. The problem is that it's not clear whether bool should refer to the Python type or the C++ type, and expecting one and finding the other has already led to several hard-to-find bugs. Both types are available for importing: you can use from cpython cimport bool for the Python bool type, and from libcpp cimport bool for the C++ type. bool is still a valid object by default, so one can still write bool(x). * ``__getsegcount__`` is now correctly typed to take a ``Py_size_t*`` rather than an ``int*``. 0.12.1 (2010-02-02) =================== Features added -------------- * Type inference improvements. * There have been several bug fixes and improvements to the type inferencer. * Notably, there is now a "safe" mode enabled by setting the infer_types directive to None. (The None here refers to the "default" mode, which will be the default in 0.13.) This safe mode limits inference to Python object types and C doubles, which should speed up execution without affecting any semantics such as integer overflow behavior like infer_types=True might. There is also an infer_types.verbose option which allows one to see what types are inferred. * The boundscheck directive works for lists and tuples as well as buffers. * len(s) and s.decode("encoding") are efficiently supported for char* s. * Cython's INLINE macro has been renamed to CYTHON_INLINE to reduce conflict and has better support for the MSVC compiler on Windows. It is no longer clobbered if externally defined. * Revision history is now omitted from the source package, resulting in a 85% size reduction. Running make repo will download the history and turn the directory into a complete Mercurial working repository. * Cython modules don't need to be recompiled when the size of an external type grows. (A warning, rather than an error, is produced.) This should be helpful for binary distributions relying on NumPy. Bugs fixed ---------- * Several other bugs and minor improvements have been made. This release should be fully backwards compatible with 0.12. Other changes ------------- 0.12 (2009-11-23) ================= Features added -------------- * Type inference with the infer_types directive * Seamless C++ complex support * Fast extension type instantiation using the normal Python meme obj = MyType.__new__(MyType) * Improved support for Py3.1 * Cython now runs under Python 3.x using the 2to3 tool * unittest support for doctests in Cython modules * Optimised handling of C strings (char*): for c in cstring[2:50] and cstring.decode() * Looping over c pointers: for i in intptr[:50]. * pyximport improvements * cython_freeze improvements Bugs fixed ---------- * Many bug fixes Other changes ------------- * Many other optimisation, e.g. enumerate() loops, parallel swap assignments (a,b = b,a), and unicode.encode() * More complete numpy.pxd 0.11.2 (2009-05-20) =================== Features added -------------- * There's now native complex floating point support! C99 complex will be used if complex.h is included, otherwise explicit complex arithmetic working on all C compilers is used. [Robert Bradshaw] :: cdef double complex a = 1 + 0.3j cdef np.ndarray[np.complex128_t, ndim=2] arr = \ np.zeros(10, np.complex128) * Cython can now generate a main()-method for embedding of the Python interpreter into an executable (see #289) [Robert Bradshaw] * @wraparound directive (another way to disable arr[idx] for negative idx) [Dag Sverre Seljebotn] * Correct support for NumPy record dtypes with different alignments, and "cdef packed struct" support [Dag Sverre Seljebotn] * @callspec directive, allowing custom calling convention macros [Lisandro Dalcin] Bugs fixed ---------- Other changes ------------- * Bug fixes and smaller improvements. For the full list, see [1]. Cython-0.26.1/README.txt0000664000175000017500000000425112542002467015332 0ustar stefanstefan00000000000000Welcome to Cython! ================= Cython (http://cython.org) is a language that makes writing C extensions for the Python language as easy as Python itself. Cython is based on the well-known Pyrex, but supports more cutting edge functionality and optimizations. The Cython language is very close to the Python language, but Cython additionally supports calling C functions and declaring C types on variables and class attributes. This allows the compiler to generate very efficient C code from Cython code. This makes Cython the ideal language for wrapping external C libraries, and for fast C modules that speed up the execution of Python code. LICENSE: The original Pyrex program was licensed "free of restrictions" (see below). Cython itself is licensed under the permissive Apache License See LICENSE.txt. -------------------------- Note that Cython used to ship the full version control repository in its source distribution, but no longer does so due to space constraints. To get the full source history, make sure you have git installed, then step into the base directory of the Cython source distribution and type make repo Alternatively, check out the latest developer repository from https://github.com/cython/cython The following is from Pyrex: ------------------------------------------------------ This is a development version of Pyrex, a language for writing Python extension modules. For more info, see: Doc/About.html for a description of the language INSTALL.txt for installation instructions USAGE.txt for usage instructions Demos for usage examples Comments, suggestions, bug reports, etc. are welcome! Copyright stuff: Pyrex is free of restrictions. You may use, redistribute, modify and distribute modified versions. The latest version of Pyrex can be found here: http://www.cosc.canterbury.ac.nz/~greg/python/Pyrex/ Greg Ewing, Computer Science Dept, +--------------------------------------+ University of Canterbury, | A citizen of NewZealandCorp, a | Christchurch, New Zealand | wholly-owned subsidiary of USA Inc. | greg@cosc.canterbury.ac.nz +--------------------------------------+ Cython-0.26.1/cython.py0000775000175000017500000000101012542002467015503 0ustar stefanstefan00000000000000#!/usr/bin/env python # # Cython -- Main Program, generic # if __name__ == '__main__': import os import sys # Make sure we import the right Cython cythonpath, _ = os.path.split(os.path.realpath(__file__)) sys.path.insert(0, cythonpath) from Cython.Compiler.Main import main main(command_line = 1) else: # Void cython.* directives. from Cython.Shadow import * ## and bring in the __version__ from Cython import __version__ from Cython import load_ipython_extension Cython-0.26.1/ToDo.txt0000664000175000017500000001616312542002467015247 0ustar stefanstefan00000000000000See http://trac.cython.org/cython_trac and http://wiki.cython.org/enhancements -- The Original Pyrex Todo List -- DONE - Pointer-to-function types. DONE - Nested declarators. DONE - Varargs C func defs and calls. DONE - * and ** args in Python func defs. DONE - Default argument values. DONE - Tracebacks. DONE - Disallow creating char * from Python temporary anywhere (not just on assignment). DONE - Module and function and class doc strings. DONE - Predeclare C functions. DONE - Constant expressions. DONE - Forward C struct declarations. DONE - Prefix & operator. DONE - Get rid of auto string->char coercion and add a c'X' syntax for char literals. DONE - Cascaded assignments (a = b = c). DONE - 'include' statement for including other Pyrex files. DONE - Add command line option for specifying name of generated C file. DONE - Add 'else' clause to try-except. DONE - Allow extension types to be declared "public" so they can be accessed from another Pyrex module or a C file. DONE - Don't try to generate objstruct definition for external extension type declared without suite (treat as though declared with empty suite). DONE - Implement two-argument form of 'assert' statement. Const types. Tuple/list construction: Evaluate & store items one at a time? Varargs argument traversal. Use PyDict_SetItemString to build keyword arg dicts? (Or wait until names are interned.) Intern names. print >>file abs() and anything similar. Semicolon-separated statement lists. Optional semicolons after C declarations. Multiple C declarations on one line? Optimise return without value outside of try-finally. exec statement. from ... import statement. Use iterator protocol for unpacking. Save & restore exception being handled on function entry/exit. In-place operators (+=, etc). Constant declarations. Syntax? DONE - Some way for C functions to signal Python errors? Check for lack of return with value in non-void C functions? Allow 'pass' in struct/union/enum definition. Make C structs callable as constructors. DONE - Provide way of specifying C names. DONE - Public cdefs. When calling user __dealloc__ func, save & restore exception. DONE - Forward declaration of extension types. Complex number parsetuple format? DONE - long long type DONE - long double type? Windows __fooblarg function declaration things. Generate type, var and func declarations in the same order that they appear in the source file. Provide a way of declaring a C function as returning a borrowed Python reference. Provide a way of specifying whether a Python object obtained by casting a pointer should be treated as a new reference or not. Optimize integer for-loops. Make sizeof() take types as well as variables. Allow "unsigned" to be used alone as a type name. Allow duplicate declarations, at least in extern-from. Do something about installing proper version of pyrexc script according to platform in setup.py. DONE - Add "-o filename" command line option to unix/dos versions. Recognise #line directives? Catch floating point exceptions? Check that forward-declared non-external extension types are defined. Generate type test when casting from one Python type to another. Generate a Pyrex include file for public declarations as well as a C one. Syntax for defining indefinite-sized int & float types. Allow ranges of exception values. Support "complex double" and "complex float"? Allow module-level Python variables to be declared extern. Consider: >cdef extern from "foo.h": > int dosomething() except -1 raise MyException Properties for Python types. DONE - Properties for extension types. Find a way to make classmethod and staticmethod work better. DONE - Document workarounds for classmethod and staticmethod. Statically initialised C arrays & structs. Reduce generation of unused vars and unreachable code? Support for acquiring and releasing GIL. Make docstrings of extension type special methods work. Treat result of getting C attribute of extension type as non-ephemeral. Make None a reserved identifier. Teach it about builtin functions that correspond to Python/C API calls. Teach it about common builtin types. Option for generating a main() function? DONE - Allow an extension type to inherit from another type. Do something about external C functions declared as returning const * types? Use PyString_FromStringAndSize for string literals? DONE - C functions as methods of extension types. What to do about __name__ etc. attributes of a module (they are currently assumed to be built-in names). Use PyDict_GetItem etc. on module & builtins dicts for speed. Intern all string literals used as Python strings? [Koshy ] Make extension types weak-referenceable. [Matthias Baas ] Make 'pass' work in the body of an extern-from struct or union. Disallow a filename which results in an illegal identifier when used as a module name. Use ctypedef names. Provide an easy way of exposing a set of enum values as Python names. [John J Lee ] Prevent user from returning a value from special methods that return an error indicator only. Use PyObject_TypeCheck instead of PyObject_IsInstance? Allow * in cimport? [John J Lee ] FAQ: Q. Pyrex says my extension type object has no attribute 'rhubarb', but I know it does. A. Have you declared the type at the point where you're using it? Eliminate lvalue casts! (Illegal in C++, also disallowed by some C compilers) [Matthias Baas ] Make Python class construction work more like it does in Python. Give the right module name to Python classes. Command line switch for full pathnames in backtraces? Use PyString_FromStringAndSize on string literals containing nulls. Peephole optimisation? [Vladislav Bulatov ] Avoid PyArg_ParseTuple call when a function takes no positional args. Omit incref/decref of arguments that are not assigned to? Can a faster way of instantiating extension types be found? Disallow declaring a special method of an extension type with 'cdef' instead of 'def'. Use PySequence_GetItem instead of PyObject_GetItem when index is an integer. If a __getitem__ method is declared with an int index, use the sq_item slot instead of the mp_subscript slot. Provide some way of controlling the argument list passed to an extension type's base __new__ method? [Alain Pointdexter ] Rename __new__ in extension types to __alloc__. Implement a true __new__ for extension types. Way to provide constructors for extension types that are not available to Python and can accept C types directly? Support generators by turning them into extension types? List comprehensions. Variable declarations inside inner code blocks? Initial values when declaring variables? Do something about __stdcall. Support class methods in extension types using METH_CLASS flag. Disallow defaulting types to 'object' in C declarations? C globals with static initialisers. Find a way of providing C-only initialisers for extension types. Metaclasses for extension types? Make extension types use Py_TPFLAGS_HEAPTYPE so their __module__ will get set dynamically? Cython-0.26.1/MANIFEST.in0000664000175000017500000000176713023021033015364 0ustar stefanstefan00000000000000include MANIFEST.in README.txt INSTALL.txt ToDo.txt USAGE.txt CHANGES.rst include COPYING.txt LICENSE.txt 2to3-fixers.txt Makefile include .gitrev include pylintrc include setup.py include setupegg.py include bin/* include cython.py cythonize.py cygdb.py recursive-include Cython *.pyx *.pxd include Cython/Parser/Grammar Cython/Parser/__init__.py include Doc/* include Demos/*.pyx include Demos/*.py include Demos/callback/* include Demos/benchmarks/* include Demos/embed/* include Demos/freeze/* include Demos/libraries/* include Demos/Makefile* recursive-include Cython/Debugger/Tests *.pyx *.pxd *.c *.h recursive-include Cython/Utility *.pyx *.pxd *.c *.h *.cpp recursive-include Tools * recursive-include tests *.pyx *.pxd *.pxi *.py *.h *.hpp *.BROKEN bugs.txt recursive-include tests *_lib.cpp *.srctree recursive-include docs * include runtests.py include Cython/Debugger/Tests/cfuncs.c include Cython/Debugger/Tests/codefile recursive-include pyximport *.py include pyximport/PKG-INFO pyximport/README Cython-0.26.1/Demos/0000775000175000017500000000000013151203436014675 5ustar stefanstefan00000000000000Cython-0.26.1/Demos/freeze/0000775000175000017500000000000013151203436016155 5ustar stefanstefan00000000000000Cython-0.26.1/Demos/freeze/Makefile0000664000175000017500000000311713023021033017604 0ustar stefanstefan00000000000000CC = gcc CYTHON = ../../bin/cython CYTHON_FREEZE = ../../bin/cython_freeze PYTHON = python RST2HTML = rst2html PY_LDFLAGS = $(shell $(PYTHON) -c 'from distutils.sysconfig import get_config_var as g; import sys; sys.stdout.write(" ".join([g("LINKFORSHARED"), "-L"+g("LIBPL")]) + "\n")') PY_CPPFLAGS = $(shell $(PYTHON) -c 'from distutils.sysconfig import *; import sys; sys.stdout.write("-I"+get_python_inc() + "\n")') LIBDIR1 := $(shell $(PYTHON) -c "from distutils import sysconfig; print(sysconfig.get_config_var('LIBDIR'))") LIBDIR2 := $(shell $(PYTHON) -c "from distutils import sysconfig; print(sysconfig.get_config_var('LIBPL'))") PYLIB := $(shell $(PYTHON) -c "from distutils import sysconfig; print(sysconfig.get_config_var('LIBRARY')[3:-2])") LIBS := $(shell $(PYTHON) -c "import distutils.sysconfig; print(distutils.sysconfig.get_config_var('LIBS'))") CFLAGS = -fPIC -fno-strict-aliasing -g -O2 -Wall -Wextra CPPFLAGS = $(PY_CPPFLAGS) LDFLAGS = $(PY_LDFLAGS) LDLIBS = -L$(LIBDIR1) -L$(LIBDIR2) -l$(PYLIB) # Name of executable TARGETS = nCr python # List of Cython source files, with main module first. CYTHON_SOURCE = combinatorics.pyx lcmath.pyx CYTHON_SECONDARY = $(CYTHON_SOURCE:.pyx=.c) $(TARGETS:=.c) all : $(TARGETS) html : README.html $(TARGETS) : % : %.o $(CYTHON_SOURCE:.pyx=.o) nCr.c : $(CYTHON_FREEZE) $(CYTHON_SOURCE:.pyx=) > $@ python.c : $(CYTHON_FREEZE) --pymain $(CYTHON_SOURCE:.pyx=) > $@ %.c : %.pyx $(CYTHON) $(CYTHONFLAGS) $^ %.html : %.txt $(RST2HTML) $^ $@ clean: $(RM) *.o $(CYTHON_SECONDARY) $(TARGETS) README.html .PHONY: clean .SECONDARY: $(CYTHON_SECONDARY) Cython-0.26.1/Demos/freeze/combinatorics.pyx0000664000175000017500000000073612574327400021567 0ustar stefanstefan00000000000000# cython: language_level=3 import lcmath def nCr(n, r): """Return the number of ways to choose r elements of a set of n.""" return lcmath.exp( lcmath.lfactorial(n) - lcmath.lfactorial(r) - lcmath.lfactorial(n-r) ) if __name__ == "__main__": import sys if len(sys.argv) != 3: sys.stderr.write("USAGE: %s n r\nPrints n-choose-r.\n" % sys.argv[0]) sys.exit(2) n, r = map(float, sys.argv[1:]) print(nCr(n, r)) Cython-0.26.1/Demos/freeze/README.txt0000664000175000017500000000677712542002467017700 0ustar stefanstefan00000000000000NAME ==== cython_freeze - create a C file for embedding Cython modules SYNOPSIS ======== cython_freeze [-o outfile] [-p] module [...] DESCRIPTION =========== **cython_freeze** generates a C source file to embed a Python interpreter with one or more Cython modules built in. This allows one to create a single executable from Cython code, without having to have separate shared objects for each Cython module. A major advantage of this approach is that it allows debuging with gprof(1), which does not work with shared objects. Unless ``-p`` is given, the first module's ``__name__`` is set to ``"__main__"`` and is imported on startup; if ``-p`` is given, a normal Python interpreter is built, with the given modules built into the binary. Note that this method differs from ``cython --embed``. The ``--embed`` options modifies the resulting C source file to include a ``main()`` function, so it can only be used on a single Cython module. The advantage ``--embed`` is simplicity. This module, on the other hand, can be used with multiple modules, but it requires another C source file to be created. OPTIONS ======= -o FILE, --outfile=FILE write output to FILE instead of standard output -p, --pymain do not automatically run the first module as __main__ EXAMPLE ======= In the Demos/freeze directory, there exist two Cython modules: lcmath.pyx A module that interfaces with the -lm library. combinatorics.pyx A module that implements n-choose-r using lcmath. Both modules have the Python idiom ``if __name__ == "__main__"``, which only execute if that module is the "main" module. If run as main, lcmath prints the factorial of the argument, while combinatorics prints n-choose-r. The provided Makefile creates an executable, *nCr*, using combinatorics as the "main" module. It basically performs the following (ignoring the compiler flags):: $ cython_freeze combinatorics lcmath > nCr.c $ cython combinatorics.pyx $ cython lcmath.pyx $ gcc -c nCr.c $ gcc -c combinatorics.c $ gcc -c lcmath.c $ gcc nCr.o combinatorics.o lcmath.o -o nCr Because the combinatorics module was listed first, its ``__name__`` is set to ``"__main__"``, while lcmath's is set to ``"lcmath"``. The executable now contains a Python interpreter and both Cython modules. :: $ ./nCr USAGE: ./nCr n r Prints n-choose-r. $ ./nCr 15812351235 12 5.10028093999e+113 You may wish to build a normal Python interpreter, rather than having one module as "main". This may happen if you want to use your module from an interactive shell or from another script, yet you still want it statically linked so you can profile it with gprof. To do this, add the ``--pymain`` flag to ``cython_freeze``. In the Makefile, the *python* executable is built like this. :: $ cython_freeze --pymain combinatorics lcmath -o python.c $ gcc -c python.c $ gcc python.o combinatorics.o lcmath.o -o python Now ``python`` is a normal Python interpreter, but the lcmath and combinatorics modules will be built into the executable. :: $ ./python Python 2.6.2 (release26-maint, Apr 19 2009, 01:58:18) [GCC 4.3.3] on linux2 Type "help", "copyright", "credits" or "license" for more information. >>> import lcmath >>> lcmath.factorial(155) 4.7891429014634364e+273 PREREQUISITES ============= Cython 0.11.2 (or newer, assuming the API does not change) SEE ALSO ======== * `Python `_ * `Cython `_ * `freeze.py `_ Cython-0.26.1/Demos/freeze/lcmath.pyx0000664000175000017500000000113712574327400020177 0ustar stefanstefan00000000000000# cython: language_level=3 cdef extern from "math.h": double c_lgamma "lgamma" (double) double c_exp "exp" (double) def exp(n): """Return e**n.""" return c_exp(n) def lfactorial(n): """Return an estimate of the log factorial of n.""" return c_lgamma(n+1) def factorial(n): """Return an estimate of the factorial of n.""" return c_exp( c_lgamma(n+1) ) if __name__ == "__main__": import sys if len(sys.argv) != 2: sys.stderr.write("USAGE: %s n\nPrints n!.\n" % sys.argv[0]) sys.exit(2) n, = map(float, sys.argv[1:]) print(factorial(n)) Cython-0.26.1/Demos/primes.pyx0000664000175000017500000000062212574327400016744 0ustar stefanstefan00000000000000# cython: language_level=3 print("starting") def primes(int kmax): # cdef int n, k, i cdef int p[1000] result = [] if kmax > 1000: kmax = 1000 k = 0 n = 2 while k < kmax: i = 0 while i < k and n % p[i] != 0: i += 1 if i == k: p[k] = n k += 1 result.append(n) n += 1 return result Cython-0.26.1/Demos/setup.py0000664000175000017500000000122412542002467016412 0ustar stefanstefan00000000000000# Run as: # python setup.py build_ext --inplace import sys sys.path.insert(0, "..") from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize ext_modules = cythonize("**/*.pyx", exclude="numpy_*.pyx") # Only compile the following if numpy is installed. try: from numpy.distutils.misc_util import get_numpy_include_dirs numpy_demo = [Extension("*", ["numpy_*.pyx"], include_dirs=get_numpy_include_dirs())] ext_modules.extend(cythonize(numpy_demo)) except ImportError: pass setup( name = 'Demos', ext_modules = ext_modules, ) Cython-0.26.1/Demos/integrate1.pyx0000664000175000017500000000026112574327400017507 0ustar stefanstefan00000000000000# cython: language_level=3 def f(x): return x**2-x def integrate_f(a, b, N): s = 0.0 dx = (b-a)/N for i in range(N): s += f(a+i*dx) return s * dx Cython-0.26.1/Demos/embed/0000775000175000017500000000000013151203436015751 5ustar stefanstefan00000000000000Cython-0.26.1/Demos/embed/embedded.output0000664000175000017500000000003312542002467020764 0ustar stefanstefan00000000000000__main__ Hi, I'm embedded. Cython-0.26.1/Demos/embed/Makefile0000664000175000017500000000341412542002467017417 0ustar stefanstefan00000000000000# Makefile for creating our standalone Cython program PYTHON := python PYVERSION := $(shell $(PYTHON) -c "import sys; print(sys.version[:3])") INCDIR := $(shell $(PYTHON) -c "from distutils import sysconfig; print(sysconfig.get_python_inc())") PLATINCDIR := $(shell $(PYTHON) -c "from distutils import sysconfig; print(sysconfig.get_python_inc(plat_specific=True))") LIBDIR1 := $(shell $(PYTHON) -c "from distutils import sysconfig; print(sysconfig.get_config_var('LIBDIR'))") LIBDIR2 := $(shell $(PYTHON) -c "from distutils import sysconfig; print(sysconfig.get_config_var('LIBPL'))") PYLIB := $(shell $(PYTHON) -c "from distutils import sysconfig; print(sysconfig.get_config_var('LIBRARY')[3:-2])") CC := $(shell $(PYTHON) -c "import distutils.sysconfig; print(distutils.sysconfig.get_config_var('CC'))") LINKCC := $(shell $(PYTHON) -c "import distutils.sysconfig; print(distutils.sysconfig.get_config_var('LINKCC'))") LINKFORSHARED := $(shell $(PYTHON) -c "import distutils.sysconfig; print(distutils.sysconfig.get_config_var('LINKFORSHARED'))") LIBS := $(shell $(PYTHON) -c "import distutils.sysconfig; print(distutils.sysconfig.get_config_var('LIBS'))") SYSLIBS := $(shell $(PYTHON) -c "import distutils.sysconfig; print(distutils.sysconfig.get_config_var('SYSLIBS'))") embedded: embedded.o $(LINKCC) -o $@ $^ -L$(LIBDIR1) -L$(LIBDIR2) -l$(PYLIB) $(LIBS) $(SYSLIBS) $(LINKFORSHARED) embedded.o: embedded.c $(CC) -c $^ -I$(INCDIR) -I$(PLATINCDIR) CYTHON := ../../cython.py embedded.c: embedded.pyx @$(PYTHON) $(CYTHON) --embed embedded.pyx all: embedded clean: @echo Cleaning Demos/embed @rm -f *~ *.o *.so core core.* *.c embedded test.output test: clean all LD_LIBRARY_PATH=$(LIBDIR1):$$LD_LIBRARY_PATH ./embedded > test.output $(PYTHON) assert_equal.py embedded.output test.output Cython-0.26.1/Demos/embed/Makefile.unix0000664000175000017500000000063012542002467020376 0ustar stefanstefan00000000000000# Makefile for creating our standalone Cython program PYVERSION=2.3 PYPREFIX=/usr INCLUDES=-I$(PYPREFIX)/include/python$(PYVERSION) embedded: embedded.o gcc -o $@ $^ -lpython$(PYVERSION) embedded.o: embedded.c gcc -c $^ $(INCLUDES) embedded.c: embedded.pyx @python ../../cython.py --embed embedded.pyx all: embedded clean: @echo Cleaning Demos/embed @rm -f *~ *.o *.so core core.* *.c embedded Cython-0.26.1/Demos/embed/Makefile.msc.static0000664000175000017500000000076412542002467021473 0ustar stefanstefan00000000000000# Makefile for Microsoft compiler statically linking PYVERSION = 2.2 PYHOME = \Python$(PYVERSION:.=) PYINCLUDE = -I$(PYHOME)\include PYLIB = /LIBPATH:$(PYHOME)\libs python22.lib CFLAGS = $(PYINCLUDE) /Ox /W3 /GX -nologo .SUFFIXES: .exe .dll .obj .c .cpp .pyx .pyx.c: $(PYHOME)\Python.exe ../../cython.py $< all: main.exe clean: -del /Q/F *.obj embedded.h embedded.c main.exe main.exe: main.obj embedded.obj link /nologo $** $(PYLIB) /OUT:main.exe embedded.h: embedded.c main.obj: embedded.h Cython-0.26.1/Demos/embed/Makefile.msc0000664000175000017500000000175512542002467020206 0ustar stefanstefan00000000000000# Makefile for Microsoft C Compiler, building a DLL PYVERSION = 2.2 PYHOME = \Python$(PYVERSION:.=) PYINCLUDE = -I$(PYHOME)\include PYLIB = /LIBPATH:$(PYHOME)\libs CFLAGS = $(PYINCLUDE) /Ox /W3 /GX -nologo .SUFFIXES: .exe .dll .obj .c .cpp .pyx .pyx.c: $(PYHOME)\Python.exe ../../cython.py $< all: main.exe clean: del /Q/F *.obj embedded.h embedded.c main.exe embedded.dll embedded.lib embedded.exp # When linking the DLL we must explicitly list all of the exports # There doesn't seem to be an easy way to get DL_EXPORT to have the correct definition # to do the export for us without breaking the importing of symbols from the core # python library. embedded.dll: embedded.obj link /nologo /DLL /INCREMENTAL:NO $(PYLIB) $** /IMPLIB:$*.lib /DEF:<< /OUT:$*.dll EXPORTS initembedded EXPORTS spam << main.exe: main.obj embedded.lib link /nologo $** $(PYLIB) /OUT:main.exe embedded.h: embedded.c main.obj: embedded.h embedded.obj: embedded.c $(CC) /MD $(CFLAGS) -c $** embedded.lib: embedded.dll Cython-0.26.1/Demos/embed/README0000664000175000017500000000031412542002467016633 0ustar stefanstefan00000000000000This example demonstrates how Cython-generated code can be called directly from a main program written in C. The Windows makefiles were contributed by Duncan Booth . Cython-0.26.1/Demos/embed/assert_equal.py0000664000175000017500000000043212574327400021020 0ustar stefanstefan00000000000000from __future__ import absolute_import, print_function import sys f1 = open(sys.argv[1]) f2 = open(sys.argv[2]) try: if f1.read() != f2.read(): print("Files differ") sys.exit(1) else: print("Files identical") finally: f1.close() f2.close() Cython-0.26.1/Demos/embed/embedded.pyx0000664000175000017500000000021612574327400020251 0ustar stefanstefan00000000000000# cython: language_level=3 print(__name__) if __name__ == "__main__": print("Hi, I'm embedded.") else: print("I'm being imported.") Cython-0.26.1/Demos/Makefile0000664000175000017500000000051613023021033016324 0ustar stefanstefan00000000000000all: python setup.py build_ext --inplace test: all python run_primes.py 20 python run_numeric_demo.py python run_spam.py python integrate_timing.py cd callback; $(MAKE) test cd embed; $(MAKE) test clean: @echo Cleaning Demos @rm -f *.c *.o *.so *~ core @rm -rf build @cd callback; $(MAKE) clean @cd embed; $(MAKE) clean Cython-0.26.1/Demos/benchmarks/0000775000175000017500000000000013151203436017012 5ustar stefanstefan00000000000000Cython-0.26.1/Demos/benchmarks/nqueens.py0000664000175000017500000000457612542002467021062 0ustar stefanstefan00000000000000#!/usr/bin/env python """Simple, brute-force N-Queens solver.""" __author__ = "collinwinter@google.com (Collin Winter)" # Python imports import optparse import re import string from time import time # Local imports import util import cython try: from builtins import range as _xrange except ImportError: from __builtin__ import xrange as _xrange # Pure-Python implementation of itertools.permutations(). @cython.locals(n=int, i=int, j=int) def permutations(iterable): """permutations(range(3), 2) --> (0,1) (0,2) (1,0) (1,2) (2,0) (2,1)""" pool = tuple(iterable) n = len(pool) indices = list(range(n)) cycles = list(range(1, n+1))[::-1] yield [ pool[i] for i in indices ] while n: for i in reversed(range(n)): j = cycles[i] - 1 if j == 0: indices[i:] = indices[i+1:] + indices[i:i+1] cycles[i] = n - i else: cycles[i] = j indices[i], indices[-j] = indices[-j], indices[i] yield [ pool[i] for i in indices ] break else: return # From http://code.activestate.com/recipes/576647/ @cython.locals(queen_count=int, i=int, vec=list) def n_queens(queen_count): """N-Queens solver. Args: queen_count: the number of queens to solve for. This is also the board size. Yields: Solutions to the problem. Each yielded value is looks like (3, 8, 2, 1, 4, ..., 6) where each number is the column position for the queen, and the index into the tuple indicates the row. """ cols = list(range(queen_count)) for vec in permutations(cols): if (queen_count == len({ vec[i]+i for i in cols }) == len({ vec[i]-i for i in cols })): yield vec def test_n_queens(iterations): # Warm-up runs. list(n_queens(8)) list(n_queens(8)) times = [] for _ in _xrange(iterations): t0 = time() list(n_queens(8)) t1 = time() times.append(t1 - t0) return times main = test_n_queens if __name__ == "__main__": parser = optparse.OptionParser( usage="%prog [options]", description=("Test the performance of an N-Queens solvers.")) util.add_standard_options_to(parser) options, args = parser.parse_args() util.run_benchmark(options, options.num_runs, test_n_queens) Cython-0.26.1/Demos/benchmarks/util.py0000664000175000017500000000351412542002467020350 0ustar stefanstefan00000000000000#!/usr/bin/env python """Utility code for benchmark scripts.""" __author__ = "collinwinter@google.com (Collin Winter)" import math import operator try: reduce except NameError: from functools import reduce def run_benchmark(options, num_runs, bench_func, *args): """Run the given benchmark, print results to stdout. Args: options: optparse.Values instance. num_runs: number of times to run the benchmark bench_func: benchmark function. `num_runs, *args` will be passed to this function. This should return a list of floats (benchmark execution times). """ if options.profile: import cProfile prof = cProfile.Profile() prof.runcall(bench_func, num_runs, *args) prof.print_stats(sort=options.profile_sort) else: data = bench_func(num_runs, *args) if options.take_geo_mean: product = reduce(operator.mul, data, 1) print(math.pow(product, 1.0 / len(data))) else: for x in data: print(x) def add_standard_options_to(parser): """Add a bunch of common command-line flags to an existing OptionParser. This function operates on `parser` in-place. Args: parser: optparse.OptionParser instance. """ parser.add_option("-n", action="store", type="int", default=100, dest="num_runs", help="Number of times to run the test.") parser.add_option("--profile", action="store_true", help="Run the benchmark through cProfile.") parser.add_option("--profile_sort", action="store", type="str", default="time", help="Column to sort cProfile output by.") parser.add_option("--take_geo_mean", action="store_true", help="Return the geo mean, rather than individual data.") Cython-0.26.1/Demos/benchmarks/chaos.pxd0000664000175000017500000000150013023021033020605 0ustar stefanstefan00000000000000 cimport cython cdef extern from "math.h": cpdef double sqrt(double x) @cython.final cdef class GVector: cdef public double x, y, z cpdef double Mag(self) cpdef double dist(self, GVector other) cpdef list GetKnots(list points, long degree) @cython.final cdef class Spline: cdef list knots cdef list points cdef long degree cpdef (long, long) GetDomain(self) cpdef long GetIndex(self, u) @cython.final cdef class Chaosgame: cdef list splines cdef double thickness cdef double minx, miny, maxx, maxy, height, width cdef list num_trafos cdef double num_total cpdef tuple get_random_trafo(self) cpdef GVector transform_point(self, GVector point, trafo=*) cpdef truncate(self, GVector point) cpdef create_image_chaos(self, timer, long w, long h, long n) Cython-0.26.1/Demos/benchmarks/setup.py0000664000175000017500000000051713023021033020514 0ustar stefanstefan00000000000000from distutils.core import setup from Cython.Build import cythonize directives = { 'optimize.inline_defnode_calls': True } setup( name = 'benchmarks', ext_modules = cythonize("*.py", language_level=3, annotate=True, compiler_directives=directives, exclude=["setup.py"]), ) Cython-0.26.1/Demos/benchmarks/meteor_contest.py0000664000175000017500000001044113143605603022421 0ustar stefanstefan00000000000000# The Computer Language Benchmarks Game # http://shootout.alioth.debian.org/ # # contributed by Daniel Nanz, 2008-08-21 import optparse import time from bisect import bisect import util w, h = 5, 10 dir_no = 6 S, E = w * h, 2 SE = S + (E / 2) SW = SE - E W, NW, NE = -E, -SE, -SW def rotate(ido, rd={E: NE, NE: NW, NW: W, W: SW, SW: SE, SE: E}): return [rd[o] for o in ido] def flip(ido, fd={E: E, NE: SE, NW: SW, W: W, SW: NW, SE: NE}): return [fd[o] for o in ido] def permute(ido, r_ido): ps = [ido] for r in range(dir_no - 1): ps.append(rotate(ps[-1])) if ido == r_ido: # C2-symmetry ps = ps[0:dir_no//2] for pp in ps[:]: ps.append(flip(pp)) return ps def convert(ido): '''incremental direction offsets -> "coordinate offsets" ''' out = [0] for o in ido: out.append(out[-1] + o) return list(set(out)) def get_footprints(board, cti, pieces): fps = [[[] for p in range(len(pieces))] for ci in range(len(board))] for c in board: for pi, p in enumerate(pieces): for pp in p: fp = frozenset([cti[c + o] for o in pp if (c + o) in cti]) if len(fp) == 5: fps[min(fp)][pi].append(fp) return fps def get_senh(board, cti): '''-> south-east neighborhood''' se_nh = [] nh = [E, SW, SE] for c in board: se_nh.append(frozenset([cti[c + o] for o in nh if (c + o) in cti])) return se_nh def get_puzzle(w=w, h=h): board = [E*x + S*y + (y%2) for y in range(h) for x in range(w)] cti = dict((board[i], i) for i in range(len(board))) idos = [[E, E, E, SE], # incremental direction offsets [SE, SW, W, SW], [W, W, SW, SE], [E, E, SW, SE], [NW, W, NW, SE, SW], [E, E, NE, W], [NW, NE, NE, W], [NE, SE, E, NE], [SE, SE, E, SE], [E, NW, NW, NW]] perms = (permute(p, idos[3]) for p in idos) # restrict piece 4 pieces = [[convert(pp) for pp in p] for p in perms] return (board, cti, pieces) def print_board(board, w=w, h=h): for y in range(h): for x in range(w): print(board[x + y * w]) print('') if y % 2 == 0: print('') print() board, cti, pieces = get_puzzle() fps = get_footprints(board, cti, pieces) se_nh = get_senh(board, cti) def solve(n, i_min, free, curr_board, pieces_left, solutions, fps=fps, se_nh=se_nh, bisect=bisect): fp_i_cands = fps[i_min] for p in pieces_left: fp_cands = fp_i_cands[p] for fp in fp_cands: if fp <= free: n_curr_board = curr_board[:] for ci in fp: n_curr_board[ci] = p if len(pieces_left) > 1: n_free = free - fp n_i_min = min(n_free) if len(n_free & se_nh[n_i_min]) > 0: n_pieces_left = pieces_left[:] n_pieces_left.remove(p) solve(n, n_i_min, n_free, n_curr_board, n_pieces_left, solutions) else: s = ''.join(map(str, n_curr_board)) solutions.insert(bisect(solutions, s), s) rs = s[::-1] solutions.insert(bisect(solutions, rs), rs) if len(solutions) >= n: return if len(solutions) >= n: return return SOLVE_ARG = 60 def main(n): times = [] for i in range(n): t0 = time.time() free = frozenset(range(len(board))) curr_board = [-1] * len(board) pieces_left = list(range(len(pieces))) solutions = [] solve(SOLVE_ARG, 0, free, curr_board, pieces_left, solutions) #print len(solutions), 'solutions found\n' #for i in (0, -1): print_board(solutions[i]) tk = time.time() times.append(tk - t0) return times if __name__ == "__main__": parser = optparse.OptionParser( usage="%prog [options]", description="Test the performance of the Float benchmark") util.add_standard_options_to(parser) options, args = parser.parse_args() util.run_benchmark(options, options.num_runs, main) Cython-0.26.1/Demos/benchmarks/nbody.py0000664000175000017500000001071512542002467020507 0ustar stefanstefan00000000000000#!/usr/bin/env python """N-body benchmark from the Computer Language Benchmarks Game. This is intended to support Unladen Swallow's perf.py. Accordingly, it has been modified from the Shootout version: - Accept standard Unladen Swallow benchmark options. - Run report_energy()/advance() in a loop. - Reimplement itertools.combinations() to work with older Python versions. """ # Pulled from http://shootout.alioth.debian.org/u64q/benchmark.php?test=nbody&lang=python&id=4 # Contributed by Kevin Carson. # Modified by Tupteq, Fredrik Johansson, and Daniel Nanz. __contact__ = "collinwinter@google.com (Collin Winter)" # Python imports import optparse import sys from time import time # Local imports import util def combinations(l): """Pure-Python implementation of itertools.combinations(l, 2).""" result = [] for x in range(len(l) - 1): ls = l[x+1:] for y in ls: result.append((l[x],y)) return result PI = 3.14159265358979323 SOLAR_MASS = 4 * PI * PI DAYS_PER_YEAR = 365.24 BODIES = { 'sun': ([0.0, 0.0, 0.0], [0.0, 0.0, 0.0], SOLAR_MASS), 'jupiter': ([4.84143144246472090e+00, -1.16032004402742839e+00, -1.03622044471123109e-01], [1.66007664274403694e-03 * DAYS_PER_YEAR, 7.69901118419740425e-03 * DAYS_PER_YEAR, -6.90460016972063023e-05 * DAYS_PER_YEAR], 9.54791938424326609e-04 * SOLAR_MASS), 'saturn': ([8.34336671824457987e+00, 4.12479856412430479e+00, -4.03523417114321381e-01], [-2.76742510726862411e-03 * DAYS_PER_YEAR, 4.99852801234917238e-03 * DAYS_PER_YEAR, 2.30417297573763929e-05 * DAYS_PER_YEAR], 2.85885980666130812e-04 * SOLAR_MASS), 'uranus': ([1.28943695621391310e+01, -1.51111514016986312e+01, -2.23307578892655734e-01], [2.96460137564761618e-03 * DAYS_PER_YEAR, 2.37847173959480950e-03 * DAYS_PER_YEAR, -2.96589568540237556e-05 * DAYS_PER_YEAR], 4.36624404335156298e-05 * SOLAR_MASS), 'neptune': ([1.53796971148509165e+01, -2.59193146099879641e+01, 1.79258772950371181e-01], [2.68067772490389322e-03 * DAYS_PER_YEAR, 1.62824170038242295e-03 * DAYS_PER_YEAR, -9.51592254519715870e-05 * DAYS_PER_YEAR], 5.15138902046611451e-05 * SOLAR_MASS) } SYSTEM = list(BODIES.values()) PAIRS = combinations(SYSTEM) def advance(dt, n, bodies=SYSTEM, pairs=PAIRS): for i in range(n): for (([x1, y1, z1], v1, m1), ([x2, y2, z2], v2, m2)) in pairs: dx = x1 - x2 dy = y1 - y2 dz = z1 - z2 mag = dt * ((dx * dx + dy * dy + dz * dz) ** (-1.5)) b1m = m1 * mag b2m = m2 * mag v1[0] -= dx * b2m v1[1] -= dy * b2m v1[2] -= dz * b2m v2[0] += dx * b1m v2[1] += dy * b1m v2[2] += dz * b1m for (r, [vx, vy, vz], m) in bodies: r[0] += dt * vx r[1] += dt * vy r[2] += dt * vz def report_energy(bodies=SYSTEM, pairs=PAIRS, e=0.0): for (((x1, y1, z1), v1, m1), ((x2, y2, z2), v2, m2)) in pairs: dx = x1 - x2 dy = y1 - y2 dz = z1 - z2 e -= (m1 * m2) / ((dx * dx + dy * dy + dz * dz) ** 0.5) for (r, [vx, vy, vz], m) in bodies: e += m * (vx * vx + vy * vy + vz * vz) / 2. return e def offset_momentum(ref, bodies=SYSTEM, px=0.0, py=0.0, pz=0.0): for (r, [vx, vy, vz], m) in bodies: px -= vx * m py -= vy * m pz -= vz * m (r, v, m) = ref v[0] = px / m v[1] = py / m v[2] = pz / m def test_nbody(iterations): # Warm-up runs. report_energy() advance(0.01, 20000) report_energy() times = [] for _ in range(iterations): t0 = time() report_energy() advance(0.01, 20000) report_energy() t1 = time() times.append(t1 - t0) return times main = test_nbody if __name__ == '__main__': parser = optparse.OptionParser( usage="%prog [options]", description=("Run the n-body benchmark.")) util.add_standard_options_to(parser) options, args = parser.parse_args() offset_momentum(BODIES['sun']) # Set up global state util.run_benchmark(options, options.num_runs, test_nbody) Cython-0.26.1/Demos/benchmarks/hexiom2.py0000664000175000017500000004136013143605603020746 0ustar stefanstefan00000000000000"""Benchmark from Laurent Vaucher. Source: https://github.com/slowfrog/hexiom : hexiom2.py, level36.txt (Main function tweaked by Armin Rigo.) """ from __future__ import division, print_function import time from io import StringIO import cython ################################## class Dir(object): def __init__(self, x, y): self.x = x self.y = y DIRS = [ Dir(1, 0), Dir(-1, 0), Dir(0, 1), Dir(0, -1), Dir(1, 1), Dir(-1, -1) ] EMPTY = 7 ################################## class Done(object): MIN_CHOICE_STRATEGY = 0 MAX_CHOICE_STRATEGY = 1 HIGHEST_VALUE_STRATEGY = 2 FIRST_STRATEGY = 3 MAX_NEIGHBORS_STRATEGY = 4 MIN_NEIGHBORS_STRATEGY = 5 def __init__(self, count, empty=False): self.count = count self.cells = None if empty else [[0, 1, 2, 3, 4, 5, 6, EMPTY] for i in range(count)] def clone(self): ret = Done(self.count, True) ret.cells = [self.cells[i][:] for i in range(self.count)] return ret def __getitem__(self, i): return self.cells[i] def set_done(self, i, v): self.cells[i] = [v] def already_done(self, i): return len(self.cells[i]) == 1 def remove(self, i, v): if v in self.cells[i]: self.cells[i].remove(v) return True else: return False def remove_all(self, v): for i in range(self.count): self.remove(i, v) def remove_unfixed(self, v): changed = False for i in range(self.count): if not self.already_done(i): if self.remove(i, v): changed = True return changed def filter_tiles(self, tiles): for v in range(8): if tiles[v] == 0: self.remove_all(v) @cython.locals(i=cython.int) def next_cell_min_choice(self): minlen = 10 mini = -1 for i in range(self.count): if 1 < len(self.cells[i]) < minlen: minlen = len(self.cells[i]) mini = i return mini @cython.locals(i=cython.int) def next_cell_max_choice(self): maxlen = 1 maxi = -1 for i in range(self.count): if maxlen < len(self.cells[i]): maxlen = len(self.cells[i]) maxi = i return maxi @cython.locals(i=cython.int) def next_cell_highest_value(self): maxval = -1 maxi = -1 for i in range(self.count): if (not self.already_done(i)): maxvali = max([k for k in self.cells[i] if k != EMPTY]) if maxval < maxvali: maxval = maxvali maxi = i return maxi @cython.locals(i=cython.int) def next_cell_first(self): for i in range(self.count): if (not self.already_done(i)): return i return -1 @cython.locals(i=cython.int) def next_cell_max_neighbors(self, pos): maxn = -1 maxi = -1 for i in range(self.count): if not self.already_done(i): cells_around = pos.hex.get_by_id(i).links n = sum([1 if (self.already_done(nid) and (self[nid][0] != EMPTY)) else 0 for nid in cells_around]) if n > maxn: maxn = n maxi = i return maxi @cython.locals(i=cython.int) def next_cell_min_neighbors(self, pos): minn = 7 mini = -1 for i in range(self.count): if not self.already_done(i): cells_around = pos.hex.get_by_id(i).links n = sum([1 if (self.already_done(nid) and (self[nid][0] != EMPTY)) else 0 for nid in cells_around]) if n < minn: minn = n mini = i return mini def next_cell(self, pos, strategy=HIGHEST_VALUE_STRATEGY): if strategy == Done.HIGHEST_VALUE_STRATEGY: return self.next_cell_highest_value() elif strategy == Done.MIN_CHOICE_STRATEGY: return self.next_cell_min_choice() elif strategy == Done.MAX_CHOICE_STRATEGY: return self.next_cell_max_choice() elif strategy == Done.FIRST_STRATEGY: return self.next_cell_first() elif strategy == Done.MAX_NEIGHBORS_STRATEGY: return self.next_cell_max_neighbors(pos) elif strategy == Done.MIN_NEIGHBORS_STRATEGY: return self.next_cell_min_neighbors(pos) else: raise Exception("Wrong strategy: %d" % strategy) ################################## class Node(object): def __init__(self, pos, id, links): self.pos = pos self.id = id self.links = links ################################## class Hex(object): @cython.locals(size=cython.int, id=cython.int, x=cython.int, y=cython.int) def __init__(self, size): self.size = size self.count = 3 * size * (size - 1) + 1 self.nodes_by_id = self.count * [None] self.nodes_by_pos = {} id = 0 for y in range(size): for x in range(size + y): pos = (x, y) node = Node(pos, id, []) self.nodes_by_pos[pos] = node self.nodes_by_id[node.id] = node id += 1 for y in range(1, size): for x in range(y, size * 2 - 1): ry = size + y - 1 pos = (x, ry) node = Node(pos, id, []) self.nodes_by_pos[pos] = node self.nodes_by_id[node.id] = node id += 1 @cython.locals(dir=Dir, x=cython.int, y=cython.int, nx=cython.int, ny=cython.int, node=Node) def link_nodes(self): for node in self.nodes_by_id: (x, y) = node.pos for dir in DIRS: nx = x + dir.x ny = y + dir.y if self.contains_pos((nx, ny)): node.links.append(self.nodes_by_pos[(nx, ny)].id) def contains_pos(self, pos): return pos in self.nodes_by_pos def get_by_pos(self, pos): return self.nodes_by_pos[pos] def get_by_id(self, id): return self.nodes_by_id[id] ################################## class Pos(object): def __init__(self, hex, tiles, done = None): self.hex = hex self.tiles = tiles self.done = Done(hex.count) if done is None else done def clone(self): return Pos(self.hex, self.tiles, self.done.clone()) ################################## @cython.locals(pos=Pos, i=cython.long, v=cython.int, nid=cython.int, num=cython.int, empties=cython.int, filled=cython.int, vmax=cython.int, vmin=cython.int, cell=list, left=cython.int[8]) def constraint_pass(pos, last_move=None): changed = False left = pos.tiles[:] done = pos.done # Remove impossible values from free cells free_cells = (range(done.count) if last_move is None else pos.hex.get_by_id(last_move).links) for i in free_cells: if not done.already_done(i): vmax = 0 vmin = 0 cells_around = pos.hex.get_by_id(i).links for nid in cells_around: if done.already_done(nid): if done[nid][0] != EMPTY: vmin += 1 vmax += 1 else: vmax += 1 for num in range(7): if (num < vmin) or (num > vmax): if done.remove(i, num): changed = True # Computes how many of each value is still free for cell in done.cells: if len(cell) == 1: left[cell[0]] -= 1 for v in range(8): # If there is none, remove the possibility from all tiles if (pos.tiles[v] > 0) and (left[v] == 0): if done.remove_unfixed(v): changed = True else: possible = sum([(1 if v in cell else 0) for cell in done.cells]) # If the number of possible cells for a value is exactly the number of available tiles # put a tile in each cell if pos.tiles[v] == possible: for i in range(done.count): cell = done.cells[i] if (not done.already_done(i)) and (v in cell): done.set_done(i, v) changed = True # Force empty or non-empty around filled cells filled_cells = (range(done.count) if last_move is None else [last_move]) for i in filled_cells: if done.already_done(i): num = done[i][0] empties = 0 filled = 0 unknown = [] cells_around = pos.hex.get_by_id(i).links for nid in cells_around: if done.already_done(nid): if done[nid][0] == EMPTY: empties += 1 else: filled += 1 else: unknown.append(nid) if len(unknown) > 0: if num == filled: for u in unknown: if EMPTY in done[u]: done.set_done(u, EMPTY) changed = True #else: # raise Exception("Houston, we've got a problem") elif num == filled + len(unknown): for u in unknown: if done.remove(u, EMPTY): changed = True return changed ASCENDING = 1 DESCENDING = -1 def find_moves(pos, strategy, order): done = pos.done cell_id = done.next_cell(pos, strategy) if cell_id < 0: return [] if order == ASCENDING: return [(cell_id, v) for v in done[cell_id]] else: # Try higher values first and EMPTY last moves = list(reversed([(cell_id, v) for v in done[cell_id] if v != EMPTY])) if EMPTY in done[cell_id]: moves.append((cell_id, EMPTY)) return moves def play_move(pos, move): (cell_id, i) = move pos.done.set_done(cell_id, i) @cython.locals(x=cython.int, y=cython.int, ry=cython.int, id=cython.int) def print_pos(pos, output): hex = pos.hex done = pos.done size = hex.size for y in range(size): print(u" " * (size - y - 1), end=u"", file=output) for x in range(size + y): pos2 = (x, y) id = hex.get_by_pos(pos2).id if done.already_done(id): c = str(done[id][0]) if done[id][0] != EMPTY else u"." else: c = u"?" print(u"%s " % c, end=u"", file=output) print(end=u"\n", file=output) for y in range(1, size): print(u" " * y, end=u"", file=output) for x in range(y, size * 2 - 1): ry = size + y - 1 pos2 = (x, ry) id = hex.get_by_pos(pos2).id if done.already_done(id): c = str(done[id][0]) if done[id][0] != EMPTY else (u".") else: c = u"?" print(u"%s " % c, end=u"", file=output) print(end=u"\n", file=output) OPEN = 0 SOLVED = 1 IMPOSSIBLE = -1 @cython.locals(i=cython.int, num=cython.int, nid=cython.int, vmin=cython.int, vmax=cython.int, tiles=cython.int[8]) def solved(pos, output, verbose=False): hex = pos.hex tiles = pos.tiles[:] done = pos.done exact = True all_done = True for i in range(hex.count): if len(done[i]) == 0: return IMPOSSIBLE elif done.already_done(i): num = done[i][0] tiles[num] -= 1 if (tiles[num] < 0): return IMPOSSIBLE vmax = 0 vmin = 0 if num != EMPTY: cells_around = hex.get_by_id(i).links for nid in cells_around: if done.already_done(nid): if done[nid][0] != EMPTY: vmin += 1 vmax += 1 else: vmax += 1 if (num < vmin) or (num > vmax): return IMPOSSIBLE if num != vmin: exact = False else: all_done = False if (not all_done) or (not exact): return OPEN print_pos(pos, output) return SOLVED @cython.locals(move=tuple) def solve_step(prev, strategy, order, output, first=False): if first: pos = prev.clone() while constraint_pass(pos): pass else: pos = prev moves = find_moves(pos, strategy, order) if len(moves) == 0: return solved(pos, output) else: for move in moves: #print("Trying (%d, %d)" % (move[0], move[1])) ret = OPEN new_pos = pos.clone() play_move(new_pos, move) #print_pos(new_pos) while constraint_pass(new_pos, move[0]): pass cur_status = solved(new_pos, output) if cur_status != OPEN: ret = cur_status else: ret = solve_step(new_pos, strategy, order, output) if ret == SOLVED: return SOLVED return IMPOSSIBLE @cython.locals(tot=cython.int, tiles=cython.int[8]) def check_valid(pos): hex = pos.hex tiles = pos.tiles done = pos.done # fill missing entries in tiles tot = 0 for i in range(8): if tiles[i] > 0: tot += tiles[i] else: tiles[i] = 0 # check total if tot != hex.count: raise Exception("Invalid input. Expected %d tiles, got %d." % (hex.count, tot)) def solve(pos, strategy, order, output): check_valid(pos) return solve_step(pos, strategy, order, output, first=True) # TODO Write an 'iterator' to go over all x,y positions @cython.locals(x=cython.int, y=cython.int, p=cython.int, tiles=cython.int[8], size=cython.int, inctile=cython.int, linei=cython.int) def read_file(file): lines = [line.strip("\r\n") for line in file.splitlines()] size = int(lines[0]) hex = Hex(size) linei = 1 tiles = 8 * [0] done = Done(hex.count) for y in range(size): line = lines[linei][size - y - 1:] p = 0 for x in range(size + y): tile = line[p:p + 2] p += 2 if tile[1] == ".": inctile = EMPTY else: inctile = int(tile) tiles[inctile] += 1 # Look for locked tiles if tile[0] == "+": print("Adding locked tile: %d at pos %d, %d, id=%d" % (inctile, x, y, hex.get_by_pos((x, y)).id)) done.set_done(hex.get_by_pos((x, y)).id, inctile) linei += 1 for y in range(1, size): ry = size - 1 + y line = lines[linei][y:] p = 0 for x in range(y, size * 2 - 1): tile = line[p:p + 2] p += 2 if tile[1] == ".": inctile = EMPTY else: inctile = int(tile) tiles[inctile] += 1 # Look for locked tiles if tile[0] == "+": print("Adding locked tile: %d at pos %d, %d, id=%d" % (inctile, x, ry, hex.get_by_pos((x, ry)).id)) done.set_done(hex.get_by_pos((x, ry)).id, inctile) linei += 1 hex.link_nodes() done.filter_tiles(tiles) return Pos(hex, tiles, done) def solve_file(file, strategy, order, output): pos = read_file(file) solve(pos, strategy, order, output) def run_level36(): f = """\ 4 2 1 1 2 3 3 3 . . 2 3 3 . 4 . . 2 . 2 4 3 2 2 2 . . . 2 4 3 4 . . 3 2 3 3 """ order = DESCENDING strategy = Done.FIRST_STRATEGY output = StringIO() solve_file(f, strategy, order, output) expected = """\ 3 4 3 2 3 4 4 . 3 2 . . 3 4 3 2 . 1 . 3 . 2 3 3 . 2 . 2 3 . 2 . 2 2 2 . 1 """ if output.getvalue() != expected: raise AssertionError("got a wrong answer:\n%s" % output.getvalue()) def main(n): # only run 1/25th of the requested number of iterations. # with the default n=50 from runner.py, this means twice. l = [] for i in range(n): t0 = time.time() run_level36() time_elapsed = time.time() - t0 l.append(time_elapsed) return l if __name__ == "__main__": import util, optparse parser = optparse.OptionParser( usage="%prog [options]", description="Test the performance of the hexiom2 benchmark") util.add_standard_options_to(parser) options, args = parser.parse_args() util.run_benchmark(options, options.num_runs, main) Cython-0.26.1/Demos/benchmarks/nbody.pxd0000664000175000017500000000126712542002467020654 0ustar stefanstefan00000000000000 cimport cython @cython.locals(x=Py_ssize_t) cdef combinations(list l) @cython.locals(x1=double, x2=double, y1=double, y2=double, z1=double, z2=double, m1=double, m2=double, vx=double, vy=double, vz=double, i=long) cdef advance(double dt, long n, list bodies=*, list pairs=*) @cython.locals(x1=double, x2=double, y1=double, y2=double, z1=double, z2=double, m=double, m1=double, m2=double, vx=double, vy=double, vz=double) cdef report_energy(list bodies=*, list pairs=*, double e=*) @cython.locals(vx=double, vy=double, vz=double, m=double) cdef offset_momentum(tuple ref, list bodies=*, double px=*, double py=*, double pz=*) cpdef test_nbody(long iterations) Cython-0.26.1/Demos/benchmarks/spectralnorm.py0000664000175000017500000000313113143605603022076 0ustar stefanstefan00000000000000# -*- coding: utf-8 -*- # The Computer Language Benchmarks Game # http://shootout.alioth.debian.org/ # Contributed by Sebastien Loisel # Fixed by Isaac Gouy # Sped up by Josh Goldfoot # Dirtily sped up by Simon Descarpentries # Concurrency by Jason Stitt from time import time import util import optparse def eval_A (i, j): return 1.0 / ((i + j) * (i + j + 1) / 2 + i + 1) def eval_A_times_u (u): return [ part_A_times_u(i,u) for i in range(len(u)) ] def eval_At_times_u (u): return [ part_At_times_u(i,u) for i in range(len(u)) ] def eval_AtA_times_u (u): return eval_At_times_u (eval_A_times_u (u)) def part_A_times_u(i, u): partial_sum = 0 for j, u_j in enumerate(u): partial_sum += eval_A (i, j) * u_j return partial_sum def part_At_times_u(i, u): partial_sum = 0 for j, u_j in enumerate(u): partial_sum += eval_A (j, i) * u_j return partial_sum DEFAULT_N = 130 def main(n): times = [] for i in range(n): t0 = time() u = [1] * DEFAULT_N for dummy in range (10): v = eval_AtA_times_u (u) u = eval_AtA_times_u (v) vBv = vv = 0 for ue, ve in zip (u, v): vBv += ue * ve vv += ve * ve tk = time() times.append(tk - t0) return times if __name__ == "__main__": parser = optparse.OptionParser( usage="%prog [options]", description="Test the performance of the spectralnorm benchmark") util.add_standard_options_to(parser) options, args = parser.parse_args() util.run_benchmark(options, options.num_runs, main) Cython-0.26.1/Demos/benchmarks/generators.py0000664000175000017500000000404112542002467021540 0ustar stefanstefan00000000000000#!/usr/bin/python # micro benchmarks for generators COUNT = 10000 import cython @cython.locals(N=cython.Py_ssize_t) def count_to(N): for i in range(N): yield i @cython.locals(i=cython.Py_ssize_t) def round_robin(*_iterators): iterators = list(_iterators) to_drop = [] while iterators: for i, it in enumerate(iterators): try: value = next(it) except StopIteration: to_drop.append(i) else: yield value if to_drop: for i in reversed(to_drop): del iterators[i] del to_drop[:] def yield_from(*iterators): for it in iterators: yield from it def bm_plain(N): return count_to(COUNT * N) def bm_round_robin(N): return round_robin(*[ count_to(COUNT // i) for i in range(1,N+1) ]) def bm_yield_from(N): return yield_from(count_to(N), round_robin(*[ yield_from(count_to(COUNT // i)) for i in range(1,N+1) ]), count_to(N)) def bm_yield_from_nested(N): return yield_from(count_to(N), yield_from(count_to(N), round_robin(*[ yield_from(count_to(COUNT // i)) for i in range(1,N+1) ]), count_to(N)), count_to(N)) def time(fn, *args): from time import time begin = time() result = list(fn(*args)) end = time() return result, end-begin def benchmark(N): times = [] for _ in range(N): result, t = time(bm_yield_from_nested, 10) times.append(t) return times main = benchmark if __name__ == "__main__": import optparse parser = optparse.OptionParser( usage="%prog [options]", description=("Micro benchmarks for generators.")) import util util.add_standard_options_to(parser) options, args = parser.parse_args() util.run_benchmark(options, options.num_runs, benchmark) Cython-0.26.1/Demos/benchmarks/bpnn3.py0000664000175000017500000001235413143605603020414 0ustar stefanstefan00000000000000#!/usr/bin/python # Back-Propagation Neural Networks # # Written in Python. See http://www.python.org/ # # Neil Schemenauer import math import random as random # Local imports import util random.seed(0) # calculate a random number where: a <= rand < b def rand(a, b, random=random.random): return (b-a)*random() + a # Make a matrix (we could use NumPy to speed this up) def makeMatrix(I, J, fill=0.0): m = [] for i in range(I): m.append([fill]*J) return m class NN(object): # print 'class NN' def __init__(self, ni, nh, no): # number of input, hidden, and output nodes self.ni = ni + 1 # +1 for bias node self.nh = nh self.no = no # activations for nodes self.ai = [1.0]*self.ni self.ah = [1.0]*self.nh self.ao = [1.0]*self.no # create weights self.wi = makeMatrix(self.ni, self.nh) self.wo = makeMatrix(self.nh, self.no) # set them to random vaules for i in range(self.ni): for j in range(self.nh): self.wi[i][j] = rand(-2.0, 2.0) for j in range(self.nh): for k in range(self.no): self.wo[j][k] = rand(-2.0, 2.0) # last change in weights for momentum self.ci = makeMatrix(self.ni, self.nh) self.co = makeMatrix(self.nh, self.no) def update(self, inputs): # print 'update', inputs if len(inputs) != self.ni-1: raise ValueError('wrong number of inputs') # input activations for i in range(self.ni-1): #self.ai[i] = 1.0/(1.0+math.exp(-inputs[i])) self.ai[i] = inputs[i] # hidden activations for j in range(self.nh): sum = 0.0 for i in range(self.ni): sum = sum + self.ai[i] * self.wi[i][j] self.ah[j] = 1.0/(1.0+math.exp(-sum)) # output activations for k in range(self.no): sum = 0.0 for j in range(self.nh): sum = sum + self.ah[j] * self.wo[j][k] self.ao[k] = 1.0/(1.0+math.exp(-sum)) return self.ao[:] def backPropagate(self, targets, N, M): # print N, M if len(targets) != self.no: raise ValueError('wrong number of target values') # calculate error terms for output output_deltas = [0.0] * self.no # print self.no for k in range(self.no): ao = self.ao[k] output_deltas[k] = ao*(1-ao)*(targets[k]-ao) # calculate error terms for hidden hidden_deltas = [0.0] * self.nh for j in range(self.nh): sum = 0.0 for k in range(self.no): sum = sum + output_deltas[k]*self.wo[j][k] hidden_deltas[j] = self.ah[j]*(1-self.ah[j])*sum # update output weights for j in range(self.nh): for k in range(self.no): change = output_deltas[k]*self.ah[j] self.wo[j][k] = self.wo[j][k] + N*change + M*self.co[j][k] self.co[j][k] = change # update input weights for i in range(self.ni): for j in range(self.nh): change = hidden_deltas[j]*self.ai[i] self.wi[i][j] = self.wi[i][j] + N*change + M*self.ci[i][j] self.ci[i][j] = change # calculate error error = 0.0 for k in range(len(targets)): error = error + 0.5*(targets[k]-self.ao[k])**2 return error def test(self, patterns): for p in patterns: print('%s -> %s' % (p[0], self.update(p[0]))) def weights(self): print('Input weights:') for i in range(self.ni): print(self.wi[i]) print('') print('Output weights:') for j in range(self.nh): print(self.wo[j]) def train(self, patterns, iterations=2000, N=0.5, M=0.1): # N: learning rate # M: momentum factor for i in range(iterations): error = 0.0 for p in patterns: inputs = p[0] targets = p[1] self.update(inputs) error = error + self.backPropagate(targets, N, M) #if i % 100 == 0: # print i, 'error %-14f' % error def demo(): # Teach network XOR function pat = [ [[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]] ] # create a network with two input, two hidden, and two output nodes n = NN(2, 3, 1) # train it with some patterns n.train(pat, 5000) # test it #n.test(pat) def time(fn, *args): import time, traceback begin = time.time() result = fn(*args) end = time.time() return result, end-begin def test_bpnn(iterations): times = [] for _ in range(iterations): result, t = time(demo) times.append(t) return times main = test_bpnn if __name__ == "__main__": import optparse parser = optparse.OptionParser( usage="%prog [options]", description=("Test the performance of a neural network.")) util.add_standard_options_to(parser) options, args = parser.parse_args() util.run_benchmark(options, options.num_runs, test_bpnn) Cython-0.26.1/Demos/benchmarks/richards.pxd0000664000175000017500000000457112542002467021341 0ustar stefanstefan00000000000000cimport cython @cython.final cdef class Packet: cdef public object link cdef public object ident cdef public object kind cdef public Py_ssize_t datum cdef public list data cpdef append_to(self,lst) cdef class TaskRec: pass @cython.final cdef class DeviceTaskRec(TaskRec): cdef public object pending @cython.final cdef class IdleTaskRec(TaskRec): cdef public long control cdef public Py_ssize_t count @cython.final cdef class HandlerTaskRec(TaskRec): cdef public object work_in # = None cdef public object device_in # = None cpdef workInAdd(self, Packet p) cpdef deviceInAdd(self, Packet p) @cython.final cdef class WorkerTaskRec(TaskRec): cdef public object destination # = I_HANDLERA cdef public Py_ssize_t count cdef class TaskState: cdef public bint packet_pending # = True cdef public bint task_waiting # = False cdef public bint task_holding # = False cpdef packetPending(self) cpdef waiting(self) cpdef running(self) cpdef waitingWithPacket(self) cpdef bint isPacketPending(self) cpdef bint isTaskWaiting(self) cpdef bint isTaskHolding(self) cpdef bint isTaskHoldingOrWaiting(self) cpdef bint isWaitingWithPacket(self) cdef class TaskWorkArea: cdef public list taskTab # = [None] * TASKTABSIZE cdef public object taskList # = None cdef public Py_ssize_t holdCount # = 0 cdef public Py_ssize_t qpktCount # = 0 cdef class Task(TaskState): cdef public Task link # = taskWorkArea.taskList cdef public object ident # = i cdef public object priority # = p cdef public object input # = w cdef public object handle # = r cpdef addPacket(self,Packet p,Task old) cpdef runTask(self) cpdef waitTask(self) cpdef hold(self) cpdef release(self,i) cpdef qpkt(self,Packet pkt) cpdef findtcb(self,id) cdef class DeviceTask(Task): @cython.locals(d=DeviceTaskRec) cpdef fn(self,Packet pkt,DeviceTaskRec r) cdef class HandlerTask(Task): @cython.locals(h=HandlerTaskRec) cpdef fn(self,Packet pkt,HandlerTaskRec r) cdef class IdleTask(Task): @cython.locals(i=IdleTaskRec) cpdef fn(self,Packet pkt,IdleTaskRec r) cdef class WorkTask(Task): @cython.locals(w=WorkerTaskRec) cpdef fn(self,Packet pkt,WorkerTaskRec r) @cython.locals(t=Task) cpdef schedule() cdef class Richards: cpdef run(self, iterations) Cython-0.26.1/Demos/benchmarks/fstrings.py0000664000175000017500000001750613023021033021221 0ustar stefanstefan00000000000000# coding=utf-8 # NOTE: requires Python 3.6 or later if not compiled with Cython from time import time import cython @cython.locals(x=int, n=int) def run(): t0 = time() f = 1.0 x = 2 n = 5 i = 12345678 s = 'abc' u = u'üöä' # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" # repeat without fast looping ... f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:.2}--{n:2}{n:5}oo{i}" f"{n}oo{n*10}{f:3.2}--{n:2}{n:5}oo{i}{s}" f"{n}oo{n*10}{f:5.2}--{n:2}{n:5}oo{i}{u}" f"{n}oo{n*10}{f:2.2}--{n:2}{n:5}oo{i}{s}xx{u}" tk = time() return tk - t0 def main(n): run() # warmup times = [] for i in range(n): times.append(run()) return times if __name__ == "__main__": import optparse import util parser = optparse.OptionParser( usage="%prog [options]", description="Test the performance of fstring literal formatting") util.add_standard_options_to(parser) options, args = parser.parse_args() util.run_benchmark(options, options.num_runs, main) Cython-0.26.1/Demos/benchmarks/chaos.py0000664000175000017500000002326013023021033020451 0ustar stefanstefan00000000000000# Copyright (C) 2005 Carl Friedrich Bolz """create chaosgame-like fractals """ from __future__ import division, print_function import cython import time import operator import optparse import random random.seed(1234) from functools import reduce if not cython.compiled: from math import sqrt class GVector(object): def __init__(self, x = 0, y = 0, z = 0): self.x = x self.y = y self.z = z def Mag(self): return sqrt(self.x ** 2 + self.y ** 2 + self.z ** 2) def dist(self, other): return sqrt((self.x - other.x) ** 2 + (self.y - other.y) ** 2 + (self.z - other.z) ** 2) @cython.locals(self="GVector", other="GVector") def __add__(self, other): if not isinstance(other, GVector): raise ValueError("Can't add GVector to " + str(type(other))) v = GVector(self.x + other.x, self.y + other.y, self.z + other.z) return v @cython.locals(self="GVector", other="GVector") def __sub__(self, other): return self + other * -1 @cython.locals(self="GVector", other=cython.double) def __mul__(self, other): v = GVector(self.x * other, self.y * other, self.z * other) return v __rmul__ = __mul__ @cython.locals(other="GVector", l1=cython.double, l2_=cython.double) def linear_combination(self, other, l1, l2=None): l2_ = 1 - l1 if l2 is None else l2 v = GVector(self.x * l1 + other.x * l2_, self.y * l1 + other.y * l2_, self.z * l1 + other.z * l2_) return v def __str__(self): return "<%f, %f, %f>" % (self.x, self.y, self.z) def __repr__(self): return "GVector(%f, %f, %f)" % (self.x, self.y, self.z) def GetKnots(points, degree): knots = [0] * degree + range(1, len(points) - degree) knots += [len(points) - degree] * degree return knots class Spline(object): """Class for representing B-Splines and NURBS of arbitrary degree""" def __init__(self, points, degree = 3, knots = None): """Creates a Spline. points is a list of GVector, degree is the degree of the Spline.""" if knots is None: self.knots = GetKnots(points, degree) else: if len(points) > len(knots) - degree + 1: raise ValueError("too many control points") elif len(points) < len(knots) - degree + 1: raise ValueError("not enough control points") last = knots[0] for cur in knots[1:]: if cur < last: raise ValueError("knots not strictly increasing") last = cur self.knots = knots self.points = points self.degree = degree def GetDomain(self): """Returns the domain of the B-Spline""" return (self.knots[self.degree - 1], self.knots[len(self.knots) - self.degree]) @cython.locals(ik=cython.long, ii=cython.long, I=cython.long, ua=cython.long, ub=cython.long, u=cython.double, dom=(cython.long, cython.long)) def __call__(self, u): """Calculates a point of the B-Spline using de Boors Algorithm""" dom = self.GetDomain() if u < dom[0] or u > dom[1]: raise ValueError("Function value not in domain") if u == dom[0]: return self.points[0] if u == dom[1]: return self.points[-1] I = self.GetIndex(u) d = [self.points[I - self.degree + 1 + ii] for ii in range(self.degree + 1)] U = self.knots for ik in range(1, self.degree + 1): for ii in range(I - self.degree + ik + 1, I + 2): ua = U[ii + self.degree - ik] ub = U[ii - 1] co1 = (ua - u) / (ua - ub) co2 = (u - ub) / (ua - ub) index = ii - I + self.degree - ik - 1 d[index] = d[index].linear_combination(d[index + 1], co1, co2) return d[0] @cython.locals(ii=cython.long, I=cython.long, dom=(cython.long, cython.long)) def GetIndex(self, u): dom = self.GetDomain() for ii in range(self.degree - 1, len(self.knots) - self.degree): if self.knots[ii] <= u < self.knots[ii + 1]: I = ii break else: I = dom[1] - 1 return I def __len__(self): return len(self.points) def __repr__(self): return "Spline(%r, %r, %r)" % (self.points, self.degree, self.knots) class Chaosgame(object): @cython.locals(splines=list, thickness=cython.double, maxlength=cython.double, length=cython.double, curr=GVector, last=GVector, p=GVector, spl=Spline, t=cython.double, i=int) def __init__(self, splines, thickness=0.1): self.splines = splines self.thickness = thickness self.minx = min([p.x for spl in splines for p in spl.points]) self.miny = min([p.y for spl in splines for p in spl.points]) self.maxx = max([p.x for spl in splines for p in spl.points]) self.maxy = max([p.y for spl in splines for p in spl.points]) self.height = self.maxy - self.miny self.width = self.maxx - self.minx self.num_trafos = [] maxlength = thickness * self.width / self.height for spl in splines: length = 0 curr = spl(0) for i in range(1, 1000): last = curr t = 1 / 999 * i curr = spl(t) length += curr.dist(last) self.num_trafos.append(max(1, int(length / maxlength * 1.5))) self.num_total = reduce(operator.add, self.num_trafos, 0) def get_random_trafo(self): r = random.randrange(int(self.num_total) + 1) l = 0 for i in range(len(self.num_trafos)): if l <= r < l + self.num_trafos[i]: return i, random.randrange(self.num_trafos[i]) l += self.num_trafos[i] return len(self.num_trafos) - 1, random.randrange(self.num_trafos[-1]) @cython.locals(neighbour="GVector", basepoint="GVector", derivative="GVector", seg_length=cython.double, start=cython.double, end=cython.double, t=cython.double) def transform_point(self, point, trafo=None): x = (point.x - self.minx) / self.width y = (point.y - self.miny) / self.height if trafo is None: trafo = self.get_random_trafo() start, end = self.splines[trafo[0]].GetDomain() length = end - start seg_length = length / self.num_trafos[trafo[0]] t = start + seg_length * trafo[1] + seg_length * x basepoint = self.splines[trafo[0]](t) if t + 1/50000 > end: neighbour = self.splines[trafo[0]](t - 1/50000) derivative = neighbour - basepoint else: neighbour = self.splines[trafo[0]](t + 1/50000) derivative = basepoint - neighbour if derivative.Mag() != 0: basepoint.x += derivative.y / derivative.Mag() * (y - 0.5) * \ self.thickness basepoint.y += -derivative.x / derivative.Mag() * (y - 0.5) * \ self.thickness else: print("r", end='') self.truncate(basepoint) return basepoint def truncate(self, point): if point.x >= self.maxx: point.x = self.maxx if point.y >= self.maxy: point.y = self.maxy if point.x < self.minx: point.x = self.minx if point.y < self.miny: point.y = self.miny @cython.locals(x=cython.long, y=cython.long) def create_image_chaos(self, timer, w, h, n): im = [[1] * h for i in range(w)] point = GVector((self.maxx + self.minx) / 2, (self.maxy + self.miny) / 2, 0) times = [] for _ in range(n): t1 = timer() for i in range(5000): point = self.transform_point(point) x = int((point.x - self.minx) / self.width * w) y = int((point.y - self.miny) / self.height * h) if x == w: x -= 1 if y == h: y -= 1 im[x][h - y - 1] = 0 t2 = timer() times.append(t2 - t1) return times def main(n, timer=time.time): splines = [ Spline([ GVector(1.597350, 3.304460, 0.000000), GVector(1.575810, 4.123260, 0.000000), GVector(1.313210, 5.288350, 0.000000), GVector(1.618900, 5.329910, 0.000000), GVector(2.889940, 5.502700, 0.000000), GVector(2.373060, 4.381830, 0.000000), GVector(1.662000, 4.360280, 0.000000)], 3, [0, 0, 0, 1, 1, 1, 2, 2, 2]), Spline([ GVector(2.804500, 4.017350, 0.000000), GVector(2.550500, 3.525230, 0.000000), GVector(1.979010, 2.620360, 0.000000), GVector(1.979010, 2.620360, 0.000000)], 3, [0, 0, 0, 1, 1, 1]), Spline([ GVector(2.001670, 4.011320, 0.000000), GVector(2.335040, 3.312830, 0.000000), GVector(2.366800, 3.233460, 0.000000), GVector(2.366800, 3.233460, 0.000000)], 3, [0, 0, 0, 1, 1, 1]) ] c = Chaosgame(splines, 0.25) return c.create_image_chaos(timer, 1000, 1200, n) if __name__ == "__main__": import util parser = optparse.OptionParser( usage="%prog [options]", description="Test the performance of the Chaos benchmark") util.add_standard_options_to(parser) options, args = parser.parse_args() util.run_benchmark(options, options.num_runs, main) Cython-0.26.1/Demos/benchmarks/spectralnorm.pxd0000664000175000017500000000064112542002467022245 0ustar stefanstefan00000000000000 cimport cython cdef inline double eval_A(double i, double j) @cython.locals(i=long) cdef list eval_A_times_u(list u) @cython.locals(i=long) cdef list eval_At_times_u(list u) cdef list eval_AtA_times_u(list u) @cython.locals(j=long, u_j=double, partial_sum=double) cdef double part_A_times_u(double i, list u) @cython.locals(j=long, u_j=double, partial_sum=double) cdef double part_At_times_u(double i, list u) Cython-0.26.1/Demos/benchmarks/meteor_contest.pxd0000664000175000017500000000045312542002467022567 0ustar stefanstefan00000000000000cimport cython cdef list rotate(list ido, dict rd=*) cdef list flip(list ido, dict fd=*) cdef list permute(list ido, list r_ido) @cython.locals(n_i_min=long) cpdef solve(long n, long i_min, free, list curr_board, list pieces_left, list solutions, list fps=*, list se_nh=*, bisect=*) Cython-0.26.1/Demos/benchmarks/richards.py0000664000175000017500000002434013143605603021171 0ustar stefanstefan00000000000000# based on a Java version: # Based on original version written in BCPL by Dr Martin Richards # in 1981 at Cambridge University Computer Laboratory, England # and a C++ version derived from a Smalltalk version written by # L Peter Deutsch. # Java version: Copyright (C) 1995 Sun Microsystems, Inc. # Translation from C++, Mario Wolczko # Outer loop added by Alex Jacoby # Task IDs I_IDLE = 1 I_WORK = 2 I_HANDLERA = 3 I_HANDLERB = 4 I_DEVA = 5 I_DEVB = 6 # Packet types K_DEV = 1000 K_WORK = 1001 # Packet BUFSIZE = 4 BUFSIZE_RANGE = range(BUFSIZE) class Packet(object): def __init__(self,l,i,k): self.link = l self.ident = i self.kind = k self.datum = 0 self.data = [0] * BUFSIZE def append_to(self,lst): self.link = None if lst is None: return self else: p = lst next = p.link while next is not None: p = next next = p.link p.link = self return lst # Task Records class TaskRec(object): pass class DeviceTaskRec(TaskRec): def __init__(self): self.pending = None class IdleTaskRec(TaskRec): def __init__(self): self.control = 1 self.count = 10000 class HandlerTaskRec(TaskRec): def __init__(self): self.work_in = None self.device_in = None def workInAdd(self,p): self.work_in = p.append_to(self.work_in) return self.work_in def deviceInAdd(self,p): self.device_in = p.append_to(self.device_in) return self.device_in class WorkerTaskRec(TaskRec): def __init__(self): self.destination = I_HANDLERA self.count = 0 # Task class TaskState(object): def __init__(self): self.packet_pending = True self.task_waiting = False self.task_holding = False def packetPending(self): self.packet_pending = True self.task_waiting = False self.task_holding = False return self def waiting(self): self.packet_pending = False self.task_waiting = True self.task_holding = False return self def running(self): self.packet_pending = False self.task_waiting = False self.task_holding = False return self def waitingWithPacket(self): self.packet_pending = True self.task_waiting = True self.task_holding = False return self def isPacketPending(self): return self.packet_pending def isTaskWaiting(self): return self.task_waiting def isTaskHolding(self): return self.task_holding def isTaskHoldingOrWaiting(self): return self.task_holding or (not self.packet_pending and self.task_waiting) def isWaitingWithPacket(self): return self.packet_pending and self.task_waiting and not self.task_holding tracing = False layout = 0 def trace(a): global layout layout -= 1 if layout <= 0: print() layout = 50 print(a, end='') TASKTABSIZE = 10 class TaskWorkArea(object): def __init__(self): self.taskTab = [None] * TASKTABSIZE self.taskList = None self.holdCount = 0 self.qpktCount = 0 taskWorkArea = TaskWorkArea() class Task(TaskState): def __init__(self,i,p,w,initialState,r): self.link = taskWorkArea.taskList self.ident = i self.priority = p self.input = w self.packet_pending = initialState.isPacketPending() self.task_waiting = initialState.isTaskWaiting() self.task_holding = initialState.isTaskHolding() self.handle = r taskWorkArea.taskList = self taskWorkArea.taskTab[i] = self def fn(self,pkt,r): raise NotImplementedError def addPacket(self,p,old): if self.input is None: self.input = p self.packet_pending = True if self.priority > old.priority: return self else: p.append_to(self.input) return old def runTask(self): if self.isWaitingWithPacket(): msg = self.input self.input = msg.link if self.input is None: self.running() else: self.packetPending() else: msg = None return self.fn(msg,self.handle) def waitTask(self): self.task_waiting = True return self def hold(self): taskWorkArea.holdCount += 1 self.task_holding = True return self.link def release(self,i): t = self.findtcb(i) t.task_holding = False if t.priority > self.priority: return t else: return self def qpkt(self,pkt): t = self.findtcb(pkt.ident) taskWorkArea.qpktCount += 1 pkt.link = None pkt.ident = self.ident return t.addPacket(pkt,self) def findtcb(self,id): t = taskWorkArea.taskTab[id] if t is None: raise Exception("Bad task id %d" % id) return t # DeviceTask class DeviceTask(Task): def __init__(self,i,p,w,s,r): Task.__init__(self,i,p,w,s,r) def fn(self,pkt,r): d = r assert isinstance(d, DeviceTaskRec) if pkt is None: pkt = d.pending if pkt is None: return self.waitTask() else: d.pending = None return self.qpkt(pkt) else: d.pending = pkt if tracing: trace(pkt.datum) return self.hold() class HandlerTask(Task): def __init__(self,i,p,w,s,r): Task.__init__(self,i,p,w,s,r) def fn(self,pkt,r): h = r assert isinstance(h, HandlerTaskRec) if pkt is not None: if pkt.kind == K_WORK: h.workInAdd(pkt) else: h.deviceInAdd(pkt) work = h.work_in if work is None: return self.waitTask() count = work.datum if count >= BUFSIZE: h.work_in = work.link return self.qpkt(work) dev = h.device_in if dev is None: return self.waitTask() h.device_in = dev.link dev.datum = work.data[count] work.datum = count + 1 return self.qpkt(dev) # IdleTask class IdleTask(Task): def __init__(self,i,p,w,s,r): Task.__init__(self,i,0,None,s,r) def fn(self,pkt,r): i = r assert isinstance(i, IdleTaskRec) i.count -= 1 if i.count == 0: return self.hold() elif i.control & 1 == 0: i.control //= 2 return self.release(I_DEVA) else: i.control = i.control//2 ^ 0xd008 return self.release(I_DEVB) # WorkTask A = ord('A') class WorkTask(Task): def __init__(self,i,p,w,s,r): Task.__init__(self,i,p,w,s,r) def fn(self,pkt,r): w = r assert isinstance(w, WorkerTaskRec) if pkt is None: return self.waitTask() if w.destination == I_HANDLERA: dest = I_HANDLERB else: dest = I_HANDLERA w.destination = dest pkt.ident = dest pkt.datum = 0 for i in BUFSIZE_RANGE: # range(BUFSIZE) w.count += 1 if w.count > 26: w.count = 1 pkt.data[i] = A + w.count - 1 return self.qpkt(pkt) import time def schedule(): t = taskWorkArea.taskList while t is not None: pkt = None if tracing: print("tcb =", t.ident) if t.isTaskHoldingOrWaiting(): t = t.link else: if tracing: trace(chr(ord("0")+t.ident)) t = t.runTask() class Richards(object): def run(self, iterations): for i in range(iterations): taskWorkArea.holdCount = 0 taskWorkArea.qpktCount = 0 IdleTask(I_IDLE, 1, 10000, TaskState().running(), IdleTaskRec()) wkq = Packet(None, 0, K_WORK) wkq = Packet(wkq , 0, K_WORK) WorkTask(I_WORK, 1000, wkq, TaskState().waitingWithPacket(), WorkerTaskRec()) wkq = Packet(None, I_DEVA, K_DEV) wkq = Packet(wkq , I_DEVA, K_DEV) wkq = Packet(wkq , I_DEVA, K_DEV) HandlerTask(I_HANDLERA, 2000, wkq, TaskState().waitingWithPacket(), HandlerTaskRec()) wkq = Packet(None, I_DEVB, K_DEV) wkq = Packet(wkq , I_DEVB, K_DEV) wkq = Packet(wkq , I_DEVB, K_DEV) HandlerTask(I_HANDLERB, 3000, wkq, TaskState().waitingWithPacket(), HandlerTaskRec()) wkq = None; DeviceTask(I_DEVA, 4000, wkq, TaskState().waiting(), DeviceTaskRec()); DeviceTask(I_DEVB, 5000, wkq, TaskState().waiting(), DeviceTaskRec()); schedule() if taskWorkArea.holdCount == 9297 and taskWorkArea.qpktCount == 23246: pass else: return False return True def entry_point(iterations): r = Richards() startTime = time.time() result = r.run(iterations) endTime = time.time() return result, startTime, endTime def main(iterations = 10, entry_point = entry_point): print("Richards benchmark (Python) starting... [%r]" % entry_point) result, startTime, endTime = entry_point(iterations) if not result: print("Incorrect results!") return -1 print("finished.") total_s = endTime - startTime print("Total time for %d iterations: %.2f secs" % (iterations, total_s)) print("Average time per iteration: %.2f ms" % (total_s*1000/iterations)) return 42 try: import sys if '-nojit' in sys.argv: sys.argv.remove('-nojit') raise ImportError import pypyjit except ImportError: pass else: import types for item in globals().values(): if isinstance(item, types.FunctionType): pypyjit.enable(item.func_code) elif isinstance(item, type): for it in item.__dict__.values(): if isinstance(it, types.FunctionType): pypyjit.enable(it.func_code) if __name__ == '__main__': import sys if len(sys.argv) >= 2: main(iterations = int(sys.argv[1])) else: main() Cython-0.26.1/Demos/benchmarks/coroutines.py0000664000175000017500000000311313023021033021541 0ustar stefanstefan00000000000000#!/usr/bin/python3 # cython: language_level=3 # micro benchmarks for coroutines COUNT = 100000 import cython async def done(n): return n @cython.locals(N=cython.Py_ssize_t) async def count_to(N): count = 0 for i in range(N): count += await done(i) return count async def await_all(*coroutines): count = 0 for coro in coroutines: count += await coro return count @cython.locals(N=cython.Py_ssize_t) def bm_await_nested(N): return await_all( count_to(N), await_all( count_to(N), await_all(*[count_to(COUNT // i) for i in range(1, N+1)]), count_to(N)), count_to(N)) def await_one(coro): a = coro.__await__() try: while True: await_one(next(a)) except StopIteration as exc: result = exc.args[0] if exc.args else None else: result = 0 return result def time(fn, *args): from time import time begin = time() result = await_one(fn(*args)) end = time() return result, end-begin def benchmark(N): times = [] for _ in range(N): result, t = time(bm_await_nested, 1000) times.append(t) assert result == 8221043302, result return times main = benchmark if __name__ == "__main__": import optparse parser = optparse.OptionParser( usage="%prog [options]", description="Micro benchmarks for generators.") import util util.add_standard_options_to(parser) options, args = parser.parse_args() util.run_benchmark(options, options.num_runs, benchmark) Cython-0.26.1/Demos/benchmarks/hexiom2.pxd0000664000175000017500000000353312542002467021112 0ustar stefanstefan00000000000000cimport cython cdef object EMPTY cdef int IMPOSSIBLE, SOLVED, OPEN cdef int ASCENDING, DESCENDING cdef class Dir: cdef public int x, y @cython.final cdef class Done: cdef public int count cdef public list cells cdef Done clone(self) cdef inline int set_done(self, int i, v) except -123 cdef inline bint already_done(self, int i) except -123 cdef inline bint remove(self, int i, v) except -123 cdef inline bint remove_unfixed(self, v) except -123 cdef int next_cell(self, Pos pos, int strategy=*) except -123 cdef int filter_tiles(self, int* tiles) except -123 cdef int next_cell_min_choice(self) except -123 cdef int next_cell_max_choice(self) except -123 cdef int next_cell_highest_value(self) except -123 cdef int next_cell_first(self) except -123 cdef int next_cell_max_neighbors(self, Pos pos) except -123 cdef int next_cell_min_neighbors(self, Pos pos) except -123 @cython.final cdef class Node: cdef public tuple pos cdef public int id cdef public list links @cython.final cdef class Hex: cdef public list nodes_by_id cdef public dict nodes_by_pos cdef public int size cdef public int count cdef int link_nodes(self) except -123 cdef bint contains_pos(self, tuple pos) cdef Node get_by_pos(self, tuple pos) cdef Node get_by_id(self, int id) @cython.final cdef class Pos: cdef public Hex hex cdef public Done done cdef public int[8] tiles cdef Pos clone(self) cdef bint constraint_pass(Pos pos, last_move=*) except -123 cdef list find_moves(Pos pos, int strategy, int order) cdef inline int play_move(Pos pos, tuple move) except -123 cdef print_pos(Pos pos, output) cdef int solved(Pos pos, output, bint verbose=*) except -123 cdef int solve_step(Pos prev, int strategy, order, output, bint first=*) except -123 cdef check_valid(Pos pos) Cython-0.26.1/Demos/benchmarks/bpnn3.pxd0000664000175000017500000000117612542002467020560 0ustar stefanstefan00000000000000cimport cython cdef double rand(double a, double b, random=*) @cython.locals(i=Py_ssize_t) cdef list makeMatrix(Py_ssize_t I, Py_ssize_t J, fill=*) cdef class NN: cdef Py_ssize_t ni, nh, no cdef list ai, ah, ao cdef list wi, wo cdef list ci, co @cython.locals(i=Py_ssize_t, j=Py_ssize_t, k=Py_ssize_t) cpdef update(self, list inputs) @cython.locals(i=Py_ssize_t, j=Py_ssize_t, k=Py_ssize_t, change=double) cpdef double backPropagate(self, list targets, double N, M) @cython.locals(i=Py_ssize_t, p=list, error=double) cpdef train(self, list patterns, Py_ssize_t iterations=*, double N=*, M=*) Cython-0.26.1/Demos/spam.pyx0000664000175000017500000000062612574327400016411 0ustar stefanstefan00000000000000# cython: language_level=3 # # Example of an extension type. # cdef class Spam: cdef public int amount def __cinit__(self): self.amount = 0 def __dealloc__(self): print(self.amount, "tons of spam is history.") def get_amount(self): return self.amount def set_amount(self, new_amount): self.amount = new_amount def describe(self): print(self.amount, "tons of spam!") Cython-0.26.1/Demos/overflow_perf.pyx0000664000175000017500000001041312574327400020323 0ustar stefanstefan00000000000000# cython: language_level=3 # distutils: extra_compile_args = -O3 cimport cython ctypedef fused INT: int long long unsigned int unsigned long long object ctypedef fused C_INT: int long long unsigned int unsigned long long @cython.overflowcheck(False) def fib(INT n): """ >>> [fib(k) for k in range(10)] [1, 1, 2, 3, 5, 8, 13, 21, 34, 55] """ cdef INT a, b, k a, b = 0, 1 for k in range(n): a, b = b, a + b return int(b) @cython.overflowcheck(True) def fib_overflow(INT n): """ >>> [fib_overflow(k) for k in range(10)] [1, 1, 2, 3, 5, 8, 13, 21, 34, 55] """ cdef INT a, b, k a, b = 0, 1 for k in range(n): a, b = b, a + b return int(b) @cython.overflowcheck(False) def collatz(INT n): """ >>> collatz(1) 0 >>> collatz(5) 5 >>> collatz(10) 6 """ cdef INT k = 0 while n != 1: if n % 2 == 0: n //= 2 else: n = 3*n + 1 k += 1 return int(k) @cython.overflowcheck(True) @cython.overflowcheck.fold(False) def collatz_overflow(INT n): """ >>> collatz_overflow(1) 0 >>> collatz_overflow(5) 5 >>> collatz_overflow(10) 6 """ cdef INT k = 0 while n != 1: if n % 2 == 0: n //= 2 else: n = 3*n + 1 k += 1 return int(k) @cython.overflowcheck(True) @cython.overflowcheck.fold(True) def collatz_overflow_fold(INT n): """ >>> collatz_overflow_fold(1) 0 >>> collatz_overflow_fold(5) 5 >>> collatz_overflow_fold(10) 6 """ cdef INT k = 0 while n != 1: if n % 2 == 0: n //= 2 else: n = 3*n + 1 k += 1 return int(k) @cython.overflowcheck(False) def factorial(INT n): """ >>> factorial(2) 2 >>> factorial(5) 120 """ cdef INT k, res = 1 for k in range(2, n+1): res = res * k return int(res) @cython.overflowcheck(True) def factorial_overflow(INT n): """ >>> factorial_overflow(2) 2 >>> factorial_overflow(5) 120 """ cdef INT k, res = 1 for k in range(2, n+1): res = res * k return int(res) @cython.overflowcheck(False) def most_orthogonal(C_INT[:,::1] vectors): cdef C_INT n = vectors.shape[0] cdef C_INT* a cdef C_INT* b cdef double min_dot = 2 # actual max is 1 for i in range(n): for j in range(i): a = &vectors[i, 0] b = &vectors[j, 0] # A highly nested arithmetic expression... normalized_dot = (1.0 * (a[0]*b[0] + a[1]*b[1] + a[2]*b[2]) / ((a[0]*a[0] + a[1]*a[1] + a[2]*a[2]) * (b[0]*b[0] + b[1]*b[1]+b[2]*b[2]))) if normalized_dot < min_dot: min_dot = normalized_dot min_pair = i, j return vectors[i], vectors[j] @cython.overflowcheck(True) @cython.overflowcheck.fold(False) def most_orthogonal_overflow(C_INT[:,::1] vectors): cdef C_INT n = vectors.shape[0] cdef C_INT* a cdef C_INT* b cdef double min_dot = 2 # actual max is 1 for i in range(n): for j in range(i): a = &vectors[i, 0] b = &vectors[j, 0] # A highly nested arithmetic expression... normalized_dot = ((a[0]*b[0] + a[1]*b[1] + a[2]*b[2]) / (1.0 * (a[0]*a[0] + a[1]*a[1] + a[2]*a[2]) * (b[0]*b[0] + b[1]*b[1]+b[2]*b[2]))) if normalized_dot < min_dot: min_dot = normalized_dot min_pair = i, j return vectors[i], vectors[j] @cython.overflowcheck(True) @cython.overflowcheck.fold(True) def most_orthogonal_overflow_fold(C_INT[:,::1] vectors): cdef C_INT n = vectors.shape[0] cdef C_INT* a cdef C_INT* b cdef double min_dot = 2 # actual max is 1 for i in range(n): for j in range(i): a = &vectors[i, 0] b = &vectors[j, 0] # A highly nested arithmetic expression... normalized_dot = ((a[0]*b[0] + a[1]*b[1] + a[2]*b[2]) / (1.0 * (a[0]*a[0] + a[1]*a[1] + a[2]*a[2]) * (b[0]*b[0] + b[1]*b[1]+b[2]*b[2]))) if normalized_dot < min_dot: min_dot = normalized_dot min_pair = i, j return vectors[i], vectors[j] Cython-0.26.1/Demos/libraries/0000775000175000017500000000000013151203436016651 5ustar stefanstefan00000000000000Cython-0.26.1/Demos/libraries/setup.py0000664000175000017500000000165513143605603020375 0ustar stefanstefan00000000000000from __future__ import absolute_import, print_function import os import sys from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize # For demo purposes, we build our own tiny library. try: print("building libmymath.a") assert os.system("gcc -shared -fPIC -c mymath.c -o mymath.o") == 0 assert os.system("ar rcs libmymath.a mymath.o") == 0 except: if not os.path.exists("libmymath.a"): print("Error building external library, please create libmymath.a manually.") sys.exit(1) # Here is how to use the library built above. ext_modules = cythonize([ Extension("call_mymath", sources=["call_mymath.pyx"], include_dirs=[os.getcwd()], # path to .h file(s) library_dirs=[os.getcwd()], # path to .a or .so file(s) libraries=['mymath']) ]) setup( name='Demos', ext_modules=ext_modules, ) Cython-0.26.1/Demos/libraries/mymath.c0000664000175000017500000000011712542002467020317 0ustar stefanstefan00000000000000#include "math.h" double sinc(double x) { return x == 0 ? 1 : sin(x)/x; } Cython-0.26.1/Demos/libraries/mymath.h0000664000175000017500000000002512542002467020322 0ustar stefanstefan00000000000000double sinc(double); Cython-0.26.1/Demos/libraries/call_mymath.pyx0000664000175000017500000000013312542002467021706 0ustar stefanstefan00000000000000cdef extern from "mymath.h": double sinc(double) def call_sinc(x): return sinc(x) Cython-0.26.1/Demos/pyprimes.py0000664000175000017500000000036012542002467017122 0ustar stefanstefan00000000000000def primes(kmax): p = [] k = 0 n = 2 while k < kmax: i = 0 while i < k and n % p[i] != 0: i = i + 1 if i == k: p.append(n) k = k + 1 n = n + 1 return p Cython-0.26.1/Demos/run_primes.py0000664000175000017500000000026312574327400017441 0ustar stefanstefan00000000000000from __future__ import absolute_import, print_function import sys from primes import primes if len(sys.argv) >= 2: n = int(sys.argv[1]) else: n = 1000 print(primes(n)) Cython-0.26.1/Demos/run_spam.py0000664000175000017500000000027012574327400017100 0ustar stefanstefan00000000000000from __future__ import absolute_import, print_function from spam import Spam s = Spam() print("Created:", s) s.set_amount(42) print("Amount =", s.get_amount()) s.describe() s = None Cython-0.26.1/Demos/integrate2.pyx0000664000175000017500000000035412574327400017513 0ustar stefanstefan00000000000000# cython: language_level=3 cdef double f(double x) except? -2: return x**2-x def integrate_f(double a, double b, int N): cdef int i s = 0.0 dx = (b-a)/N for i in range(N): s += f(a+i*dx) return s * dx Cython-0.26.1/Demos/numpy_demo.pyx0000664000175000017500000000027712574327400017627 0ustar stefanstefan00000000000000cimport numpy as cnp def sum_of_squares(cnp.ndarray[double, ndim=1] arr): cdef long N = arr.shape[0] cdef double ss = 0 for i in range(N): ss += arr[i]**2 return ss Cython-0.26.1/Demos/integrate0.py0000664000175000017500000000022412542002467017313 0ustar stefanstefan00000000000000def f(x): return x**2-x def integrate_f(a, b, N): s = 0.0 dx = (b-a)/N for i in range(N): s += f(a+i*dx) return s * dx Cython-0.26.1/Demos/Makefile.nodistutils0000664000175000017500000000060612542002467020723 0ustar stefanstefan00000000000000PYHOME = $(HOME)/pkg/python/version PYINCLUDE = \ -I$(PYHOME)/include/python2.2 \ -I$(PYHOME)/$(ARCH)/include/python2.2 %.c: %.pyx ../bin/cython $< %.o: %.c gcc -c -fPIC $(PYINCLUDE) $< %.so: %.o gcc -shared $< -lm -o $@ all: primes.so spam.so numeric_demo.so clean: @echo Cleaning Demos @rm -f *.c *.o *.so *~ core core.* @cd callback; $(MAKE) clean @cd embed; $(MAKE) clean Cython-0.26.1/Demos/overflow_perf_run.py0000664000175000017500000000370312574327400021023 0ustar stefanstefan00000000000000from __future__ import absolute_import, print_function from overflow_perf import * import sys import timeit try: import numpy as np except ImportError: np = None def run_tests(N): global f for func in most_orthogonal, fib, collatz, factorial: print(func.__name__) for type in ['int', 'unsigned int', 'long long', 'unsigned long long', 'object']: if func == most_orthogonal: if type == 'object' or np == None: continue type_map = {'int': 'int32', 'unsigned int': 'uint32', 'long long': 'int64', 'unsigned long long': 'uint64'} shape = N, 3 arg = np.ndarray(shape, dtype=type_map[type]) arg[:] = 1000 * np.random.random(shape) else: arg = N try: print("%s[%s](%s)" % (func.__name__, type, N)) with_overflow = my_timeit(globals()[func.__name__ + "_overflow"][type], arg) no_overflow = my_timeit(func[type], arg) print("\t%0.04e\t%0.04e\t%0.04f" % (no_overflow, with_overflow, with_overflow / no_overflow)) if func.__name__ + "_overflow_fold" in globals(): with_overflow = my_timeit(globals()[func.__name__ + "_overflow_fold"][type], arg) print("\t%0.04e\t%0.04e\t%0.04f (folded)" % ( no_overflow, with_overflow, with_overflow / no_overflow)) except OverflowError: print(" ", "Overflow") def my_timeit(func, N): global f, arg f = func arg = N for exponent in range(10, 30): times = 2 ** exponent res = min(timeit.repeat("f(arg)", setup="from __main__ import f, arg", repeat=5, number=times)) if res > .25: break return res / times params = sys.argv[1:] if not params: params = [129, 9, 97] for arg in params: print() print("N", arg) run_tests(int(arg)) Cython-0.26.1/Demos/integrate_timing.py0000664000175000017500000000065313023021033020571 0ustar stefanstefan00000000000000from __future__ import absolute_import, print_function import timeit import integrate0, integrate1, integrate2 number = 10 py_time = None for m in ('integrate0', 'integrate1', 'integrate2'): print(m) t = min(timeit.repeat("integrate_f(0.0, 10.0, 100000)", "from %s import integrate_f" % m, number=number)) if py_time is None: py_time = t print(" ", t / number, "s") print(" ", py_time / t) Cython-0.26.1/Demos/callback/0000775000175000017500000000000013151203436016431 5ustar stefanstefan00000000000000Cython-0.26.1/Demos/callback/Setup.py0000664000175000017500000000035212542002467020107 0ustar stefanstefan00000000000000from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize setup( name = 'callback', ext_modules=cythonize([ Extension("cheese", ["cheese.pyx", "cheesefinder.c"]), ]), ) Cython-0.26.1/Demos/callback/cheesefinder.c0000664000175000017500000000050312542002467021223 0ustar stefanstefan00000000000000/* * An example of a C API that provides a callback mechanism. */ #include "cheesefinder.h" static char *cheeses[] = { "cheddar", "camembert", "that runny one", 0 }; void find_cheeses(cheesefunc user_func, void *user_data) { char **p = cheeses; while (*p) { user_func(*p, user_data); ++p; } } Cython-0.26.1/Demos/callback/run_cheese.py0000664000175000017500000000015113023021033021105 0ustar stefanstefan00000000000000import cheese def report_cheese(name): print("Found cheese: " + name) cheese.find(report_cheese) Cython-0.26.1/Demos/callback/Makefile0000664000175000017500000000024312542002467020074 0ustar stefanstefan00000000000000all: python Setup.py build_ext --inplace test: all python run_cheese.py clean: @echo Cleaning Demos/callback @rm -f cheese.c *.o *.so *~ core @rm -rf build Cython-0.26.1/Demos/callback/README.txt0000664000175000017500000000052713023021033020120 0ustar stefanstefan00000000000000This example demonstrates how you can wrap a C API that has a callback interface, so that you can pass Python functions to it as callbacks. The files cheesefinder.h and cheesefinder.c represent the C library to be wrapped. The file cheese.pyx is the Pyrex module which wraps it. The file run_cheese.py demonstrates how to call the wrapper. Cython-0.26.1/Demos/callback/Makefile.nodistutils0000664000175000017500000000050312542002467022453 0ustar stefanstefan00000000000000PYHOME = $(HOME)/pkg/python/version PYINCLUDE = \ -I$(PYHOME)/include/python2.2 \ -I$(PYHOME)/$(ARCH)/include/python2.2 %.c: %.pyx ../../bin/cython $< %.o: %.c gcc -c -fPIC $(PYINCLUDE) $< %.so: %.o gcc -shared $< -lm -o $@ all: cheese.so clean: @echo Cleaning Demos/callback @rm -f *.c *.o *.so *~ core core.* Cython-0.26.1/Demos/callback/cheese.pyx0000664000175000017500000000052213143605603020431 0ustar stefanstefan00000000000000# # Cython wrapper for the cheesefinder API # cdef extern from "cheesefinder.h": ctypedef void (*cheesefunc)(char *name, void *user_data) void find_cheeses(cheesefunc user_func, void *user_data) def find(f): find_cheeses(callback, f) cdef void callback(char *name, void *f): (f)(name.decode('utf-8')) Cython-0.26.1/Demos/callback/cheesefinder.h0000664000175000017500000000016313023021033021213 0ustar stefanstefan00000000000000typedef void (*cheesefunc)(char *name, void *user_data); void find_cheeses(cheesefunc user_func, void *user_data); Cython-0.26.1/Demos/run_numeric_demo.py0000664000175000017500000000016213023021033020565 0ustar stefanstefan00000000000000import numpy import numpy_demo a = numpy.array([1.0, 3.5, 8.4, 2.3, 6.6, 4.1], "d") numpy_demo.sum_of_squares(a) Cython-0.26.1/docs/0000775000175000017500000000000013151203436014556 5ustar stefanstefan00000000000000Cython-0.26.1/docs/_static/0000775000175000017500000000000013151203436016204 5ustar stefanstefan00000000000000Cython-0.26.1/docs/_static/cythonlogo.png0000664000175000017500000001014112542002467021100 0ustar stefanstefan00000000000000‰PNG  IHDR˜@毾csRGB®Îé pHYs  šœtIMEØ ,c$bKGDÿÿÿ ½§“áIDATxÚíy|”ÅÇ9+– H¡­V©¶ÒW¹È6ám@Å@8²lB6’D‚Kä QÀ*G8rB8B@?ÔO R¥•#BP ZP E˜þfw6™Ý¼»ûîdÙMÈûÇï³Ë›ÙwÞ÷/Ï<óÌ3ó6#„4s…&Nœø4J†Þ…r bèT ýýª†þ „ò¡P*4òƒ~áªkVTÝO ~EA™PDœ¤›Ð~ªÔZiÈ&»+´:ëD ìéGh4XiÐ04î“ÐÐ-‚%¥ãÐH¨…Ò¸`hÈîÐZènËRg h¥ûl¤€¡ábëç–¥.@JC7"ÀØhðX‹×]h¶Òm6ÀÐHзõlpÚ€æAj(zjµƒzBC¡)P:´ºíÐJ Ç”Fo€€Ñÿý ˆ{‚û‹{ERˆ¬f;C+®§¿w òQ¾F›, Ò ·XØ¢ƒC!Ù9Eý¿;Ð8¥ñ`hˆæÐÁ†¤ë‰ûÈí}*xmΡ îl`ã©]4ýÔ Jd޼£×ù4@ÀM€ááOœÆæê›@±‚–ìè . }ˆ€Co_wÝêž#'}ÔÕ×÷PLLÌ &dA>M0<ì_³QŸ£ õ¦ÜJÃÒK:«ÒÊÆª•¤«”d‡ÍÛ[òvñÑàÔ¢ãAú]õ;ó¦l_é—²}®ÒV¿fz} ™½/ÙJ7ü‡È4ÕÀTM 0‘FúÔÒ^e‹u _|°0|qÙ=Õ¢ýdÐÂ6/\$8u7\$ð­$ e8{ñKÞJ|“¶ŸY›OxÍÚÐSf8åˆ`0ÖÃÅ€e79Àp³Ï±a¼£~W{éõ¤Åàôƒ§Ã ®(\¡óöášKá*$svÿ¸ò)\Äû<â5cã Ï™Ÿ•9ºqú¹2ÚßTÛ-Ð0«åT4ä݃Á®´ý$,­$42?¿%àJ1ÀUd×À7 Œp%R¸6Q¸È€é뉇.»Tfã} ØUNvÖƒU«Õíácy(€ÕÞpˆ`£ô–SºÆñáiÔr•ž1¤ß×1˜Â¥7ÁµÝ—×L#\žÓr)`7ôúV2k uPGg$Z˜dÞ¬V{ÇD|/¹E,.‹W-,%a ö‘A öùá`ó Ô]3‚ pí$þ) ®ä|⛸…ƒkƒ®þºlÒ/>“xƯy¼!?P<“îù(€q#G‘©—±r+‚õÒ,jrêç}mpêÍàÚj€Ëg…+ ˜Q —G|é·Žxh³º(€5>ÀÆ v²­ àÒI·×Ž pm6‡+!Ç×Ôu¤Ï”5l¹X#l»\_:RQHjqÏÐÔbMPj‘&HOU¨\?ª7 4>‰[4€ë:b4µ¾®~¸Ö’Þ“W+€56ÀpƒmØÂ GÛnµ;|÷£Þ§~QéÕÂ=,—>ìíb}ÈÜ"}ÐÜB=àÒé×ÌÞ¦˜¼UEI¸NУçt—ŽÂ•‰®ÑW¯Ø #`Õ rešÞ «ÓñA®Îh+úÐÀý!º”®HfÙeT%ŽàŽwgÇ øÔmSY|_(0|ïMS“pî4|®Ç¿“äΙj4šÎ(;ÊciãßCÒ{ƒ†:ðŸäa6ý¶: ]ƒn@§ ùŽdǘ¦…DºÇ$Ë“ [r¨ûàwí7…#ŒÔ}†X—#á#\Ù5põ™¼†ôÒf°U °iÇ©U!ÕSÏjíóŽÂ…FyÊtOhÔãöÊ£L4Æ2+–KJßÛ çd+HŒ2Ë£££ÛØßå.Û¹†Ì„„„‡í¤ÂOakSù¤€Ÿ%òûfÊ,Q°A–QzÀužÂņ£.£So%±E*a1šàêÍà’¬:žËq–Kšî ß;RP 7¸gô!;Fl 0ênp¹ježæ¬Akø 1µjVÀPs‰˜ÿtП©ŒßŒ†Jø{¨]$f@ÔÜuÐÄÌÉlQOsöé •Z0ðšÀ–9#þ‘þaND:…«Œ¨$àr4AGŒÔ©çá2Ì`µÖ€KÈÅI„|9ßÏ€ÕÇcç¢IÏK”Ç'Œ?þi‹¿w3¥”ãU¸¾:³*‘‘‘-ù®er-ÎÑ…«£‚ºM6ò˸²Gå¶E°n5#Ä%û:F¤¼e€kQ)1 G¤ÙሄÚp„®ÕfpQõUg´'Wtè©ÕšL­àR¬ „|MÈ…1[`8gœÕjÖŒ6êW\c-γ•«o”™…Ö Zî@íg°ã2¦âLeoËì#AÀ:pÝãZ¸¬M`× GxÛGúUʸ@®i«u—\ÒòV pá¹_ˆ‚^»Ñس7ŠdN»éÓ9hzòë ôvf9ðÛÙ\ùbAÀZ³=D åå&²Ìÿ?1ž~@ÇgGÔL`KfGÔN`Û GÔ… Òd¼…îp°±K4Y­qÔjRõ:!ç_%¤r$ý|üLÍ]ã*ÎïËÏ—1?Êù•`¬üUG».˜ÙˆHµpÿò:pÙÏŽ°ލ—vUéáÉ¢K¬$c˜ÕŠ6Z­ªÑ€êÀIȹ—¡áÞ`¨ó)ÀXÈÀT×{ö®Ýâ#|;ÆÆÆ>ê Àn ö­9`%%5N½üpÄ·€«ÜC—S¸ÊW9FŒå½´åʤ“½4«vöŽ]¥¹Q× ]ânC—h°Z´K„ÕªÂ@æ<\Ê¿¬—© j €Q«Å#IÎýâ7?˜~à¼\Ø" ;,¦>•ޏã=sã|¯„õÝ ‰lIªtlêbBGrYë «5\йÊ9òœÕi´Z#9; LHyxJ±`9\]oÉ ¢ÞæÎÕË€‰nP9·w»õpÄVŽÈ»îõÆÆþ¤:¡©žVJ®L»a,僦ñ,ü`Å‘¯±Z‘Ìj½°†r&p¡­N‡Nj"€é¹ºVØ»öèèè|jµ0¹°½‚€u­gÜ“n/1`fÞDr5a8 º# •ŽÅ¶&»Ž¼¡KaìÏ\á€kà !äTÐÀ&Ø®®]2œüç¸òW]åäç V³Ü+øíâI6Ã36Uæ ¯L»b.³ˆ< PGþ5sG¾Æj fV+ \„üË»³`4PÙ˜Ãï^àêºak*‰•âʹ °%‚€™N8·(ØŽ1nÈ`AÖ»D>"¯&fŽ<µZU¯Öuä V+‚Y­Pjµ9éOÈ ¿o‰ƒEEEµåæÚî=º]}ñrBÎŒg÷·vÎsÐTÖÌÓU€Í,Þt’¾êŒÖpêÏ›¹pDÿéë“Òé.‘ÈÇÙŽ¼¡K c]b àB¯xÂòY"bÃoA°ÔNÙ[€adö9S)Î  ôá-ð¸qã:Yé=¹ÍkŠlL90OAÀÖò' HÙ6±N8‚M`{$ä&±×9Ù‘gV+€Z- Õmrʯ«`c,îí~B˜:Ãlëª-æ—Y9_%·Â€û û;¿ÅÃß,AioÃ…'.Lš4éYWÖBpϯ#ü‰¨óKÊ?/™‘ÅÆ;ò G~t­#ÎäÈ“päY—èk‚ ò^^DÁw$î‘æA³XwSjÚF"uæg.Câï86ç~ÆêœÈeTPKõ‹“}ÉÃGsÆ$îýþÆ~P ¸»³Ùb[ߤÍ=×Ç–Ù}ã×39ò—œàȺD^ÛH…g;QÀX#©pOŸXä?Ýf“Ä+hÊ…5ÛÖ´ þ¾ƒ›)ù^hºÅç”ó·s]CMeiôÞF9zm¹ImÂ,× 8Ý®À 0¸óï‘Ø'¦òrS v“ÁR'õ˜™ÕÅcÚ†`X.•ÇÔ,Uÿ)kºÁ×ÒÙuä«F-$ç"U°ZÐ0(BEÊ©AÁ*r’*ò­Õ Ÿ@GFìуú34›ÁâáŽâ,Ï2M»¹2U™æ§Ñ{°¼~wåäw,CveÕ“uvùÊ—?"gGdчg£K„²aµ²Ñ%B³É±önιOæ‹rçµ4¶u‘§ûZΞ]RëŒVËäÈ¿.áÈ·ïÈŸ pë¢X…uÜ\ž²ƒµ€E Z±PY•}1^'àÈ_&§üKàÈß­ñµÜ Mƒ6ù Ö|EÒ€µâ†×ލX`Ñq9ò§ý¿"áÆüÜ;¶°Oý:¹ëa±|ªåNÍ(ª»ùÉxA+æe·²Ê1!æùù."ï{°æ·'½ú­—Ï57Z® è"»ßëtšFÈqÀ¨;/X™ÝÊ®Fµ%•£.ÔZ­aF«U‘¶ŒÈ£[ôŽ\½Vi}ã\Ž #È'hx€†"ðy˜O¶Ä±pñ è&83da¦Š—úÀת”‘—w© h#ÃÒ¨ØÎ6·ÐQ~% ØäDóÐÿŠnRy™C=k!±þMŽ.ó)™žžÞ–Š~§Ç¡_ØZ©Ì–°Ÿ¸¦rº©‡Òø èu~¬A5ÝiúÞ -]º´àz êš™™ùDVVÖSÙÙÙÏlÞ¼ùú™››Û zråÊ•¿…:ÀŽiiiP袢¢è›ßÂè¾YüòuTz¿­ª"AÀdalS3áwik4šùÉÉÉž¬œœœÚ±cG¯ÂÂÂ~EEE}vîÜÙ3//ï9 u:uª?}¿¿RºéÖæ­”FoÀ€1ÈþÈÞïSŸ·ÏÒŒËË111ŸœR@·166v™V«ï«q¬;ʦfîÔ³.ú{­ÒØ0.re=ß>ë –5Oª¨aÆö4[¾~·ECaJ7rÀ8Оgyý÷Ü Þ¢4ì˜Eè`9¿èÀºÆ6h{‘N)ú&±w¨šY¶ïœÔ-6çH7Ûí¥@ÕD³€­%]€Êbi³ÙÆÃtyÝýøŸlbý'¶æg?cmbï¯Ô³íµCéîJÃ5ý:S’ )øÇIEND®B`‚Cython-0.26.1/docs/_static/cython-logo-light.png0000664000175000017500000001011712542002467022265 0ustar stefanstefan00000000000000‰PNG  IHDR@õxþ—sBIT|dˆ pHYsˆˆštEXtSoftwarewww.inkscape.org›î<ÌIDATxÚí tTÕÇY+¤‚ÐV+ŠV[éa !ÙÃ$l¢eBK&,àÙB°Ê¶,@€–$О¤•%’!{B°·ÿ;sgrg2Ë››ÉdHžçüOâË›wß¼ûã»ßrï}!¡²²²¡‘Ðè3( Ê„N@ùÐ-è—2íÿŽ@©Ð*(š yA¿qÔ=˲®ÚæÐh#¤†ˆt:Ä@tƒšËYOBgv„–B—íŒ5ýí‡úÉú˜„Î{úºç@pLé 4j"wîc:ª3´ú_ƒc¬KÐhyxsR€Ð1O0‹ó«“c¬«¯ÜÑN‹¦N998¼BsåaÍ B'„@7kØ¡t¸; -€B¡`è ¨5Ô ê €&CqÐ>è¾@Ê‚ž•;½¢ÿzY‡?ì¼ÿ²¼Ï ‰€ÕkÅrH™5ô·J ¹ãMܱ0Y¤Ãî±°¾SmÙ5Eý¯Ð¹ótch³`GQ‹ó|-&*»C§ïÂ7@ öZ$Ø9¡*4ƒ¢˜£lë}þ õ–!¨%€ðp'–:úK Í‰‚–èèÏ2vµ¿€A[Ϻú"h{ž D´N÷Œ£ï7//ï‰ŠŠŠþ¥¥¥ G½ów,j²µ#>’ÚhP\V{ElÎ(Å’¬8Å¢¬Ä 2>É<é“qÆOµ÷°¯jOjŸè]«½¢wÍ÷ž½Ã«‘JÕD"D_B´ÚѾ¼¼|#×¾¢>$Ò ÿ‚šZk,déÑÁK¤/Íy¤Xrˆô]œE‚ €‡øÇì#€‡ø~¼‡øDï&}æî$^svÏÙۉǬmçÜfmî"1ÝpB0Ùèâ`‹™XïÂy…¹¶ú=¯ZkH¥"Múʼô0aðÜ¢ð.د…g>…'øÌÛM¼õð¤Rxˆû‡)ÄmÆ–;®3·¼"1:qªO92[]_Ú'ðà×Ji¨ÿgGüC(<±‡HPlVàÔÔ¦€gN€ž xú|”¦…'ŠÂ³•ÂCzOßD\”‰Ù;çKÁ¡l’½ìÍ›7[c˜ri0áK>ônRÂÐ568–ZžìKºc}UÛúSxT:xviàñâᙩ…ÇuZ2è®JÕL@Í¡j[ÓZRRB'Ò¥Ñ$*ÚÖ:*âûHm(diN„bq6 Ztô]tÐ ûÅìá§gñŽfðÌI%žQÛ9x6kàé¥L$=#6׈uÏ9ó4>ºçÓ`b‘—Hi`”Ô†`}”‡êœæù×5N³<;4ðxÌ¢ð¤Þ3ªàq‰H =¦l .á d€œ Q‚×dkx”¦#®]U—žm†ðD&iᙺtŸ¼­”rB€v ÀSdKC1™]c2Ãüb2ÂüTTéa€'̋ꣴ0¨ía€ç[qiáÙ¤‡§§žõ¤Û¤µ2@În¾›˜n+@»ÌWŸ}ÝMã4/Éž§Xœ¥‚åQ}’© ˜Ÿ¡ò›Ÿ®<*8Í*Ÿ¹;U}æìPyÎÙ®òŒÚª<çhÄå:Á£¤ðlÄÐ¥…§ëÄx-@e‘a¤|šJ£ŠéQøB*f´¬A§÷¢KJKK3$ž»‚i°îxqqqgv,Íhj­îÜÅRÂ=tC#¡XßÍ®¨¨T³Ãyíñ¹éøL kûGè+öÝØbx’•‡6@Ç JètZÈϮЕ-D†¯ÙÆ \v´s¿OÒ…ëÚDáAM®Ç–p] O¢žî“Ö‘®áñh hÚ@CªIHÙÔK¤,üux^ä'åK84wþ S–ÇŒ~´Ô×R¬T«Õ-ÌÝ-‡àJ-ÝÍ~ô'­LUžÌÖæñEç_MÌïš©(J ¾ÆYfÀS@áaáº&âÒÀ£ušÍ„ëÛM…뚈KO7I€Ê")…Rv¸®BÇÐyJ tä‡Üß¿bp(Þû[¨H7W ×ÏÁ5TÐüÿ:>)J­’™Žå&ÚÝ€”¸Ÿ¿]»ví9üþ›©ÿŽ•••LdðC¹û ï&±EYZ¤”mÄÀðF̼Ö8ÿ÷URH…'‡(LÀck¸N#.ê4óð¤±:g2À™HHñBŠÆAc{Ö@5ô›´÷º Ë2†/XŠ—Œ†­NÜ”_5@5Uhj4´&ÝKîoyÔ­±0?,‡;÷$=¸] NúkÙÁ¶!qGîiàY’M Âõ˜ "9\¬ ×µð¬5€‡ªGh|kR®ÄpE­Î$juO(Àù€ÂÑ„\¹ãqL±Ð&í´kÜ5G]gwa®ÓŸÍcçÒ©É=ÍtFB©Hwî}zàkA€ÚpÃWÿ*xÌH«‡ëîÂu­Ó¼ÆHñWIeøS°:II8oužëÕÐð;¡RXé´’8Îútáç{ãX3+י˟)Ps¶‡æüF‚Ëñ…Çà¸ÃJ¾º®/š¬®WH-…ëÕáÂâ?ÆpÕO;dé¬ÎjuQ¿OHÁ»„ä¥?Ÿ«g…rç®áÚâów©¢4¸k"±ó+x€n dQ(ZY ëÕu‹áz5xÂ×d;õ †¬|R<žYÑZ«£~мx†råmh{}VçS±Zwüsk÷~ýúõ§ù~,,,|ÆÝè¦!@YYz§Yz¸~ðäº(“rO.àÉEÄ•‹p=Àèt¾kØš=Ý&® »“7¥†¬}š!Kcuè«£x0ôçÿà¼EHÞ hÀˆPª¥´Š™voé>‡ÛÍŠLœ7*Sœ–®?pŸ¹e¡[ä¦Î„ iJÔÊ6UÙá¹;¬ÎtR2¾‚s”9«3Tkuòry ÔÜàèP×ÖǺÏÔÕ‰.SÖg~Øe>\ßÁÂõ”ÛnnéEÊ"{" Ï&åÓî&ù¤` ÏÍ8Êz«3„Y7ÎB.…ôÅÅÀ  š/⎯²vïjµº ߇•••OÙ ‚u¬ªsí³®÷ž™2ŽTD$LƒC¥d¹IĪ£¬²k‡¬ËýO0àé x¹à×§!DK\[{%tükÜu*ìåD' ¤_ãÿIæ‹áúŒ­yšZUù´r³ðd”a@ª9Êà e½ÕéǬNäx|9ëÞ^ õ㢪7¸ãw,•:X›#¸t@†½Z&Ÿî‚¾ó3üuáºéˆks2ò3?dñåPbà(S«£~·º£¬±:!ÌêR«CÈyoBÎyý`KÐ’«õ<ºqãF«š„c}¤„ÖöˆÝÏ^îoƒ­tüݹøÞ®öh† @º öo§¹À ââÂõ^Ó7Í(“MY|Fy<‘ì(k†¬ 6dùŒZçÑ;ÇU ×YÔ%29 ޳ÒÎŽ2³:>ÔêPp¨î“ ^ ª#*ß_òGêl⺵Í7FÏ`…™œ¯‹VÍMŰ@&– §ET#xü¸ðýjqqñ+ö¨‰àž?'ø‹R+„p½Àdu=2ˆw”ÃL8ÊïU9ÊWtŽò@޲/²×Ž»þ~ Ïé[Ýù¦j-¶ÈßT.3:¸LÛìË£p™š è5y]'ø:J«Ž²zØbreˆV…(H.U_È_AÎSù@žU:çákKÔ%uÝ:Áü‰ÆF9Œ³@“¥Î¤Ó.9Õ”ÎObS;;bNt'A€â%7V6IiÕQÎûkryp"e¨_"†,((V'CÔ'‘œ h]—s€Ùçºï?¢.ïÅÙÖ…]躔5ñ•„*µVGç(¿oÂQdÝQ>ïS§“êa•6py_ *€F Z¡@IŽU 8Ê¥ä‚wå‡z_§ŽbÓT5> 9Ÿ¢¡ÔŒ ?mQ¦4€FO±ÉQ¾è}äk;è{÷‰z€N{µ««‡Å㟤–ÜSx0c­›ÕÆòGf”›È(peÏ#úÏžw륵>•uÍ¥³ï{›–d€ªD­P@9V«Ñ’ä»Zeuj­Ž>£ìoœQưå>ðt8Ù5ÍóØ*DXϳðy.[¥ŸLë,Ãc~ƒ©ìÒ(ï­îðuò%d”MÈ=‡¨}ZHÌåÜcó^¤ê$¿Á(q§Ó/çòfåÖjbbý•òS<ÌêTæärH¬N‰ùŒ²Ž‘ žÜü ‰ºm¼A«gfë²Âè Ùa¶m“Ígg*~#ù‚Ôš\ôõF”Eä/ Ã€è†«Mø=š|ï1”\pÆè¨1{­÷  Z=ã÷ƒ…Ëí$q3ØV;áÛšuLRN–câ@z‰-¯}èdàÐ=Hî`'ˆéu6¯úQƒCËýåŽ}Ì2 ­W²mÒM%Û€éMZÎ;õ1ȦN,“&øþ1K/ò¥5/º™eWšz Lt“ÇÞ,—4—mì¹…íúoV¸ý™­¹*bkš($[ÙûËTlûÙ@º‡±ÜqΣÿûã:³£°.IEND®B`‚Cython-0.26.1/docs/_static/favicon.ico0000664000175000017500000000217612542002467020337 0ustar stefanstefan00000000000000 h(  ddddddKddd£dddÚdddödddüdddídddÆdddƒddd ddddddÄdddÿdddÿeee×jjj£kkkšfff«dddédddÿdddùddd‚dddddd dddädddÿdddùddd^ˆˆˆššš››› eeedddŒdddÿdddÿddd£dddddddddÊdddÿdddÿdddoQÛÿ?FØÿÂ=ÔÿÛ;Ôÿ‘;ÔÿdddddddddÇdddÇdddIdddWdddÿdddÿdddáddd]àÿ¹TÜÿÿIÙÿÿAÖÿž;Ôÿfddd±dddÿdddÿddd©x<;£s8œmäý¼câÿÿYÞÿÓNÛÿ¼D×ÿª<Ôÿ•;Ôÿdddædddÿdddÿddd`¯~@ͨw;ÿvÞð¹pæÿÿhäÿÿ]àÿÿQÜÿÿFØÿÿ?Õÿ|dddüdddÿdddÿdddMµƒDî®}?ÿ£ƒOË‹©•¼†¦”½~«ŸºaßüâVÝÿÿMÚÿždddôdddÿdddÿdddS»ˆHÉ´ƒDÿ­|?ÿ¦u:ÿŸo5ÿ™j1ÿ©›ºeãÿÿ\àÿydddÌdddÿdddÿdddo¾‹J-¹‡G‘³B²¬{>¼¤t9èžn4ÿŒ’tšpæÿiäÿddd{dddÿdddÿddd´¹†G¡±A±«z=ÿ¤t8ÿŸo4fddd ddd!ddd!dddddddddçdddÿdddýddd6½ŠJN¸…F¬±AÊ©x<§£s8dddddd²dddÿdddÿddd£ddd>dddödddÿdddèdddAddd dddœdddÿdddÿdddÐdddddd9dddßdddÿdddÿdddÎddd”ddd†ddd©dddïdddÿdddÿddd°dddddd dddndddÇdddúdddÿdddÿdddÿdddîddd«dddDdddddddddddd%ddddddÿÿóïïóÏÿžžÿš›ï˜/Ÿ¿Ÿ?ßùïóóçþ?ÿÿCython-0.26.1/docs/.hgignore0000664000175000017500000000007712542002467016371 0ustar stefanstefan00000000000000syntax: glob *.pyc *~ .*.swp syntax: regexp ^build/ ^_build/ Cython-0.26.1/docs/Makefile0000664000175000017500000001307712542002467016232 0ustar stefanstefan00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = a4 BUILDDIR = build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean pdf html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* pdf: $(SPHINXBUILD) -b pdf $(ALLSPHINXOPTS) $(BUILDDIR)/pdf @echo @echo "Build finished. The PDF is in $(BUILDDIR)/pdf." html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Cython.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Cython.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Cython" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Cython" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." Cython-0.26.1/docs/TODO0000664000175000017500000000206113143605603015250 0ustar stefanstefan00000000000000Background ---------- [brain dump] The "Old Cython Users Guide" is a derivative of the old Pyrex documentation. It underwent substantial editing by Peter Alexandar to become the Reference Guide, which is oriented around bullet points and lists rather than prose. This transition was incomplete. At nearly the same time, Robert, Dag, and Stefan wrote a tutorial as part of the SciPy proceedings. It was felt that the content there was cleaner and more up to date than anything else, and this became the basis for the "Getting Started" and "Tutorials" sections. However, it simply doesn't have as much content as the old documentation used to. Eventually, it seems all of the old users manual could be whittled down into independent tutorial topics. Much discussion of what we'd like to see is at http://www.mail-archive.com/cython-dev@codespeak.net/msg06945.html There is currently a huge amount of redundancy, but no one section has it all. Also, we should go through the wiki enhancement proposal list and make sure to transfer the (done) ones into the user manual. Cython-0.26.1/docs/sphinxext/0000775000175000017500000000000013151203436016610 5ustar stefanstefan00000000000000Cython-0.26.1/docs/sphinxext/ipython_console_highlighting.py0000664000175000017500000000532412542002467025133 0ustar stefanstefan00000000000000from pygments.lexer import Lexer, do_insertions from pygments.lexers.agile import PythonConsoleLexer, PythonLexer, \ PythonTracebackLexer from pygments.token import Comment, Generic from sphinx import highlighting import re line_re = re.compile('.*?\n') class IPythonConsoleLexer(Lexer): """ For IPython console output or doctests, such as: Tracebacks are not currently supported. .. sourcecode:: ipython In [1]: a = 'foo' In [2]: a Out[2]: 'foo' In [3]: print a foo In [4]: 1 / 0 """ name = 'IPython console session' aliases = ['ipython'] mimetypes = ['text/x-ipython-console'] input_prompt = re.compile("(In \[[0-9]+\]: )|( \.\.\.+:)") output_prompt = re.compile("(Out\[[0-9]+\]: )|( \.\.\.+:)") continue_prompt = re.compile(" \.\.\.+:") tb_start = re.compile("\-+") def get_tokens_unprocessed(self, text): pylexer = PythonLexer(**self.options) tblexer = PythonTracebackLexer(**self.options) curcode = '' insertions = [] for match in line_re.finditer(text): line = match.group() input_prompt = self.input_prompt.match(line) continue_prompt = self.continue_prompt.match(line.rstrip()) output_prompt = self.output_prompt.match(line) if line.startswith("#"): insertions.append((len(curcode), [(0, Comment, line)])) elif input_prompt is not None: insertions.append((len(curcode), [(0, Generic.Prompt, input_prompt.group())])) curcode += line[input_prompt.end():] elif continue_prompt is not None: insertions.append((len(curcode), [(0, Generic.Prompt, continue_prompt.group())])) curcode += line[continue_prompt.end():] elif output_prompt is not None: insertions.append((len(curcode), [(0, Generic.Output, output_prompt.group())])) curcode += line[output_prompt.end():] else: if curcode: for item in do_insertions(insertions, pylexer.get_tokens_unprocessed(curcode)): yield item curcode = '' insertions = [] yield match.start(), Generic.Output, line if curcode: for item in do_insertions(insertions, pylexer.get_tokens_unprocessed(curcode)): yield item def setup(app): app.add_lexer('ipython', IPythonConsoleLexer()) Cython-0.26.1/docs/sphinxext/cython_highlighting.py0000664000175000017500000001725612542002467023232 0ustar stefanstefan00000000000000import re from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, \ LexerContext, include, combined, do_insertions, bygroups, using from pygments.token import Error, Text, \ Comment, Operator, Keyword, Name, String, Number, Generic, Punctuation from pygments.util import get_bool_opt, get_list_opt, shebang_matches from pygments import unistring as uni from sphinx import highlighting line_re = re.compile('.*?\n') class CythonLexer(RegexLexer): """ For `Cython `_ source code. """ name = 'Cython' aliases = ['cython', 'pyx'] filenames = ['*.pyx', '*.pxd', '*.pxi'] mimetypes = ['text/x-cython', 'application/x-cython'] tokens = { 'root': [ (r'\n', Text), (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)), (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)), (r'[^\S\n]+', Text), (r'#.*$', Comment), (r'[]{}:(),;[]', Punctuation), (r'\\\n', Text), (r'\\', Text), (r'(in|is|and|or|not)\b', Operator.Word), (r'(<)([a-zA-Z0-9.?]+)(>)', bygroups(Punctuation, Keyword.Type, Punctuation)), (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator), (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)', bygroups(Keyword, Number.Integer, Operator, Name, Operator, Name, Punctuation)), include('keywords'), (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'), (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'), (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'), (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'), (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'), include('builtins'), include('backtick'), ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'), ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'), ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'), ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'), ('[uU]?"""', String, combined('stringescape', 'tdqs')), ("[uU]?'''", String, combined('stringescape', 'tsqs')), ('[uU]?"', String, combined('stringescape', 'dqs')), ("[uU]?'", String, combined('stringescape', 'sqs')), include('name'), include('numbers'), ], 'keywords': [ (r'(assert|break|by|continue|ctypedef|del|elif|else|except\??|exec|' r'finally|for|gil|global|if|include|lambda|nogil|pass|print|raise|' r'return|try|while|yield|as|with)\b', Keyword), (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc), ], 'builtins': [ (r'(? 1000: kmax = 1000 k = 0 n = 2 while k < kmax: i = 0 while i < k and n % p[i] != 0: i = i + 1 if i == k: p[k] = n k = k + 1 result.append(n) n = n + 1 return result Cython-0.26.1/docs/examples/tutorial/primes/setup.py0000664000175000017500000000016712542002467023260 0ustar stefanstefan00000000000000from distutils.core import setup from Cython.Build import cythonize setup( ext_modules=cythonize("primes.pyx"), ) Cython-0.26.1/docs/examples/tutorial/primes/primes.py0000664000175000017500000000051012542002467023407 0ustar stefanstefan00000000000000 def primes(kmax): result = [] if kmax > 1000: kmax = 1000 p = [0] * 1000 k = 0 n = 2 while k < kmax: i = 0 while i < k and n % p[i] != 0: i += 1 if i == k: p[k] = n k += 1 result.append(n) n += 1 return result Cython-0.26.1/docs/examples/tutorial/great_circle/0000775000175000017500000000000013151203436022662 5ustar stefanstefan00000000000000Cython-0.26.1/docs/examples/tutorial/great_circle/p1.py0000664000175000017500000000043612542002467023563 0ustar stefanstefan00000000000000import math def great_circle(lon1, lat1, lon2, lat2): radius = 3956 # miles x = math.pi/180.0 a = (90.0 - lat1)*x b = (90.0 - lat2)*x theta = (lon2 - lon1)*x c = math.acos(math.cos(a)*math.cos(b) + math.sin(a)*math.sin(b)*math.cos(theta)) return radius*c Cython-0.26.1/docs/examples/tutorial/great_circle/c1.pyx0000664000175000017500000000043612542002467023736 0ustar stefanstefan00000000000000import math def great_circle(lon1, lat1, lon2, lat2): radius = 3956 # miles x = math.pi/180.0 a = (90.0 - lat1)*x b = (90.0 - lat2)*x theta = (lon2 - lon1)*x c = math.acos(math.cos(a)*math.cos(b) + math.sin(a)*math.sin(b)*math.cos(theta)) return radius*c Cython-0.26.1/docs/examples/tutorial/great_circle/c2.pyx0000664000175000017500000000056112542002467023736 0ustar stefanstefan00000000000000import math def great_circle(double lon1, double lat1, double lon2, double lat2): cdef double radius = 3956 # miles cdef double x = math.pi/180.0 cdef double a, b, theta, c a = (90.0 - lat1)*x b = (90.0 - lat2)*x theta = (lon2 - lon1)*x c = math.acos(math.cos(a)*math.cos(b) + math.sin(a)*math.sin(b)*math.cos(theta)) return radius*c Cython-0.26.1/docs/examples/Cython Magics.ipynb0000664000175000017500000002301412542002467022073 0ustar stefanstefan00000000000000{ "metadata": { "name": "Cython Magics", "signature": "sha256:c357b93e9480d6347c6677862bf43750745cef4b30129c5bc53cb879a19d4074" }, "nbformat": 3, "nbformat_minor": 0, "worksheets": [ { "cells": [ { "cell_type": "heading", "level": 1, "metadata": {}, "source": [ "Cython Magic Functions" ] }, { "cell_type": "heading", "level": 2, "metadata": {}, "source": [ "Loading the extension" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Cython has an IPython extension that contains a number of magic functions for working with Cython code. This extension can be loaded using the `%load_ext` magic as follows:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "%load_ext cython" ], "language": "python", "metadata": {}, "outputs": [], "prompt_number": 1 }, { "cell_type": "heading", "level": 2, "metadata": {}, "source": [ "The %cython_inline magic" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "The `%%cython_inline` magic uses `Cython.inline` to compile a Cython expression. This allows you to enter and run a function body with Cython code. Use a bare `return` statement to return values. " ] }, { "cell_type": "code", "collapsed": false, "input": [ "a = 10\n", "b = 20" ], "language": "python", "metadata": {}, "outputs": [], "prompt_number": 2 }, { "cell_type": "code", "collapsed": false, "input": [ "%%cython_inline\n", "return a+b" ], "language": "python", "metadata": {}, "outputs": [ { "metadata": {}, "output_type": "pyout", "prompt_number": 3, "text": [ "30" ] } ], "prompt_number": 3 }, { "cell_type": "heading", "level": 2, "metadata": {}, "source": [ "The %cython_pyximport magic" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "The `%%cython_pyximport` magic allows you to enter arbitrary Cython code into a cell. That Cython code is written as a `.pyx` file in the current working directory and then imported using `pyximport`. You have the specify the name of the module that the Code will appear in. All symbols from the module are imported automatically by the magic function." ] }, { "cell_type": "code", "collapsed": false, "input": [ "%%cython_pyximport foo\n", "def f(x):\n", " return 4.0*x" ], "language": "python", "metadata": {}, "outputs": [], "prompt_number": 4 }, { "cell_type": "code", "collapsed": false, "input": [ "f(10)" ], "language": "python", "metadata": {}, "outputs": [ { "metadata": {}, "output_type": "pyout", "prompt_number": 5, "text": [ "40.0" ] } ], "prompt_number": 5 }, { "cell_type": "heading", "level": 2, "metadata": {}, "source": [ "The %cython magic" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Probably the most important magic is the `%cython` magic. This is similar to the `%%cython_pyximport` magic, but doesn't require you to specify a module name. Instead, the `%%cython` magic uses manages everything using temporary files in the `~/.cython/magic` directory. All of the symbols in the Cython module are imported automatically by the magic.\n", "\n", "Here is a simple example of a Black-Scholes options pricing algorithm written in Cython. Please note that this example might not compile on non-POSIX systems (e.g., Windows) because of a missing `erf` symbol." ] }, { "cell_type": "code", "collapsed": false, "input": [ "%%cython\n", "cimport cython\n", "from libc.math cimport exp, sqrt, pow, log, erf\n", "\n", "@cython.cdivision(True)\n", "cdef double std_norm_cdf_cy(double x) nogil:\n", " return 0.5*(1+erf(x/sqrt(2.0)))\n", "\n", "@cython.cdivision(True)\n", "def black_scholes_cy(double s, double k, double t, double v,\n", " double rf, double div, double cp):\n", " \"\"\"Price an option using the Black-Scholes model.\n", " \n", " s : initial stock price\n", " k : strike price\n", " t : expiration time\n", " v : volatility\n", " rf : risk-free rate\n", " div : dividend\n", " cp : +1/-1 for call/put\n", " \"\"\"\n", " cdef double d1, d2, optprice\n", " with nogil:\n", " d1 = (log(s/k)+(rf-div+0.5*pow(v,2))*t)/(v*sqrt(t))\n", " d2 = d1 - v*sqrt(t)\n", " optprice = cp*s*exp(-div*t)*std_norm_cdf_cy(cp*d1) - \\\n", " cp*k*exp(-rf*t)*std_norm_cdf_cy(cp*d2)\n", " return optprice" ], "language": "python", "metadata": {}, "outputs": [], "prompt_number": 6 }, { "cell_type": "code", "collapsed": false, "input": [ "black_scholes_cy(100.0, 100.0, 1.0, 0.3, 0.03, 0.0, -1)" ], "language": "python", "metadata": {}, "outputs": [ { "metadata": {}, "output_type": "pyout", "prompt_number": 7, "text": [ "10.327861752731728" ] } ], "prompt_number": 7 }, { "cell_type": "markdown", "metadata": {}, "source": [ "For comparison, the same code is implemented here in pure python." ] }, { "cell_type": "code", "collapsed": false, "input": [ "from math import exp, sqrt, pow, log, erf\n", "\n", "def std_norm_cdf_py(x):\n", " return 0.5*(1+erf(x/sqrt(2.0)))\n", "\n", "def black_scholes_py(s, k, t, v, rf, div, cp):\n", " \"\"\"Price an option using the Black-Scholes model.\n", " \n", " s : initial stock price\n", " k : strike price\n", " t : expiration time\n", " v : volatility\n", " rf : risk-free rate\n", " div : dividend\n", " cp : +1/-1 for call/put\n", " \"\"\"\n", " d1 = (log(s/k)+(rf-div+0.5*pow(v,2))*t)/(v*sqrt(t))\n", " d2 = d1 - v*sqrt(t)\n", " optprice = cp*s*exp(-div*t)*std_norm_cdf_py(cp*d1) - \\\n", " cp*k*exp(-rf*t)*std_norm_cdf_py(cp*d2)\n", " return optprice" ], "language": "python", "metadata": {}, "outputs": [], "prompt_number": 8 }, { "cell_type": "code", "collapsed": false, "input": [ "black_scholes_py(100.0, 100.0, 1.0, 0.3, 0.03, 0.0, -1)" ], "language": "python", "metadata": {}, "outputs": [ { "metadata": {}, "output_type": "pyout", "prompt_number": 9, "text": [ "10.327861752731728" ] } ], "prompt_number": 9 }, { "cell_type": "markdown", "metadata": {}, "source": [ "Below we see the runtime of the two functions: the Cython version is nearly a factor of 10 faster." ] }, { "cell_type": "code", "collapsed": false, "input": [ "%timeit black_scholes_cy(100.0, 100.0, 1.0, 0.3, 0.03, 0.0, -1)" ], "language": "python", "metadata": {}, "outputs": [ { "output_type": "stream", "stream": "stdout", "text": [ "1000000 loops, best of 3: 319 ns per loop\n" ] } ], "prompt_number": 10 }, { "cell_type": "code", "collapsed": false, "input": [ "%timeit black_scholes_py(100.0, 100.0, 1.0, 0.3, 0.03, 0.0, -1)" ], "language": "python", "metadata": {}, "outputs": [ { "output_type": "stream", "stream": "stdout", "text": [ "100000 loops, best of 3: 2.28 \u00b5s per loop\n" ] } ], "prompt_number": 11 }, { "cell_type": "heading", "level": 2, "metadata": {}, "source": [ "External libraries" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Cython allows you to specify additional libraries to be linked with your extension, you can do so with the `-l` flag (also spelled `--lib`). Note that this flag can be passed more than once to specify multiple libraries, such as `-lm -llib2 --lib lib3`. Here's a simple example of how to access the system math library:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "%%cython -lm\n", "from libc.math cimport sin\n", "print 'sin(1)=', sin(1)" ], "language": "python", "metadata": {}, "outputs": [ { "output_type": "stream", "stream": "stdout", "text": [ "sin(1)= 0.841470984808\n" ] } ], "prompt_number": 12 }, { "cell_type": "markdown", "metadata": {}, "source": [ "You can similarly use the `-I/--include` flag to add include directories to the search path, and `-c/--compile-args` to add extra flags that are passed to Cython via the `extra_compile_args` of the distutils `Extension` class. Please see [the Cython docs on C library usage](http://docs.cython.org/src/tutorial/clibraries.html) for more details on the use of these flags." ] } ], "metadata": {} } ] } Cython-0.26.1/docs/README0000664000175000017500000000070413143605603015442 0ustar stefanstefan00000000000000Cython's entire documentation suite is currently being overhauled. For the time being, I'll use this page to post notes. The previous Cython documentation files are hosted at https://cython.readthedocs.io/en/latest/ Notes ======= 1) Some css work should definitely be done. 2) Use local 'top-of-page' contents rather than the sidebar, imo. 3) Provide a link from each (sub)section to the TOC of the page. 4) Fix cython highlighter for cdef blocks Cython-0.26.1/docs/conf.py0000664000175000017500000003357613023021033016060 0ustar stefanstefan00000000000000# -*- coding: utf-8 -*- # # Cython documentation build configuration file, created by # sphinx-quickstart on Sun Jun 29 13:36:38 2014. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os, os.path, re import itertools import datetime YEAR = datetime.date.today().strftime('%Y') # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) sys.path.append(os.path.abspath('sphinxext')) # Import support for ipython console session syntax highlighting (lives # in the sphinxext directory defined above) import ipython_console_highlighting # -- General configuration ----------------------------------------------------- # Use cython as the default syntax highlighting language, as python is a subset # this does the right thing highlight_language = 'cython' # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'ipython_console_highlighting', 'cython_highlighting', 'sphinx.ext.pngmath', 'sphinx.ext.todo', 'sphinx.ext.intersphinx' ] try: import rst2pdf except ImportError: pass else: extensions.append('rst2pdf.pdfbuilder') # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Cython' authors = 'Stefan Behnel, Robert Bradshaw, Dag Sverre Seljebotn, Greg Ewing, William Stein, Gabriel Gellner, et al.' copyright = '%s, %s' % (YEAR, authors) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = '0.15' try: _match_version = re.compile(r'^\s*_*version\s*_*\s*=\s*["\']([^"\']+)["\'].*').match with open(os.path.join(os.path.dirname(__file__), '..', 'Cython', 'Shadow.py')) as _f: for line in itertools.islice(_f, 5): # assume version comes early enough _m = _match_version(line) if _m: release = _m.group(1) break else: print("FAILED TO PARSE PROJECT VERSION !") except: pass # The short X.Y version. version = re.sub('^([0-9]+[.][0-9]+).*', '\g<1>', release) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['py*', 'build', 'BUILD', 'TEST_TMP'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # todo todo_include_todos = True # intersphinx for standard :keyword:s (def, for, etc.) intersphinx_mapping = {'python': ('http://docs.python.org/3/', None)} # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' try: import sphinx if os.path.isdir(os.path.join(os.path.dirname(sphinx.__file__), 'themes', 'nature')): html_theme = 'nature' except (ImportError, AttributeError): pass # use default theme # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. html_logo = "_static/cythonlogo.png" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. html_favicon = "_static/favicon.ico" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. html_domain_indices = False # If false, no index is generated. html_use_index = False # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Cythondoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). _stdauthor = r'Stefan Behnel, Robert Bradshaw, William Stein\\ Gary Furnish, Dag Seljebotn, Greg Ewing\\ Gabriel Gellner, editor' latex_documents = [ ('src/reference/index', 'reference.tex', 'Cython Reference Guide', _stdauthor, 'manual'), ('src/tutorial/index', 'tutorial.tex', 'Cython Tutorial', _stdauthor, 'manual') ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'cython', u'Cython Documentation', [authors], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Cython', u'Cython Documentation', authors, 'Cython', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'Cython' epub_author = authors epub_publisher = u'' epub_copyright = copyright # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # A tuple containing the cover image and cover page html template filenames. #epub_cover = () # A sequence of (type, uri, title) tuples for the guide element of content.opf. #epub_guide = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. #epub_exclude_files = [] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True # Fix unsupported image types using the PIL. #epub_fix_images = False # Scale large images. #epub_max_image_width = 0 # If 'no', URL addresses will not be shown. #epub_show_urls = 'inline' # If false, no index is generated. #epub_use_index = True # -- Options for PDF output -------------------------------------------------- # Grouping the document tree into PDF files. List of tuples # (source start file, target name, title, author, options). # # If there is more than one author, separate them with \\. # For example: r'Guido van Rossum\\Fred L. Drake, Jr., editor' # # The options element is a dictionary that lets you override # this config per-document. # For example, # ('index', u'MyProject', u'My Project', u'Author Name', # dict(pdf_compressed = True)) # would mean that specific document would be compressed # regardless of the global pdf_compressed setting. pdf_documents = [ ('index', project, project, authors.replace(', ', '\\\\')), ] # A comma-separated list of custom stylesheets. Example: pdf_stylesheets = ['sphinx','kerning','a4'] # A list of folders to search for stylesheets. Example: pdf_style_path = ['.', '_styles'] # Create a compressed PDF # Use True/False or 1/0 # Example: compressed=True pdf_compressed = True # A colon-separated list of folders to search for fonts. Example: # pdf_font_path = ['/usr/share/fonts', '/usr/share/texmf-dist/fonts/'] # Language to be used for hyphenation support #pdf_language = "en_US" # Mode for literal blocks wider than the frame. Can be # overflow, shrink or truncate pdf_fit_mode = "shrink" # Section level that forces a break page. # For example: 1 means top-level sections start in a new page # 0 means disabled #pdf_break_level = 0 # When a section starts in a new page, force it to be 'even', 'odd', # or just use 'any' #pdf_breakside = 'any' # Insert footnotes where they are defined instead of # at the end. #pdf_inline_footnotes = True # verbosity level. 0 1 or 2 #pdf_verbosity = 0 # If false, no index is generated. pdf_use_index = False # If false, no modindex is generated. pdf_use_modindex = False # If false, no coverpage is generated. #pdf_use_coverpage = True # Name of the cover page template to use #pdf_cover_template = 'sphinxcover.tmpl' # Documents to append as an appendix to all manuals. #pdf_appendices = [] # Enable experimental feature to split table cells. Use it # if you get "DelayedTable too big" errors #pdf_splittables = False # Set the default DPI for images #pdf_default_dpi = 72 # Enable rst2pdf extension modules (default is only vectorpdf) # you need vectorpdf if you want to use sphinx's graphviz support #pdf_extensions = ['vectorpdf'] # Page template name for "regular" pages #pdf_page_template = 'cutePage' # Show Table Of Contents at the beginning? pdf_use_toc = False # How many levels deep should the table of contents be? pdf_toc_depth = 9999 # Add section number to section references pdf_use_numbered_links = False # Background images fitting mode pdf_fit_background_mode = 'scale' Cython-0.26.1/docs/_templates/0000775000175000017500000000000013151203436016713 5ustar stefanstefan00000000000000Cython-0.26.1/docs/_templates/layout.html0000664000175000017500000000074412542002467021127 0ustar stefanstefan00000000000000{% extends "!layout.html" %} {% block footer %} {{ super() }} {% endblock %} Cython-0.26.1/docs/src/0000775000175000017500000000000013151203436015345 5ustar stefanstefan00000000000000Cython-0.26.1/docs/src/quickstart/0000775000175000017500000000000013151203436017537 5ustar stefanstefan00000000000000Cython-0.26.1/docs/src/quickstart/cython_in_jupyter.ipynb0000664000175000017500000007567613143605603024406 0ustar stefanstefan00000000000000{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Installation\n", "\n", "```pip install cython```\n", "\n", "# Using inside Jupyter notebook\n", "\n", "Load th cythonmagic extension." ] }, { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": false }, "outputs": [], "source": [ "%load_ext cython" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Then, simply use the magic function to start writing cython code." ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "45\n" ] } ], "source": [ "%%cython\n", "\n", "cdef int a = 0\n", "for i in range(10):\n", " a += i\n", "print(a)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Add `--annotate` or `-a` for showing a code analysis of the compiled code" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "45\n" ] }, { "data": { "text/html": [ "\n", "\n", "\n", "\n", " \n", " Cython: _cython_magic_6ba45f17d130910db2606828f4326b2d.pyx\n", " \n", " \n", "\n", "\n", "

Generated by Cython 0.25.2

\n", "

\n", " Yellow lines hint at Python interaction.
\n", " Click on a line that starts with a \"+\" to see the C code that Cython generated for it.\n", "

\n", "
 1: 
\n", "
+2: cdef int a = 0
\n", "
  __pyx_v_46_cython_magic_6ba45f17d130910db2606828f4326b2d_a = 0;\n",
       "
+3: for i in range(10):
\n", "
  __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_range, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3, __pyx_L1_error)\n",
       "  __Pyx_GOTREF(__pyx_t_1);\n",
       "  if (likely(PyList_CheckExact(__pyx_t_1)) || PyTuple_CheckExact(__pyx_t_1)) {\n",
       "    __pyx_t_2 = __pyx_t_1; __Pyx_INCREF(__pyx_t_2); __pyx_t_3 = 0;\n",
       "    __pyx_t_4 = NULL;\n",
       "  } else {\n",
       "    __pyx_t_3 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3, __pyx_L1_error)\n",
       "    __Pyx_GOTREF(__pyx_t_2);\n",
       "    __pyx_t_4 = Py_TYPE(__pyx_t_2)->tp_iternext; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3, __pyx_L1_error)\n",
       "  }\n",
       "  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n",
       "  for (;;) {\n",
       "    if (likely(!__pyx_t_4)) {\n",
       "      if (likely(PyList_CheckExact(__pyx_t_2))) {\n",
       "        if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_2)) break;\n",
       "        #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n",
       "        __pyx_t_1 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_1); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 3, __pyx_L1_error)\n",
       "        #else\n",
       "        __pyx_t_1 = PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3, __pyx_L1_error)\n",
       "        __Pyx_GOTREF(__pyx_t_1);\n",
       "        #endif\n",
       "      } else {\n",
       "        if (__pyx_t_3 >= PyTuple_GET_SIZE(__pyx_t_2)) break;\n",
       "        #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n",
       "        __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_1); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 3, __pyx_L1_error)\n",
       "        #else\n",
       "        __pyx_t_1 = PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3, __pyx_L1_error)\n",
       "        __Pyx_GOTREF(__pyx_t_1);\n",
       "        #endif\n",
       "      }\n",
       "    } else {\n",
       "      __pyx_t_1 = __pyx_t_4(__pyx_t_2);\n",
       "      if (unlikely(!__pyx_t_1)) {\n",
       "        PyObject* exc_type = PyErr_Occurred();\n",
       "        if (exc_type) {\n",
       "          if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();\n",
       "          else __PYX_ERR(0, 3, __pyx_L1_error)\n",
       "        }\n",
       "        break;\n",
       "      }\n",
       "      __Pyx_GOTREF(__pyx_t_1);\n",
       "    }\n",
       "    if (PyDict_SetItem(__pyx_d, __pyx_n_s_i, __pyx_t_1) < 0) __PYX_ERR(0, 3, __pyx_L1_error)\n",
       "    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n",
       "/* … */\n",
       "  }\n",
       "  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n",
       "
+4:     a += i
\n", "
    __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_46_cython_magic_6ba45f17d130910db2606828f4326b2d_a); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error)\n",
       "    __Pyx_GOTREF(__pyx_t_1);\n",
       "    __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_i); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 4, __pyx_L1_error)\n",
       "    __Pyx_GOTREF(__pyx_t_5);\n",
       "    __pyx_t_6 = PyNumber_InPlaceAdd(__pyx_t_1, __pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 4, __pyx_L1_error)\n",
       "    __Pyx_GOTREF(__pyx_t_6);\n",
       "    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n",
       "    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n",
       "    __pyx_t_7 = __Pyx_PyInt_As_int(__pyx_t_6); if (unlikely((__pyx_t_7 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 4, __pyx_L1_error)\n",
       "    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n",
       "    __pyx_v_46_cython_magic_6ba45f17d130910db2606828f4326b2d_a = __pyx_t_7;\n",
       "
+5: print(a)
\n", "
  __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_46_cython_magic_6ba45f17d130910db2606828f4326b2d_a); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 5, __pyx_L1_error)\n",
       "  __Pyx_GOTREF(__pyx_t_2);\n",
       "  __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 5, __pyx_L1_error)\n",
       "  __Pyx_GOTREF(__pyx_t_6);\n",
       "  __Pyx_GIVEREF(__pyx_t_2);\n",
       "  PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_2);\n",
       "  __pyx_t_2 = 0;\n",
       "  __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_print, __pyx_t_6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 5, __pyx_L1_error)\n",
       "  __Pyx_GOTREF(__pyx_t_2);\n",
       "  __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n",
       "  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n",
       "
" ], "text/plain": [ "" ] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "%%cython --annotate\n", "\n", "cdef int a = 0\n", "for i in range(10):\n", " a += i\n", "print(a)" ] } ], "metadata": { "anaconda-cloud": {}, "kernelspec": { "display_name": "Python [conda env:py3]", "language": "python", "name": "conda-env-py3-py" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.5.2" } }, "nbformat": 4, "nbformat_minor": 1 } Cython-0.26.1/docs/src/quickstart/overview.rst0000664000175000017500000000606413143605603022150 0ustar stefanstefan00000000000000Cython - an overview ==================== [Cython] is a programming language that makes writing C extensions for the Python language as easy as Python itself. It aims to become a superset of the [Python]_ language which gives it high-level, object-oriented, functional, and dynamic programming. Its main feature on top of these is support for optional static type declarations as part of the language. The source code gets translated into optimized C/C++ code and compiled as Python extension modules. This allows for both very fast program execution and tight integration with external C libraries, while keeping up the high programmer productivity for which the Python language is well known. The primary Python execution environment is commonly referred to as CPython, as it is written in C. Other major implementations use Java (Jython [Jython]_), C# (IronPython [IronPython]_) and Python itself (PyPy [PyPy]_). Written in C, CPython has been conducive to wrapping many external libraries that interface through the C language. It has, however, remained non trivial to write the necessary glue code in C, especially for programmers who are more fluent in a high-level language like Python than in a close-to-the-metal language like C. Originally based on the well-known Pyrex [Pyrex]_, the Cython project has approached this problem by means of a source code compiler that translates Python code to equivalent C code. This code is executed within the CPython runtime environment, but at the speed of compiled C and with the ability to call directly into C libraries. At the same time, it keeps the original interface of the Python source code, which makes it directly usable from Python code. These two-fold characteristics enable Cython's two major use cases: extending the CPython interpreter with fast binary modules, and interfacing Python code with external C libraries. While Cython can compile (most) regular Python code, the generated C code usually gains major (and sometime impressive) speed improvements from optional static type declarations for both Python and C types. These allow Cython to assign C semantics to parts of the code, and to translate them into very efficient C code. Type declarations can therefore be used for two purposes: for moving code sections from dynamic Python semantics into static-and-fast C semantics, but also for directly manipulating types defined in external libraries. Cython thus merges the two worlds into a very broadly applicable programming language. .. [Cython] G. Ewing, R. W. Bradshaw, S. Behnel, D. S. Seljebotn et al., The Cython compiler, http://cython.org. .. [IronPython] Jim Hugunin et al., http://www.codeplex.com/IronPython. .. [Jython] J. Huginin, B. Warsaw, F. Bock, et al., Jython: Python for the Java platform, http://www.jython.org. .. [PyPy] The PyPy Group, PyPy: a Python implementation written in Python, http://pypy.org. .. [Pyrex] G. Ewing, Pyrex: C-Extensions for Python, http://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/ .. [Python] G. van Rossum et al., The Python programming language, http://python.org. Cython-0.26.1/docs/src/quickstart/htmlreport.png0000664000175000017500000010655012542002467022460 0ustar stefanstefan00000000000000‰PNG  IHDR²2¸«¾sRGB®Îé pHYs  šœtIMEÙ   #dKbKGDÿÿÿ ½§“ŒèIDATxÚì]XÇûNb×{‹ú3&1jSŒcbFŒ¢bCÅÞbE 6Št‰(HS@ŠŠŠˆt¢H©v¥)VŽ˜ÿ «›ûßí{G¾÷ùž{fçfæ›þ½3;»ûÁ¿@ •ø€ª€@ Â|ð-¸}3‰„„„„„„¤iŠ¥)Ñ¢$$$$$$$D HHHHHHHjž rç¸î5’¼§íØ»Jýf‰Æ #¾>—ü}/×$-`ç—z¤µªº!L {'Z@=Š„„¤^$!.Òí¬ËÙÓN²Ó‚þ?ji¢ù@<§6ò¢Ô­[פ1Bþ1Qá¿ú*:òZ=NÓ<‹S/S¼`ë|òIû‰Æx7Zðê%K²õ¢$$MYœìO»8&'ÆÂíêì7CàpqvšÔÁD#³9”-“'OÚ«µ[ÈsÛÖ-sg×ïê­ÓÆq+-ñúµµkV}óÍ×Mp·@攉Ô—Àü_¼pÖÇÛƒ¹LLˆÆï%sðd(BµhèN~ZJšÕ+:wîܺuë©N‰¨©ÔfÖÌmÚ´îÚµ‹ºÚV!Õ2¯Û.œ;ݯßÿn¦Þ`}R’âzöìáës©Êrýï}/_ºÀ8¤§Í8àÿœÜY‡µÕ±¾lÞ¼yïÞŸjÜËYê¤1 4WbîœÙìFìÚ¡Žr}øá‡Ã~þÉÄXŸUä×½[·Øèp™3 --[¶”œ cçÝ\{ô衱k;ã ®Ö§OŸ-Z Èà¶zº?û¬Ê>xð t_&J ¿÷¸q¿·mÛÚÇŒ ‚"ZXq*D+³:VYæRW3’{ޏÁ(صÔTUºtéŒa5{–³z !!iÜ‚‰èœ›«Ð6ùøho/AÏ£›7m-7*48s¬¸Ò‚y5E V®X6ö÷ß0õ‡_ † U-óhôèQf¦Fì% ü“&ð)—’Ò|f§þíÚµc¦Ý½š •æ×-èÔ©Ó?f&˜»aÈgqÆ]¾lÉï¿ajé×_åV,_ʃaˆ¯0œ¶Ö–_|ñ9Ë„”ê6™3Œ4×ÿ½æ—_†I΀„Œá×ÒÂÖ똹ë‰&¹âžºUeêvÊ”?ØË¡C¿g‚}ùåö'm@uÁi–(/šþ—¼haÅ©à¬L™ûªl¥®~6$÷qƒQ°k1c ‚|®Z¹œfL’F/·Òùü+ãÙQÓ…E»5©+ûš:[€u{`Kù¤Žövß ù–½øÕWgÏ8ó)—ÅÑ`~à€Y…ÉaV{þ9ÙòØ‘êß«-W¯^=5÷ìÂ,/Á0tïÞÝÏÇ“­%Tìjh  å52<cU s[ wèð »k".2¦¥©Ýκ¦ÆrX¡ËfÍš‰f þhÎÂrªà¬L™­² ¥®‘lHî9â£`×bÇÓhÆ$!iÄâéqÓ àAIKI8íâó‡`5¹[€)[pzbvqkd·)#Ólîk@°u8iˬ¤‡&šÎr%ÄE~úi/†I·4ÿñÇ¡pÃçF|Tmì 9Çû½cÇŽ}ûöa™‡P\¡Zb-¨P0,š?ïߦÉÿ¹g÷Îê´QtäµM×1\r$d ÅY³z¥„ wéêìøóO?¶iÓF¨³‰†UÁY™2׃ ¥®‘lHî9â£`×âÌ I#¦°ý¢í!7So`REf9Qc´«Ÿ°šY ‰ànV65K °î3f4r£FÚXYˆfF\¹À!›~ÿݸñ s;bÄ/5;¹ ý{+-ÑêøÑnݺŠÛ-ÜSÁ¥¸D@eV®XÖ§w¸jf891¶U«V’3 !cX+÷ë÷?uµ­ÒÒt SƒØèp'.溸ãTÁY™ÕÙ-¶Ô5’ É=G\§åÜ-€ƒÝÌ !!i¬‚éš“¨á³[U6ýþÛ˜?/,>.yœcöØk„¬X¾몈ð·÷AEUòI{vËT†{-_}5ÀàÎàA93#®\ªÛT0™2 îÝ;zö졦ªRK´@~êŸ^žî°âÇ-Í;uêÄYê¥K³w‹At–-U§ž:÷U“ºÁ*«lÙÈÖ›¸ HÎØÕÐÀÏû÷ß¶u³T´ c‡àaèÄþÞh ´@TgeÊ\²•ºúÙÜsÄuZÁŠbÆdÜØß@iÒ$!iô‚ ÁÓã<ó+©ÉñXž;®1Zp3õfÞÞ½?mÞ¼9 -lOM-ÁQ˜1½uëÖ]ºt}ãÔ¶m[™÷ õu›5kfh ÇY@qåºxá,b1§Û0õÃ}ÑÝ­–h‰±> T 𥭵%g©ÑÒŠ³g¶«l˪øç°ñgŸõc÷en£6mÚüú«œà³.œ¨2c×®|ñÅç›7màO ŽšîÛ·*äÓO{™I¦B*8+S憓¹ÔÕ̆äž#®Ór>‰0Saºä5 Icz@ñ¼›+C˜3†2> HÒ˜düø±ÆFúTMYè$$MP\œ`þãb®3ÁÕÙ‘YxÃÁ¸ùÒ‚ê¿Ù­ú'«kif¬{¥5•OÙ’Â"òà½_~ù…àÛ_½mQK]·!¿ ‹„„¤LÀÙ3Îng]˜fB.ÒnAÓZ#öéÝ›}“„v HHHš¦\ò8çíÅ}&h ÑÉ´×$$$$$$$MVþ£@hòøo·€@ BS- !!!!!!¡#‡$$$$$$$D HHHHHHHˆ- !!!!!!!Z@BBBBBBB´€¤Š†'!!i‚RƒÏ°ÑôÒȤfh÷•3µmÀoľ†öVZ¢¨4p>H šÚ;jjdùÇL 4½4²¾Q´ÀÃÏÑÛÛ»¶ ØäM_ÆÇE5d6àáºnÿ© «Ž ša0à/½Ÿ•Lä×[šÚ]LHˆ½™z£FÈALôõÕ;ØÛÛ/ÿ¼­è¯ƒƒCà9GFõ[Éwo§Ü»“zÿîÍ÷o§?¸“‘~/3ã~Væƒì̇’õN2Â3+ã # "":©Á%@¨%`à×%-¨ãuµoõûFuiÁ9o[__¹U]j»½}||~ÿûÓ¸˜ÈÈ âââÖj9ü²è-›kVž)îá="2nŸ‰›·Óuè\£ —R“ãA¬]}~[vD6–N°~H÷ëׯ[­šŸ úk6o²“““TÌà-xË Þ¿ {«ÿ(+ýñ£ÌœÇYOr²…žøëQv:ø?`4& ¢D š4-p¹t400PÏn«——Wm·÷ØuŸfggOUÑ 8ArRˆE‡×è_Øï”¸Ü$\á@ðô}W” ¶1÷¼yÐ!ò;Eƒã–NžCç™~ö§NbBtZJ‚Œœ`éìèèèã‹þý;‘–0 _±Up7íA%'ÐÑ)gDW—•7o¯—b7·"Æ“ †(ˆˆèD „‚’’’´J7ZðÙˆQ_ñ¢5ˆœ'9ž—=l쎛XY[x\rÏÊΪZàpÁœàÇùùùu³[pÜÅèÅ‹ò[Æ4f€Eÿ=—5º—¼¢²ïYûÜq ¼Òÿ®Ñ¹”= KŒ¯ÍÓ;蚤ã3dŽÑwsM´ã@ "ÂCR’âøop‚YQQQæÓÇá÷ø‚©‚¿W¯^ů´Ì€ix¬õÜ»™þàNVæƒ B S~ôh™£cIXXþÝ;yå>öòÅó—yyø}¡§÷¦’(TC`DIx÷Á½[H„Æ$Е•õêÕ+ƒŒŒŒ†C „¹Ëëò§yÅYÏ ½(,(.ü+²ë'D joÞ¼ ¿~Íö„Õ£GŠŠ A à¶w´»ôúõ뚤6gõÀ ;hæææZºê×ÍntsÑ}þüùÔ­£"Ã-¸è2aõñÓÁ÷œŒœ½Ýù·–Cfý8ÏD~óÉÕ‡¼þ¹˜¶ãDÜ"£«»¹¡súÆa› þÞ7â£o¦ÞŠ_2322ÒhÔ¶¶¶&c~‚ûø¼É̯ŸŸß®ví*˜Ó¼É±±±ü™Á;Zöàþ­Ìô{²ÓaìõôÊaÿór_>–çâRjfövÃàå‹Wù¾¾ÅÌe…è”#JfņhAšè˜d7ÕjV#›ZÝä}©¢¦™Ã†ÜI0×§¤¤0î´´´²²²†F ¼Â^M^ûÉ0¯6?_jûó%üvûÅÅÔùNH\^yù›÷ލªª6ØÎy=⚟¿OQqé¡SQ‹´}”´}ëútŒÀ*/4,Ì ÆhÁQç}AAÆö»°v7pT…µ–[ÕÌ@ôwÜß}¤²ßÖguaÛÄ¥vÄéÀÓ§OÍœ4Ÿ={6]uP½3,÷Õô]g¨þaž©ÜBÃ*ÚÛ4tèèíÓÖÛ°]wâR¡óŒwÚF€,=¾áx´ŠMŒÁ…TÐ/O÷„¸Hž´`õÀûÊ ø5ù=ì½åÎíŽŽŽÆ£€¥âD___ÍîÝîܹ£Õ«§···¥â¤ f0÷ž£úþÝ´‡nge<Èy”ÉÜ5xö,ïå˼‹KYpèЛüü|CÃ7OŸXýο<çqVV惇î ¢D ê%‡õ[¢†ÖI0I>~ü˜q?©Dƒ¢޾y½Æ8k™\Ë|ôß Ž‹¾OûO¶i=`Gγ|¢5Öž==neQRZºH×gŸS¬]ÀÝ#—o:—²úȵ¥úOŸçÚ;Ø ÝM‘˜8ì¸r%ÈÐ^OÇ~=úßQ—ƒ/_¾4wþ=â¼?##cüß½ùï–ƒì¶\™~Á?tí7B£‘c…ö¿T]¿Z´vý¯ ÇûËëîÖ;~ÂÎꔃÛY—Kç._ºàqñÜÙÓN[÷þVÑPÓ1v½EÔ*óˆuQÚg’@ <=Îó§öööV«æ‡‡‡™"gkk NPPP`khPÁ 䆢ÆÀ =ztê”#˜Á®vmƒƒƒ-gOHHXþy[Þ´ âBvÖÃ'9Ù0öúåééùÖÖ¯ÁNzËœ_?V‡…Eù³§oé¢ búûH„?-ò—m>MLLüüóÏ…Ü ™HH«º‰'¶­¸d÷ÕÔÔ:vìØ©S§íÛ·ãR6ÿÚ«"i[ª¾Ì<›‡çÏŸ+**¢ž»víª©©Y|JÛIØôù4“TÛ˜³³³1ßÞ¿?99™Ý.++Ã%<ÓÓÓÁðW=Ò¿ë¹]åÎjùr9Ï‹òò kü&B^^ž›››¶¶6êyÏž=VVVìVJ£§Aþé¦gãõÏ܈¾ý<üæS—°‡ *¶±-"ŒÎÄfegz\r¯.-еÙ|Eß~kNNξ+ÐϘßý'Vdeeíµ[Ž_-Ûå™™™Z6Ëà{Àú599†g£Ê­ê‚”Ø­­H¡2&MV «Q×~X°‚Úײ1ƒSŽ'U¹©hÁPE#u}o/oÐà€¨È«ñ±7â£ðy=Ô××s¸’¡òÁËG¯9ù·e”¦s"h¨Zëo¡ðûµk׌G~6NÀ2–ànÔ¶…ÂoiµrøÏQýàÞ­Œ‡we§?}ò–þĉҔä8\\Ê ò Ž©Ø?ˆ/ŽŒ(ywü°ôÁýÆýôÉcD̨<^PÇ´ÀØØxÍš5Bî÷” :&êE%444~øáÆÿرc#FŒÈ¬Ç—Í¿öªHÚ–ª/ZÀæaîܹÊÊÊÏ*±hÑ"™ó)m'aÓçÓLü6€)±°¥¥¥Bw Àà‰¿ŠŠŠ ü@-X³zç³ìð¯Z°õŸÇ†«²D‚´@ª, MAXO‚`í.ˆzÀ´ ’$XÛ›89;¼Znà•þ¤àVVÞ…ˆ MçZΉúçST,®æååZY[T‹ì³\¬{r3ú–†º×.ë>Üi5ÿÁƒ;ŽÏÃïv˹`¦êsïÝ»·éÈTüûÛšî<íÄËËKÃrñ­[·ÔŽÍA:Û-*RÛ~|neúó‘4V赩øÝbÕãÇgnr5,DÚçýÞÎÎV˜œ:Úþâùþ´`»KXX(@ltxÒ˜´”x2’’·FëÄèå–;NÄîtˆgd‡C†¥¿s;?4ôí‚ÀÀÒ¼¼Š½/ Nœx»s §÷´ ''DAÄŠçÄЂ£GöëׯE‹°|±±±Œ§à«3Dß²‡žž^÷îݱ¤[ºt©àijÁ™÷Ï?ÿ<{ö¬› €IpÅŠŸTbåÊ•¸dü1EnÙ²+E¬Õ Ï;wîL›6íã?nÕªÕäÉ“Ù=XÁ,‰D‚OOÏo¾ù…™†D‹)˜~ëÖ­Ùb"‘6mÚ0î‘#Gúûû3n8ääädóª"©Ê‚ðæææýû÷oÙ²% bkk;`À&:zk~° ëÖ­2Ó‹Uš¨jqõŒ²/Y²ÍÝ£GC‡ V;ÕU6“` F‚ñ3=z´hÎN"®€¢´@´õ…ÒçÓLüI«)>!Q^¬²$ÐLŒÓÿ’2Àð7ÁJE ÞüûïwŠ~=F›KE ¤Ê’ÐäêêÊÖ³(ÀŸNŸ>½»gΜaé”8ô ggç;wîÝ»700e¬£¼¼M¯¥¥…0œÔ%Ì™-Ñ÷?x×#*Ë=2ãŸË·œN:|馵ß]Ëëy¯òŒM d¤h€ÿ(‡††hX-º{÷î–£âWåèt rói·oßÞ|¤ò÷ù›7on<ü'Ù¢C?ÄÄĨ›/v·|bžÎ×qqq«ŒGj›Ì¦‚%0éo92­BãÑ¿ð»õX…ö­òà³v~fÀŸ|°FÝÜüË ,,,0ì_å½”Š[€€ @R“ãá|³aBB쀿ÉÙ‡ü}/ó?røv·@~Thh¨ÅÔ‘ø5ö5Ë À 0)hvëvùòeæ_æWêÝ‚ô{ßÑ‚ôôW‡½aOܼY”—›Ÿÿ*$àèÑòôôÂìì‚—/^i•#Êc‰´(V3ùùùKÆ ã³[€Ë©S§>®fI555Ñ%%%°^X[ ¹Ù[·nE\¤ÎôÇlÛ¶ñLj4iº ÂoÞ¼™ñüúë¯1}`E…åŦM›”””Dg|΂ˆÓ‹‚ óíòåËÅ­&YŸÙ³g#YhG®455qÉø£\¹¹¹ìÒVJ6¡*’ª,?sæLrô·ƒ¶oß^QQ‘½Äb— ¶oß¾‰'¢ba/aã×®]+ªZ\=c2-Õ  ‚ÕÎGu•Í$˜‡víÚ ÒÎ*âì$â (J 8[_0}>ÍİFÈJ ‡äMTÂH¾‰€õÌÈÃY 7|jä&BIÙ›AнÿG*Z U–„¦ ýû÷£zÅi¹xñ¢µµõ«J€½yxxTéogg—_ ÐSQZàëëkii hz77·ú¥.§0K¯û'øÈå[ã‹iÚg“ÿñ¼åpå¾[xº†]þµ±;.#-HJŒ ˜¯ù ÖîËMFÆÇÇ/6zãÆeF#“’’VŽILL\ª_á¿@û»¨¨¨ÙûFFFÎÚûlùokzHµ[ zx~DDĬ})(în±Xot­0ƒ®†£+tƒ¥\hð|vX(!äÒÒ¦·™šš¢QÍḬ̀6*,È¿s+™ôˆ¨èA39 ÿHe3PHB\dpß…s§O9ذ³bÎx\<~5˜ÿŠÌnÖIǦ g~AHÁ˜}ön‚ÑσØ0Žz·@€¼x‘ëí]bbRneU~ölÙÇ…¨È‹…>>¥ ÙÙù))ù‡«¢˜y7²Ú¼ysž´\qƒöéÓG´Ç >\ÔÍ&õé§Ÿ".{ç¾wïÞŒ©±wî9|bE(:ãsDœ–¾}û¢Sa²æ¹7Ž)+KfÖƒ#33“ñÿè£Ø¹ŽfÍšÉæ/TER•áŸ>}ʆºd£#Û©©©ŒÖë~QÕâêºØ‡jÁj磺ÊfÌ(ײeËžWbéÒ¥œa8;‰¸ŠÒÎÖLŸO3I d +%ÑGÎXnaÆ`1Ty¶ 6:üûï¿Cqð wM--øøg·^Ãm¥¥ü³$4©«« R%vÈ’öË wÎÁÁ°|jGæ!ä²ýceàoË•kiiöVp÷vŠTqýƒ¯ oê›íå•y1"óLÈý‰ë¬4Mœâb®§&Ç#q8®_ ¹qs¿¼×Ì…ÿV»[pìaW®\9öÇÏ '`Î`œ ž30úé+&Lpp°ôg ÞÞDpq)ÉËÍÍË}•—›ï^Q||±¿)TÍ$`ûYÚ$ 0¡©_ò‘Èð1cFã·‚“ôî׿[ëWÿowýuù¿¡IÅñ…×RK.G¾wá!ôz˜\ŠÊZw,rÕ‘ˆ­6±¦ç¿™¥¯kbrÅ?1!šùösü@ËÌí³?u6pcv·àèøï}||vµm ÖÏrÃï?g™åÎí666frƒ¥Ú-z@U˜œüv{„àIN!Ë àyêTYFFþÿ{@±â½7ùÓ,ØýXÑKÃb+§òòò‚}™°úéСCii©[PÅ–-[Ø “'OVQQad¢·;wî|þüyÌž¨Þ¹sçò·7â´Ì›7ý¿¤¤mÔµkWÎb ¦óÃ?°O"ìÞ½û§Ÿ~bü= ÂY‡ƒ=¿&•¿¸*âYž¶sâŸþyûöm¬³ÑcQ¢ªÅÕó¶mÛ˜gN“Ô,-ÊÃòåËÑú………çÎûßÿþÇ´ˆPÎNÂY@Ní¢­/”>Ÿf’ 2ìr–y]Ú v¼z¤™OŠÚ =±:—,”ùÌgå-ÿg``u¹iª¶3'Ì9*y'4;Ÿ¸»»cÂdÎXYY]¼xQ²•g @A@4Á +;;ÛÁÁ¡0ƒc–Gnß¹õâÅóüü¼—/_$§$›¨oßfes줣­ 3ýuFá×Â&oþüÆ 5GamºAo&lÿ¯«» ýŽ^Õ¿Gœ4ù7jtÔu0///ÑÔð –0I¥OTTÔí©þ{­ªßwyW옜¹úPýdœŠuìRÓð]ö ªÇB†(®Ð°²sñ ½z-äjøYÏ =‹ £—üóý\ÓŸ™ëugÎ'òzÉ´ƒƒÙœIGûýU«gL©àè£GûÑpH0p‚S§N|Ó>–s'‡‡‡¯ØAæ×ARR RS zsáBéãÇoGEpr*{öô•ßÛwJ÷:#öòÀü±¸KÁ'”••@L˜3gÎÀ€1>‚nA˜÷—-[Ö¾pà’=N¼iÓ&اN:1ž°Ÿþy³fÍ`'>ÌŸˆÓâää4pàÀæÍ›óÍ7èÏœÅL}{ìØ±m*1nÜ8vÇŠ“NÇJ¨«« ¾Ÿ€¿¿¸*âYž¶S¡¶¶6óÀ·ß~‹…š¨jqõŒ&^¼x1ÊŽFz¡ú´@(˜¸?ýôSæýX`pö"ÎNÂY@Ní¢­/”>Ÿf’ [ ,ìQÊäJ°«[Lì8ªß×…ÅåtøÑ¡ÝΦ§3 Šÿ»ñs÷u«ÿø§&w ;WWWÔ3˜7ì={€­Œ¿4*Ë¢$ø£e™'0Ínß¾]ˆ ˜w$ÆÅÅ5=P+ÁA.§Ì™99;ù‡††,]®lh|ÈÚÖ‚aqq±ÿVóåÇ‘×&nè«´[.11±¾‰f°æÀ4p‚e{+Þëg`§Zï¯8dh}à=c·Dãó)»ìã^Ýd½Ã6fæ·1+­¾žeô­¢ñÏ ÍG­´°íâÔÞßÎÖw=㖚ϓžsD_4›[É Æ ¾pá®¶mÁÌäççÇü~ÛÝ]p_¸-çþN°~H÷˜èë²½ü˜aåÉIÏž½zþüUÎãü#GÊâ ÓÒ r_æ=ÉÉ51yÍçåDz¡Êç¿W¯^mbb"ê&TY]MGµTy¨í|ÖA°„½wïÄ0{™™™eee¯_¿ÎÊÊÂ%ìÙýû÷Ù'*ëýåÇ/òŠíÎÆÏÛäÿñw'Zpè=ÞªÏDëog:˜÷ºŸžóïûð–ÃÇëèè¼×ÓBll à ¬l,lO_½vÅ¿ÕÿTRLÅʾlÕª}òuðM„ß×öŠ_¨1ÆÇÇç¨ó¾†ð‘$†ìµ¬p軹Æ;l¢¶ZÇÌÓ w(l©qø2ãð%Fá …)잪8E#àÛy¦›4ͼ<Ý£#¯ñß-bà¨móÑ•œ@à÷ð/_Vp‚9“øs ŸJb˜‘QyXhA^ÞËÜ—/Ÿ=Ë}ð Ï×·èòå"®O%ݬÁO%UI °âLJJuª¬®¦£Zª<Ôv>ë Xª‚¤§§úÂDÝRj ŸJªØ€yó¦°¸¸Läé‰K °C}‚`Y[[³‡I3ÐÓ×Vß¡öo|X9.6jÜߟ^»v­nv Õ†ùøxÛŸ7i ßNô ð—Þ÷І–nÑûz¦þ*£+ëF)é‡)–× š¼;ð€ ;ýÛæùÍ<³_êr°»è+Õ“œ{`¢¿Òr>V60(?®ÈÇ»à¿Ï#Õþ‡•ÓÙ†'j$ñz¡R}U¹ÁÒ‚--­]»v:uª¤¤¤ô´¸¸ØÕkW€0ï«ZIˆ»îÓºÙ-38ãeÕp>©zíú¨Å‡-í\Ϲ¹žwsÝgd5hºÞ¨åV3v_žµÏ_aÐäÞrÏ}¯lûå_:³×j[Y[x{yÄD…KµUÀHjrüÅ“è‹fs&%$$X­œ'ú Nð÷·]¯] ŽŠ¼ŠðÒÑ‚;©÷ïÞ|XÉ ²2<ÊJü(3çqÖ“œl!'þz”ž•ùýέúª)ÐðQ/´ n„¦ éեĄ˜‰þWÛííëës)À©p‚·Ol¦$ÜˆŠ¼~5˜ùu;~Æ‘‘ õ¿UÐí/¯3pºîðyºÓWikë;:‰À`ó‘‘ '@úq1fúöööË?oËù»r@û _0„Œç³!Á4ü[É,3ÀÒ?ýÁØûÌŒû0üÙ™+$ëd>„'x ؃wœÑ‘IhÑ¢ÌKâj»½¯\½Ô 8ûòãäÄXór‚¸˜ëa!Þ—/ºuqv²wr<áâìpÞÍÕÇÛãjh`|lÿ“†¢¯j†¨ˆÕ‡í¿~-*ÂBƒ +òzX ¿wB|Ttäµ”$)^ŠP³ã–Æ$@´ iIõ¥ÆhA“Á °,æ?*ò*Ìöµ°+ø…©Nˆ‹„¿ ÷ê€\Ó¸%!i‚S?€á_}‘L ˆ„½§Ä‘hAÓ·BÓÄ5šH«u Z@BBBBBBB´€„„„„„„„h ÑÞ´ FN¨’¼§ò-ø€@ B“Ç[Z@›'$Mêém¡)<†NB"Û$I´€¤ uwª’¦ ôj’ ee¥$$[ˆ4ƒcT$Õ¢˜1CƒýIH·- !Z@BB´€„„h Ñ¢$$D HˆÔ-8p ”SNž¸cq,Ëêxº?™’÷‘hk¿æ”snOžxqÊñ™?M$D Hš4-02Ê5?òÈÅ9ÙË3<À/øJ` ÿcöl­}{‰ÔÏnH€µÕÖüWr…¢¿`ãµJ Ä™ó¡~>—:vìèìt²Î ›“ã hô÷õ$/ƒ %g`¨objhwÒnþâE‹–-!`8æJÇSöGŽš™3F†Ý‚´”dg§§¬ùW¸™štèPibBê‘#ÒÒ!ÙæèË—.ôíÛGÈݸi•ÅÈÃ[µjÕ±C‡éÉ_¿Ò0ilÕÅ'Ö/r£Ù~¾@yñеk@N ¼|9ÛÏGþö;Ñ’* QÎ÷Â_FZ ­]äãuÕÄä9x€ù‘ÇŒù77BÀÜ_€a8i§%®Òœóä5B vïÚþÛ˜_ÙK¯ËgöêÙ³Y³fíÛ·ÿu´œ©±~mlhij—AÖ®_¯´Xù¸µ…ŽîÁ“ö¶ó•.ZºdÑ’ NpÊÙÁÐØÀî¤Í\¥6m”–è镯Fß:jž`m˘[›—‰7R™û p LÝÓ‚];ÕÌŸ+änÜ´ÄÚêXB\$’Ò|P„¦F ¶ªª‚é²ý|ÁÂE+1…¯®àl?Ÿ·PI}Çv¢$|îI^ qøÀ_FZ`föÄý| Gͼ‚Ûíl¼«KCìlïyy†¿§´`ìØßvkì`/‡ÿ2LAaúiÇà _÷³ûöî:ôû·mš{vN˜0Žl¼ ‚vY¼t™’²²•õq}Ý“vó””/[êäìhzØsåœó—,_.tì€-°´Ì ¾‡íK0“"¸ýúxe0TàÌé'Q·ÅÑ‚}{÷ôîýióæÍtñÂYÆ“…è%㣦ªÒ¥Kç6mZÏž¥œËi9~ÿmÌ‘L…Ül€¤1sg\‰¹sfã’ñOMŽ_ºtq§N>ù¤ýŽíªŒg ¿÷¸q¿·mÛ¶eË–cÆŒfWá‚Y-ˆ-<%1!ZaÆt³k×.(2«Ë_»Î;·nÝzêŸSâc#DãÞˆFn%$1`À—È0²­}p¯„ºöóO&ÆúlÄà ¿îݺM“ÿSWçë©£½Ÿ%aRµ/ã³WkwŸ>}Z´h,9;Ùëéüì³~L¬KçøÓÔÌʵk.YÂöóù */_Æöspß5¯:v@´€Dœ¤$Å^³=nøÈ~áò¥ëN§R«ï`+8(P[»(( ÈìðSö†BÅQDÿ`8¤¢plÛº¹gÏ6E¶ÖŒ' ÁðâüS±Pi~ÇŽ[µj5~Üï>^ÿ@¯?§üѺu«Î:­[»Š33½zõtq²g/[´hîëíÁiK\ääF¶iÓc~øða˜_ÏÉ“'1ZÖ®YUe®˜¤0Å—™,ZºdþâE˜1 8i‹IÓä°¡­âüUrq´ âúKYŒÕ¿pþQZJ²ž^iJRŠ¥Å+ÆSG§ ´ éF*¢fV-,$àF|Ô¦ë¿ò-ŸÝ\Žýý7Øfìýª•ËECbèÂÇD… ¹ÙË—-A\¤~5ø×_åV,_Êø£+Ž=*48á—.YÌx~ùåö'm`¤c£Ã—(/ÂrA”pDœž‚èàÁá!ðVÝæMFËUd„`ÖÌJ æ‰Æ=fn†á&!qPŸÌL`øaægI¨[[kË/¾øüfê & «nSA­öóaCø˜šÀ F%sûþ1iBÈP™­*›Úµk7eÊì%Rí1Àä¯X³zòbÑ~>gR•œ€h‰`Ôÿýwèø…»Zg |¼® >npÑ= Ì â„%†¹Þ—¯ùz‡] Dï×Õ-”–À^ž;ëâçs ÇàA%ï ˆó_±| ÖngazÁfüÅøƒõ9öÛýÂÌ,œ™Íöóùï6ÿ? Åüu쨙è½°~Ìðgn4Lš8žñŸ7Wñ×Ñrîg!à Uæ à ½dàe–ŠÃK–Ì[¨dqÜBßPOßP×ÊÆRqþü%+–UÉ ÄÑ‚Øè[º\ ½fp35 $à°iatÔ­¸˜›©ÉÉ·Ò’ôõKDÍFTäUvuÛ¬Y3ž´Àß÷2ãF'?™§ì0†EÝlRÝ»wG\Æíës©G·‰ 5\JóÈ'–颴€³ â´ð„ô÷fÜ(2«äØÇÛƒqƒptíÚE("/ß’ f¯¹g¸Ÿº…E72<Äz¢ŒŒjhY³z~áæÜöçÙ¾Qal0¡Kq±ª`«WÏ_´P°ŸÏY°€' Z@"*àåcÆŒÆou‚œ°»««[`büÂÒ"ÓË3üm òöºŠ¿@|}B/ºGK{OóïŽþybØÈF 0¼1c2n0¬Úw·n]Ù³„NŽ'83Ó²eKéÿÎx^À¿ÿÏàߣGw˜ÿË—ÎsTôìØ±#««Æ uUæŠhAÍÐå%Š æÿcn¦£wP[wÿ‘£fs•”ŸM–€œ=“c`PräHýÉQ·ß­×Sb¢ná/ƒøØ›a¡÷ª<[ îV½è¥à sÖlʺµ«ÿ^·FÔÍ&…Xœ‰ú³âêìøóO?¶iÓ†ÙuûðÃyfXœž"]0YÁ]@6?Œ8ÚÛ}úi¯³gœ%'îvÖeܸß1ûöícyìˆäº=fnöyÿþð™&ÿçžÝ;ÙD6¬_‹(ìÝ–šjßjà`ÎÎQZ ØÏç-\¸z-Ñ’ú{ÁâXVÈ•Š÷€xz\w;oò6|ü}C(ao"ø;Øß¥}ô¡Ð4KxÊfþÅù‹N.ï´$ø›¸›§]9Ÿwt°ÅÜñóÏ?2>GÍ÷Ý·­[·æ©E\®˜›˜ïȺWƒ(Ïš7×ÐH_sïSCÓÃF{÷ï121œ5oÞâ¥Ëd¸‰pòÄ‹[iï'9ˆ¿èÿðœÛcøÜˆOcßX «S–x#õÂùG5H Ø-œKðï¿F+êÜ-`Áª—v  ÂÔÄ 6:üVZb\ÌuÑ|ŠË°8-<á9w 0úÂB8£ ŸˆuÎÍ•ÿ¡*«ãGÁÑ%×-‚ üê«•+–õéÝ›½·zÑÝ Õe~ÄÁØXõN *9ÁªÙóç‰öóÙóç¯\»–n"Ô-ÐÕ-p?#x9^À^êÊ÷õ =awW”`°ÙÙX úØX[ôêÙ³fiA÷îÝÎuýw œOwäP«b’Û~øùx¶hÑœMm¯Ön¯Ëî` Þ—/²©uíÚ…s·@\®Þ9?– ¼Ìœ`†¢âíýÛwªéÒùkæÌ³fêèîÔØ®£{pÆìÙ‹—,A0©hAIB\ZHð}Á×1Ç ØKcãâøØ›gÏäð§Ÿ|ÒžÝ$½D0,sßÞqûl•P Ñ‘×Ú·oÏÜêt ªXºt1{}̘ÑË–*³[ ¢g :vè€årrb,ŒôÔ?§ð§â´EçFø‰ÆG^…ž-ت²é÷߯øyÁÂ]ò8‡,1þ;w¨7c”ñ™¼ä§þéåéÜÒ¼S§NUÖ-þÕ9¸ïÝö~ˆ÷,ùúëÁ‰ Ñ<«E´AkŠ0œ@aζŸOŸ5Kaö,¶Ÿ+(*®\³šŽ’ÔÛ{ Ξ¾Á2…ÿ^`p ÄüÈcï0Î'Ö¬^1pàWGÍý½ ÇŽš}5àËukWI6óü1»ýÎÇÕÊå#G wq²‡18ag5~ÜïŒÿüysääFz¸Ÿ½xá,s“óÅ_ýï]FC‡~¿oïn÷ g°ât;ã<{–ÂÐï¿cþ £½¥ _9µ½;[0)\ºè<[ .W„3 /ƒ¬^»<@cÏÎM[6híל:cÆÂ%Ê ò ûökmSÛ¢µw¼ÂŒuÖËðÞßt–€€(°œžÖÖ¹±17ù< È^ªlÙØ¶m[q—‚§åg*L<ÞÏ„ùÇÌƘñt ª€ Sœ=³]%à`MÄåEà:|²k‡:ãyÔüpß¾}š5k£»g÷Nþ´@œž´àF|ôôéÓZ·n‚ 4oÞœñ¿™zcÛÖÍÌ ÿ¯¾ƒÍÆBB\¤¸ÉËÄXÿóþýQ¨ÊcË–UÖí?‡?û¬kPfL?xà¿—¡®0êyV‹hƒÖ-ج¢Àöstï«W(L›9“íçÓf*lUS%Z@R?´â~!ú¢{4˜Õñt?ŸPÆÁüÈ£@ÿ+ö'osÒ,¬7oZÿÅçý[T ¤*8µgnòôòÃažhÀ5=ãàwyòäI­ZµêÔ©£¸'˜Sì¦&&,†°,û裺wë6uêPæ/,/0™Â¿Gî(›RøcÒF ¨ò 9WΧN"$½ÎH6ùyÔèuÖa®TÛ®:Y^~þÂ…—/÷ô8?wÁüÉòSÕ·«ªlÝ´zíê_FËò:#HXȽ°Ð{º:e§Ÿ±´ÁÆ:7éF*û®ÃºyÛx­Æ®í¢î÷Z¼½<úôéSï%?~¬±‘~¯¥ar¿²ý|Ê´iK–/޼yuÑRå)ÓäÙ~>ü×߈Ô-p¿äïb ŸwÎ-ÎÃ=28(žzzïï7`¼kêåÇŽ¶½zõ”ºèåÇ2 –Gc'MZºbé¸?&Íš3ÇÁÞö„•­µ…Õ±iÓ§ÿ6~¼òå1ã'lSS%•€„ßONLIIJ¹ŸfbRøàZØÝ›©É †%uI °²÷òtu¿²tÉb¶kaW~ÿmÌÒ¥‹ë…ÜL½qðÀÞ/¿ü‚}F±ÁÊž½Zãþøý|üä?æ/Zè}ùâ%s/œ½pîôLEÅß'L@?ÿmÂD­}ZÞHC´€¤viDW·ÐÅ)¹r €y ÁþämæHAÿTÒÅYX³ºŸ?=rÄð¹sf“ñ®ÕO%mVÙüÓH¹¥Ë—:Ÿ:yÂö¸å±Ž1=vÔÌÊÒ|î‚ù?ü2bÓ–ÍW}d£}ý’Ëž™•VäícçÜ3G jö ŠMjÊÖØµ½S§NmÛ¶ýkÚT w$W—(¤ª[xöéÝ»ÊGÈkévíÞU±7¶~Ÿ§ÇÅsgO;¹8;œvq<çæª¼téÃGîÐØ™–’@´€¤>iDG§ÈÚê!{Ë@Tš¦­Ú´ñï:´iÓfâ„ñâÞ†DRƒÌV?Àï²WÅ lì¼›+~áÆ¢Êß×ÿùUçÃʇ•:9=ú<}X™¤Ž™¬~Ò˜øØˆ¨ˆ°kaW®†âî¸˜ë‰ ÑøWò¶Ñ’Z¡$$ODi Iã¢$D HHˆ- ©9Z€nDBÒè…hIÓ¡4ÞIdž'+h@ Ñ@ D @ - @´€@ Ñ@ D @ - @´€@ Ñ@ D RãåË—wïÞåãIhš@ -‹üüüÝ»wkUBCCÃÀÀ Žóçìì,Uøòòrýû÷sþkhh(î/ž(,,Ü] Tˆ¾¾¾ä Í|õ3À‰””55µ½{÷IyðàA]]]¡ñD·nÝ>|ȸ=zsXý49+DªZâl¸;w2ލ¨(T©ÚQ†6J„gÿ§ˆ³DÕ,&@h@´@SSSÔÓ××—YO`µqåÊÆÓÕÕUAA«1ø«¨¨0S0gHÐ ø`‘é 0ÆÄbå +‹ÉÓŒ]»DGGKОÍ,Ga SòÕŒÞ'NÀ'))iäÈ‘žžžp'''C£Ï ),,äŒ..ó€¢¢"|6nÜxÿþ}ÆÄkÇŽ ÈØÉ“'%Ô§8hii1ŽÜÜ\>;:Õä+ñññT³DoÞ¼ÌsÉn´Ü»wÝBàì œ5jÔÙ³gÙ”‡hthY¶lÙæÍ›ÑÐȪ¥¥¥ÌRe-‰k8Ö^9r„ <`À€iÓ¦ F_½zµ¹¹9"Þºu«Jí¢Ñ9Û]Bš|ÊÎY"©ŠI 4-`§ }}}¸™!=qâÄýï 8ѬZµŠqèèèH 6ìÁƒìeii)̆¶¶¶½½½à<%j«8ÓTUU……fܧN’`á~üñG6¤Ýµkט(ׯ_‡£¨¨ˆÉ¹„ÝQOqÑ9³±fÍš+ïΰkbd¾¸¸˜q;;;_½zU\}VÙL°v|¬{uhØ ì%³R‡ê—¨Ê…,ã–ÐÁ„hn{´àÑ£G™üsF1bz#ãvssóóó“¡BøÔ’¸†¯…ÉDVѯov˜šš í·cÈ<}ú4//oÏž=‚ nNí¢Ñ9+Y\š<ËÎY"©ŠI Þ§ÝfH‹›ÐEgm S¿ÐdCé8==]2-àL«Á¼ ÇlN°!™ êׯ_oݺ¬Ù›&œÀòWÔS\tÉÖ—uR 6KœõYï»ÉÉÉ;vì(++“¬ú%âI øp ¶fXwLL ³Qij‡HU!s¶@CCÃÚÚZ0!£¾bÅ 6¤­­-ûfÃ)S¦deeI®Ì_˜^Ab˜[*¬ñãŒ.šy¦–°f:þ<iiiðÌÈÈPWWG‰%;;; õ).WÌIo¤ ô$ÂÀååå…²ÄV~dd¤OÑèß}÷sŒ¿Ê§ª_"æ$€o±žHM³È0þzòä ggàÄâÅ‹Ñ LC:ÌáyÎè6l­A³§OijBø×’¸†³´´D>‘%ðN ­!‰¸(†cÎ%hçÓ8Óä_vq%’ª˜¡AÓ‚¦Xˆêœx¯ftBÃAm<*B D Þ0'ÛçÌ™³qãÆ‚‚‚:ŽNhP`6«´µµùß›'¢@ ˆ@ Z@ h@ ¢@ ˆ@ Z@ h@ Ñ@ D @ - @´€@ Ñ@ D @ - @´€@ Ñ@ µM Ç¿ÿþ½{÷jhh”––Öqþœë1:W…íß_T—5P^þ¯ŽN]+%põ¥’:èuÖÁø+ª~–øT]mŽ>x.›ŠË—Kwí*,çlÍœœ7€Ä™d££ßjçT„t™Çóĉq=Dœ'c'0Gš‹Ÿ?Ùóùóó÷ì)q…JJùâÒdòŸC‡Š˜Œ‰S$®‡ˆöyβ‹zŠ«:þ]‘³ÝEG‡¸\»VÆh—ÜîUÒ‚àà²#GŠ% X$ˆšGoDÓ0…âlΡÝ@FG|ük‡©´ãwÙ²\¢8¨KËŠ*JJzæðô¬Ø]NN~ Õ>>¥yyofÏÎG_‚çâÅùh)ÉrÎ8rsß tRÕ<¡vi,+Lø–-[tttÊÊ*ºQii©«««¶¶¶½½=cŒ¯_¿îçç—ŸŸ?}úôââbwww†.pBUUaÞí.:_½zUteϺE—û#FŒ`ïe¸¹¹A¯Tјššž:uJ¶*czs~þŒw%*|W ŠýÒ«W˘™¿¨0tYL¾ÌàŸ8ñ3&!Ó¦½’ i¾5 §N•0ÑQnÌhÚÚEöö%ì4·uk!{oÇÍíí,Ó½û‹ØØŠÙS^`àÛÁÏéÉœÑùg‰ÙëL°jÕÛ¹© àŽN‘¸Öä\Õq*úñÇ\¶áììJ`“Ä¥)NLf7Ìà²}ýz™Ÿ_:áô鯈»{)c Ä¥ –;!˜ˆ¨"q=D´Ïs–]\Í‹Vÿ®ÈÙƒsĉ˃h»‹,7Ì*lI090ÀlÍÃŒ1$@\»‹f¾!Œp>¦ê¤Õ>bDž "ôL¦hè¥pýWÃH¬t õ;û()aO…³æ u±[àëë{èÐ!Ö®GDDÀ6§§§3ÿ–””à_,âan<(áHëÆø`“åi×wíÚUè5¸éÇ:@T²ô=^4ó+yø¸`šLtL(e¨Úôôr6}Á쾂©i1þ+ÇLÁ.)D=ùƒ3:ÿ,ñ/{)²Œ›³59m§"v-.3MqŠFÊ[¿¾àÅ I³fIÉ¿ocS˜+¬ü˜'.MÎü‹*×CDû:°¦ß±£°¬Líœ}þõë ^‡¥eqfæ\ãÉ“7`¨â¶ÁdØ-à¬yBÝDpuu9¨l--ÆçæÍ›ì¿ Ë–-+//_»víÞ½{%$¸mÛ6v·ÀÅÅ%$$„q³·!²³³ÙdÙ¤ÞTŽ~ø¡¨èm?°··—*:»[àääTS´`Û¶ÿ誋KIHH™¸I tþÙ³·Ù|ŸiŠ2}vœÜ¼ùßàYfNNo× G³ƒ°]»Œ›Ó“?8£óÏÿ²×™"NóÀÙš‚“,úÓ•8ùßÊéܹҠ 2qiŠS„l¼|ùfÆÉS'¨À²eX~­][ÀæM\šœÆOT‘¸"Úç9Ë.®æE«ŽWälwÎÑQ«´å…’°’† dë!s·E\»‹f¾~GGxxÙž=o÷ $÷:Ní?ü ÚñnNþïú¿·w)»Ã<~üfÍš‚§OßlÞ\ðêUÌ@E忳̮ۻٻÅ‘\ó„Z¤FFFãÇ?sæ cYÿþûccc˜[MMMæß˜˜Æ*ÃsssÁ½(222ÔÕÕ÷íÛ§¡¡aggÇúãR³‡B²Ož<'Ü{+4srrà³aà Pøœ8qBÚè (///C}YXTÜ¢{=¾Ž´´×•%*WW¯¸¹¥¡QhgWÑCOŸ.Á¿Ì¨›4é:+.srÞäç¿Ù½»"$¡„û”̆siZ[¿½/hl\ÌÜ’42*b|þ¸U‰”Ïœy;øÿú«b«žJJùì"NOþàŒÎ?KüË^gŠ˜ÖLH¨HÊÁ¡„i#ÑÖdÀܘgîÍ#§"ämÅŠ¶álmKÞõyŽ49=‘SŠ€€2,ª`KÄeJQ'•#®˜]®q¦õš½ÁobR,AgÍsöyÎJWó¢UÇ¿+r¶»èèØ¹³sÄ1ÚÙâc`ŠkwqÀbŠP"P( ù|øðí|Ð5}ý"Ø?qÍÁ9´ëwt|÷].|Ø› Òj³×AÙÏÓ0,pÊ”WYYå,µ<8Ybl¹œ\E§’ ‹yzaàÀ\yùW’kžP»» µq_ ÑÏ̬¸if©–@h ÃP“¼E ZP+`ö'´µµËÊj÷y•>xNBBBBòÞ Ùõ&·[@ h@ ¢@ ˆ@ Z@ h@ ¢@ ˆ@ Z@ h@ ¢@ ˆ@ Z@ h@ „ÆF \IHHHHHH- !!!!!!!Z@BBBBBBR‹´àƒw š%!!!!!¡Ý‚·ä€j–„„„„„„hÑ¢UÑâ $$$$$$D ˆ- !!!!!!!Z@´€„„„„„„hÝD !!!!!!Z `ûA´€„„„„„¤©ï- !!!!!!!Z@BBBBBBÒ„i@ „&¢@ ê›T>³ cÄwO@Ô…º:Hí=ë1ïgÙeë6M­3p–¥šU×” ÐThÁ… FÕ²eËÎ;/Z´(''§á'Ùæ&&VuæµúëF{õk©ÎŠÙš£:Ú%„i"´€H ˘0aÂ¥K—^½zõìÙ3UUÕqãÆ- Z ³®¤¤¤±cÇÂ1hРçÏŸ- Z@´€@xÏh [´h!íÀݓĥ……E—.]ºuëæîîÎx–––nÙ²¥k×®;v422’0z/Ywffæˆ#Ú´i³cÇÖ377WAA¡uëÖ?ÿüóƒ¤5xü3/òÒ'IÚ@__¿oß¾mÛ¶…Cœ'g1á@QL6++K‚vqe.//ìØ18ð ·„²WŸèéé&''ÿú믂qvqÅä¬Ož™”ÐëdîÉï]g>|8†„—‘‘!''ײeKüÂ-¹‡ŽŽÎO?ý”——'N»——×!C0Ÿôë×ÏÆÆFÂÐWLQOÎ|Š«OžµD -OOϯ¿þºúë6fHX[[<˜ñܹsç”)S0˜1ª««KK ,X ¦¦â²uëVÖsݺu«W¯†§¶¶¶¢¢b,)83ÏRœv›6mÚýû÷±\fw_D=ÅM‘ªªªH…]¼x± DÑèwîÜÁÚ%8p ܵ´œrrrú@âóï âê“gæ%(jRAIIIBôyóæ1!UTTæÏŸ/¡ê˜ˆ¨ ô¨'OžHÐ"uñâÅ’’t9eeeÉ´@´˜œžœù¬f!ˆˆVx=zô¸|ùrÐf“B³fÍÏ>}úܺu‹gtQ7V-ÙÙÙp`éÀzöìÙ“Y½|ù²sçÎ5E D3ÏRœv\ʈó7E2ÅÄo÷îÝe°œÑ÷ìÙ3vìXMMÍZÝe-//ïÝ»wYYÙ¢E‹¼½½%„äßÄÕ'ÏÌKPÔ¤:Œ´„肃 n U‡ˆîîîŸ}öY•›sX»ëéé%&&¢êd(&§'g>«ÙC¢ܸ~ý:æ5,jÊ²Šº1«¾~ýZæè}ô¬HD0MÁçÃ?¬½Ìs^ŠÓ.î¸Á)’-¦d;$®,œÑSSSñWZZZ­Î›111&L€cÀ€¶—¥ê 2ÉRh—B‚¢&ÛD£ .6$gÕ!b§N°d¯R{DDÄÌ™3Á-ÚµkÇÜÃ’ª˜œžœù”ª‡D xáüùóXLxxxÈ®˜ÇdŠuäíÛ·9£cœ€0à‹‹‹áÈÉÉa£#‡=Ú-@š………Õ­5i,Á›7oØKqÚyZÎbÂÁ¿¢+qâĶmÛ$‡”ÐD‹).“BÝFZE¯3ˆja£ ¡ª›;w. ·„ªcòvçÎü177W‚öÙ³g'$$”””œ>}3Œ´Åäôä̧„§–¢U/ò^¼x!ÕF¥è3ÎÉÄÆÁ÷;tè xžüßÊÓjíÛ·gC^¸pë„´°°`=322†ÞºukÁ'^½z…Ep»víúöíkkk[ͲWi ´µµÛ¶m[¥vž–€³˜p¨©©¡˜(¬ÐíR!íâ̘hô‰'2Ù³±±™4i’„²WhßÏ>û kPEEÅ€€€*‹ë ¢Å—=¡n#­¢Æ×8i¢ æç¬º‡Ž9²E‹£FJOO—Pul”óçÏ/Z´H‚v€æÍ›2ÄÏÏOÚbrzræSBáSKÑBÃnÂêMa4RghŤžL - - - žL - - - žL - @´€@ Bã¥ïõ‡•묰”ÃÚè ¸Bø—…>—L jŒøøøL˜0¡U«V]ºtY¼x1}X¹JïãJŸKn4Ú黈Db„Ú¥“'O3(((xúô©††óU\¢D AŸK&Z@´€@h*´@EEE­Zµ’vˆ¾¿V÷íWÑ×ÚpjçŒ~ïÞ½‰'¢aGáf<³³³Gމè»wï–!óô¹d>ŠªÙ?ésÉÿÒç’ ¢‚xùò¥–––lŸ%}O?¬ÌùíWÎ,qjçŒþÅ_øùù›šš²•‰UTT—!óô¹dþŠd!ésÉ|† }.™@h„´€™î{öìÉ.pÿmVæüö+g–8µKˆÎl½`Uĸ»wïÎ~%V†ÌÓç’ù+’¹r†¤Ï%ó2ô¹d¡qî¼xñb÷îÝXIÈÆ*ªœ à‡•9¿ýÊ™%NíœÑ/]º„(r%¸°®fæésÉô¹ä*;ÿô¹dP³´€Yà¶nݺ–hAƒý°ò¿ÿÿÛ¯l¶?ÉÊ©3z=ÜÜܰ–‚eCbaÄÜIÜ-àŸyú\2EÕ§ô¹di‡ }.™@hT´`ÆŒááá0cOž|¸ Öå=ý°2ç·_}’•S;gô:„„„ K‚!•••‘O”HðÄ¢„ÌÓç’eVTMZ@ŸK¦Ï%Mœ;wî—_~iÙ²%¸?ÆüÇ¥ÝÒ|?¬ÌùíWBŸdåÔÎÝÊÊ Ó*óðáÃlÈììlæA---v/ZBæésÉô¹dÉ´€>—L·„Ú¢„ºÄóçÏ1{JÝ®46¦QJ@"D Û¶mÃÒSOOoÖ¬YdHˆ- D š4tuu?þøã¡C‡ ½c€ Ñ¢h@ ¢@ ˆ@xohÁ”)SèÎ@ D þuuuýú믉ÐÔiA^^^Ÿ>}BBBˆÐÔiÁÆ—/_þoUŸ!ÐÈiAtttÏž=Ÿ={F´€@ „¦N † fggG<€@ ¢_Î¥z$¡‰Ò!Š á’@ Ñ@ M@ Z@ h@ ¢@ ˆ@ ZP«Še}fA¶—%ÔÙ#|YXX|üñÇu%z0„@ µN »Œê†0±¤ŠË'p¯^½bccë±Þˆ.¡&iAýçû}¦~øaýÖÑ@ 4ZÀ¹Í€K ‹.]ºtëÖÍÝÝñ,--ݲeK×®];vìhdd$!‚—¬;33sĈmÚ´Ù±cë™››«  ÐºuëŸþùÁƒÒQÎ,ñWĹÑâåå5dÈ-Zôë×ÏÆÆFr~ Š .++‹ñ”——?vìø…[B%- B­ÐØÅæÍ›ñÅû÷ï/++“1ˆÚu¼‚‚kkëÁƒ3ž;wîœ2eJFFL¬ººº´´`Á‚jjj………[·ne=×­[·zõjxjkk+**J[vÎ,I«H(ó`B/^,))¹s玲²²äJSUUEšP·xñbƱ „ª8p Ü m_‡@ œ0xýúõ7&L˜°qãÆ¡ÏŸ?‡Ö±Y³fŒgŸ>}nݺÅ3º¨Ü%;;,¬YÏž={bqÇË—/;wî,m©9³$­"¡Ì÷íÛWOO/11e¯²ÒEøíÞ½;ë¿gÏž±cÇjjjÖì¾@ ˆH‡GuèÐA¶-‡*í:øȇÌÑ?úè£òòr†Á¦ à/øÈpŸ3KÒ*Ê|DDÄÌ™3A8ÚµkÇÜPi¬"–<©©©ø+--h@ ê“<}úô“O>©%ZлwïÛ·osF‡¡4ϰ‘ÅÅÅpäää°Ñ»uëÖ"´ˆGš………2——3KÒ*g­½½½;uê$¹ÒEø…RÖÑ¢E .do+- BÝтٳgÇÄÄ”––>xðî+VÈ`‡øÐ‚íÛ·O:¶6//o×®]‚áû÷ïÄ^4ÈÚÚfQØèJJJ¸,**RUUe=7lذeË$ˆÌ‹ÚÑ*Á™%i •u˜PRRrúôé^½zI®%uuu(‚:(evˆó4>'-€¥Ü¸qc—.]:tè ¯¯/˜ˆ““SûöíÙ.\Àê!-,,XÏŒŒŒáÇ·nÝZðW¯^aaÝ®]»¾}û¢ Ò–3KÒ*ª%ÐÔç!Cüüü$Ó555(‚:(e<'NœÈ¤occ3iÒ$ •L BÍÓB½µx@ - - ÑÑ@ - @´€@ Ñ<5¤o×ÞÓÌËüô)@ 9-ð÷÷—““kӦͧŸ~ #W÷æ§6¾M\gƬn>¬\ý¯"Ix·„l5Ij"„FH ÂÂÂÀÜÝÝ 322V­ZU÷æ¹6¾M\gæªn>¬L´€@ uA äååeVYß&ægtÉŸ!LSÜ'˜@__¿oß¾mÛ¶eÞhÄùUhqv‘3ó¢i‚oÉÉɵlٿ웋¬{÷ªðÔÐЖpF¿wïÞĉ[µj5vìX¸%äSªbr†$Z@ `º733ëÕ«¬ø´iÓ˜·ôó_PÖÆ·‰9ÁÿgˆÅiG0”úþýûÏŸ?7nÜ¿b¾ -ÕbZ4Íyóæ1ßPVQQ™?> l†ùýì³ÏdXý‹Fÿâ‹/üüüŠ‹‹MMMÕv x³úB „÷†|ôÑGkÖ¬yöììú† f̘!Õü^ß&愸è@ 4rZðÓO?±´+iiiA­~›XHgt Ÿ!~óæM•Ñë†téÒ…­ÁÝ‚§=zôpssò>//O”VÑ@ ZÀ==½eË–±7¤²µñmbNpF—ðâÎ;GDDHŽ^g´`îܹÌ7”·mÛwíÑ‚:„„„ ˜‚5ÏY!Š)ó¾Q@xïilêúõë;uêÔºukyyyiÖÆ·‰9Áó3Ä ´µµÛ¶m[¥öº¡>9rd‹-F•žž^{´ÀÊÊ æ­yøða¡4…*„h@ - @´€@ Ñ@ D @ - @h”´@æ#ú¢žU~v¡r¨©©9gΜRLäDKK‹† @ -à0$ >ùä“FI dN­ñôéÓ®]»2ï`nÅDNŸgÏžÑÈ!¢ؼyó¶mÛˆÔŒŒŒV®\Ù Š¹bÅ ccc9@´@/^¼èܹ³Ð7~êœße÷mbQ[È|ñч.´4²—â¾¶, //¯!C†´hÑ¢_¿~666ljœŸ÷ÁâñãÇûøø4¨bz{{O˜0F@ -ÆÁƒ•••ÂÚšó»Ìâ¾MÌi/™HDIIIB‰$|ëY°ñ/^,))¹sçŽäZ’ðb$òôéÓULä¹¢‘C D þŠ‹‹ÿ¯½3¢Ê×>ÙȾ/$@®z‡„;q™a CȦa_‚ÄÁÙÂ"Jâ ŒÈ\"̨„! ŽŠˆ€DP‚€`dF2 Lä‚ òaؘ@LH ïoÊiÚêêJ§³t'ùýž÷éçôéS§Î©®®óÖ©êú‡……>|Ølf\fs±‰5ÇK¥¤¼ªÆ< £-›¾xñbÙ>â ÔÝâ€ÅÎÎÎJEûéfee¥1 ™Û‚•+Wöë×ÏNº¡—Y'6±éxiZRs¼Ô‰õ¬"''gРAíÛ·÷ôô\±b…¾-0÷‘j¶ÀºÉl¶@ÍÍ›7;tè°gÏ;é†f\fÍØÄ2Ú•——KââÅ‹Æã¥RR^CBBŒk–AQQÃ[XÏæÈÊÊò÷÷·ÎÄÅÅÉâvÕÍmÛ¶qo¶àglÚ´)::Z£:]DЌˬ›8"""##CÆ<)o<^*%%SÁùŽ;îØ½{·á­N¬g ¹¹¹ëׯ ³Î¤¥¥%%%ÙU7Ç¿téR~9Ø‚[tíÚuÆ öc 4ã2kÆ&Þ¼y³œsËvzzºj¼”Å;wî,çÜÆ5¯]»ÖÛÛÛ’XÏ*Ö­['c³³³sTTÔŽ; +2¦F[PXXdøÍ»)õKÆ×5[Gjjªþ_“„„žr€-l` [ØÀÔ–cÇŽÅÇÇ{TÓ«W¯ãdzZ¨-¸÷Þ{_xá…âj$qß}÷±Z¨-pssS¬[U0ÉÝÝíÐBmÁ Aƒ.\x¹I?lÇVO9ÛØ‚üüü_üâÊã{%¡ÄêÅ´D[ЧOåÞ‚¢¢¢”””Þ½{³Z¨-0¾·àêÕ«ò–íÐBmATTTJJJQQQqqqjjê=÷Üs«:."´([päÈ‘¸¸8÷j$qôèQl@ µ€-l´x[p!„BÍNØ„B5E[Pýw†ʤ§§xyyXRÒò:-Y¤Õ¨KÍ5>%eÒС½í¼ñ¦’6§¦Nâg†B-Ôœ;÷Y¿~ÝÜÝ]|§LUY™Ûø¶ ,,øàÁê·NË©ãÈªÙø‚‚/‚‚üÏžÝeç7•´YZ^X¸×®vúµŒ!„-¸¥G‰}úéÅÅ_ÊH˜øè’%3ÿ€îààÐ@ƒD#Œ¬š_ºtÖ„ ößxM%% NK›ÝälÁ¸q>½SJÊ«¤9R „°ÖØ7ñJZœA§NÿS«Ö\»vhÚ´±r~éçç-c¡áŒ³sç{ÜÝ]çÎ`8 _¾üåÀqnn®²Š¼¼OU3á5·kÖyæÌ®˜˜ûZ·v‘WI›!†´$dAY<:únÕI¼j½ší47V™k|\\çíÛÿjÏïß¿ûŠó%!¯’6þ(+keÏž]ô¿wÍiÖ©¹‡˜Û”nʶ:wî³Úî!↠{XÊÈ«a{"„¶ v¶ÀÓÓ]ŽÑ[àííY«S´gŸÐ§Ïƒr–JfϯdŽÙoÖ¬'ÊÊ̘1ÎPɤIÃ'N*™‹=3dHïÚž jÖ9|xŸääÇ%súôÄ#úê¬JI©dÔ¨þ:k×i§å'²ÁÁþ_ØsãOœØqGiéþn—´ê ˆ´_¿×š+Ò¬SsÑ\ܸ›cÇ>RÛ=Ä` dêlWBØKmÁ£ö˜:uŒ²Å<ùä''ÇZLÛ·osüøÇªL95ÌÏܸ߭`¨$44H9Ó-.þ2 À·¶GmÍ:3%­?²*%åU5æ©Ö®ÓNËm³³Ó¹vÞøùó{JŠúÃÊÊ\gýeÍ­È´NÍ=Dsqãn†„Ôå"Bbâ£Ø„¶ N·†…/[6×0>Y(±¦w):::*ƒ¢|d8"KI‘ 9ªëñ–µ5ë4Î4s#«iI͵ë´ÓêÙûlü7ßl‘bÇŽm1½_²ÆÙs+2­SsÑ\ÜònrË!B5”-0Ö{ïýiÀ€‡jÕšvíB¾ýöÓAñüù=ª“c)YVvÀêC¹f~†Lƒ¡‘¥¼ü+I\¼˜m<²*%åUu&*Ã’ñ¸¥ÓNË×9+k¥7~̘£G÷WM׋¶mK¯ñÞs+2­SsÑ\ܸ›¦“"7o~Í!„Ü<õÔP–ä™ù—¶mCöî}»V£õœ9Iýúu“®\Ù7oÞ“Jæ¨Qý%ÿêÕÉÉ*™thƒñGãÇ2ÜhNš+Ò¬SsÑ\ܸ›ª[(|srÞkÐ?2 „¶à°rÓxhh»»kLÌ};v¬ªíÁ´¢âà”)£äÄ×××ËðçÆ3gvEGßíæö³ïKJrä<ÒÓÓ=<7&æ¾sç>uérÏk¯=-h é„`6H3d°ffÏž]¶n}­¤$G@ròã=zD*±ÃpÉ!„š†-ˆŽ¾Ûðì¼;3ÄØÉH¿dÉÌððP7åÁ8–kÖŒ5 Y§ä¤§§úûgfþE§=Û¶¥GEÝåââ|ÛmmW¯^ 3û¢S§*³flbÍÁ5Æ.+;`ñÈ’pÉ!„°¶ÀÇÇëÊ•}Jº¸øK__/{8 —UðÐwßm¿téïÊy°åÁšulªNÉ™;wBiéþŒŒ‘‘ÿ­Óã?úèÕŠŠƒ'NlÓÙ§S§*¨’flbÍÁ:q„}üñë;Þi|µ¢ÆH!„°Ø!¸ßüZµ¶Üp¶@5þY¬YǨꔱUÕOíUÅñS)<9þÊ)Qm›èèøÓø'‡9CÍrÉG‚êf‚ºØK>2·vÍvj¶gëV?9/”4§”k\‘ælæÚ3 C޹AÔ´dm›dy7­þÞmÛ#:ëb ,Üëtj»páG›"ö¢ÆÙ„² [ðᇾr„Ú²ÅÏŠ&Ê‚çÏ«ÏÛµs,+ ®ã}aµ:@ß¼yë­æÚCB¥…š³ª’mÚ8nÜè+g‡W®[±"ýYY~ú[)0ðV¦áÜZƳòòë¼x1ÈxUJÊ«tDu §Œ‚–4ÉònZý½Û¶G:u6-0Þôk›8Ñ}Á‚ŸÝÔ²m÷ „ìÒ¼òŠWX˜ãþýÖ á£F¹Í™ãqõjprò­kÌ“'»O›æ!ãM^^àØ±n m srü o5מ˜è&™2Àß"§Ù__‡ìlYܸG–¯HQZšWR’»þV6Ìmöì3gÎô´’ᔑá#í”òƃ¨RR2U+ºã§Ý»-j’åÝ´ú{·mtêl„½N¿¶ãÇCCk?Þ}éR/." „li 4ï neBQQ°å©3g‚¢£]ÜÜ~vGzIIðèÑnžžááNkÖøX1õªÙNs•,Zäåáá ¿öüüŸîœOMõ4ž‹6mϪUÞrÄ÷÷wX¾ÜÛŠ)*,üñtÙpׂæV:u*°K—V]»ºœ>ýSÉÍ›œ¹‘ÓîôtoÕ *‹K”9ƒÖ®õñö¶¨Ivó‹/ücb\¬ûÞmÛ#suZØ£:îu5šŒ„WÃ[Ù8²AŒ/3a B6›-háºt)XŽÈ°"ñúwÎÛ§bc[ïÚåל¾q;ì‘XžrˆÂØX3gzÈ©çâÅ^ÆžC!„°-Q/½äåååpï½Î'N²5Bµ[-l` ¤ó­jî~zzº———%%-)Ó-°¥-0úóÕ-¶oßÞ³gOWW×ÀÀÀ±cÇ^¼x±yØ‚°°°ƒ2ض cÛÃ?,Π´´´  à¹çž‹m¶ÀÁÁ9ÀX?¶]½zÕÕÕÕæ}»víÚ´iÓ‚‚‚üüü–.]ªdž={¶sçÎîîîsçÎ5táòåËtssëÔ©S^^ž¡ƒ?.Þ¦0-SK7=00088833³^êhb¶ ¸¸855µG6?9~öÙgûôésæÌõgÏž­dŽ9rÖ¬Yeee3fÌ04lÒ¤I'N”ÌE‹ 2ĺ3{S[ Σ´´4###22²^êhJ¶@9Á =yò¤ÍǶöíÛ?~\•”ŸŸ/‰sçÎ& >{ö¬âiêË\ºtINNNØh‰³EEEÏ?ÿ¼œ¦Û¼o2WVVª2oܸ! ùÈЧj«Ãîªn&¨‹-°ä#l4g[PU}o›››ÍûÖ®]»o¿ýV•|þüyÕl”,++«ãÀ\+[póæMl4[[ðØc}ùå—ßÿý¬Y³¢££m>¶Í™3§_¿~2ü_¹reÞ¼yJæ¨Q£$_ŒKrr²¡a“'Ož6mšËËË;vlCÛ‚€€€œœl4[ y;ý¦M›î¿ÿþÖ­[ûûû'$$œ:uÊæc›x”)S¦úúú.Y²DÉ;;ÛNÆ¡F ¬¬Yç™3gdÀkݺµ¼JÚt£?øH”Å£££•ˆMæ6²f;-oRÿþýW¬X! y•tm·ç¶mÛ¢¢¢\\\n»í¶Õ«W×Ëw$]Þ½{·’Þ¹s§8~“ÍÄL™2eüøñUuˆ T¿4Z`eÍ:‡žœœ,™Ó§O1b„¾-PJJ%£FÒY»N;-iÒ‰'"""JKK;tè éÚnÏààà>ú¨¢¢B–MLL¬—ïÈÇÇGܤ’...öõõå7 ÐlÁBCC íÇ4Z`eÍ:3%­o ”’ò*C¯ÎÚuÚiI“„ùóçÇÆÆ¦¤¤X±=ÃÃÃ/^|øðaq5îεZ>ðÀ’PE €¦j ~ûÛß¾ñƶõ*-°²fÆ™†ÑΜ-0-©¹vvZÒ$á›o¾‘·Ç޳b{æää 4HÌ–§§§r1‚Ùlöˆeù b£Ñh•5ë 4df dP///—ÄÅ‹mRR^CBBŒk–±ßØÙè´Ó’& cÆŒ=z´*JdmÉÊÊò÷÷¯—Ù‚èèè={ö(é]»vqo@3±úŽÁ&}k´ÀÊšu6löìÙ’9sæLI+™2´Kyc[ ””LÕÚï¸ãÃyúí´¤IìØ±ãõë×###:TÛn&$$äææVTT¬_¿>,,¬^¾£åË—?øàƒùÕÄÄÄðO€¦g j<´[Ðh•5ë}:00ÐøÂ„éTjóŠ!À f À”ÔÔÔ!C†ÔWm2Æ«žrh:U€-l`  IÚ‚7nܼy“ ÐÒmAeeåœ9s~øáãGþÙôôt//¯ú½?ßòÚšÐÿŒ£B›ƒXÏØ¡Nóñ>;w‰qss“ºwï^XXhÎ ,,ìàÁƒ|Óú˜F…Ö Ö¬ëZîlÊìÝ»·mÛ¶™™™ßÿ½|òôÓO_ºtÉæÎÀÁÁ¯¹FL£B› Ö¬ë°?Ñ¿ÿ÷Þ{O?üðƒ|ôí·ß'''ÙÊh>véÌ™3111­[·–W㇔ÂK–, ÷ððDuZ¾vUfzzz`` l±P5Ö j’ffÍ:óóó»téâîîþüóÏÚpùò庹¹uêÔ)//Ïx]¦Q¡Ík6ë°·aéÏþsXX˜ráæÍ›GŽ ˜?¾òÖf=üy;‡®Ä;ž>}úˆ#Œ‹ 0à»ï¾»té’*ºquZ^RÞÎ;·´´4###22²ÆeUMÒ Á¬Ygbb¢tPº9{ölC&Mš4qâDÉ\´h‘ê±K¦qžÌkÖŒÞÄ£°?[ÖÑÑñ©§ž*,,Tâ]»vM2:äçç'oå<ÕVÎ@ÕNãhË’6.f2Ölë[ÀlÊE„ .¨& „÷ßßÏÏoãÆrËÓ‘‡K—.É@nIIK¢B›ÆzlAͶàܹsrªš0Þzë-9ßݲe‹Ê1@½3sæÌ’’’Å‹?¯À¶àÔ©S† ƒ7nüêW¿òøâ öîÝ«r P¿¼ôÒK^^^÷Þ{ï‰'Ø`K[pòäÉsçÎ=ztÕªU2ü¿òÊ+‘‘‘¯¾úêÛo¿ýî»ïnذ᫯¾*//ç hþ¶àøñã999ááá>>>ßÿ}iii@@@zzºx…âââË—/—••ÙIˆEhX[°{÷î¶mÛöïß?::zþüùׯ_OMMíÖ­ÛÙ³gq-Ë„††<øÓO?ݼy³¿¿aaaQQ‘OffæÕ«WÙî-Å888Œ3f×®]%%%±±±/¼ðBeee·nÝfÍš¥< š¿-(++°ÿþÂÂÂÕìܹÓÇÇ'...00pÙ²eØ€–b *++¯\¹b|GáÕ«W·lÙòÌ3Ϭ\¹òèÑ£lw€a ÕsŠämyyù¥jÄð#€d [Ø‚`Ë–- èÙ³§Ë–——/\¸oZ ²çëûlã´ùúõë,ªfРA¦!:¥f©-77×Ðì&±akå!***dÿ7WlÍš5õ IÎéF}àÀýû÷3Æð©xsËÊaѧ#H=û«QjNOO߸qãÚµkßxã y{ìØ1ÅŽüûßÿ®Çs¾ž?Gs¦äæÍ›Ï<óL]¦LlØAáoû›ø¶Õ«W›«¹ÚüòË/\$4k–†-\¸°î³¹aMŒæžc¡-8r䈸%ŽzÐämA¯^½®W# ç:!—,¿ˆ õ(5?üðÃòVÎ#SRR&NœxñâE¥ÀÓÕX=P_³uÙŒ6é b 222¬¸=¢.mÞ·oßsÏ=7oÞ³õþÌüF˜-°Il€æl â™ù ú¼9ûy„>@s³ÆoyÎ?3ÐlmÏùo çü4ÃÙÀ¶°€-l` [ØÀ@“³ï#„B¨Ù [€B!l*=ýI//·V­Z©òSR† Úź:Mk«cyiIjê¾,„ÂÔ` ZýÍLw¶»9……ù<ø²*³  #(ÈûìÙôz\‘ÎØ_£-–H{ Wó}!„¶ æÙqeêÔ~3g`»›“ƒƒƒiæÒ¥‰&ÄÕ×*Æ{èôé×å;’WI[7»”—––È÷…BØëmAQÑš€¯3gV4¡ ´mÛ¼¨¨ÿrqqºí¶àÕ«Wï3öææTŒ?Š‹‹Ú¾ý9ÃÛ“'_¿ÛÕÕ%6ö$­³vÍÚÄ ÖU2åÕð]œ=›Þ¹ó]îî­çÎh(ß¿ÿoV¬˜ y•´q%YYózöŒâ÷ƒBØëmÁÂ…#»×Ë Úh öùè£Ùïž8ñgUãë×è,.m((È0¼½óÎ6;v<_^þβe÷èñ«7g †1Ø‚‘#˜5ëѲ²·gÌ`(/]ŽˆhWZú·ÚJZu]CZÅï!„°VÚÆÂÂüþߦe ÂÃ/%Íg`ŸÞÊ+6³³ÓëLó¯^}GÎïkk Œ/"ŒNPw~þJIœ;—n\~þü„ØØÿIIQß`XYùž‹‹¿„ÂXi V®œØ¯ß¯›ÜÊÉùã AÑíÛzzº*3ê–\°ÄX=[°uëÜÈÈvNNކµè¯Ý¯àèè 8ï?úæ›Wäí±cËLï‚d¶!„°VÚ‚›7×uèÐvÏžšî–ÊÊšçïïi“Ù‚¸¸(Y»ám›6¾7ά¨x÷Ê•7MWgÝu ãÏŸÿ«élÁ˜1ÝF~pìØî¦·\poBa ¬´›6%GGßUïWâA ssÿ$cðúõÓÃÂümroAZZbRÒ­"øúzdgÿAQ!•‹osss1þ'B|üÝkÖL’ÄêÕ¿ëÕë㛽¯k „ ÆSjê!CºØIc:ó”C„Â` B!l¶!„˜§B!„š°!„´(Uÿ¿€í€B¨mÑáne;öP||‡‡“¨W¯àãÇbLmü&a BÙf¶@5Ý{¯Ï /ü²¸¸·H÷Ýçƒ-¨‹Ækút\uPƒ8Ic B5%[àææX^ÞGIKÂÝÝÉNFú%K"ÃÃÝ=<œ$!9—/÷80TZÛ©“o^^Õü‡ñ,ˆqUiU’“žØ:8¸uff'ölÛvT”·‹‹Ãm·¹¯^}·NIqƵ­ŽŒÜöÌ™8’gÏÆuîì'|îÜ_ÚÙ¿ÈŠQ’WIó@!Ô¨¶`РЅ ;È +’Ä!aöp +«0 Íwßõ¸t©W’3iÒm'þWYÙËý¬‘¦íÔ±ª:%G†äÒÒ‡32ôÒiø†>úmEEŸ'bÛ[b †¯ÁŒÙvÖ¬;¥G3fü·¡RD„—4©COIó@!Ô¨¶ ?¿ç/~ᩜmKBÞÚ‰-P©¡¡®rz-‰ââÞ.ÖÙU’#A2Þ;99è´'<Ü}ñâˆÃ‡»IIË/"舠 ÖÊÖ>w®§q;çÏ¿+660%å.~!„Ûôé¬Ü[PTÔK†¢Þ½ƒíóv¶EŽŽò‘ƒC+ëlÎZô=PNNÌ A¡íÛ»yz:)“üu¿W :\r_ITVö5.ÿÍ7U‡K~ˆB![Þ[põjykŸ¶ ];·²²‡Í¾yógBéÑÅ‹ñõe Êʺßßߥ^úÜúüyÙ‚1cÚÝnìØöüB5¶-ˆŠòNI¹«¨¨WqqïÔÔ_Þs\DPåLž|û´iw\¹Ò;/¯‡jÈ p‘³yÃÛˆ¯ŒŒ»ÅCÌ™sg}Ù‚„„°Üܯ ¬_ÿë°0×zéã¨Qí¤…bÅ’“oÝ[pðàƒ;z]¿Þ72ÒëСù „j([ yßþ‘#Ýãâ‚ÜÝD’8z´»}Ú‚’’Þríééî¾fÍ=Æ-ZÔÁÃÃÉ°ÈæÍäD<0°uzzT}Ù‚uë~-nÃÙÙA\ÔŽÑõÒÇ3g⢣ýÜÜÿ‰¤ônõê»{õ æ7€B¨ag B!„-@!„¶!„B-Ø@ [ØÀ¶°€-l` [ØÀ¶°€-l` [ØÀ¶°€-l` [ØÀ¶°€-l` [ØÀ¶°Õ¶`õÊe!„jáºe Z@ãÒC°Õuþl¶@ÞûïÿC!„P£©!†`«ëÄ „BØlB!ÔtlÁWû¿<óõvIdgïÉüäã/v~¾âõ×c B¡g öÿóó ioڼɓ4>éÙyÏb B¡–xaS懟|²U'ŽêÓõž¤¤¦<7ïÙ÷×½Ó¨¶@¹;ÑÁÁÁËË+22bâ“ãs¾Ìæ‹D!„Úüß×ûOüûk%übÚ‘o—~<¯êÆ™¿Î!‹Äõ8rä¨Å/ÿ©±m’øú_û3?ü qìèÝŸmç»D!„Ô|¸nÙço%ïùâóW3ÖlúýýUoô«úçûUÅyUU%kæmÓ¦”Ù›õvÝm,Rk[`ÐïŸ~êÑGüÇÈ|5tH‚W5Æ&È[ÃRxa~»vm###>Ú¼AÉ?v4÷©‰Innnýúöù×Áö „BØs–,õ³ ß½p_ÕÊ>¹‹]xãwU¹k«.ÿ¿ªª‹Ç÷n‘¬¯ëE¥¼õ¶àóÝ;‚ƒƒ”ôø'Æ=Ô½Û¾dù÷Ï|0&iüㆥdÔß›½ëëífÊïïŽú•’?õ™ÉÄtýâó]bzlÔÈáì!„°:e.ÿ-áâÊQ}ºß–©ÝÊÖŽ/xsRÕõŸ¿ |ºk׎ºØÃ“Ž­·ßù—³³³’ Ù±ýc%ýéö­Ê„†²Ôþþý?W899)évíÚnÏÚ¢¤ÅI²O „Âh~š¶êÍœ¥#ò—ÄN>à½'ï¯zwäŽY±ûÿðhÕ;ƒoúÓ'Û³¶}œY—Û=Af B‚ƒ•´Œ÷ÇŽæ.†ÕR†·RÀø9‹ì!„°¦­~ûý׿Ž+Ië6lØÐÃ#ªÞxxÃïîmâÃU+ûfMN^ôÊï¼›™>åÿ²×Yg Tž N÷<öØ#†Ù‚Ÿ~b˜-·ú¶ ,,toö.ö„BÈܾþõy›V>ÿÂ’å'6¤üqTç…n¯z­ÓÊQ÷=½bì¹w/\ú—ÿÌÇï;xàïVØÓy+þ‰p óÃÆ%Ž1þ'Âã}¨û¾d‹ºu{à‰ÇõmÁŒéÏ<Ô½Û®ÛŽÍݺeS¿¾}Ø'Ba Œ3s}¹)s³$þ9û7Ïuou~aä‹ýn>tXÞ’¸+«ÝõÁª=ŸïªËý ¦ž v÷(³ý‘žœ0^€áÓù†$ ò¬FòVßüû›¯gΘªüCá—¿¼kÙ+bŸ@!„-Ðütÿ“®;ft|¾{«WÆüöÒÿ>ðåÂÏ¥ßyoÝ?>˜ÿÿþý/ëêÔôµ°!„²‰-8üêˆìqûæþöëͯ¼85qIú›Jþ‡ëÓ¿Úÿ¥uêxlB!d×¶À 9‹ÿòÆ;ï×±N}O€-@!„š€-Ø´uËçkç×±Îç °!„P°_çæ¼¿êźÔY£'àÞ„B¨É\D¨K–xlB!Ôümåž[€B5ÿÙ‚Z-xËT@£ÓCp]êlÅWØÀ¶°€-l` [ØÀ¶°€-l` [ØÀ¶°€-l` [ØÀ¶°€-l` [ØÀ¶°ØÀ¶° ÇÿÏ·Ž‘ölö«IEND®B`‚Cython-0.26.1/docs/src/quickstart/sage.png0000664000175000017500000016142312542002467021177 0ustar stefanstefan00000000000000‰PNG  IHDRÝ  ’€þsRGB®Îé pHYs  šœtIMEÙ  MxJÀbKGDÿÿÿ ½§“â“IDATxÚìX[ǯ­x¯­ØÝz¯Ý­((¶XØÝ!© ‚€‚„ Hww7¢Ò¡R‚4ß÷Âèܹ[Ì.K(ïÿù?ûœ9sfNÌ™™ßœ=;ûÇÿP( …B¡P(TSël …B¡P(ª‰¡D„Þ'D£Ñh4F£ÑèÆ÷3­Èåh4F£Ñh4r9F£Ñh4\Ž\ŽF£Ñh4FÿÚ\6rq?Gô/z›aWiÚ"ṃF£Ñhts°£Ã;gG[¾q9yƒoB.oЬ›ÁüÞ…\Ž= F£Ñè舰@s3³WFub®ð/€ÀèÛÅùÅåì[´p°—Ÿ@sÖ<Î;Gí2¹„½bùR:õÚ±c1Öñ;w&¸ç–Ôõ;¶5—wïÞý‘š*À´˜Øf–Ûîß·wá‚ùD+Í›'|`¿™ È,À¯†\uu´†F>ŠÀ®.œ?ËsaŸÇž1c:çp(|jij>>ÑP##áxº;C7=wö´íªU+ÈÅ)S&ÉFŒþâù3xЄ‡Š½{vmX¿–¹²ì²`Ù˜<÷UÞj]ÿbpî9ìNFj×"Î)0”óÐÁýx¹D£Ñh4ºAEg-/óË™ÙqÀ€þäÈ<°H¯^=ù5¿\PPüɪ£Ã;>r¹á ½I'‹£G2{mL§^šÿA¸˜ï\½z¥ÖõúÏWf®W¿~}¥n^Ìâ@f}úôqr°![ LæãåJ&ƒú*+݇$îÛWx·>îÚµ ù½»p(˜´ÔuXknfBÝP€7ùɰئMæ"A<–•e™ËÆä‹y¨5_ŠÁ¹ç°;©]‹<§ˆž€—K4F£È6ÖoE¨“È©Žxeb Éø6^ÌDåb"_ÆËaÏPb²è|ärð”)“ žëcÉ3gNg. ËzE„öïß@ù§Zÿ=ÂÔãåp,/^Ø­[·…HôgØ–¡•H„eHöDCmØÐ¡`ÝÚÕ7o\­Ï1 ô=uòøìY39€CÁ :‡%rhv‹¦Æ†Óþù»S§N 9=s,“çvà¡Ö|)çžÃîd¤v-–ÅC£Ñh4Ý@\ðÍüÆpB\$€ $ ÆÔøÃåýúõõöäÏ$ƒ©ãåN6üårÍÇæÏŸ á9³Ÿik2†]½âª*Mž4Âð ¼;kÖ þÒÃÚÄø(í§{÷îÅn¼œú­,²Û F£Ñè†0 K(§&àçüòsgO-\0ßÅÉ.>6âµ1̓/\~`¿ÄâÅ ü~Ì…eκK—¿Èoíy˜ë3jÔHÅû²cÇŒfYvõºpþ,Ð 1ä|ãú•¾}/^8Û@\¾vÍj;KÀè§ZÝ»wgYk ‰ÝäŒaxÒØ'±‡]@´){ïv=Ÿ‹Ïž9I¶»p.˜—ë°¡CÏŸ;Í—wëÚ„ »:ÛÃáÀåÌY°lLžÛ·Z׿œ{»NKm(âœ/^´žÓðЉF£ÑhtƒnÊ6Öoˆ×°Ž‹ wv´¥¾€?\ž è3`@ÿ¶mÛéüñk~9<^ˆlÜбcÇž={0¿ t( À󨹒‚\›6m”åYV]½¬ÞšÁVÄO ½ leiÞ@\®ª¢d YŒ9BWG‹e­á0‹‰nê\+G9‹GU† LNcàùuêÔiÞìô©ô¹ü±ÆÃ… Aú÷ïFœ¹œ! –Éóã¹Öõ,çžÃ®Ó²|Ë&‘ œŸàÑh4F×ßÄ{ߘ›ŒNüГ—÷$¢'/Y²HEYÛ¡%_dŽF£Ñht#ÛÄØø;,ÄŸ`tScCbôD˜—×ÿ?y,o Œê̱¹! K˜yïî­#†SßÚþûµØ/q,¨ë6ç?¥B£Ñh4Mð˜Ùkcs3â—~ ïOÄñò4J*4`ù.H4Ž—£Ñh4n|¿³¶°·cýÛHär4F£Ñh4ºé\ŽF£Ñh4F73.‡4F£Ñh4Ý$þÁå P( …B¡P¨¦ÕÿP( …B¡P(T“ ç—£Ñh4F£Ñø»O4F£Ñh4¹¹F£Ñh4F.G£Ñh4F£ÑÈåh4F£Ñh4r9F£Ñh4F.o¹GF£Ñè_Ú||-Þ.ÑMn>p¹½ûë†&ȨÈÐ_‚t㣘ÝÜŸÆP( …ú•ÿw…_7D¼]¢š¼?×—Ë­ íííš WžÔœqÜÚÉëè—K=³Qqäzùi;T××z gšÉ: ö—ÝõÅ‹û‡ 0¸ZòpJJŒùð>öcRܧ ÉŸÞ§&'¥¥~LOû”‘žœ™žR㌟NOÈŒ´dHÉ 1lÂæ°>Z P( U§àFÖ˜\ÞÈgx|[f®—[Øë:::êÙÐÔÁÁaá±þa!ÍÊÃÂÂŽHÌØõHú™¯¶M¬¥_Šu@š¡Ë{•×aâWM§lU~kët®cê°`Ÿ:o˜P~|bíCÛ"""˜?ÕÄWq…æ?¹ü”§|zÀ Ø•‘š•ž“ñ9'“Á «²2SÐ!q Íñ"‚B¡P(ärärTÓp¹É»Ç®®®òzçìì캃.:Ú?33sÍÙÑ¡ÁÍ Êc¢#fízxXÁpüŽQÔ~U?‘»n»ïPô>ó4DÃ&ážAà$1ŧÖZF6SÄ Y-Á#”Kˆ?ݵžù¸Esâ¨× –ˆO®…rYÙ*Ârr¤«ÁövÅ&&¥ææ%D$™ 6 asär …Bñ]eeeñµ*--m&\>dgÀëQô\Þ•ó9ÇÆÖú™ÞS•ŠÚ:šÖï,323øÏåo”?2¼[TTÔ8ãåOM”óòòÖžÒlÐ<1>ꌼÉa¹wvA†®u’ ]?=wþ l{Ó b¯Š¯¸‚÷=ÓhY£‰[”'mU•1.ðóŒ£?dNòÍAAAÃçÓík¨Ÿ>>>ðÉ-šGýcR\òÇ„Ô䤌ôä"—­zü¸ÂаÌÛ»èCRaVVAî×ü¼ÜüÂBøÌ“—¯®%õšd6IMùü1v‚ …BñWß¾}:OKKk>\ÎøðPYõ¥°4ãkqVÞ÷âÒ êªÀ^]Ë[¬ª««ýü}uõµ³²²JJ¿Ã³eqq1„_ê¹{¸UVVòËŸ™É”?4*((Ð2UhœñrÈ뉉\nnîšs£ƒýš—[9x.•|úÊãÓÝ碗К(ªü·¸êÚÓÏ%ïÛ=²Š¿¢¶KÙç†aÄýב²¯"Z'—»:ÛG†'ÄEråO÷n Tž3UWWWuþ?~*¾’øtrrºÖ¹sÍ£‹øÊÐÐPúhþ“Ëã“?%¦§~ÌÊLÚ–—¯/,ÈÏýZhbR®¦öcÈû,Dñmp¹eDX M.—ݵf¤||*Ïž À­uõ²¡¡¡ÊÜ©£%¶ÌÑÑQªO襤$é~}íííµÄ–× ùÖ4/CŸ>ħ$¿ÏHKÎÉJ'&®|ýZ˜Ÿ_heUN"øýûÕEEEJJÕ_>k?­ü_•“‘‘žœ’œ;A.G.o’"Ç£P¿«à¦ŸM„?תYq¹¡ca¿ùÆÒª¾éYÿα±rü2t峎#¯ä|-B.oѽ÷ë—§Úšeåå»än…ê¹|P·M¼o+©î+¡àò%·à…Ä^¸\ÕàŠ»»›Ò‹Kp¶È¾8'Ìc“{ùùùÆŒŸêÆwÒÒÒ–@Â@ù ­½,÷Ÿð ÷â$ä¨lX“ûú cüý|šË¬×Ÿx:t­Ü ù§úzÚ/ ôÌÍLÞY[ؾ{kmeaöÊèÜ­‡Ä”¤ CkÒ8ª$ó:¸ÜÆú }.ñâ…ö¡m~~~ê«„uuuÊ‹‹‹u•kÐ\x ´@yVVÖË—†€æ×: xxxhm^±˜m.¯™Ä’™‘ò9'h[Q¡*5µHG§@üåËnl\™ûõ;45«¾~)VTüÁë° l˜šòvBŸËây㪨¨¨aÆ1„›3—sØI||ü²eËjX$â«««/^¼Ø­[·îÝ»_¾|y‹o¸&âöH5g“eÈÍ̓vîÕ«—””T}ʉB¡b€îï™™™ÀŸ>}Љ‰!¿ë¯¨¨€EˆLMM… V5!—;ùô6“Rv B9¡œÜ’¢ïD˜\^XXhnn.##—÷›7ojkk“_& —7C¹º9§¥¥>0 Wxü>×/ዉw pùYÝГšʯC32Ó­ßYÖ‹ËåžöðpWxq.''ç¶þ81ˆÏ;ú222néí‡OiÝýéééÒÏöAðÝÞÞ>&&„f/>Ôö|WïHÍj÷Cì“Ì…ÌQîÅ)xn¹8Ž74iøü+AELù’‚½µƒ½µ—‡KP Oxh@dx|ú{9:ÚÌÜ¡´çžíɧÁ‡Ó ’2Ž.v§Ïå€×Ùš" }}}UfO('Ñœ„rø„0´¶¦ÈH©}P€žæe(ùcbZʇ¬ÌÔ/Ÿ³µõõËccŠ!`bRQ\T¬®^3‚^Pöó7 åÉŸŠ‰ð—ÏÙ°aZíóFær•Ç3„Q.Ÿ2e 0b^­®_¿>uêT"þÉ“'³fÍJ¯ž>}Ê[|Ã5·Gª©¸œ,ÃÖ­[÷ìÙóµV»vízöìÏåD¡P|à8Üâ¿×ª¼¼œaâ €8Dª’’H€ÎËK`ù~hˆç —Ÿ{”Ý}ú“:kDår®ŠÄpKÍÏÏ"wppÈÍÍ…v €§êE¹¼¹ÉÈØ°¸øÛ~E›àŒÔÏʼn…oÒ¤Œ#¥£ÞÄžÕô),,ÐÖÑäËoköôô{~N†ëÏvÀùpMg{JJÊUímÉÉÉWžŠÃçe­­ð,{IsëÇO©¯µ ÷¡  `;;»ëZ»/>Ùû¹¬Y³·ËO·Öîì r¬É÷YÍçýCÙÙÙ›.Oôñöäöµƒ^OO—Ê_¾¬®®ÎËýLŸË/+šx{{‚‡ûEG†ÄÇF@$áØè°ÃÒús÷k]ѽjNøŠA8p¹í»·Àî\Œ—÷ññÑ\?ÏÛÛ[eæ*šÃ± ¡ZH£¹aWãåµ\þ1+3íë—l@í¤÷E^^?&‘»º–ÖŒŽçåëëÿ;——¯.ÏÉ)„lÖ¼•… —?~üxðàÁíÚµô %"©ïÏgþ{ÈËË÷éÓG@@@BB‚ú|*±­^½ÚÌÌŒ!L&€«öºÔêàÁƒ°HÄÃ5ýÌ™3½zõêÖ­›¢¢"™””´nݺ?ÿü³C‡+W®$¿6¥‰¹"r±±±?~<$†Mˆë&s5©ûïØ±#YMØI§NˆðìÙ³‰0„……y‹gh"®êé544†Ú¾}{¨”§§§®®îÈ‘#‰Í¡§‘÷ËË—/÷îÝ ì[XXÈœ5»v†ºïÝ»·  àýû÷©ÍN'ë:µ ;ÜÕˆH@ó¹sç²ìQ(ªI7µœœ:)áüÍÈÈàÀåp£ß°~-C ;`àŠË«ÿ÷¿IbN‚s5¸âr®ŠÄpK555%/ïÌ‚˜W¯^ݨÕëׯÉçvñp+466¾zõê­[·\]]I'UUUp#“––†4,_†ƒâ,'j¥¥%{œõ]?XeX¦=²M¼û*úá»§gµü ¿ªœy¼>Ï>ÞwÙ³ëÞ¿Z½öóÑÚ„„„“WÃ3Ü®ûSCBB.ilçv¼€^\v\XXØ!•…°·SjkÓ‰ýŸQ_W“ããõðyîIMîç4× o¾: М>ý“T­¡¡NB¹¦¦&€Å·Â|®à@paê|FD„Ž\(œÁEï;;ÚÒÿÝçñòµs¼¼¼4×̆OåéãH4(‡«˜TïÞ¶¶¶ÄZâ“ëñòÔÙ?¹<5õÛýûÕäÌò„„’‚¢¢oE@áW¥¦~ÏÌ,ÎÏûÏyÊÊU°I6G.DKKK+**‚“úôétÆËaqÍš5ÙµTºxñ"sʲ²2ÀǼ¼<†0™àܹs°-ìZV¬Xqþüy".1Ë—/‡néOŸ>MDŽ7®wß¿ÏÏÏ?uêÔŽ;˜eEØåÜW@¸Á#åþýûÙ “1¢¢¢°[ÈJ%%%‹D<Ô«  €,g Þ⚈«º@úM›6ÁS1ô·{÷îýõ×_bbbäâ¬Y³ˆd·oß^¶l4,9@ö‘#G˜³f×ÎpG²^§6;¬ëá@œ)''´(OC¦Ñ\=‹ëñr —çåØÛ—©ªVikW™™U¤¤|‡–çå§¥}wp(:ÏÌ,Š-ªù…h]\èC„¡¨mÛ¶¥Éåð0F„áÙLHHˆ¹»»¹¹Íœ9“9Lîªÿþ°-Ž0`†½‘³·Y ÊÙ»wofàcYv¹ 8:Ü]8T“á3xð`â2 ôôt"¾uëÖäÍ mÚ´á-ž¡‰¸ª ¤ÿòå ™ža‘ÜŠG„¯™³f×ÎyÄ!kj³ÓɺÎÃD-<óìÛ·/·V,Ó P¨¦\C’’’˜ß"G\Ù>~üw@â÷3uÎ/ ö›ðÝ»œã!@^-áºÊÌå222äÀô·nÝÂnÉ­Ü=Ü>|xÿÂ1î¢NàÓh)£¨Öñ/Ü>½ ÊÐvH|d˜˜ockÍ —×ü{NLˆ‡»›Ø•)>|Ø«0388x˽1T˜‘éçç·ñæ__ßõׇûøøœVÞìàà@¼|®d/ ƒõׇÁ6Ü{¹9ÂßßSíþIX‡|·Ü èyQ]Rˆ(ÿQ¯¨P--Mx^,ÿð>–«m=|§l{`šiœa”nþÚóÓ²£ÚRªFa!þq1á°søûzúx¹zy¸€Í-mo©™Ð,'ÇËŸ¬˜îîîþdÅ4ʉ9åpa¢Î5Wþg‘ÆÃÃûùå?汘˜”|+,(úø±$<¼ÔÙ¹²øüùûÏ7—×¼›åËç"—‚Ë9Ïca¹X'—SèC’U×®]»~ý:s˜Ü )Ë@<ó›¶¼½½çÎÛ¹sg‚Œ[µjE³Àìrîºnݺž={6ÌÒÒ²N._ºté7Èùå°Èßñr–MD³.u,"é©ßÌ’mHÍš];3dM¿ŸÐóòò<*»–‡ÿ´§úcR\qñ7øäv†º£«W —‡dÓ ’Ô¼ù2â’†Óª æ¦>ÞnÄts0zlt0}Ï5ýµÇ5!†~›ãå5À½tê»wï¤z÷ú÷‡ž½{1ü Tyê77·'Ëþæ~~ùß}ª¨TffÄD“þVøï XÀjjU™Evv¥Üþî“>o‘£§ n©š1c´ s˜:^N‚…Eãå…±±1\òà \KøØåBö‡Œ$?\Þ¡CêürX$Âüš_β‰hÖ…&8å¿~P³f×ÎÔñr¸¼ðË©e JGGgûöíœÓ P¨&Qjj*ù+ªˆù\qy½åäÃä?gkÉ=õ¥/1íûøz½ç?é8þÙI‹ÿñï},pñdž_N/'crrr`±ÎxÎãå÷îÝã0mESaá¡oÌ ¿¾rO¼þ<ø’~ð­—!ÚÖ!p,ôôŸ]»qy÷Þ¡¡!*³žØ|û<©° öJq3ä¸öò“ŠÝÝÝ.¨Š×Êëóòr{7àrÕwAGŸR8÷,ôÁ›¨ñ›äTµ<Ý£"jÅ!ñ3Pti5ó!«eOÜ5dçv¼üñ’É×’’’H(·¶¶Vš<ŒDs­«—Ÿ={¦&<ÖÕÕ•«ñr†÷$‚ƒƒ¾ÇÄü "ÿœó„rˆ|ù²"-­è?ïI¬yy}.ïÖ­9çy’­[·.§Vk×®¥þ<œØCnnn×®]ËËËÂÔ,Μ9CNR_¹råÙ³gÉaQæùå=zôxóæ \î¡y·nÝJøØå"..ý¿¬¬ ŽQ¯^½XV“ºŸ©S§’ïc¹qãÆ?ÿüCÄ?~üP›x¿ ÈŸÞsÏ®‰hÖ…&ÃE|õêÕïß¿¯¨¨€ ÍÈœ5»v>þÕ¤è÷Âà @s;;;æ½Á'`úò³BAAA‡eÖ¸¸8ßzr¨iÿTȾf¼\õµOÊ¥çaguB%ø]{qá‰çD1¥×µõLl¼||=}üÌlÜä5ßÎÝûhòÖÓviÈ?6 ö#~$J'#8Ô¶,x¼`œ`Ò}iÊá¤z¼`<œ9J‡š”¿|ùRqü`ˆÑÚºÒÏÏOrtWžÿW[÷ýþýê·o˳³¼ÝȨâë—oN?þõ“»ÿ"ïÞ½û矲[¤¾eÏž=Ô!"Íëׯ ‰j˜š€×¾}ûþª`‘ˆ‡‹û©S§»wﮬ¬LD¢ 6¬M›6j>¤Ïåìr122=ztÛ¶mÇý™e5©û¾½hÑ¢NµZ¼x1yש®®†«d·Z]ºt‰úžrúñ욈f]hÂ1\»eddˆw¡L˜0ÁØØ˜9kví ‡x÷îÝPw8è ïc©?—3”î4ýû÷'ÞOø,{ …jZÁÓ;<äaÀИZ‘ã»p$ï Mû¿BÞa9]ÿ6è<ÕøÁ«´âÒ'‡|¨ì0Hñî#~Ž—ƒòòòLMMÎáò.%%ÀMþ(nm°êz­ @>Æpˆ‡ûñ>À†Ë—/3p9Ü>\\\ˆw¥+))………a·äYÀâîn&¯Œ4ž¨ºº9{yyJìߣ¤r_GW“@ó°°PÞ¹à»ìÄàÐÐÐ7„£¢¢„õlh4?|w@ù¾[5ÿp©¨w¡Éÿì“àò®UÌ£TÞÄ^{¾KÉç”VðÝMWÌçÔ·Yy‚˜Ê´sê.=oµæªýQÓ׿AÞq1á4¹ÜÕÂNµ­µh>ìÛ·o¯  kÌãääD|*M ç§ÂØÖÚº üøÄ>!Áþ4/C“â“?%¦§~ÌÊL•“­"Ð\Q±*&ºøë×o¹¹ßr²‹ÔÕ«"¿ÇÇä~Î)PU­}g¢llÂæ°~ý9Y¯¯–””TUUe£êl®–“5WeÀ^„B5+}üø±¬¬ ¸3==½¢¢¢²²2##(?}úD¾‰µi¹¼†• KõÌÂÅO9ÿ9I¿ãHƒK´…–éLØd öÔîSjÎÿ~…ÿûÌÎΖ••Å^×È !Ð\û™¦®þSÉ#êÅåàš±m!€ÅC·×ÚÙÙ54/<Ò/<<|çõùo7-”S¹üÖ‹À±"÷'mU¹ò,èœNˆ¸¼—ø}o ¿}*~{•ý¶ß÷¹ã±FÊuÕu— âNI©ÙÙXúÒ/g@sqhm¹µPNù|8cD ”oYNÊ)\—ü±f*KFz²,pùO4WV®òö*.,Ì/ÈÏÿúµ 9¹ÐѱÄÖ¶„)‡d6I­™\ž;i4.6lXtt4sUgsµœ¬¹*ö"ªY©ºº(<55µ¸¸˜ÿýûwˆR'¿$lr.'UU]ý½´´‚é2Í–Ë---¡=á GGG‡|?ªIÐ\^AæÒ•‹õårpXhÐâcý}}}g¼\ìâtûoT›ÊÉùå#×ËOSº~OYâŒü¸M ‡”Ý?Ú¡à-r×c­´ÛÊ®+®»,½ê¼à¼Íxqµy;å_è¹»:rõ>–£æ€àÌŸÜB9yÔ?¼ýø!>ùÓû´Ô²µ´-ûÍ««ÞX”8Ø“óˉx2YÍ›X>½‡Ía'Æå( …B5—ϱEßÍ–Ë===¥¥¥¯]»öòå˲²2ìKM¢°°PÉ#ʉ?Ú«/—ƒ#ƒíß8ã倿¯í´›”\îåë?g÷C-=S sÓ7榷•µÇlŸ³_{ã ÛÍ·E¼b/|ÒbòÝëeEÈhëhÚÛY‡ùq5XN8.&Üê¹&œ}HH©EóŒ´ä¬ŒÔì¬ôœìŒÏ9™ †HX••™š‘ž ‰aØ6OJŒm´/ÝP( …j*.oã-µÅöçzq98*"dÙ‰A ÝAÞ¹5(ÿñâÈØˆÈð @/?âÓüÍ›£×ÕgïT˜ "7t­ìè r3Åå6’‘QP1zùjÄLÿ…¨Pû 0PSxñâÅþa,?ŽüËÍÅÐR†ûÑ’'ŽzRb ‰æÉŸÞ§&'p§§}òÎLO©qÆO§§@$€;$€dÉ?¡6‡àE…B¡PÈåÈ娦äòÚç kèêîó®YA91dh &^RâïíébokenfblôÂÈPßÄØà¹©ƒ½µ—kxhýŸ{2 r,ü<»¾ý}=! o/7È+ÐßÛÕÙ>"<(8Ð76š‹—£ó÷Bƒ …B!—£Ñ<›?\Þb M5éÀßA>À;Þîð ¬ñÉÐ~šÊ̇F.G£ÑhtÓp¹šÚ÷g:v¶éAï£"ââbbâcc`mB\Œ¬lE³år¼'ý~íŸuýæ™ÂsÏ]¼A]‹'Ïž™9w®Ô-iÞ¸\Y¹@C=ËÄ8ÆÎÆÏÅÉÃÝÕ k=Ü\îÝ+C.G7š=ÝΜ;;cŽð‘Ç=Ü©«Ü]9G˜ìçÛvïÚµOˆœ€rxø$ûù¬ùó‘ËÑh4º™û°ä–H ñ¼p¹¼|yhpâc"q‚¿uŸåGEÆS\ i8Œ—7þ"—ÿ~\~îÂ@“§:š²r÷ž¿Ðݾs×Aè×’5PþÒØ@IEQïù3ñ;.]¹Ì-—ËÈ”8Øù¨ªæˆk¨gü­¡‘DNLq¤áŠËâyûú:ýû÷cÿÞ\®¤ ÷ÏßSÛ·oߥË_Ë—-±zkÖ<¹œ·æ¢³Õ‘ãÇwìÞCöóm;vî’Ø»ko ”“ý|ëŽí'ND.G£ÑèfîÄø¨ ë×2@9Ä@6 °G;v\³zUxh,²qC§N{õêyñÂYên©fYNæ\è´si̲¨Û·m•“½K¦‘•¹1œð–ô !!¡víÚ9ÂØè…¼Ü½!Cù¾³¶ “A­{öì- ºY$&*´á¸êuðÈ‘{÷jëx¨LÀÊácG¦žÓár5µÏ–oB ðX# Ð\Qñ„ÍÍÂMM¢ ×Óýhgã×ø\~òÄÑÖ1„o.Ÿ>íÅû²ŽöÖзED6£·4.÷psÜ-±oÇž=d?ß±c÷> ²ŸoÙ¾mïþý SÏ‘ËÑh4ºy:6:lö¬™$ïAbxœÇàŸôÎ:ƒÀî·o²âccäåËc£cµ4¿‘²²ÀåÑ‘qàË™ã÷ïÛ»pÁ| C?yó„ì— “bø1b÷ˆÃ_<pì·wÏ.xáËaÏÞž.‘áA§NŸ4qË­NŸ:1WxŽ—‡ `îæMwl' ¼téb(—a·d锓ºÈ¹˜K[gQ¡LŸ>í¡ª„¨*B8.&œsÁV,_êéî|îì©Î;¯Zµ‚\œ2e2™lÑÂPN0øÐÁý ú=0÷Ã’Û÷ìdQTº¯ÿ\¨Eõ¡’®ÀÊŽ:¡œ—Û¾ó7zG`·Á‹D7W™77µ‡_È9-5¿uö€3i?wÀà‰`tu4‰HRÌ‹D̑ǺwïÖ±c‡5«Wº:Û±D78{ïÝ•f“ \œl×­]ݹ³xýº5°HÄ»¹ØÃ³\×®]á¡îø±ÃD¤©±°ðìN:ÁƒÖÌ™ÓÉqhj‘˜+Â!švv´Y¹r9T³G÷îPe2;ÀÊ;¶uëÖ­C‡K/t°³bÞÖÉÁJˉ>ïË: žtñ‡¶}àfL„Þ ’É|¼\9£6 c=¸¦ @rmÚ´a¹‡ú;Ø[a`å^½za(¡«³=vv´¥î–Z`šå$9´ËÒÒ)*„…„„K€O×Y° o2ža‘ÚJPk" |ièù95h.)¹m×Nͧš Jò JrÚÏ´¶lßNÊÙq¹ƒõ¥+V–A€æ5ob¹[¦¤X`oëëhïíîê %rrß™¹ °ÒÂÌÄÉá6$(V‡‡&x{}äm s<õ=ì(™\456œöÏß:u"Æ®ˆI#Ür9Í‚QÉÈŒ Ìnsšå$¹m:EŸ8~b®\¾ÀsÁ˜[‰N9ùËåûInÙ±ý‘†š¬ü=¹;êÕÄwî”<Â;—…ëë}“+VUÉÓÒL·³ñû9ðìfo竀μ¬,ƒYÎc±±~CŽïB Ðärc£çDØøåó>½{3£Õ£‡ÊãÆe“»‚'.Ø–ê÷îÝ‹ÃÞ`‘´A9»uëÆÌå,+Â.š†ô¦Æ?jjôœÌá^èa þÝ»3l¨«£Ù¯__ÎéÓ§÷™Ó'îé´- µÔÍ«d$Ô‘ÈrÙ¹c|B˜åÌšÇ÷µ™Œa‘ÝVupùž½bÛ·QûùÖ;¨ohA.G£Ñèô>–çúy‰ñ5ï):ðKruN±0φ˜ÈðxòÍår²Q‘qoßdÕÉåpgŠ &ÂAÞÆËÉñWG‡w°È™ ¨*†û%ÆG……ø³Ûmý¹nÛÞž.Ìû²/§&£YNêx9Wí@§¨V–æCê 0äι-s+‘»‚u\¿Á üè6q%e©[7U(=x¨|ëÎMeU%ÑmÛ9ÂÃ<Í'žî5ï):·±ö77 ñü=Ä8;zÞ»[FÎcqqò0x‘XçürvÓµ™©ï¿#¹ê=»wìÙ½“9LîªuëÖ,wñ°È°·Ç'MšÐ±cGòŒfÙåBÓ ›“»e~z¤n¥ö@YP°–¦:ç?ÕҞݵK—þýûÉËÞåܶ²2· «–.]|úÔqr'{wÁ&ä„~ßúL⯅ò=›Å·2÷óÍââ»%öá<4nq\®¨Xïéñ‰úÿAÄsrQE¥4<4ÁìuN\>aÂøS'š2®\¹œŒïÒå/rÒXBb79_yþü¹û$öpæÂn]»>ÑP‹‰ 8^³zK¬¬s~9Ëx†‚;{já‚ù.Nv@~ï¬- /"J¸lé’@/0ÃürênÙ•“!2žÛvøÏ{KX52nÜØ¨ˆ`š șˡÖ?¦×/ZpðÀ>Îí_çq©ÊE¶l¹+sçòÕ‹r÷e7lÞ,"ºYAQîêõ˲r÷DÄÄ–äöwŸrrÅ€à–oB¨SYˆ)æä¢Âý"G/}½|äòÇtž³„7vÌ#5æðÆËþɆEãåÅ-év¶–žîNö¶VÌådW`v¹Ð4¤g9^Þ§Oo 3–›@9a+m- úÄ£p_¦GœÛ’ 6tû¶­ýúö…ÞNDê=Ó‚æ’¹{ ’‘[5-—P¾QLŒìçë7mÚ¸yÙÏ7ŠŠîÞ»—ùÑ ¹F£g.'ÈÛÙ1•Ds r uÊ!RG§ 4$ÎûX,ß¼lӦ̀ýá¾KÆŸ=sR@@€\XÝÔ¹V Ø‘>Öx8p ì¶ÿ~7o\å#—3,!.òü¹ÓÄ‹PFù@U‘œl½aúŽ;öìÙ€Ö²Ü-»r2äÂs;PͲ¨"7Ü»ûïî\»rIt³ÍäÌåäûX6‰l ¾7†ï\~úìYñë7¯ž:sBúŽÔZ‘’‡€Ô×mÚtûŽôù‹g¤oÝ\·IäÜÅ ÜÎc›½Š$шHýß™ß-ÓPÏv°÷¦ó>rñÏ?ÿ$çi0/B2aáÙĬë9sf,2ìÁöÝ›?;w&¦;SÃÔ,¶n%'RÏœ9]|«(9¸Î<¿žeen»:Û%/©}€¤ šìraØ„]ÒÏ›'üÎÊL_~èàþÙ³fš½¾Ô×Ó†"ñÇìcðB—‘/]²Èàù3€ìûò÷ºvíZgÛWžË—ÎÿœaòHxà‘—½;jägGšÍÂ|@ùÅå’GŽˆ“ý|ÍÆ;÷îR‡Oöóµ"ž8Ž\ŽF£Ñ-ŽËÁÞž½½>ÊÉV¼4üJr90ú3‚èÈ8ò_?±íí¬…„„ZV×iÄ{ÿtáyGOX¹xùªuëöîßèè³KbϪuk/]¾pöÜ)É#’3ç-àËÁ–oƒ­,ƒ͵Ÿ¦:9x‘ÀèêY®Îî/ž¿çŠËØGÌÚg¹H}gȪ•Ë©/9!Òܽ#4ü£„”05 €È5kV Ô $SÁó€8èÄñ#?fqÜ» ^­[·ê=}ê8}.g— M.wr°Y±|i‡ ²Àâð¬øó•#NðTE¼çdØÐ¡@Ìä¶ r´·fGŸÒR× O•C‡QR”«³mïÜ–@Ž4¯\¹ž*ɵÐVkV¯¤Ù,Ì”_\>mÎ\²Ÿ¯\»vÛÎð`fcýfëöm+×®!ûùŒ¹ø¿Bh4Ý"¹ÜËãSLTlltldx¼ªj‰›k²¯÷‡„¸àr%ŲÎå{w úz»/\0_Bb7ryùæ-éÅ+VHX²rŶ];ím­ÞY[X½5{kñj“˜ØÂ¥K÷ìݳ`é2éÛÒ^×ÏËÁ-߆‚»¹¸9;z**Z˜‡Y[z¸¹@¤¼|1K.o ÿ}ܰ~í©“Ç˜Ã¿´ tûõëÛ„ÿÐ9WxŽôÍkͼ•Î]¼°hùrèç‹W,ß¼e‹Á ]}=m]ÍgÚOÖmذ`Éèçó—,=ñ‚§»r9F·8.+(”ÙÚ¤×Αøñ2 ólbZ9éÛ|ׯ]îÞ½»€@ÍÛ#‘ËÈ@Û×n\«M<~ÔÉÁÆÚÊÂì•‘‰±Á+C sÓ=Ïœ}åúÕøØÞ¸,'÷ÝÄ(¦v.² ñ2–ÏßÓÊI7—÷ïßÏàù3æð¯è-b›mß½±|ójö¬™[·ˆ6 —{¸9]¼pvÈÁä«›­¶OŸ=ýÏla‰ýÆ/Ÿëë>Õzòè±úƒ'Õ´µ4¶nß6uƬSgN»»: —£Ñht årðýûåFF_ÈY+ÌÆvD7švGG†„‡xûz»ûx¹Â'„ÃBü£"‚amB\$oóXËÊ–èh§³V˜Ý8\þ;ùÔÉc]»víÔ©Ó²¥K8LJáÜ\Ìâªm!²_ß¾u¾à¥ù 9`·‹“­ƒ]Í—BðHóÆÜ>!lokåìhk9?` —£ÑhôïÆåhôïgf.G£?#—£Ñh4r9\ŽF#—£Ñh4¹¦Çåð‰F£Ñh4Ý ýƒËQ( …B¡P(¥(›¾+*Ê‘ËQ( …B¡P¨árš)ÿøãär …B¡P(ª¹œæT[är …B¡P( ¹…B¡P( …B.G.G¡P( …B¡Ë£"‚>T…Oär …B¡P(ªi¸<.&BAáþ…  …B¡P( ¹…B¡P( …B.G.G¡P( …B¡šË»ë4r9 …B¡P( Õ€\þm!—£P( …B¡P Åå4SòË322$%%ÇŽ;tèÐAƒ 2dذa#jĈÃk1cÆlÞ¼944´²²2$$dÞ¼y°ÕèÑ£ásܸq&L˜X+"0}úô+V;v,**ª¢¢‚9ßòòòׯ_/Z´ö 9B£F"v5~üøI“&Áþ—-[¶råÊ}ûöÀNŠŠŠ¤¤¤þþûoH<䧆þÔ0ŠˆÃao3fÌ8þ|p¯³œðØ@p¹††W\Þv+¹…B¡P( ¹¼‘¸ÜÇÇGHHØw×®] 4¹\II‰àrÎ\«\]]ûôéC ®Ïž=»]»v+V¬xùò%`:çì<<<:wîLryrròˆ#€ËÅÅÅirùéÓ§‰y,ÜrùŽÛqðiçŸËeË+ª™¥/91tKêå•Õ |OšÜðµÛçM×bFo$ê?ûpؽä‚âJ†Œ¾}¯<­–4j{àñ@ Ùø”ìRšùÒIÀPž€ØB( ÄÀVxú¡P( …B5 —<àuøðáG½wïž E÷(’••Õ××ÿúõ+À´ššà2.ÿ_í¯<zôèñçŸvèÐaܸq@ÉŸ>}ª¬ýüü`’Ëa“AƒÖ‹‰‰yzz³ÁÕ«W/_¾|鿺\«‹/;vlذaP5xðà–ËÓ¾Üì?S2´´¼Š%Ë–•W‰\‹f†oÑ1³ärh§£Ê‰Ì«ž /ü‰æDÌ™xj‚¿÷‡ä}« “o êâ–?lkÀ?C‡l<÷P( …B¡šŒËSRRDDDz÷îÝ••ºuëÖ¥Kâí+C‡555-))ÑÒÒ‚š\ÊÏÏ_»v-ìö)))I̯³lAAAT.ÿðáÀ`vell™À%$bæ§“/ý‚™¹¸Ù¸x5/Š[Áe Eý½«ÿ7~ ïWx˜°Û4;.'é°µ‚½ÔÕÕˆË utt.¯ówŸÿ«ý뢩S§víÚuõêÕÄ7Oœ8‘ššZç({xxx÷îÝYr9Aöu›¤¡ÿ§BT˜ÖµÉ‚ðÁû Ì=D,‹Kþ3§¨¤"‡n `Çå@ùÌ3LÈ7ÀP·*)ûwÏßK« r¤“/ý‚ ñ½=0(î^òPÈåXT¬>r9 v›æÂåuJCC€¸_¿~JJJ$—÷ìÙóÞ½{œ¹ |Ó¦M;v(óæÏܹsNž<™Í—£¢¢ýÉ÷—“\»¢9â΃¨0]YU½èd8,zFð…Ëa;.'S6—Ö>8ƒ…\ŽEÅê#—£ð0a·i .ÿöí°rzz:ñÉY?uîܹ֭[—«ªªÂž={\x}àÀ@䬬,€ì¬ŸÊ®<عsç &À†©©©ååå~~~C† à¾}ûvdddJJJZZs¾ pyïÞ½¸Š!,,ìââ2o•™™I”ÿ™ V•””Ðárgx ‘/8Îr ôì7žÿ™.Ò_¤fê£ÇÊó‘›ü½ÎÎóX8äK§`DÚÖ™ð)´ÉßÌý3^õPÈåXT¬>r9 v›ÆærYYÙAƒ ýW~ªÿþD€\5°V:ujÕªUß¾}•””€˜MLLÚµk1:t:tèèÑ£GÕjäÈ‘£~jøðáÝ»w|‡ÏóçÏZô¿ŸÓÍ!%ì᯿þ@â'³A ¥¥¥½½½©\žœœÎŒ\ÎÇvcXµjg`õ?ý=´.çÜàXÍê"SÿþßT‡©ñ¯œÜ±%.wvv>zôè¦M›Ö³Ò:ŠÖR´¦V»ví277/))IKKn'Vé™÷¶qãFHsåÊ___êëPÊË˹Ϟ=+**JdݹˆˆK7Ð]“¿w–¥mÐ*Ô¿¨à¾Sçoò{ú2…æ¯i8Ô…ºjÅö€†kú{ ¤§ñsóËOÝŒ=ß½÷D§ ‹=_ ñzæ¢2ÇÔÙàܯzŒw5×ý´TÌ·¢ÊÆ,_ž÷x;5x>Yšê0ñvRðýÊÉù¤häŽÍßú6,—WVVæççX§p¯ôôô‚‚â/îKKK³³³SSS9¤‡µ_¿~-++c(졨¨(++‹Ý†Ÿ? íÓ§p9à5p9”¼ªª ûL©‡`çPrú\º£—ÌüÈ,}ñéˆ!b5ßuó ’}AY_ËÈ'ŒÝDLh!ã=à ٧î´Ù6\x2üŠæÇ„oÔWžQK³3h¸xÀ^™øä¬RúùÖ™€¡.ßd1^áÿ µD.çû ƒzùcnÎ\ÎnŸMåuµªªnŠönŸlö»­’ؘ¨Ñ„\Þ„T´ù`È …„Ï_k¾öLøP´é@o$Ô´ŠNÖ–V…EB}^ˆü…¸¼>§Æ¯Âåõ¼RñýÊÉÛI\þ+)&&†àòµk×\Žà…jRQQS½uëÖ}ûö=xð <š’«ž?>|øðvíÚ7N__ŸÊåDXKKkèСíÛ·Ÿ8q¢µµõ•+Wú÷ïég̘ÎLóDøõë׳gÏîØ±cçΗ/_žÀ_.7~›1mµwŸINðib™A&xù&ãï^‚“k·|‚ò¨£>VQ³0·Éš±Æö©›Àî»BÓ\úÿí"z(ÄÅû ™¸  *8OÄ/<¦°Î Gò…Ü¡ ÄåC/¯¨†°¹-‡UÔ,l]sVlˆO*ª¨¬þò}ËáP£7¼Ü]èÜíØÕ‚H×ý´šŸ—WÉ©'A²#W¢~,>JZ³;ˆ·ÒÒ,*dDÂóZ†vÛz$4*¾æÿˈ*ظäpxšš½Þç­}6Üw¿U˜§Soxǯý¨¯ÌÃ÷Ë~[Í®‚Ô£ ¹C‚Ëyèf\åTA6ƒ8Y:ãå ϲm9Ô‚Ýaª?—šáJ³Žð<¥bèWœO™K÷âò ÊÃòY>o³,lÕÔ~™ ]ˆ·Sƒ!SÎ%¬ódáÜ&t® q¥¢ÙÙ5®ÊÃᤠfWçõœeŸ§sv×yÅÃñòÿ€ÛÍÍíöíÛ7oÞ”’’ºs玌ŒŒl­ä~ b¤¥¥oܸqõêÕsçÎŽ·«°Åû÷ïaˆh¨–,x4…s¾W¯^?nÀ³gÃb`àÓ@€™°áÄð÷ï߉ÅÄÄDb?­ZµjÓ¦ ;. !‹ŠŠ`‘LÉ._¶-À;0—\ô È%®¼ ñÑñßÈm9¬¢f·ŸÄÿþŽ%+§tñžïvœçdz«±-Y `#êbá·Š¾Sy+-çsuuÍäNwÆ/ë9ߥB£ ¨UXA©ó&BÓ\B(é© ÈŠ@úN©£‚ MÒ0ãå!`j•YRR{{e …ç­‚Ô£ ¹C‚ËyèfÜrùÞ3Éiß«ªª³?—]–‰Ûr8´Î#;t–Û‡”ï°jø7v1t…C-ئú7ìBQ?ËŸœCaC(P,C¿¢ʰkjI GÀ¯Üüò›Š üârÎ%¬ódáÜ&MÅå4KÅî¨qËåìN š½—fãp8R®xzr9 …¢«ˆˆˆþù§mÛ¶ƒ ÒÐÐ` d]]ÝaƵk׎øÛæÆåìfR_qE}!—¡EúÔå5/ÏZ,æYÐs‚S«²€ëµðF_Ø3\‚œÌùRÆÃÝŽæ¿u°¬C2΋\•–·?Ö›ÐÜÚ,l®yãÃûȼs‰7£AÛRßÒÀŒÛºæ¬ÞØwªó€j^À÷þc1Ï$æ<?(oo*ü_]?zã¡›ÑÏÝ3 wÏéðQsÝ{Mt=ßàk^9•XVü¡Î'¡i.ƒf¸j¿Leîñ™ÙÕ‚åaâK¿‚=C 2Îu„4!µ¯dèW4ORÌmE*6±ð ö3nQÍD–›ÓœzNçÒDóda×&\]âJE³TìŽýòp8)hö^úÃîìfwÅãÜ£ËQ(Ôï£ú'£´Ì¸ér»ª™þjgìftúR3oüúœ ñïxUüåNnËÃÇëyà ¹…BýÂ7 v£˜Mâ_«ð¿n;ÿêGЫܱaÑ-á0ýÆÝ¹…Bá@Nݲ°ÍZ°Ù¯Ï$§QóܯÉÇ—•WÑYÕœG¡~¡3ìf¿VãóëŒàoÕxæTö@ŽZ=ËÓ@×ó†S³àòñãÇgeeaÀbcv#eee–›s˜êZç,X–zòäI¯^½úõëghhˆg> ÕÒx¹«ß¿…÷+¬««Ë/.§–0$$dêÔ©mÚ´aH™=zôhìy( …B¡P¨fÇå #F|­ÕÈ‘#‰˜±cÇš£2„¤^}}}â_» Lýßor?Öµ"÷«¬¬¬€’™G©###‘+**øÈåd 'Mš¤ªªJþ?"¡êêê-[¶ØÛÛcÏC¡P( …B5G.?s挮®î³gÏΞ=KÄ´©U«Z‘Ô M„ gΜ9yòä/_¾P)6)++nÛ¶-¹Uuíÿ†1“ôÒ¥K!SüÍ—“%„3¾ÌõرcšššØíP( …B¡PÍ”Ë===×®]»fÍ///"fìØ±VVVÄØ3K†UsçÎ566þßÇËmll¬­­!À°3Iÿñ_ñ…ËÉ0<30Œ—S§Ñ£P( …B¡P͑˫ªª†ÔŠœš€>räH†d@jaaáÏŸ?SWYZZö®0}\ÎÌÓÌYpµŠš&88xÒ¤I­ZµbøwCB%%%ØùP( …B¡PÍŽËQ( …B¡P(ärär …B¡P( ¹…B¡P( …B.G.G¡P( …B¡ËQ( …B¡P(ärär …B¡P( ¹…B¡P( …B!—£P( …B¡PÈå( …B¡P( ¹…B¡P( …B.G¡P( …B¡PÈå( …B¡P(r9 …B¡P( …B.G¡P( …B¡ËQ( …B¡P(r9 …B¡P( …\Ψ?êý]åä䈉‰ñ½` ”¾>[ñ·¦|s‘x«Z‹6 …B¡P¨ßËMMM{÷îÍGÄi¶\Î÷šÖ_,‹„Љ\ŽB¡P(ª%ry“#N£qy3„9äKl7 …B¡P¿<—ó SË‘/‘ËQ( Uç…›¨6é¯Äå,ã©‘œGÙgϞݩS§?ÿüsÞ¼yoÞ¼a¹‡AƒA²+V°ÌÔÙÙyùòå={ölÓ¦ ìjÆŒ666 I‡ºÒ‹¹¹¹gΜò´oß~Ô¨QÊÊÊÕÕÕœkš““søðáþýû·k×nàÀ’’’Ô¼`Ÿ„òwîÜyãÆ©©©ìrgh ÎugW¤ú vÕg'u iÛ¶-ìsêÔ©•••S^^>iÒ$ˆxXËÕÁýøñãæÍ›ÿúë/H)//«<==………;vìØ¯_?mmmæMÖ­[›tíÚuïÞ½_¿~åÐCê<ˆ( ÕüU'[4• ¦iÎ%Ä&ú›¹¼FçÎc^{ûömæýi¥K—2gjkkÛºuk†ý@Œ……ECpùèÑ£òRPPàPSÙaÆ1¬0`@ff&‘ ´´À”º–šç¦à\wú\NÿX°«>KÕY÷K—.‘>„Å[·n‹W¯^åöà RÓÀã 45FWW—a!!!j8 p,X6QA¡P(är„Nl"äòæÎåìÒ8::’œ—T+øZµjåîîΰá?ÿüóéÓ§ÏŸ?‡……1ïaUVV.***..~ðà±â‚Ë'L˜Å(,,@{B¥ˆ8,›¨ÎŠ P(r9B'6rù¯Êåëׯ'"ÉùäH'Æ–––t C¨¼¼œXÛ±cGúNŸËIT†&b€&9d$MÎE EbqÊ”)DaaarŽ õe—;CSðVw†H®Ž‡ê3«Îºƒ<<<à"Û´iCŒ…{ÿŸ½3‹â¼ÿ¸„+ÆË}šVÏ*¨©¨5µ"¦šŠA#`+jmÿFã•hlŽšh¼ŒŠxð`ÅŠÓ¦Mcæ_Õ6@c//))á9¢pEJw³éÚµ+“ [·n¼’Ù¡P,™7šõؤk!Ð}EÔöáïÿ;õÿ÷šu°¢¢‚NëëëÙòòr^/ I<B¶Í„Pƒ•‘ÈŽ¼Ò‰!bÔZxyë{ymm­5d&G‰ºº:^FÅyMF¹TUZ\\LÆÆÌ¹Šÿš£Æ^Îýšc“zʃ¬v(TÙ'‹f}ç6éZt_µ}g¸ví•™™ÙÌ*6UUÅþš˜˜‘pG^/o!mÕÖéL—uïå©zyMM Â.VCkkk €ýŸ*RZà¬Y³ØEÉÉÉ-êåMMÓ©S'&°ªªJiE]»veƒª/ç …f}ç6éZˆyUˆï;ÃèÑ£Ù¢üýý›ÙAµ!ìéO?ýÄÚ?bii©4‹ÈŽ@[÷ò»·¯ûûÍêîädlllaÑõ¶?T¯¼\Ç-”9üÛLMM{ôpÙ´a]+Ú¤ø!bgŽ:wîÜ·oŸysÿvãúeý¹ñÐÃ!5/777gŸ={Æ„\½z•—˜ìäNµ¾õÖ[Là±cǘï¿ÿ^ÕšfáÆtéÒ…9%›–Ú–ör¥=8p w¹³"ŠëËÙåÝj» ¶ïJ›ÄK£ñµP;°jûNìÛ·IcooÏœ?^|›ã剄 ¡‘gBÆŒ£4‹˜Ž€x¹‡Ç¨™3}b/D?|pd.pë¦7Þ¦W^®ã:::¬ýòßI‰wNøþ­·þØV¼œ9HL¸õÃ÷Gý|ß³µµ½xVO¼\‡Ôp¼¼ÿþLø?ÿùÏòòòÄÄÄÁƒ«òf©Tš››ËFFF2}ûö•6BLHLLL“¼üW¿úszøðᢢ"ÞªíîÇ"æÛÓÅ‹3&L())ùî»ï˜ÓßýîwL‚ÀÀ@v¯º½IIIéÕ«—H/WÛw¥M╦ñµP;Œjû^\\L,(ÄÙÙùñãÇÌRûßüæ7•••Z¼¸ª³wïÞ>|úô©››²cÇ¥YÔv ÃËMMMïÅßTu1&zܸ±¯¼òŠ™™ÙèÑîÿ¹vùÖÍ«ôWúöÍkl ±²²¤’æùóæXZZ¾üòËž'$ܽ¡-éÔq )Aô™b$/ùþÝw¦þ¹cÇ—­­­V,_ª!Ïyáܵ‘Z÷röñÏÌûm/^lèž½{÷211qrrüò‹Ï˜À”ä„Ù³}iкvíòчËT ,[ÔªVØÛÛ Œ€þ©áxypp0oÅ­¢Sþþ÷¿çnfÇæ7ožâ‚ÝåË—«­”¾hÑ"^!ìÂŒ7nèÒË•ö”T›D“×Bzq³3¯ÕÕÕ¬2°3èj½\mß•6I±4Í®…ÚaTÛ÷ùóç3áááÜ—»ÿ V.®ªÁämI>bĹ\®4‹ÚŽ€axùðáoxMšx$âàý{·yQ½zý&<ì gV’¼=yÎð™¾lélš¥KÏö÷¥ƒEïÿŸû¨‘W.] ß%µš9ã]mI§Ž[H:KÞyòD”Ú†ÌùëïÇŽ!g½~õÉ«H‰lÑùröq)ö¼5/–ä;8h+¹/ÅN›öø÷ùsÝ݆åέëÌ@©X¦(º¥aɨýRÃñr"((¨wïÞt Õ½{÷¹sçfgg󧤤Œ;ÖÜܼk×®&LàæýöÛoÝÝÝ;vìHŠ3|øðC‡‰©”.“É>üðC懩%k×®MJúùniêÔ©ºôrU=-..^±bݱÐ(YXX3›ú±úùùuëÖbÿö·¿±kô©(á«í»Ò&©ú½Ï¦^ 1Ã(Ðw’Zæ7ƒ† Æ,³©««c~b‰ý½O­\\U—’lûwÞéÔ©¹þ‚ Ø­f”¢ö"€x9ÙØüysz÷îEŽŽ$aÜÉfö‘˜pÛÒÒ’¢Ïœ dÜ£czîîäD’GÇNNŽg£ž%©²¶¶Ò–tê²…«V®èÙ³ÇÇ«>´µ±9¾ ¼KF«˜ØÎÎ.æÜiæøÜÙ“zåå)É ôÁÊ‹up°ÿä_«HÁ¹)ɘ©ñ…³ËuõÊEµ# ÿCÚö¼è’ŠŠ F {ôèÑÐåM&´s/giï“ÑzOû˨‘#˜ï¾=8tˆ[ÇŽÙ¯2ácF{lÛº‘¶nÙ0ÙË“ ämcÅ&Öâf#:h¡­­í©ßÓÁ±¨#..ÎAÛ6ÓñgŸ®f á>¨@Æþ™}›/'æÅFJ"ÆÛ­[7gçî!;·+ö‚}¨X^EªF@ÿ‡^þó#—ÌoÉd²ÂÂÂeË–1!~~~x9èÞËÙÉQSSSvò’ìöîíë$ÄñwþêϾov»º¦ƒÁƒýðýQv.6îòlØ¢-ìØñev:ùZÜTÈ¿V¯ôÛÇI„'wÏŸ=ÅÖNrIüyuû¸VñòþcþŸÿ½{÷úä_«Øykz,ùàý±cF_8†Ê9y"ŠÒkK:uÙÂ)~ÛÍm0©<¥¹zåâû ÿùÒK/uìØ‘·öƒyÌùÛìqãÆÞ¸~ù?×.SÉlíô§Œä‘tðÖ[TtÇ®]»°+j´½Ëmj¼¿ß,î~,lì$ωgNýð )~wÈ×ì2’Ÿ§¸¾\ÕÀòôWÕèÿÂËÁÿ¨­­ òðð°²²2iÄÙÙyöìÙ=ÂàÀË@—^öÍÇÿÁ¢«±±±½½Ý´¿L%f¢v|èìÜÂþµz%׺¾üâ3²«}ßìfCRS—.YäääHj¯½Ö›YF¢éÔe ÉüfÏöup°§2mmlfÎx—lïߟò§?þA1qRâ’Η_~ÙÊÊ’»y9h¿~}©ªë³OW+J䋾òÊ+ÚÝ…YmBÅöíóúÜ€¿±;¨pëݺeï_}•F7-{CCØ•èäñ _w³èºê£ÂËk³ªÐÿ!…—@_÷©·¿vÙUgC /¼^/‡—^/‡—cHÛŠ—/^¼¸EKnjù-מ67€í-©˜¢pðrx9†Èà‡Ô ¼\wáfŸ½9’ôàÁƒõë×/]ºôË/¿¼}û¶nDPã¼ñññëÖ­£ÖnذááÇJCÄäzþüy`` …Ð3+ INNþâ‹/(dûöí¥¥¥Ü›”x1^ˆÒÖRQuÿþ}n wïÞeO/^¼¨˜WÇ^ðrH'†^/ײH}üñÇ7oÞ”ËåùùùÌO¾ë³—ïÛ·/;;›Z{íÚ5j¹Ò1¹ÂÂÂNœ8A!ôÌôZ1dõêÕIIIBÏäؤĪ:«*|÷îÝ{÷îݵk7%Ý0?üY[[KwðrÐ.¤zû` IŸ[ˆ!2Ô!mc^ÎÌz~úé§Ììæ™3gHû+ÀÍ›››»mÛ6f~Z*•RÈãÇéxÅŠÇg×±ð²‹ÉÅ’••Eê¶dÉz~úô)S`ttôG}ÄX871“‘‘Q]]-ÐH**¢ê(„ªVÛ< ?{öìÊ•+IXïÞ½{úôi:^µjÕ­[·KSUBdd$…(½EEEŸþ¹pˆ@.fV›žSWB’MLÏÔ ÞÍŒÚÄÜ ûÒUú"¾‚¯+hCt(Ð&çËIz=zôïÿ›ŽIk®]»&“ÉÄkË–-¤Vr¹<99yíÚµBvåÊ•šššK—.q×—s³‹ÉÅBQÈD‘š3E]¾|™öàÁžbþôÓOGŽ¡òÉ®XµR¬ŽJ`«£òÕ6[#¹5åâÖ®XšÒ¨=d¨ŠcH6¼iÓ&îJÅá\|ðã¬T>ÝÀ( IHH ‰_¾|yTTÂÒ¤Ä eee?üðÝ{{9¥‰Ѹö†ŽÙ”äè4DT)]Ðââb¥^ÎÒo¾ù†)*==n ëâ¾t•¾…¯ ðë †~yù?þøÙgŸ‘¢Ñ)=SÈíÛ··nÝJ‚{úôiUâEÒÆÎ43¹(„Qy²"U^.&71Å8"%`<ò…êU ÙÙÙì:Åêè@±LæñjäÕ®XšÒ”Jyff&©$3k«*Dm.1óå,t«ÀÈ«À|¹@bê2wú_ñBÐm ÷ÿ-«V­¢6%yöÑ£G÷ï߯4¯âÒ×æÍ›é """--›Xñ¥«ôE(|E¾®/oq/_¶lÙÇÉfî߿ϕ’§OŸR„•””ðò’*ݸqƒì‡ Y¿~}\\•såÊ®ôp³‹ÉÅ‹âÍ—«ÒÁÀ‘ÿݹsgåÊ•ªª£\luÌt¬póTÕÈvWšp ,×®][½zuJJŠ@ˆ˜\d·ìqÆtC^4Î(çææ®[·îüùóÜ›”˜¡²²òÔ©S[¶lè @HH{JǦLOO§v‘âk’7¤Ä‘#Gâãã·oßÎK¬øÒUõ"¸‚/4ý:àåZöòÓ§OÔÈ™3g¸ëÂ?þøãèèh:•H$Ë—/çù Ið×_MbÄ.ÎVº´——]L.–ÌÌLÒ¬%K–ÐsVV–°?ݾ}›Ê¡Äôœ ª‘Š+Â…›§Öêx¥ —ÀÍÎ¥ººZiˆÚ\ÅÅÅÌrvz¦cJ£Â$þä“Oèú²³Â bó¾„@]£R7_(ûþ{{“œœÌžÒ1ãñb¾å©8¤/ÏÐ ‰âÊoÅ—®ª!¼zíåí6d]DºC¯­­ÅP^/­vÉV¬X!ü]Xx9ðr¼x9^@ïùÊïðÐøÁÌ»ÎéÏCŸÛf 6˜Æ|wšÔYx9x9¼^/Gwàå"ø®=¾·Ï¾ãÒx9ðr4" /o^ÞF?oÊËJÅ?N:¡TÎÐwt¿½õ4“n <àåðr¨¼Ý—ÃËÕØÉG.oŸrfx}Ç¥ofß©˜¢€‹ó¼\ƒ(x9¼^/Gwàå†ïåj‡’d')gí³ï¸ôÍì;ÏËEhhÍÌ^¿ÝºC3òÂËáåP+x9¼^®^®?·3x9.½>x¹ÚcCòráE,z¸”n /Gƒ!²ðrôröc ^®7-ðrx9¼\÷oU‘ÎÝ5ŸðÞMyyNa9%˜»5š 9}SJ!1w3Ù¹[ÏRHUü­•G›£¶ºÌ¨ÅÂÅ닪ñN¯x¬xÚLµ~yhÑ{´è‚ª4§ÍÍìo“ÆíÛ˜Ä'yÏkäµõ/^Èäu%ÕY?•h«%:ör^JêTfÞsê V†]à] Åëh^.üÏèöàåëׯ766¦g½ýïAKô¹ÜFFF]»vì]cFO½¾ûГúz5yÿ4㆘*¨Ìæ{ù© tìòä§R É-*gC(–B®&=ki6 /×L[Ô˵è¦ðr1¼ÔðìÔõÔOö^\¹;f÷qBšÞ¦½œ9þ(ä| ä?ÏòþJD^~/ש—롉êLÎêêê^}õÕÀÀÀ_ÿú×tÜ®¼üçÂËËïܹ³páBGGG’'x9¼¼9“Ð÷îÝ6l{Z__ß«W¯øøx6ËoûÛ¨¨(zÕ•––}:}¦¶¢ ìÚµ‹7F!šÉÙ‰'ÜÜÜè€dâäÉ“l¸L&[¼x±µµu·nÝ6nÜÈ›uã NUUÕœ9sº6@§l‚;vy˜šš<øîÝ»úÓwÅë¾zõê÷Þ{9NOO÷òòêܹ³¹¹ù[o½•——§t”&k—þÔ©Sýû÷§ëBWg÷îÝÂÝ©¬¬ôõõ}å•Wììì¾úê+]¾)šßw{9áêêÊŠ8ó¸qã¸YŽ;ÆMLo???æxæÌ™Û¶mãÆ’¸Ó#sL¥¸¸˜"íf œ±ù^.2ÏžŸåV¿:<–¢Ç­„ç$ë6Ï3)™@n‚ø¤RJ`7(ÆcÊõ„äRnóø¿ÕI{ù;Ÿ#/,­bN?Þw™ÒËk&#> ¿ÊRlÃx®;ÁæZ±çÇÄŒüjY-=î?ΧSžÅz~,‰ŠK{^^M÷`Š I>üýýÿþ÷¿·îÄ!×Q¸vÒT/Ÿ8qâ7ß|C{÷î¥c6|åÊ•üã©¿¤‹-R:Vìé’%K(/i\nnîŸþô§¥K—² Hמ>}Z^^þÙgŸqç[½ïŠ×=33ÓÞÞž9îׯ_LL ùèóçÏßÿ}ò¥¹T%ÓÿKOw\G­®®¦^³ Tu‡®ïÛo¿ýS#$îº|S4¿ïº÷ò­[·²CAï ºõåf©¨¨à&ÎÉÉqpp`Žé¶‡N¹±ôîëÛ·/;_îááqöìY¹\ΫQ8£V¼|Ó¦$z0ì³°—?Í©b½œ‰]ñEJq‰ìfüs®ŽsüsU媪ªû2ðÑ9K\´2_Nôì†Fv_|@Ççï<¦ç#—R…7üÈ+áJ9©@BúOÞk~˜öïâÓj0ƒÿª9SãÎñ³7žÿ¡’™irî²J¹òÊo.3!)Y…ûIXÜ„UG©Ì/_¿'Íçf|˜Uô×Mg(v×É:}ð¤‰]²ë"™µÄçË“ÿɬÉù*â?bbu?_¾?ºá¾ôѳ¢5á?Ò#ýY‘þ¬cÙuüÝ¥g}yðò¿¾¹pãÁ3F¤˜ØCc² ËÖ-\JnX¤q¹áE’]PÊšbkÍ—ÿ|7þSɦˆ«Ô˜“×RéôI^‰˜' §×-uyMø¥‡5ÄÒ©°š‹oü¹[éÜ”%åÕ÷Òó¸… ·œçoKCOÞYz?Ä¥PlÜý'ܼ'®=ÜqõCqWPë×bë/çËþ ªÎËÙ¡~Ôx!ö¾+ìå48tOãCÇ$JsŠEŽ^;Z_®xУG””æ˜ì“>[ýúŒ£ðì¤Ir–žžnccÃLoÓ3K¥R&ª{÷î>öÔÑÑ155õçÏ•‡œœØ………Ì1鈉‰‰þô]ÑTd2™©©©bíÔrµ~ÃM¦ÿ—ÞÙÙ9((ˆ»ÔA ;tAé¥Â.™Ð团ù}×½—ÓÝ‹•• Éq=g‘µb’lö}AŠÛ/vìØ‘‰-**Z¸pá¯ýë.]º oÚ´‰n«ÄdÔØË§MS³ã KɸöLÝ-z.;û# Å>yV%°@…ŽÓ¤?ß´P öƒc´îåäß 7GoÒñ=iÃ÷IþØPrrf…l…ü#è##£V÷rÆQš³ÈxéÒ¥¼ÏòeË–1QÆÆÆŠÓrª¼œ×ÖÖ*z†ªôúÐw¥óåìÌe\\œ»»{§Nx—›—KU2ý¿ô7nÜðòò"}$Ïûᇄ»Ã»¾º|S4¿ïº÷rbòäÉ¡¡¡ôþÚ±c/KII‰ªùr{{ûÊÊJµõæççûí·#GŽœ:uªÈŒÍß¿œ;_Îó”]ø{Ÿ®ø5Pž—3ŸOJ£´âå+¿iø¦lô­Œ·V­–Õæ—TR`va¹¼¶nª£N±Ëwÿo¥ ¥¡ŠbNé€N)k±ã?<¢¨¶¤þÔ—‡YEìÊuæA!L*äѳâÈ+©ï~qœ›ñý\Úø!ÓLKx°Ö.«ûùr™¼am;ÇLúãå2¹²±ú¥è8×°²‚¤œYB ëË™ãW×(Æ ¸ÒX Ôîh³=êFôG•ãö¼¬JiË?üeË¿¿tëá³çåÕÜ÷~ý/{ý!§×"¯ ¶öc¡ºžä=ç­EQëå C]+f¾œIù¬ ôJbæ.‰½öîåÎÎÎOŸ>}ÑzhýËÌ9÷›ŽR©”>oê|yZZ;_N§:ðrm}ï“eõêÕ³fÍbgˆI}ž?^__O"¥jßLUÉôüÒÿïÏ_}ýÉ“'ÙÕ;ªºCTé|¹Þmî{Ÿ ‰ÄÍÍ­wïÞ¼U+”…×Bîúr:8}ú47öÒ¥KìrÊ›››û¿ÿ—”°3âÂ_èêw…„íYi¬€|·„—OZI žSXþÏà†i¹K‰ ß¹›IÇï}½ªFÎZ¸H/Wª¶ŒÒÝNËýó§QÜØ™ëNž¿“ɬbg®¨*MÑËßþ$Ji¿„cáåŠV÷ÉÞ‹O4®(©høÔÉë©-íåµ;.|´û¼â²fz¹ŠYÖ¬^Î>BO6lj$rAvFã\þ¹[éŸ6^‘çË_nZ"æ jýI•—+mpS½|Ý¡+wÒrJ+jزM ^{÷ò/¾øbâĉ=’Ëå÷îÝ›>}ºÒZnKc­ËÙÞ½{'MšÄK3a„}ûö5¼°V­R\_Þ­[7vÝ·ƒ‹/öôôÌkä­·ÞúàƒÔzy“­E½œÌéÎ;ï¿ÿ>w?KKËï¿ÿ¾ººš|”®5›˜7ª’i÷UÑÝ÷Ýw“’’jjjNœ8amm-ܺ þóŸóá®/xS´s/§¥Á\¹r¥bf?–²²²C‡q÷c¡ƒÁƒÓ¡×$Ý“jÓk’î”Ø¼4ø©©©µµµŸ~úéøñãÅd$lmmoݺE—)::º©Ë¸óåÜ)ó¶èåì’fAË®“ ܸ7bä•»–ü‹™u, ‚Î ¬cQª¶”¥¤ñSöÉO¥Üïn²ÉÿŠdö‡!ñãåñé ÍþxߥŽÕ½—ëó:fíõ¾3ñªrJþCNI’ôÅK¤ætÌv¤…̬” z¹Ì =Í/ãg­»Ž…÷X¹;†»°D¸åÌ=ÉÇ¡?‹ìö¨j½\ílQ/g–ë°æ½õèõæ¯caÿúæÂÁÆoÁм«iï^^WW÷å—_2[‹ 0€û™×F½|èС§Nâ¥ùᇘogÊd2RUr‹_ýêW›7ofb׬YÓ¹sgÅÁ©¬¬üë_ÿÚ¥:`ÿ¥®ç^Î,½  4hùòåÜÝTÈœ~ýë_»¸¸²­â€ªdúïå‡~ýõ×MLLú÷ïæÌáîí½÷Þ{;v$½#gWá ¼)ôÖËÅ RÊ“÷ìÙ3;;[194Ý3û—Óm0oÿrÒîwÞyÇ‚†”^–$îlÔùóçéîÈÊÊŠ.i÷üùó‹ŠŠÄd$œœœ^zé%ºXGŽisó寕>©LH.ýÍÈØfzù¾³÷ç³é1ë¼Û®ªiÙõ‹™ï}Þ}”ç½æ‡¿üû(~ïS•ÏÙ|†™/«¬Y¶;–‰½“–÷IXÜ;Ÿ{kåQfQÍýÇb¼üý¯/4n=Q6wëÙ?}tdÆÚë¿»A· bbéñ¼¼aê÷¯›ÎèÆËâ¾´úèYÑ¿Ã~d¿ý¦ÊHÊ«n`6E\Õ—}ì&¹ Y#ÕG»Ï¯=xù»‹÷YsZýÍf㿈ƥ ‡c~€BH˜Z®Á{Oß%Û#;ßþý >zÐÁ³‚R Ü«ì+ƒŠ!ÂÎ|ï“ùVhcl¡¿÷™SXvîVú®n}¾ÿÇBκ/öÌþy.iüâ¬Ú–?n¼£8~õ!©ùÖ#×òŠËÕÊ«ðli/gÖœPƒéöccDÜ“¼Ås‡šN÷ÿ÷BU×Òž†EÇÓ®Ü}þ›S Ó4,íÚËÛ ØÄ¿÷©ÝðàÁ«¯¾Ú†úÞ*DDD°k¢´²”«uؼœ«ÑÜ}ý[1–ÈKÀ; }Ü}è—7.î9”ÕL/ÿëŸÿÅO"ëù±„BH‘kþ»bu溓¼ô¿Ü'±àÃÐK“ÐÜ@ßõ§r‹*˜Ù¯Àïo3§üx/‹YM[TVs7ÓçËãb¼œ ‚Î]¹ÿ´¤¢†J+(©¤¼‹w^»5ò6û_rÝüÞçw“ ·íû©¸â;Õû$2{ž0Ë~^èjŸÄ ÈÿÜÏø©¢JFW¡¤¢únZÎÎn1QwR³ysÉÒÆÉf oÑoºq/=—^4bôx^V•žK}?àœ}ëöIÌ}ª½}i$IÍi0©j¢²ZFVzà\‚È–yðrÒãŸè HïGºan„^¨ûñ+ØÒ^¾áÛ¸Ô¬B¦ÁäèÔSÅÓøÓU ),­än£ªk»Žßº—žG÷{Ì>‰Ô/¹¾^9CßÑ}>‹-***ÊÉÉ™8qââÅ‹áåóèwïÞuss»uë–Áxy¢…~—§<š³zA~ nç7øîþï AÎÐwt_-[·nµ¶¶îܹóŒ3ÊÊÊàå^njjÊýMn”øu2ú÷}ôç/‡—ÃËÑxx9¼\/äìÔ©Œ|ˆy˜—·Û¾ãÒ7³ïóåxÀËÑ`x9¼^®ešd'&gí¹ï¸ôÍì;€—ã/GƒáåðrýõòöúÞ>ûŽKàåxÀËÑ`ˆ,¼¼ x9x9ðr4^/‡—€—ÃËáåh<º//Ç^ŽÃËáåÍôòï}ÕJË^ŽÙ 4fx9ðràå-îåëÖaô€—/‡—/^/^¼^¼x9¼=ôòoh㈗㠴]àåÀËáåÀË€—ÃË€—/oi/ôèш#ÌÌÌ<<<222t9¬Ô¼¶¼¼’òµk×–••Ñó˜1ctéßðr/ÿSSÓòòr: 5733ƒ—xy+xùðáÃ7nÜHj¾víÚ‘#G2••• lšÇûùù988¸4ˆ[Î!C¢££¹^¾|yàÀ¬yóàyy||ü”)SlllLLLœœœ–,YRUUÅ&¸pႇ‡Ç+Œ=:66V|^/oK^žššJjnjj:bĈôôôŠŠŠM›6‘X‡††²Rîââ\PPPSSsûöíiÓ¦±ÙwïÞý—¿ü…[à?þñuœJ_Η;;;ïß¿¿°°P.—ß¿ܸq .dbãâ⬬¬>\VVVZZzðàA:½~ýº˜¼^ÞÆ¼œ¥¼¼|Æ ƒ Ú¸q#Ù9îççG¦.ËÚÚ://9­­­uppÈÌÌéåQQQܺ1°³³cŽ'MšÂݱcÇäÉ“Åäðò¶çåeeeëÖ­ñ—Lí1bÄÞ½{™Óððp:¥@1•Ši3å?~üÁƒËËËsrr,XÀ†Sâýû÷Êåòû÷ï7náÂ…šI°Ò(¥õRóè.‚¹ ¥¥¥”€N¯_¿Îæ:Ù?~üÌ™3ts¥•—¾Ú’ÇŽK­ÊÎή­­ÍÊÊ¢û¥÷Þ{OLg›3’Úº Pv^®_^îçç·iÓ&Uez{{‡„„pC¶oßÎ ¿¿ÿš5k¸±Gõôôl¾—«-™\°W¯^III)))½{÷¦S‘•Ši3å T*sQQQÜôôt;;;-z¹Òz'MšÄ» ;vì˜>>{öìá†p×—Ïž=["‘´„—«-ùîÝ»}úôa¾ùúë¯ÇÇÇ󘘘ð¾ *¾ÍúæåžžžŠë˽¼¼Z×Ë;uêTXXÈMÿþûïóªVuà倗óÉÈÈèÞ½;³‹L&#Á%gcSRR\\\"""ÊËËÉ€ÃÃùû±deeõë×/444??_.—SQ$ñÇo¾3 —L:NRÎîxñâž}ûR ¹%P‚°°0Åe!bÚ¬o^~ùòeKKKf?âСCtÊ®ªo-/§;œwÞyG*•Ò0Òó|`eeÅ«ZÕU9’ªv“ÔÊU€ÊðrýòòûyÏš5ËÎÎÎÔÔtàÀäÜØÄÄD///Òq33377·ÈÈH®ÐdggÏŸ?ŸÜòÚØØx{{s·ÖnŽ& ”L­]´h71úúúrCŽ?NyŒŒkWÛf}órâܹs£FêØˆ»»{LLŒH{n9/¯ªªZºt)ÝÔ[[[“£Ó]¯jUW^x9ðr}órfÊP÷ÏÀË€—ÃË€—/‡—/^/D+[J ÒC›ë£T*577Ÿ7ožŒ$õ‚ú’‘‘?1€—·G/×n]:¯ï€ª«« `$©ýû÷÷óóßÀËáåm‰ììlSSÓK—.ÌHÆÆÆRrssñWðòÖäÞ½{^^^æææ®®®‰„kx.\ðððx¥‘Ñ£G“à ‹ ÷·â£¢¢†JÅvíÚÕÓÓ3!!A¼MJ¥R+++ÊN…DGG‹ìNMMͺuë† Ò¹sgcccoooê·FU«>(0>>~Ê”)666&&&NNNK–,©ªªâ¦Ù¼yóرc l$ÝÝÝñWðòV#99™ìóðáÃeee%%%aaad~¬áÅÅÅ‘Ï1±¥¥¥¤Óëׯ‹´I2TÊB)û¡C‡©:16™™™I‰×¯_ŸŸŸOZ|úôigggjŒ˜‘…/]º4==]&“UWW§¦¦†„„Œ1BL½Híß¿¿°°P.—ß¿ܸq .䦙0aBhh¨$ ѤI“ðWðòVƒ,–œŒ²}ûvÖðÈÕx±;vì˜oÞ<èK@@õ…·º^¼^¼x9¼x9ðrx9ðràåðràåÀËáåÀË€—ÃË€—/‡—/^/^¼^¼x9¼x9ðrx9ðràåðràåZ€šÒ¤ô‰ÄÉÉ©C#º¬WƒB:ph¡×“ðhH¥Rssóy󿵓ÑPCFFþi÷Šðòvíå...gÏž­««Ó/YHË9ðhøúú0 ºººŒ†*hú÷ïïç籆—ðr­ù‘‘Q}}}«kž˜¨Àhdgg›šš^ºt©ýŒ†±±±4¹¹¹íÜËß}÷Ýàà`™LFÙé9((ˆBðw¼Ü0½üñãÇ~~~fffƒ ⥼wïž———………¹¹¹«««D"áê…T*õññ±²²¢Ø¡C‡FGGóD„‡øQ®÷Â… ¯42zôhr8aûaé **ŠšJÅvíÚÕÓÓ3!!A¼B ÷W­– ÆæÍ›ÇŽÛF# €7îïï?wî\^2ww÷ÀÀÀ¦¾ßZ¥¶^á÷‚@É4D‘‘‘nnn/¿ü2]¦I“&%&&jåPSS³sçÎñãÇS.z¦c Áßeðrôr—ààà‚‚ú¼¿}ûö´iÓØØääd''§Ã‡—•••””„……‘½±V‘™™éèè¸~ýúüüüªªªÓ§O;;;ÇÅʼn´:„ë¥*ȘØÒÒÒƒÒéõë×Eš(ie¡Œ”ýСCÔªNL›EöW­«Šš0aBhhh{ z¥1bïÞ½Ìixx8*êfHH n“†W¸UÂõ ¿„Kæ$½Ý(öáÇÍ|#0mÞµkëåt¬ÔË[åk¼\›^îçç·iÓ&Uez{{“qC¶oßÎ~üûûû¯Y³†{ôèQOOÏæ{¹p½äj¼Ø;vLž}ú0ßU}ýõ×ãããÓPcšú½Oµ­¨Wø½ \²âHîÞ½›·\ÕÿC/o ##£{÷îÌ2™Œ4…,MIIqqq‰ˆˆ(//' çî’••Õ¯_?RÉüü|¹\NE‘>>¼ù^.\ïåË—---™H˜]Dè”]+¼`Á‚7ß|3))‰šTTTtàÀ=z(î@B©pfß ªŽ×€‘#GnÛ¶7Ó,²¿{¹ª} o4¨:’cv?Ç‹/öíÛ— XoS÷In•p½Âïá’y#IoFIÞÎ6TuXXXó`KÕë óñàåmÛË_4îÊ|87 ™hXX˜âr” ¼ùæ›III”±¨¨èÀ=zôЗ»¹¹mܸñÎ;åååµµµiiisæÌñõõÕ–—«Ú'‘tœ†"66–9½xñbß¾}© Í¿:T£‘‘ÑâÅ‹…“9û$^n˜^þ¢q‘ùó绸¸˜ššÚØØx{{³êÉpüøqŠ%qäRQQ±páBccckkë©S§&$$èÀ˯]»F"Þ³gO“N: 2dÆ ìjœÊhR½ª~WhÖ¬Y‹-â†Ð©âý€fWG)t³Ä¦‰‰‰Áï ^Þæ½4 ²íTWWëI{ªªªúöíËûU#x9¼ÜÀ‘J¥æææóæÍÓ“öP{x«ùàåðr/‡—Ж½œ´[í^àå-èåD/ðò–òò+—bD>àå^/^/ðrx9ðrx9€—ÃË€—Ã˼^¼^¼x9¼x9ðrx9ðràåðràåÀËáåÀË€—ÃË€—/‡—ÐÞ½¼C‡Mê¡D"qrrêÐHSGGƒ,ŒT*577Ÿ7ožnÆM¸Úô¨¶é±ÒÁàëøúRu={ö”ÉdªÚ@¯zdddàxy“?§]\\Ξ=[WW§K'ÐOYlf«|}} P]]­®i0÷Nmz¬ôÙË5ËH¹lmmCBBTE¯ÿþýûûùùá㼼ɷFFFõõõmQ¶ôªUÙÙÙ¦¦¦—.]2xׄ—·õ×y3½|ãÆ=zôàN™óŠŠ¥÷Bnn.>´//¿wïž———………¹¹¹«««D"á~FJ¥R+++Š:thtt4ï#–G“>žUe¡Àøøø)S¦ØØØ˜˜˜899-Y²¤ªªJd¥ÂmNNNžkìå”Ñ$'8~ü8µÊÈÈH1±p›ïß¿OýíÒ¥ ³ù¹sç^zé%1Ý©ªªZºti÷îÝ­­­É,I´Õª‚¿+$|¨ £FêØˆ»»{LL E÷ .tpp`Ú}ú´³³s\\œ¢éjÜ{öl‰D¢Ÿ^nbbRSS/€—· /ÏÈÈèÞ½;³‹L&‹'gcSRR\\\"""ÊËËKKKÃÃùû±deeõë×/444??_.—SQ$ñÇ×/ïÓ§OXX˜f?f/€—·R©tÖ¬Yvvv¦¦¦<|ø07611ÑËË‹tÜÌÌÌÍÍ-22’ë¬ÙÙÙóçÏ'w§¼666ÞÞÞ±±±ÍwÜÊh’—?~œZedd¤Ù¦éo/‡—/o‹^ÎLëþÀËáåÀËáå^/^/ðrx9ðrƒðò¦né-‘Hœœœš¿¸V¶.DÇÛ–öæèééé“'OîÒÈÛo¿‘‘¿b¼^Þš6éââröìYzS—^®cc6`/þü9]÷uëÖ­]»¶gÏž¥¥¥øCÀËáå­f“FFFõõõzb±ðrÝ@F>{ölnȬY³6n܈?d¼^®’Çûùù988˜™™ 4ˆ×Â{÷îyyyYXX˜››»ººJ$®MJ¥R+++Š:thtt4Ï;5þM{áz/\¸àááñJ#£GŽ–]6¢¢¢¨©Tl×®]===ijp5îQ@@]nbÿ¹sç6ÿâÖÔÔ"2¤sçÎÆÆÆt•½½½iôZúEEåüùóܳgÏŽ;Èx9¼\¥”»¸¸Ãݾ}{Ú´illrr²““ÓáÇËÊÊJJJÂÂÂÈeY›ÌÌÌttt\¿~}~~~UUÕéÓ§ãââD:®ÂõRdÆLliiéÁƒéôúõë"½œÌ˜²PFÊ~èÐ!êU'¦Í"û«AhäGŒ±wï^æ4<<œN)°ù×—,|éÒ¥ééé2™¬ºº:555$$„ oéו­­m^^7$77×ÎÎÈx9¼\9~~~›6m°:Ò8nÈöíÛY›ô÷÷_³f 7öèÑ£žžžÍ÷ráz'MšÄ‹Ý±cÇäÉ“Ez9%æå>}º˜6‹ì¯="²³³{õê•”””’’Ò»wo:ÕÊõ577§;ÍòvP˜¼¦¦¦r¹œFŽéîîNtc`ff†?d¼^®{{ûgÏž©Šµ±±ÉÍÍå†/²fFyyúXRRbiiÙ|/®×ÊÊŠ›““CYDz¹b^[[[1mÙ_ zÄçêê:lذ«W¯jëú2dâĉ'Ož,**ÒåëŠõòßýîw¤æðr^/Wƒ‰‰ ãOJ166®­­å†PbÖ&)¯âdª‘‘Qó½\¸^¥±Ô‘^.W¸Í"û«AXF7¢Åë›™™9sæLkkkªËÅÅe̘1«V­Ò£c /‡—7 áùrE»âÎò:::Êd2µUhàåÂõ*/'õdŽ·©¬¬ž/§AÓf‘ýÕ G AAAsæÌ Öú….//ß¹s§›››‡‡‡˜,ÍYÇ‚ï}ðrxyÓðõõؽÎÇÇgÏž=ÜîªèÙ³gK$’–ðráz===×—{yy±,•J¹±QQQ\/'7åÆîÚµkÆŒ¼(]–-²¿ôˆ¸{÷nŸ>}˜o²¾þúëäÐ-tÅ‹‹‹;vìØÒ¯+¥û$nذÈx9¼\9Ý»wgöc‘Éd¤ƒdllJJŠ‹‹KDDDyy9ùbxx8w‘¬¬¬~ýú…††æççËår*ŠÔsøðáÍ÷ráz/_¾liiÉìmÂì©B§ìšì ¼ùæ›IIIÔ¤¢¢¢ôèÑCq?ÊH…Óåpvv¦êx 9rä¶mÛxóî"û«A¨1$åìn/^ìÛ·/¥Tɦ¦››Ýwݹs‡J«­­MKK›3gÝŒµôëŠù]¡¯¾úª¸ü®/‡—«G*•Κ5ËÎÎÎÔÔtàÀä»ÜØÄÄD///’H333’¼ÈÈHÞ."óçÏ'£¼666ÞÞÞÜ­Ä5örµõž;wnÔ¨Qqww‰‰a£***.\èàà`lllmm=uêÔ„„®—K$WWW*– §*¨"ÅÚoÞ¼Ù«W/EÓ_ zDã¿hÑ"nb:åÙs]]¥W\ /̵k×HÄɉ)c§N† ²aÃWã4‰GQ;wîÜ¥K—·ß~›÷O ÀËáå MråÊòòåË—c(/78/gækuÿÜÞÐÆX-[¶ÌÅÅEqq ¼¼í{9h;¼öÚkÇŽÃ8^/^¼^¼xy;óòz³kŠT*577Ÿ7ožÎšGuQjS^¹reÔ¨QÜVõìÙ“·5x‡ö¹ÿ àåðrór__ßTWWë¬yTWÿþýýüüÔ¦ôòòânB­²µµ —^/7(/ÏÎÎ655½té’Ž›Kõæææ ¤INNîׯ_}}=·U7nìÑ£wÊ^x9¼\,÷îÝóòò²°°077wuu•H$¬MðfŽýýýçΫ›†mÞ¼yìØ±Jo¢¢¢†J îÚµ«§§gBB‚øbÏŸ??yòänݺ™™™õîÝ{ÕªU¼ùxÂÝÝ=00P Ù³g‡††òZUYYioo¿cÇx9àåðò¦‘œœìäätøðá²²²’’’°°02]Ö&kjjFŒ±wï^æ4<<œN)P7m›0aÏ}YÙ¥»ˆƒ–––R³:äèèHY,¹>õ7;;»¶¶6++‹î=Þ{ï=^šI“&©*áÙ³g={öä3h[¶lqvvf£à倗ÃËEáííÍ[½}ûv®M’¿öêÕ+)))%%¥wïÞt*²ä*ß6ºaHKKSZ2wNš ÓéÓ§k6döt+ ¤Î’^«Ê²lÙ²¯¾úJ±Uô\UUåàà@c/¼^Þlllx ©É¼y6çêê:lذ«W¯ê²m&&&Jçæ©y¼6çääØÚÚŠ)³¾¾>88ø7Þèܹ³ÀÝBuuµ©©©Òž?NÊ^RR¢ÔˉÀÀ@º£`ÖÆÀË/‡—‹ÂØØ¸¶¶–"—Ëmrt#:n›€—+¶™óÒ(uîåË—Ó Æ¹s犋‹™om*í¯€—¯[·Ž QÚ*6/y9³<^x9¼\¶¶¶yyyÜÅùò   €€€9sæ‹/Y+ëXRSS•–¬8_noo/¦LKKË'OžpC¨ ÅV©ZÇB÷ ={öTº˜‡[ÈöíÛ«ªªà倗ÃËEáãã³gÏno}ùÝ»wûôéSVVVZZúúë¯ÇÇÇë¬mßûܹs'7d×®]3fÌSf§N ¹!ï¿ÿ¾¢=SJ¿÷Icõ׿þUÕ}WßIë·lÙ/¼^.Š””—ˆˆˆòòr2ïððpî~,¤ã$å±±±ÌéÅ‹ûöíK)uÓ6}™ýX¨yÔR’`ꈘ2gÏžýÎ;ïH¥R¹\NÏ|ð•••¢=9RqŸÄúúú~ýú=xð@­—tç`oo/¼^.–ÄÄD///Òq33377·ÈÈHÖ&gÍšµhÑ"nb:õõõÕMÃ~WH"‘¸ººRƒ©ÙÔxê‚È2«ªª–.]Ú½{wccckkkrtzž=ÇÄÄ(ý]¡ï¿ÿ~òäɪJæ"“Ézöì /¼^nÐ=À€÷§å qïÛ·/ï×”FŽyåÊ\ÀËáåÚ†™ÍÕìY'H¥Rssóyóæé¬Æ€€ª‘êÅ[^øÿöî.ª:ÿÿ¸ÄM/ËEmS»ì*iEˆ¦åc ±‹»¨™·MÔµßÏÜ2ÿý¹›÷g¿.ºº)–×4¯Y –â /¤y©´TP V1¼ Ìpg. ¿Ž;çr„×óñyð˜9×ï|ÏÌ™÷Μärr9@.Èåär€\ËÉå¹¼Î$&&†„„4¹îN®×•Õ5¶_í±{ÕH鄎;êõzG=#ÓË\ÙÙÙìD¹ü.Èå¡¡¡;v쨨¨¸Ãë%—;Ïî¯,I'´nÝzñâÅŽzF¦ðÁíþR¹¼Þqss3™L<Ïê­ÜÜ\OOϽ{÷Ú~8™={v‡”‡Ì­>±¤¦¦Ê¼/^¤¹¼þæò&6”cwïÞÝ»wï{¯‹ŠŠ’„g;û¥K—†ÚªU«€€€¹sçVw½®´ÙG¾øâ‹!!!o¾ùfYYYµîèíܹsРA2ÜËË«sçÎÓ¦M³½®r½'Ož”y[´hÑ´iÓž={nÙ²EÙþ¬¬¬aÆùûû{{{?úè£Û·o·jÕœ9súöík·µ¥¥¥mÚ´ùøãår9oÞ<ö#€\^s¹£$'öïß/IqݺuEEE………kÖ¬‘»‡²šñ™gž‘QÅÅÅ.\˜8q¢ëëue^ؾ}ûO?ýôêÕ«ƒ!==½_¿~“&MªÖb="IÆÒ¹¹¹F£1'''..î•W^qf½?ÿü³Œ]±b…V«•Å®_¿^zÒÒþ³gÏðÁyyyå·nÝ*Kç+[õì³Ï.[¶ÌQ'ȇ™E§Ó9ê™Å‹8Pýs{@.¯§¹\’œÕ‰Ëüñ Aƒ¬ftå@ìíÈåIIIÊ!gΜ ªÖb|Dòq¥E‹άWâûG}d”-í=zôÌ™3•c¢££•CBBBNŸ>í¨$Í·mÛvÁ‚Žz&##C‚;¹ËïÊ\îïïouRò… ­f<þ|½ÊåVg­˜L&77·j-Öî#’åÄÇÇ÷êÕË×××ö´õõJ@·êI¹k™·M›6¹¹¹Ê±~~~Ê!–Ãáv;A>KHv7ŸWcÛ32ÜÓÓ“ý —ß•¹ÜÝÝÝh4*‡  ˆV3ºr—Û‘Ë]\‹£G4uêÔž={¦¤¤h4ówd¥7”¹\e½v{Ò2VºÔö,«ÏUærIÞ’ËÍGúÉå€\Þð—ÔV°®·¹Üîp??¿sçÎ)‡dff:™Ëƒ‚‚¬ŽˆKOZÆ[]€Ü–dnYzk,X ‹*++«îy,äòzË£££mÏ/‰‰iœ¹ÜÇÇçêÕ«Ê!¯¿þº“¹|øðáóçÏWŽZ´h‘eì˜1cÕ[¥þ½O3N'á{îܹ¶‘Õ©|ï€\^¯sù¾}ûüüüÌ×ck×®•»hœ¹\ÒóàÁƒ³²² ƒü}ã7”×TQ_ïéÓ§ƒƒƒ?ùä­V[RR"O’   ËØœœœnݺIìÎËË“…ggg/]ºôñÇW.Jå:‰Ê» .lÓ¦mc"""T¾ÌÊ÷>¹¼^çr‘’’òä“O6».22r×®]µ¬›ØS+óÞ¾\^VV6eÊ”víÚ¹»»HFÏÈÈp2—‹ôôô˜˜˜æÍ››¯_.{Ï=÷XÆæææN˜0!44ÔÓÓ300066ÖêRñ*¿+¤¼«×ë;vìh5P¶šúï ‘Ë¹TVVÖ}÷ÝW­YFŽùÐCY~ÉÈIòq",,lÔ¨Qô9 —“ËQ%Ï+W® †´´´þýûÏ™3§ºQÞÛÛ{üøñÕš+..Næ’yÙ€\N.GerròSO=e>#(<<|É’%ô —“Ër9@.'—är€\N.oCBB]™»f×6q†ú…À«u™v'§”G!%;;›Èåäòz'44tÇŽvÇÖìZàõ3—Ë£xðÁ¹B9 —“Ëë#777“Édw”£ßμ}†¯×ë%mËßùóçËÚÊå"55Uý=Èå 6—ëtº÷Þ{/<<Ü×××ÝݽmÛ¶±±±»wï®ó†5±a5Áœ9súöíkwÞ;w4¨U«V^^^;wž6mšóÇÔ­ÎÜW .|æ™gd¬ü•Û2D9ÁñãÇcbbZ¶léííݽ{÷ÄÄDËrââ⬎…=zܸqV«ˆŒŒœ7o{ @.ot¹\Rø”)SΜ9£×ë%¼fff.^¼ø‰'ž¨n†¾}éÜѨgŸ}vÙ²evGI^_·n]nn®ÑhÌÉÉ‘LüÊ+¯T«ÍŽrù¢E‹,¹\n+sùÉ“'CBBd½EEE+W®lÑ¢…e92¥ôêòåËÍwW­Z%w­b½Î8p { @.ot¹ÜÛÛ[räíHÏM¨­\.!øôéÓÎ,D Dd×sù!CæÏŸo9eÞ¼y2Dù GRµrú (—#Ÿ:uêtâĉŒŒŒÎ;Ë]ÛUȨöíÛ³§äòF—ËÃÃß{î¹äääüüüÚÍåµBeɶǛ…ÉdŠïÕ«—¯¯¯ÊçW¾Ùiwl`` Õ©á’¼­¦Ü¿÷îÝ{öìyàÀ»K.//÷ôôdOÈå.—Ÿ={vøðá’ CCCûôé3mÚ4'3ºë‡ÃoG.Ÿ:uªß””FcþΨÁ`°,ÇÉ6× —»»»Fååz-¢®s´dr9 —7Ò\nQ\\|ôèÑ… öèÑ£wïÞ®§ç;pKff¦íp??¿sçÎ)‡Èdµ{¼Ü®Ö­[_ºtI9ÄöxùüùóãââÆŽow!œÇÈå=—[h4šfÍšÕVzvQ ¾÷éããsõêUå×_ýäòaÆ-]ºT9Äêüòü±k×®EEE………<ð€| ²]È¢E‹øÞ' —7Æ\Þ£GÙ³gÿðÃÅÅÅF£ñôéÓcÇŽ9rdýÏ厮“8f̘Áƒgee ùûÆoøûûß\ž‘‘º~ýzéIIÞ«V­R^E⸄òÔÔTóÝ={ö„……É”V ‰ˆˆà:‰€\ÞsùÁƒ%ˆwìØÑÃÃÃÇÇ'<<|Ö¬Yz½¾ž4O%;ú]¡²²²)S¦´k×ÎÝÝ= @2º$f's¶‹'Þ¤¥¥ÅÄÄH÷òò’<6l°Ì>bĈɓ'+'–»VŸvíÚÅï ry#Íåw5ɵ=ôó¿TŸÉlj°°0«ß —“ËïYYYÞÞÞãǯû¦˜‹×ìïuqqqòXä±Y¹œ\r9¹ —“Ë@.'—ärr9ÈåärÔºÚúµ£Û÷«IärryÞärr9¹¼Î :4>>^¯×˃’¿óçÏ—!t¹üÎeëK—.ImÕªU@@Àܹs-£²²²† æïïïííýè£nß¾]9—ÊÏÎÛ¤¶ÓØ]¯ ?zôè‹/¾èááòæ›o–••9óXt:Ý{ï½îëëëîîÞ¶mÛØØØÝ»w;9ïÂ… Ÿyæi€ü•Û2Äùn<~üxLLLË–-¥¯ºw˜hy¼qqqV¿Ç9zôèqãÆñÂärrù-ùXbèš5kŠ‹‹/\¸0qâDóð³gÏðÁyyy‹·nÝÚ¾}ûýû÷W™¿Ïåv×+ÃeEŸ~úéÕ«W Czzz¿~ý&MšäÌc‘>eÊ”3gÎèõúòòòÌÌÌÅ‹?ñÄNæòE‹Yr¹Üv>—Ÿ?¬[·®¨¨¨  `åÊ•-Z´°<^YŽ´aùòåæ»«V­’»Õ ýäòF‘ËçÍ›g;|ôèÑ3gÎTIHHˆŽŽ®Å\nw½2<))I9DrvPP3ÅÛÛ[’qÍúaÈ!óçÏ·œÇ"m“!NÎ+Ÿä€rÈ‚ ”777·S§N'NœÈÈÈèܹ³ÜåU Èåärë|þüyÛámÚ´±Š~~~µ˜Ëí®W†[µb2™ÜÜÜœy,áááÏ=÷\rrr~~¾+RÝY/^¼¨"]gµœýû÷wïÞ½gÏžà% Èåär;1´¢¢Âv¸‡‡‡íIäVùØÅ\nw½ÎÌëÈÙ³g‡ Ó‡††öéÓgÚ´iÕÍè5ÈåîîîF£Q9Ä`0Ø.'ê:^Ï€\N.¯F Öëõ5›Wâ»ÉdR)--u满.ær‹âââ£G.\¸°G½{÷¾Ý}غuëK—.)‡Ø/Ÿ?~\\ÜØ±cãããyIr9¹ÜÙÈ;f̘ÄÄDõy=<<ì~QrjVV–rHRRÒÌå¦Y³f·»‡ ¶téRå«óËüñÇ®]»>ðÀò™W5 —“ËŠ¼999ݺu[¶lY^^žÁ`ÈÎΖèùøã+§‘¬¹råJÛÓQ&NœØ¿ÿ'NÈŒùùù«W¯îСÃÈå=zô˜={ö?üP\\l4OŸ>=vìØ‘#GÞî>ÌÈÈ ]¿~½¬W’÷ªU«”×c‘8.•ššj¾»gÏž°°0™’6 —“ËŠ¼¹¹¹&LÄééék —f›6m’±nnnV )))™4iRÛ¶mÝÝÝ^zé¥cÇŽÝ\~ðàA â;vôðððññ Ÿ5kV•gãÔŠ´´´˜˜‰ã^^^òñ`Æ –61bòäÉʉåîø´@.¿›r9@.ÇÝÏ|\¼fÈåär€\ËÉå¹ —“Ër9@.'—är€\N.y.îj !—¹ — —ärär€\~Ww\“&ù4õN=Gu=zx{ç·j¥yá…¢ÌÌ z²fÊÊLo¿]zÿýZ/¯ünÝ Ö®Õ±ä)ÔPŸí¼ºÙŽ–l)öH —“Ëijõ$&êCB´›7ëKJL……¦uëtaayy&z²ºL¦ÊŠ&N,9s¦B§«;wdÕ­Zi¼¼ò;wÖN›VZ^~sÞ“'2¶E ´­gÏÂ-[ôu¾ç’–””Ürt¼¨ÈQèäìÿúWŨQÅmÛ^ûßîo~S ™^9VeUÙ“5ÞF:<÷ËÂà |}5îîùÒ¶ØØbiI­l#•V X´w¯¡f[A½ÍÒ€ ô=zH“¤»dEiiF˼2—UçìÛgxøáóí¸¸Ù@ʱ£GWR+Û—ýFÃÞoðênÀ¯nõWŠò\Û3Rª|¥ËA.o˜¹¼}{í§Ÿê®^5 •ééÆ~ý 'Mº±ÇÙ¿ß {Ïuët" MkÖèäî¡C7÷tÉÉz)¹±i“~Û6½¼©X–|ölEp°öƒÊòòL²ÚºU/+’:Ó*Ù­Ëþ˼ނ“ìŒd¯Üûôí[(css+ŒÆÊœœ Ùo¾òÊýæÏ?WÈŠV¬Ðiµ¦âbÓúõ×Ú\ç{®fÍ4¥¥Ö¹\öÔN¾m‡†jãã˯\1É{Ï‘#Æ?üáæ›„ú6RïIW¶‘¼çM™RzæL…^_)oÉ™™‹—?ñD¡ëÛH½Uòpä{À€"y—’’È"O<'·‚z›¥ò†-(Ý(Ý%ñHÖûÓO7Þ¼—,)ÿýïoyo~íµÉ–L ËY¾üÆ»æªU:¹«s.]«o_ö {¿Á«»a¿ºÕ_)êÁZý•B.¹¼Áæò¤¤[v|²_ ÒZŽ^È®M9öãËå¼e^ÙÁÉž×r£ysòˆÂÌ™·œ§‘ ‹Ž.rrkµÞ ÊUö>²vyC2ß–·‡>ºe^YTï¹"" åH9DÞ<=jÕ¨QÅÿø‡Ã3^Ô·‘zOº²¼½¯mtç{Àùm¤Þ*/¯üûï¿–Eäýµ¤Ä$oZíÚi·lÑ»Þfi€tU«^~ùÆ»µ<½4—.ݘ]âHÛ¶Z –‰%£tê¤=q˜‘aìÜY+wìõíË~£aï7xu7ìW·ú+¥Ê\®òJ!—ƒ\Þ`s¹ò¿Ï•׿yãævãuîﯹxñ–Ð… –£¼æÝLo¹qÏ=7wmÚXï¼ L~~N!–UX­WeÙûÈŠâãË{õ*ôõÕØþPvyVóÊÝ:ßsÉ[K‡ÚmÛô¥¥7ŽÖ˜/8à̼ғ¿üâðm@}©÷¤+Û(<¼à¹çŠä3?ßÎ{¡+ÛH½UòaÆêý›7ë<#H½ÍÒÛVIc”‡Ð>øàF¤Ø¹óÚÙVKØ¿ßн{AÏž…TãñêÛ—ýFÃÞoðênدnõWJ•¹\å•B.¹¼Áær•îîùFã-£ †J|«Élo™Ìöä9«ý‘#v×kYøÔ©¥²sLIÑk4&“Éz¬ú¼uhëV}däóDûõ+ܳÇ¢ufFéIƒ¡z}eÙFê½áÊ6:{¶bøð koÌ¡¡Ú>} §M+µ¼#º²Ô[%k´:*VRbjÖLãz›e JOŠ´4c—.7O9]¸°ÜvQQ…RÕzb¨o_ö {¿Á«»a¿ºÕ_)ÎärG¯r9Èå1—Û=Z#;>göÁÁZ½¾†­jÝúæ¿mùùiλ¥U™™Ê£5VÇc¤ÍõpÏ•šj8Щÿ)×àˆše©÷¤+ÛÈ¢¸Øtô¨QÞÆzô(°dre©·*"¢°ÆïÜêm¶{DM:P9D>YɆ“æÉF¹zÕú°ÜüùåòŽ>vlI||ym1e¿Ñ°÷¼ºö«›\r9¹¼6ß_££íœÝSäÌ^c̘âÄľ- V¼t©Ãó&}|4V;Í×_/±Œ>¼Xv Ê±‹•×Ã=Wllñš5N}uhäÈâÙ³žª¾Ô{Ò•mdK£¹ùêÊ6RoÕôé¥VÿéNNÖ?þx¡ëm–X$[²¤|È[¾ ¶zµN¿e‹ÞrޝÅ?»v-0?ï $8ÙõíË~£aï7xu7ìW·3IÚÃ#ßî·HÌåÕý2@.¿‹ß_÷í3øùݸ€ÔÚµ:¹k9»N}¯‘“SÑ­[Á²eåyy×¾‡ž]!o!Nîa32Œ¡¡ÚõëuÅÅ×öƒ«VÝrÙ³\œ•U!‹•¿o¼Qªü¾ÿéÓ×¾ïÿÉ'×®PRbúüs]PÖöaªÿLš+cyùåâ'Œzýµ6OœXòôÓE&çö¥ÒuíÚݸbƒÌ.o ò~l«¾Ô{Ò•mÔ£Gä‰~0Ê’ÆkÝ>vl‰„ ×·‘z«._¾ö¾h~¼¥¥7®ç`õͪšµÙrÅóW¬¤U²ä“'oy./¯”m!Áè‹/ny#•YämÛr »={ aa²gž9êÛ—ýFÃÞoðênدngr¹,|åJ]EE syDDᇖ›Hæ —7†\.RRôO>Yج™F*2²p×.ƒó{ÜÜŠ JäÃÓ3?0Puñ]ii×®Ë+o3^^ù²ÃݰáæpËÊLS¦”ÊÔÝ=? @#ïòþ¤\uzúµy›7¿qõ\y¶_—¹ó¹|õj]—.òp~ýkí_ÿZjuÙDuòæ7bD±¼½IO>üðµ÷-åX•m¤Þ“®l£ƒ ò¶×±£ÖÃ#ßÇG^0kV™å?Ô.n#õV:uíêÈò.+¨{÷‚„g¯ö­Þfiž,ê·¿-ðöΗ.:vÌÎQ±·Þ*•U[}—K¶ÎäÉ·\ûLîZ2A•ÏõíË~£aï7xu7àW·3¹|Ó&½ô†›[¾£ó[Ôsù÷ß;uÒÖì] —£ÈÛÞ}÷ié¶Q ²¦­÷ß/“·6ÏIðê@.· †¢¨†Wì7(Šª?û €\ŽšHNÖ?õÔÿü†‡,YRNŸ°\?¢&´j¥¹ë¾£ öô$¯n€\~/¯Ÿm¾Ÿ®<"¶Ï+öl_z’­Ë9^Ër9¹h(¹¹ —¨¹œóËr9@.'—är€\N.È幜\Ër9¹¨—¹ü3à.×@r9Ÿp÷"—ärr9@.Èåär€\Ëïd.oÒ¤ Û"++ËÛÛ{üøñVÃÏœ93hРæ×=ÿüóÙÙÙµµÆÃ‡7Î×××«õª+++{ûí·ï¿ÿ~//¯nݺ­]»V9VzOú°› Èåäò»/лت‘#G>ôÐCåååÊZ­644ô½÷ÞÓh4ùùùï¾ûnÇŽ k¥Á’k§OŸžžžnÛòºZ¯:“É4`À€‰'ÊgNwìØ±¡C‡*'Þ{ðÁGÅNËïÄy, /—çææzzzîݻצ;ß3fŒrȈ#fÏž}»[^WëU·víZ« n+55UzòâÅ‹ìG¹¼ê‡ÑäßìŽ:zôè‹/¾èááòæ›o–••YÍèh YYYÆ ó÷÷÷öö~ôÑG·oß®{òäÉAƒµhÑ¢iÓ¦={öܲe‹Õäî¥K—$üµjÕ* `îܹ–Q;wî”ye¸——WçΧM›f9¶íb«Äœ9súöíkÛQQQ²^å;vزvóq]­W½¯hûÑÅVddä¼yóØr¹³ÃQ.oß¾ý§Ÿ~zõêUƒÁžžÞ¯_¿I“&U9£8{ölppð|——'Q~ëÖ­²¨ýû÷›ÇþüóÏrwÅŠZ­¶¸¸xýúõþlsù3Ï<³fÍ™àÂ… 'N´Œ’TºnݺÜÜ\£Ñ˜““÷Ê+¯¸Þ*³gŸ}vÙ²e¶ó¶nÝZ>'(‡\¼x1((èvçãºZ¯z_ɇ4Éå ð¹®wïÞÛ¶m³]ìâÅ‹%Á³ärWsyRR’rÈ™3g¬¡£}Úv^OOOùp"7""""##å†^¯÷òòºÝù¸®Ö«ÞWÒ€ûï¿_>–””$''·k×nË–-V ÉÈÈ4Ï~Ë]Íå–³VÌL&“›››3 ¸M›6¹¹¹Ê!~~~æÛî­N;–»¶¹üüùó¶K–6ÄÇÇ÷êÕË|»ç«Ô¬Uf:N%—?öØc‘ï|.¿ÃëUï+i•Õ“jóæÍææ)•——Ë”ìG¹ÜÕ\^å@G XÒ­í©Þ–Lïîîn4•ÓKô´]rEE…í’§NÚ³gÏ””F#ÝѼ5h•z.ol籨÷U@@@QQ‘rú’’’fÍš‘˹¼~åòàà`½^ïhu+­ŽÅ^¸pÁÉ%ûùù;wN9$33³VZe" ´Þؾ÷©ÞWÎärÎcäò;”Ë]3fLbb¢£Õ >|þüùÊ!‹-r2[ûøø\½zU9äõ×_¯•V™9úÞ§ÝëΚ5ëvçãºZ¯z_MŸ>ÝêI•œœüøã[M&›•ï}rùÈå]»v]¹r¥í '999ݺu“€›——g0²³³—.]jÉm§OŸþä“O´ZmII‰4>((ÈÉ\.yqðàÁYYY²XùûÆoØ^Ë¥f­2stDóïû¼ÿþûšëý¾£‹NÖ8×ÕzÕûêòåË<ðÀºu늊ŠJKKÍWk±ú–påõÃê\'Ë«ÎåMì©V.ß´i“¤F777Û‰sss'L˜ c===cccSSS-cÓÓÓcbbš7on¾~yJJÊ=÷ÜãL./++›2eJ»víÜÝÝ$£gddÔV«*ÿ®PåõË;J›}}}¥ÙÏ?ÿ¼|*°š@> È=<>šjår»óZM`·1@ÃÏå–3UÌÕ´éµÄ¼{·!2²Pn<úhÁÉ“7Î&ùúkC¯^…ÞÞ7'v>—;š×*—Ûm ÐðsyûöÚÂB“íŒÝº¬^­ëÝ»Ð2¤MmB‚®´Ô¤žÅï¹'_wíP{ååË&ËpGóÚ/·Û çòéÓK'M*ÑjMEE¦ü£,<üÆ©#óç—7k¦1ŸÍb¤]»VWRrmÊ+tŽrù¯­]³FWVfzç2ËpGó¶l©ÉʪøñG£¿¿F¥1@ÏåCå”)¥‹½½óûö-Ì̼qzwA)4Tkþö§YJŠþᇠ<<òCB´ –[NG±:5åóÏušÀ@MR’Þ2ÜѼï¾[æã£iÑB³`A¹Jc€žËryçrå—,ëgñD@ÃÏ幜\Ër9¹ — —äò:wæLñ Aß4o¾Aêùç¿ÉÎ.v~Þ&M>·*Ë¨Š Óܹ™<²ÝÛ;AJnÈ]hûý÷ù'¹ÿþ-žž ¿úÕÆ§ŸþúË/Q.¹¬ÌøöÛé2—WB·nÛÖ®=«²R)YŽ“K6ÓjõmÚ|¥l°3ó>œ?nÜa_ß V3þ{çôè‘"¶U«¤^ØŸ™Yä|?«/Y¥Ÿ÷íË1âÛöí7K›ýü6FGï“!Î?"õ6« —׉§¡¡›ß{ï”F£ÏÏ×½û“ ÎçrG£&Oþ1*jÏ·ß^•„]^^!±oß=þóQóØðð”?ÌLKÓ––^ûÝwWeb îæ±&Så€_K ”,«ÓU;¦:ô J3fÏþéå—9³d iÉÿø½Uû«œW>!LŸžžž^`ûÀχ„lÚ¼9·¤Ä(¸nݹ°°myyåNö³Ê’ÕûYFuê´eÅŠì+WtÒWß|“'IzãÆ_œyDêmV @.¯M’ÇŒùN9dĈo%溞˛7ßpáB™rˆÜmÑ"ÉÑô¿üRÚºõ—æÛkמUâJ&ɸ’8Y²ÙO?¶l™tþ|©JûÍëè÷êµS"¬rÈgŸ›1ãDuû¹º¹|ܸÃòÉG9äԩ®]·:óˆÔÛ¬>€\^›¢¢öìÜyI9dÇŽ‹}ûîq=—ûùm¼té–c«/–ùûoTÉ‹’•Í·Ü·wož“mذá|DÄ.õlmY²ÙsÏíûË_Ž«·ßѼŽxÓ¦‰%%·äã¢"ƒ¥aÎ÷sus¹-i†¯ïg‘z›ÕÇËkSëÖ_ڦ砠¯œÏå2±»û²œØØƒ'NXFÍœyòé§¿>r$¿¼¼BJnÈÝwÞ9i»I{»w_êÙs§å,—M’Ë øÚÇgƒTïÞ»·m»à¨ }úìùüó»£l—,¶n½ ´Z½zÞµ;¯JPnÖ,±´Ô:Å~YÝ~v”Ëõ³­uëÎýáœyDêmV @.¯Mžž õïbFD슌Ü-7ôú /¯'gé¥ý^‘Ø}õªnéÒ¬6m¾úáeì°a‡”_U”»¶qÓRÑÑûŒÆß •ÜÿÉ—……†’crrn»v›¶l±ÍÕ„†n6?g–,S†…m³œ`]å—,•óª§gé@IüÊ!Ÿ}vÎòmTçûÙn“ÔûY);»¸cÇd«/h:zDêmV @.¿-¹ü±ÇvšOQ¨V.·²jÕ¿ øúFßÍþ©K—­))KK’­·o¿(QÛîÕùù:‰à’5?ü0ÓÒ*‰€Êi6oε{Ř1ß½ÿ~†£öØ.ùŸÿ̼ï¾d®B%;šW==Ëdž6oÛvAoQ‘AÚo¾˜LuûÙ™SV”ýlÊ;uÚ²zõY'‘z›ÕÇËk“‹ç±X),4øøÜ8³YâáÃùʱß}wU:šwïÞ<ËØ€€/%*ÇJ²oÖ,Ñj–Ë—Ëýý7JÜTo•rÉ¿úÕF«K.:?o•éyëÖ ‘‘»ï½7Qª_¿Ô={.‡„lªn?;“Ë•ýlqì˜VV·pá™j="•6W9€\^k\üÞ§ Ó–¼èå• ×W(ÇêtjGâ•c#"v9“ËÿçNþéOGªl•rÉv¯}®…µÙ™ôœšzyàÀ}Õígg–¬ìg3IÌ~~׬9[­ÞPosuÇËkÎîõûfÍʨÙÒV¯>k9¿¢C‡ÍßËñr¹Û±c²£y÷ï¿Ò¥Ë«ûMŸžnuKrrîãßr‹„þM§NVÙ*å’«‚ÍëLzŽ=h ÊÎ÷³3KVö³X¿þœ·w¢EgœÙF*½aÕæêŽ —לù÷nÞ?C£ÑK9ú]!»•£¢öHJ;¾Ô`0åå•/^|¦mÛ›ßGœ;7³sç-æóË¥víºôÀ[çÍ;m±ë“O²³³‹eÞ‚ýÆ¿Èz×­»‘Å/_.—‰ånQ‘AæÝºõBûö›“’Î+×.«þÝïöÚ>"õ%«‡`ççµ›ž_~ùЉò!+«xâÄ#O?ýµÉT½~¶»dõ~ž3ç'7·Ïí^4Æ™G¤Òæ*ÇËkÓÏ?ÅÄ|ãë{ã÷á%Ù ‹¶yqӦܾþÕ¯6zx|!IqøðC·½^¶,«G”¦M¥äÆòåÙ–Q_}Y2_HÈ&™W– ËÙ±ã¢rÞS§  ú¦eË$/¯„îÝw$$X_ ±W¯v/žXå’UB°3󪜳zõÙ.]¶Jƒýëä¿þ5Íê"ƒUö³£%«÷³£3sÌ?ÌYå#Ro³úXr9@.'—är€\N.È幜\Ëryíærär€\€\ËËr9r9@.@.ÈåÈå¹\á3 ¹‹s9ŸŠÐ0Ër9¹ —ärr9@.Èåär€\ËÉå¹ —“Ër9@.'—är€\N.È幜\Ër9¹ —ärr9@.Èåär€\ËÉå¹ —“Ër9@.'— 7— ÆÝšËËr9r9@.@.ÈåÈå¹¹ — —ärär€\€\ËÉå¹ —“Ër9@.'— XPwÈåÀÍ\^o×K.¹¼¾çr'WÔ¤Ir9u.orÝíËå?gžP¯OH.ÀÝ—ËŸ|òIGñºW¯^–!½{÷VÉÜÊáär Ú¹ÜI·Ýºuûþûïåöwß}÷È#8 ÜärÀ¥\n9þ÷¿ÿÝßß_yD\n,Y²dôèÑr{È!«W¯VîÇ{,44tÕªU–…ˆ}ûö™ïΘ1ÃÇÇgêÔ©är ê\^ùïÃÛ£—/_n5¼¬¬¬C‡GŽéÔ©“Á`°=.Q^¹ËíÔÔÔ'NÜ{ï½·)—yh“[Ér9îú\¾aÆGyä·¿ýmJJŠrøŒ3Ú¶m¯ ßûÛßZ¶liup]¹@“ÉTisBK-æòŸNïßÿ)K(—Û2„\€»>—›íرC·rx^^ÞŸÿüçÒÒRå”Íš5;räHNNŽrÈ©S§ÌóÚ ë•µ}KÚ±#¿ù͵SÞå¯Üæ<4„\n>ðìçç7sæÌJÅ)ãÊÉ,C¦L™rï½÷N˜0Á2äµ×^“!Ó¦MSNfµ„Z?¿ü»Cû¢¢"å/ç—àîÎå:îž{î©“Ï®çr¾÷ €†Ë%¿6mÚôµ×^#—u–ËëázÉå —ß¹\bw•E.ÀÝ‘Ë늋¹¼‰ÓÈå@Í©f‰ÚÕªk¹Ü™£ëEQEQEݾº‘Ë›ÉíˆÁ5^¦Ìxsæ*¿+JQEQEQ ¦nG ®ñ2ÉåEQEQ¹œ\NQEQEQõ;—ÿpøÐù´rcß¾¯¿Úºå›] ‹~¼aÃärŠ¢(Š¢(Šºs¹üð÷{çŽIú2É2dì«cÿ{Ú“Ë)Š¢(Š¢(ꎞs’ôÕÆ­[“寙SGŸ~2â7ýMÿÿÓÿ:í¿?_¿æÎåró÷CÝÜÜ|}}úŽ÷êw‡ö±)Š¢(Š¢¨†ËO¤>“™f¾ýÖ̹'w­.Ù2­²âü’ÿ&³ôøâË/ÿƒÙw4—›o¤;üÕÆ„Q#_iݺuêžlHŠ¢(Š¢(ªçòë?Ü»ò­¯¿Ù»`Ùò¤ÿèU¹"ºòûÏ+µg++‹–ÿ%6((H¦Ù¿}µë¹\f©^.·Ô¼6áùçcþý1â‡Ø?üÞ÷º!±¿—»–¹þ>ãí`°°®›¾L4ÿéÔñ ãÇúùù5mÚ4ú¹gýøOŠ¢(Š¢(ª~žÇ2kÞ‚=‰Ëþ5£{åâgÿïK—Vü©òøºÊ‚¬ÊÊ˧÷o– ¾põ<óô5Ìå{Sw˜o¿úÇÑ}ûD}{pß¡{{÷~rì«c,sIìÞ¿owڱïOúG~È<|òëÿùdÄ7{wK"üÒ Ã_Ê‚¢(Š¢(Šª·ç—¬úýåÅ×¼½yrTéºW¯|:±òȺ½3ÌcwïÞéJ.7O\ó\žqò˜‡‡‡ùvëÖ­wîØb¾²#Ù|<ß<×áïüû˜#îîîæÛ!!Á;¶o6ß–(àÏ‚¢(Š¢(Šª‡¹|îÒO¿›3쬧&ùl\¯Êµ/ïœúÔá¿?_¹fpzÒì­;¶oÛòUͲ¾m(¯ùñòÖæÛ¸:uÜrŽŠ%[Íe¹+(nÔÍÍ'EQEQUßrù'«?ÿè/£‹æF »ph×Ê¿KüS¯Æÿ®rñsÛßxì­ÿýçŠ5k¿Z4éľõ5ËåV¡¼æç—¿ð Ëñò])[-ÇËå®z.oÛ¶Íþ}»yPEQEQõ3—ññ´¤Å›1kÞ™Äéïü˜Ž•=ºxx×ñƒŸËY82÷ygNü¿Ï ùöÇ#jËm”ÿ\Íë±ùjcÂèQ#”×c3fäS}û|{pŸTTTäÇŒRÏåo¾ñzß>Q»wnûéÔñäÍIÑÏ=Ë‚¢(Š¢(Šª?¹üøÑCI_})7¾ÿ¯ð¿öirñ°™Ñ†Æ9;«á'ÏïNXúõÞÝ®œ³nÊ«q~¹ù„“{ï½7¬ëãâ^•n›~üÈ~ÿ’ÏurCîªçòÌŒ´)oN6_§¥K—Îþs6OŠ¢(Š¢(ªž_~xœ÷Î7»ý­O“Žè™ÿÈCï\; ¼æ³õÞÎÊ5ßøÅ¢ªÁ2UB9¹œ¢(Š¢(Š"—Wƒÿßûñ+Ö|îâ2ÕC9¹œ¢(Š¢(Š"—«Åà¤äÍ{×½íâ2«`: in general, a ``nogil`` block may contain only "white" code. .. figure:: htmlreport.png Note that Cython deduces the type of local variables based on their assignments (including as loop variable targets) which can also cut down on the need to explicitly specify types everywhere. For example, declaring ``dx`` to be of type double above is unnecessary, as is declaring the type of ``s`` in the last version (where the return type of ``f`` is known to be a C double.) A notable exception, however, is *integer types used in arithmetic expressions*, as Cython is unable to ensure that an overflow would not occur (and so falls back to ``object`` in case Python's bignums are needed). To allow inference of C integer types, set the ``infer_types`` :ref:`directive ` to ``True``. Cython-0.26.1/docs/src/quickstart/jupyter.png0000664000175000017500000022056013143605603021757 0ustar stefanstefan00000000000000‰PNG  IHDR¥³ÜÀsBITÛáOà pHYsÄÄ•+ IDATxœìÝwX×Úðwf ½Pz,ˆk,XkPc½Äú™h¢&æÚ5ÆÄÄ Š ÔÄ."•¢HP¤wØÝ)ßG÷îݤ, úþžv!„Bè¿x–ã+ìNÃr<÷v³¡°ÿB!„žŠzû@Æp,Ç hZ@Sšz»OÉWªHLLLYYY}gU´´´„5ýÇq4­R«ÄsÀsÿ}KÑ@Ѥ¼RRRBB. :DCC£nF!„P=¢(HË)¾õ4+þUAnQyA©´°¤\C@éi µ5„6¦ºŸx¶hÕҴꨬ¬¬}ûö ™í:Šˆˆ¨q<ô?ÁÏÏ- ÒšÏÐ!!—ÓÒÒÒÒÒìÛµkWq8…B¡FÅqBÇnb›v´A3Šå«ñ2 f2É’„ÛlÌUí +Y}sK÷ÿÆW?Çq‰„¢(MMM¨aPBÖÔÒÒâ8ŽT ©V‘èG"‘Ð4-‰hš®l¤ÝM( …5î·ŽBÕ7žçILóv¾!~Þ€V¥å2Mq<ϱMS±©¹^‹áyqÀs,ÏÏpË „‚·sòM:$ªá}𢏼—ÒS+ù6£ÄÝ¿˜´¨x-‘˜´™´€öùìéõ=²ÿ§óÕÐ3«,a‰D²}ûöòòòÙ³g›šš@yyùæÍ›e2Ù‚ Œámd}êÔ)''§ñãÇ“º™¨¨¨S§NõëׯgÏž'Nœ¸zõjVV–ŽŽN»víFÕ¢E ²Ú‘#Gž?Þ³gÏž={’%ä߇ž={ÖÜÜ|ÆŒ< ÊÈÈ(--½|ùòãÇ¥Ri¯^½ú÷ïÏ0ŒP(dYöüùóW®\yñâ…H$òòò:thëÖ­åuQ‘‘‘gÏžZ»ví·ß~{ñâÅÔ"e__ß´´´„„ÕüüüƒÇqááá4MK$àyžürÙÙÙÏž=cFþÝüüü¤¤¤˜˜˜û÷‡·hÑÂÊÊ*..îÞ½{ÑÑÑß}÷½½=èêêÆÅÅåäätïÞÔñJ¿ÿþ;>>ÞÒÒJJJ^¾|YVVÆq\IIIFF†L&+..&+—••mܸ1&&ÆÆÆfĈ¯_¿¾sçNTTÔ²e˼½½IÀTPPðìÙ³„„„'Ož°,˲lÍ ¹ÞÐ*<<¼C‡ ¥¥%ï0®ôB¡¹¥ž‰HýývÀÊ$ÇrÝËËK1«ï•‹/6vB5žŠ¢ÒrK÷ÝxÎq<ű<Ëpì›1–exŽã8ŽçXžçy–å9îMµÏÏÇ-†²<1ÏÀD¹£2¥¥¥=zô¸zõª¡¡a½î)x{{ß½{÷WjØÁY(~ UX'T!ŠŠž£ ,ª†€ã8Žã*["ïÝÌó¼ÒšEqWVV6þ|www²‚™™ÙܹsAbbâ£G@OO¯sçΡ¡¡$qxôè‘D"qss³²²š¦iš¦(ŠüKQ”X,&/²²²BBBÌÌÌH0Dòãáááíí]\\)ä†ÉÎΞ:uj¯^½ ßÛx!„ÐljnĽ¦EZÏ0,˰Çs,Ë0,Ãqì[Ã0,ËÏ“9–%HúÚ"åLœ¸r(.‡²‡ÀKšhåÔæy®¤¬¦(º¦J#QHuÖ$AŒ……™ÿšD3Ç™››·oß^SS3..Ž¬Ù³gO‰DrûömÒÍîß¿OÓtÏž=€eYùåñÇq¤ÁëÑ£G4MwèÐA*•–––Êd²ÒÒR‰Dâè訩©™˜˜¨˜CCÃ.]º°,Û‡9rd‡233ýýý õõõûöíE>íÛ·ïäÉ“ cÇŽ®®®0`ÀGGÇ ?R’““3cÆ 777mmmww÷µk×’FL¹^½z¹ºº.X° i=¶!„>6ä.ž–SÂsljxHsð<Ðh iÐ  ¦xxš¢4%‹ ´µ}ú´––VÕ[ßÇ“©@¡½ Þ†JŠo+û:ÙòÃËd2±XܧOŸƒÞ¼yÓÛÛûÑ£G,ËvíÚU__ŸÄ^òL5M†ahšöôôtrr’÷j„9;;“ï’Dk Ǹ¸¸yóæmß¾dOGGçÇŒŠŠêÞ½{»ví¢££ S§NJ¦«øþïÿþ¯¼¼üÁƒ¶¶¶ðÍ7ßüøã+W®üóÏ?'Ož\PP°téRkkëû÷ï7oÞœ¬Ð©S§wfU&“-^¼xÇŽêÙójóððxüøqeŸß¾}{ýúõõŸþ¹K—.žžžõ½!„ª„a3 JXF<Ïq,ÇrÇRRPBŠâ¹7ó ñÀS”Häja¸r„‹•Ö#ÅË(TöpÀóÀ‚,hJÊc€cØ7Q<°ÅÿüsooïK—.@Ïž=I7©Û·o6Œçù+VØÚÚ¶oßþÆPZZ:zôè–-[N˜0A&“)fC&“͘1ÃÖÖ¶k×®/^¼€¤¤¤~ýú¹ºº’Œ)mN5‡R©tâÄ‰ŽŽŽ³fÍêܹ3¨æAi/ôõõišNNN®ÛïƒBµ$ÒúšEsé%ÍMiŠÿãïjÅÿÚ·@ë)è&‚^2è&Nh?­8ÐHº KßÔ ó¶ŠH Òr È^ky411yõêy——'“É MMM322ÈBT¶&ÈkFäXª¯¶÷ËE^Õ^YÞƲ¬P(tpp …ÿý7ˆD"¡PøøñãK—.Éoiòû=_¶oß>ŽãäóA=z4++ËÊÊJñÄ{÷îͲì‰'233{õê%”êÙ4448ŽKLL$3P“ØËÃãE‹/^¼8uê”@ ÐÐЋÅb±X(’>CõT3$‰@*•ª~D’ä9Wo4FŽ­éÓ§_S±aèpã;ã0ò¸ÜÊV :qâÄùóçÉ&:Ô¿ÿôôt//¯ðð𘘓¤¤¤-[¶üüóÏ0tèP2wÀÙ³gGMfË|úôé±cÇ&NœÈóüž={tttRRR>ùä“ôôtÅm=z´¬¬,))ÉßßçÎpçοþú믿þ" Òª›SÍáÁƒE"ѳgÏÚµk—šš ªyPÚ puu}ðàA ~ „RŽãÀDW 4M‘ÞºgÏž‰‰ÉÎÎ>|ø0ùb…kêêê>{ö,>>>??ÿĉŠªÎ(Ÿšµ—ñ²röÅ¡ƒPpì»GÝ“Yi0 ¡B»• ¹—O2¤¸°ÿþ·nÝŠˆˆX¹reëÖ­ÓÓÓoÞ¼Ùµk×;wîF4òT R]djj¾páÂ=zˆD¢ÈÈÈ'Ož‚Ù³g‹D"ÒFnÀ­ZµjÑ¢EjjªH$òõõ…û7yáææV^^¶mÛ633³Ž;:;;‹D¢9sæüðÃQQQÞÞÞššš%%%ÏŸ? …‹/~»Ç<ÿ_£‚­B«V­àÉ“'ª‘J‘Ö­[«k[ª(Šb†Q\\|úôiÒ IF¨EDDtéÒE¾Â;oóñññ¤7R…z÷î­««ëèèH"Q__ßiÓ¦ijjΜ9“L­¹páÂ#GŽœ={–D„Æ ›ûì³ùóç+nëÆŸ~ú)EQ3gÎ,++»~ýzÿþýuuuÝÜÜÈùÓ¦M¥Í©æðÖ­[þþþE 6lÍš5pýúu¥<¨î…££ã“'O†^Íß!„Ô…žÊÆL—Íæx€gYŽ--s´ÐæIXà88x8xxxö¿¯9(X"X3Ðr€ê×)^W®\9|øðï¾ûnýúõÙÙÙÞÞÞß|ó ¸¹¹Íš5kÑ¢EÚÚÚãÆ;~ü8X[[«®ikk;räÈiÓ¦YYYM›6M<õîÝ{ðàÁ!!!Uw©®YzôèÔ©SåÏþóŸuëÖ]¿~ºvíjnn¾iӦѣG7kÖ rss·lÙòÎl(U§‘ž^„™Ùÿ<×ÅÇÇçæÍ›£FZ³fP(\³fͲeËV¬X±|ùrptt,,, ³³³Ó××gYvÙ²eS§N€ÄÄD2›¼=T'³Š“}'-€JåyéÒ%¥Í©æa’¾|CªyPÚ ùQ„jxŠ€î–¿_}Âq@Ïð,ϱZ" xpðû¦»4po:Móo†åóÀ±o^k± è ”èÍ …­ýï¿ÿª.ïÓ§OŸ>}—$''ÛÚÚ’î ÷îÝ322ªlMŠ¢–.]ºtéRò¶oß¾äE5‡ÈÔøy"ïÑ¥÷3?£ºÏ´êGéš%…aøŠu<|Qó8ní=kQ‡‘¤yR%IŠ‹‹¥R©P(TÆó|ÿþý}||’’’„B¡®®.üú믊¸H+›D"qqqÙ¹sgbbbaa¡™™™bQéa«¤£ôƒÊËËIÞ ŸÆ:aÂ__ßììl}}ý–-[é+mooÿÍ7ßäää¼~ýšeY}}}333òÔ ’ˆ——×Ï?ÿ¬Æ¸ŠÅâ'NôêÕ«gÏž¾¾¾ ÃDDD„‡‡7kÖŒÕO ~þùçÔÔÔ#FTó£­[·vêÔ©oß¾£Gvss =sæÌèÑ£»wïzzz7nœrrrzôèÑòåËoß¾hoo¿eË–9sæÈËmâĉ–––ëÖ­Û¿¿©©©ŸŸß¦M›ôôôªÈ‰¦¦¦@ à8®OŸ>;wîlÛ¶íˆ# *\yΜ9#GŽÜ´i“M@@@~~þ¼yóûõë—––výúu__ßaÆmÛ¶Tº 2äúõëNNNÚÚÚ¤¿óÌ™3'MšäàààããÓ¦MÅÄÉ9SSÓÀÀ@2•¹¢qãÆ)mN5‡S§N8q¢«««ŸŸy±j”öbccG]E)!„Pý!!Ñç=\¬Ltžeæä±ßÌ@xXÉ›ŽA<BX`Zy¦ý›º"øb€r 8Ñ@[AËá 4ª|¶iÓfìØ±¤OB×®]ëï²Iñ5ìÌóÍf&pljóŸÃó<”1}ú”gY÷Vâáß Ú’>8Ë&„ò¹©<+J$fŒ[òŽ]ÄžCV­€ç8ªò:˜ÐÐPš¦øé§ŸÖ¨õÞFÖÖÖvvvU¯FÓô¿ÿþkaa1uêÔŽ;VQ3„€H$jøÉ©ë‰¥¥å™3gjô•Ù³g×SfB¨F4‰c(€cÈ”B ð,€ d<äˆ(wƒ¾P|4žƒ¶õö¿Æy$ƒzÕ¬½Œçù×o¼|ù’çyss³¾}ûP´€cøâ¾$x ôL(x³œŠª:f,((ȪuK˲ò¡=Uˆ·¶¶ÖÑÑÁ&-„BH¯VýJÊ“°À³ozi°`¼ô»AÞEníu çó¦fHŇß^&“Éž'>§xŠ>11I"•ijÐ<ÇR!¥ßŒÒoößU9€ªph½Ò‹–Ìï\ë¥šÏ sqq•NÖ!„z3T^»@"”§’ÉÛÎ8`9È ® ògfð¾=å¢.j …Bkk뜬ŽçÌ›™‹Åb (J xÓÇHãE½©ªÅ™ë‰º0B!„”‘¡òúƒ 'à è²À2 •LÀGn”FÕ,×­[Å0©¦¨fñMÓ=zôˆŽŽfY¶M›6ä!o*uj[»ÓíV !„BU¡h0º=¡äHR€J> @swÐoÚÝßTy|@ÁÔb¼=B!„>h¼Â37xàÊ( 5þ»ð]“PÇÄÄTç‘aï---Œ‡B!ô¿Ès9€úŸ¸‡g¨a4™*Œ‡B!ô±Ãþ4!„úØa<„B¡ÆC!„úØ 322;!„BI¹?uFF†‘‘Qcå!„B¨åååa{B!„>v!„Bèc‡ñB!„>v!„Bèc‡ñB!„>v!„B«V­jì, FƒñB!„>v!„Bèc‡ñB!„>XÇ2 Sõj堃 B!„PCzùòå7ž>}ÊqØÚÚöìÙÓÞ޾•«W¯V|_\\¬¥¥U£ííÞ½ûüùó×0 cdd´eË–=zlܸ±gÏžµÝÚSÍXJJЧ§§Òj§N*((ÐÑFÚ8 IDATÑ!€-[¶ØÙÙéééÕ_ÞÂÂÂ}||(Š"KŠŠŠÖ­[çääÄ0Œ<'ï#GŽdeeUvÕZ5‘˜˜xúôéÛ·o'&&š™™U¶QùÛ0‡î÷ßokkkhh<Ï_ºtéüùó...5= «Ð;|ûöm///òöôéÓ'OžìÒ¥ MÓðï¿ÿ88899yÊ”)úúúšqppˆŒŒ”Éd"‘ˆçù„„HIIqpp /ìììtuuñÀFMÔ'Ÿ|Bî¾™™™×¯_?}úôäÉ“8ºººK—.mà6®—/_^¾|yüøñÍ›7OJJ:v옭­í¨Q£>|øðáê¿›™™yúôiCCÃñãÇ“…ýû÷?~üø•+Wš7oN®NŠÔihhèèè(.ÉÏÏ?wîœRøöúõë .¼zõªyóæ:thݺµZ¶^£Œ?þçŸJJJœ¥R)”——“ |(‡?~ܶm[Š¢ä9ŠJì·ß~óôôìØ±cvvöÎ;ìíí››»cÇŽeË–ikk×S†åŠŠŠ~ùå—!C†üý÷ßùùù­[·&7¹k×®EDDH$’-Z|öÙgäowÕüìÛ·ï“O>¹xñ¢¡¡a5Ã0çÏŸ÷óó“W 0 ++ëùóçíÛ·oøc²BÇ9s&##cÊ”)ò#¶êÒ1bÄ©S§”~Š¢elmm9ŽKOO·µµÍÈÈOOϧOŸ’+΋/ºuë¦x`«ž€ðìٳ˗/yxx”••9;;·mÛVuwÏP;;» ¼FQEÉÇÇLJ‡‡[XXDEE …Bźaôþ“ßJìììd2Y@@Ïó.\€Áƒ@^^ÞÎ;—,Y(?>ÍÌÌàÑ£G×®]#öСC)ŠJNN ÉÊÊ266îÓ§‹‹K…×Õ*ޤ¤¤œœ ‹Aƒ5oÞªŸl…'`LL̵k×rrrttt|||ºvíª´QÕ#¼Âô9Ò¼yó>}ú@qqñæÍ›,X`ddtñâÅ´´´/¿ü²ÂÓ¡ÂÂÔÒÒò÷÷·µµeF___,W¿âüÎ;,ËŽ;V €ŽŽÎ˜1c¶oß~ãÆ Õx¨áúSK¥Ò?ÿüÓÕÕuÑ¢E]»v=sæLjjjƒm]QFFFPPPïÞ½.\hii§øéĉõôô&L˜PwhÂËËëÉ“'¤‡W^^^jjj›6mW¨°Ä“’’ %%E,§¤¤@RR’µµuCDiiillìôéÓ'Ož•œœœ˜˜xÿþý &,X°€ã¸[·nU–òõÈÈÈ!C†Lž<¹Š:33³¬¬L±lišž8qbûöíß“c’㸠  äääI“&Ƀ¡w–ž±±±êÑX{¤¥¥emmœœ $ rqq‰çy>?????_ñÒSá ˜ŸŸüøñÞ½{/X°@WW÷É“'•‚âZá×(ÞYòÏŸ?—ÉdsçÎ>|øõëן={ÖXYEu‘ŸŸmbbBQ”»»{\\Ïóðøñc'''mmm¥;©.5kÖ´iÓbbbRRRŠ‹‹9Ò¡C‡%K–øúúž8q"''*º®V–‡Â£GvëÖí«¯¾²±±9yò$T?Ù OÀòòòÀÀÀ®]».]ºtðàÁ—/_.))QÜhW$¥ô=<>¾U«VOž<±±±Qj¾©°ÄÃÃÃyžñâ…iRMNNvttl°œsçëë+‰,--­­­KKK544X–-,,4337n˲•åßÓÓ“a˜Aƒ™šš6LnsssõôôHG%•å°a2&lbbRRRòòåKyÜðÎÒ+**Rý!qÈûì³Ê ÓÐÐpÕªU/^¼8v옹¹y5Æ’’’ÊÖ$JKK•z&¨'êÝ»w«V­ª^§   33sÓ¦Mä-˲Š={ê‰RÆÄb1É»CR¥X™ÖhšnÛ¶íÇ[µjõøñc¥*,1+++Žã233IuBDDDNNNrr²ê×ë•}g늒V­Z=þ<66¶´´ôÆÑÑÑZZZÕO¶Â°´´T(’ž7nÜÉdò~ÖD®-...YYYaaaŠMÆéééònsžª…iooŸ””D. 111IIIª•éܹ³®®îÉ“'cccå SRR>LQ”ê¬ Ðó1jii?þâÅ‹—.]ÒÕÕýä“Olmmë{£çÏŸ?þ¼ü­@ øî»ïÌÍÍ?ûì³þù'88ØÙÙÙÛÛ[é[îîîGŽ;v¬jÿsµóòò:{ö¬ŸŸŸêG••˜½½½L&#uZ¶¶¶2™¬!;UÈËË+55õرc ÃXZZŽ1¢²üË;‘ SÔ­[·‹Å7oÞüçŸAóæÍ'MšDj‰ß™Ã†¤­­=bĈC‡ÙÚÚZYYÕ"oÕ)óúãèèïääDÞêëë7kÖ ”ÆxVxš˜˜Œ1"$$D"‘tìØÑÊÊJCC£²S@~ØTxà5Šw–¼……MÓ»ví‹Å={ö¬°m59B¡ÐÅÅ%11ÑÎÎN¾P~|ªvæ===ÿË—/_¸pÁØØxôèѤ;`õ7jdd4f̘+W®äää4kÖlìØ±4MW?Ù O@§OŸþôÓOúúú:uòôôTªm­ð¯,Ûb±ØÅÅ%55U±X¢££ÓÒÒÈõ¡ÂÓAµ0MMMGuýúõ“'Oš˜˜Œ3¦úOI’Äììl Ñ£G“K“Štç–ËÈȨ~ÿm„R‹ÜÜÜììlÒŘçùmÛ¶1Buδ&*>>þúõë3fÌh쌠wXµjÕš5kjô•àà`@0pàÀzÊñúõëC‡}óUq:T³0ÉüC_|ñÅ;·UZZ_TT¤­­íàààíí]aÓg^^>¯!Ôø† ˜1c†¹¹ùÇY–U‚Ð{H&“ååå=~üxâĉ—&¯ž S[[»gÏžÕœ#ã!„Pã3772dÈÑ£Ge2i¨~ÇI„EZZÚñãÇ;wîL¦C¬oÅÅÅëÖ­ó÷÷o¬@õªú…W…€íe!„@­ÚËЇ!//¯áÆ—!„B½Ÿ°~!„B5¬B!„jÀùB!„ÞO!„Bèc‡ñB!„>vÊñEQ ÿv!„Bèc‡ñB!„>v!„Bèc'Tz_TT”œœ,•JÕµ±Xlkk«§§W[i€M4iM½|šzþëNµ>ªÝo`orõWòxœRíÛ·oÈÍ©QÕ‡+ªåx(99ÙÑÑQKKK](++KHHhݺuým¥6¡(""BéäyÏSnÈò©]¨×ü×G†Õž¦j $''[ZZŠD"5nåSëcãuffrr²Òñ&W–¾Ž½·¾"""”–ÔÇæòòŽjG9’J¥ê=´´´T£lõn¥6Ѥ5õòiêù¯;ÕJ¥ÚÚÚB¡¢¨ÆÊÕ{®v3‰äää}*“É\\\„Â÷:ŸJX–Ý´iSpp°††Ð4TÇþnÆ KOO700P\XPP`ffvîܹ¦U>˲2™LSSSí)—––jkkËßò<ÿüùs333¥Ò{§´´´’’’Ê>‰DöööµÏ%€T*ÍÍÍ500h*5ó½ç†ÉÉÉ111iŠ—DÔèjpÐ ><''§¬¬ „B¡¦¦fIIIÏž=ÝÝݧL™¢öœ=xð`Ú´iðÕW_?^½‰'&&999)Þ8Õ‚eÙï¾û.""ÂÔÔôôéÓ0~üøüüü:ÆCéééæææÇW\8mÚ´çÏŸ—••Õëà‚+W®Ü¹sgáÂ…5'ªP\\<|øðüüü¹sçNš4I]ÉË–-›9s¦‡‡yûôéÓqãÆùùù­^½ºFéŒ1‚eÙ*V8wî\óæÍk‘ø¸¸½{÷†††2 CQ”§§ç—_~Ù®]»_~ùeÒ¤IºººµH¡Ïóû÷ï?pà@ii©††Æ¨Q£æÎ[Qщ'nܸ1jÔ(__ßìÛ·ÏÃÃcæÌ™µN°°°033³¤¤D*•²,Ëó|xhh¨üŬY³ÂÃÕVëÖ­Û¶mÛª¹uyR·nÝúý÷ß8P—}©‹ßÿý—_~!¯%ÉáÇKJJüýýÓÓÓ}||Ô½zõêÈ‘#+V¬øá‡¤Ré¶mÛ¾þúëýû÷wèСC‡µHðÌ™3?þøcQ§izåÊ•J·*¤vµ½sçNLLLçÎ×®]»víÚš&¾uëÖ{÷î­^½://OqùĉmmmÉ9™‘‘qõêÕ:î…¦¦¦­­-<þüøñã¤"MíäÁÄÇÇÏž=[!Q½R ‰ÞIII¤šjÑ¢EŸþ99.]º´råÊgÏžÕ%ežçÿþûïW¯^öïßÈ!ÕécTRR2oÞ¼?þø£.[G0fÌCCÃw®Æ0L@@€Z¶øÏ?ÿ|÷ÝwóæÍ[¹r¥L&;}úô¬Y³,,,ªóõM›6‘˜~óæÍ°dÉhŠ-M2™ìСCäµP(üôÓOƒ‚‚þúë¯3gÎp×£G­[·ªqs:::‰$&&ÆÃÃã›o¾ñööNNNÎÊÊ266®]‚Ç—CFFFvvv‘‘‘Š+pwìØ1Œ‡ê[Í}Ò!‚ƒƒ=zôøñcÒÞ4jÔ¨Áƒ«%:þ|ZZ¤¦¦’%{÷î±cÇÖñt¥(ªM›6Guppèܹs»ví¢££}||bccoÞ¼Ùµk×Ï?ÿü×_­] ݹsçZ·nM.vŸ~ú)DFF–••éèèlݺÕÒÒúôéS—ü¶¶¶‡€áÇ?yòdóæÍ‹/Vo[Å`ˆ¨{HôìÙ³¢¢¢víÚ)-/..¾sçNïÞ½Õ8`JÑô{4h`ÿþý ÃÌ;×ßߟ,áy¾îñ1œ;wîòåËS¦L±±±yñâÅÞ½{555û÷ï_÷”Q5•••9räÉ“'æææR©T*•¿xñÂÝÝýþýû½{÷þûï¿ûôé3pà@µlN"‘lذá믿äD"Q³K©%Æ×××ß¾}{FFF^^žX,^±bÅ—_~ibbrúôéôôôeË–©¥FeÙ%K–H$’©S§æä䨽ã6allù¤Ò·UeffZ[[ËßZ[[gff2 £ºPñ[ò¿W[¶l)®_¿~õêUš¦íííó©–k,!ŸéÒ%===??¿'Ož\ºtI¾ŽL&Û½{·Zº{oܸqÙ²e•}ºxñâjvÞRDQ”je|XXغuëH0M³_W“S³"ÎÌÌtrrR}0|øpy ZÝ9::*µ(·hÑ¢K—.êJ‚‚‚¶nÝ:vìX‡½{÷^»vmÚ´iÁÁÁyyy£F IMM­û9Ž …õý˜=@ ßDí†;©rtt‚!HLLÿüs2>Ž8wî\hh(é? ˆô+R/ò'(y2L]ØÛÛËûÈ^»víÎ;ªýqë8û+ªŽ÷7277‹Å2™L±’¶î(Š‹ÅÏž=ÓÕÕ=sæÌË—/åMà;wîØ±ãÉ“'ÓÒÒLMMk‘8™€‘HMM%ÃâÔBKKK•pkðàÁ¤Ïó$R{%„<Ro²ugll¼mÛ¶E‹?þüùóÚÚÚ¤¹SKK‹TžÕ޾¾þÈ‘#7nÜ8eÊ[[Û¤¤¤?þøcøðáо}ûÀÀÀ=zØÚÚþùçŸ:u¢iÚÇÇgÆ íÚµ#“|’F±víÚ;vìÁƒööögΜIHHX³fºvücмyó~ýú½s5um±ÿþšššüñÇž={„B¡››Û/¿üB.˜;vìØ´iÓO?ýdbb²páÂ:Èßw255ݶmÛO?ý´yófkkëõëד8[u¡bš=zô`YväÈ‘:::óæÍëÔ©“D"¹}û¶ŸŸŸ¹¹ù˜1c† ²páBµ?!n̘1@z•——+×==½¾}ûªk†ò'Ož ¸uëVVV–¥¥e§Nüýýk7²LnèСb±øðáÃYYY&&&C‡%–……ÅÌ™3ýõׂ‚‚Ö­[OŸ>\\\FMÚtÆŽKfˆ022š;w.éÀçìì|¨Zm^»ê. y7=[[ÛmÛ¶©+åªéêêª=Yèׯ_uþ˜®…ÿû¿ÿS|kccsãÆZ¤£Ô~¯FúúúÓ§O'¡‰ºÐ4=hР +™|||ä3ŽÊ 0@>T[~,yzzzzzª1W!ÔT¼¿íeM®®®|êõÒÓÓ ®”B¡Ó{4u/B!„P£Àx!„B;l/C}˜¢££/]Rs×W„ЇJ¹~H ¨w\wYY™ê„êÝJl¢IkêåÓÔó_wª% ÔõÄÀU­ƒ!¥ª(©¿sâ}(‡&TìªåiÂ‹ŠŠ’““«x‚XM‰Åb[[[¥‰ÝÔ»•ØD“ÖÔ˧©ç¿îTKà£ÚýVáñ&W%Ç9Ñ(åо}û†ÜœU}¸¢©à±)!„BìOB¡ÆC!„úØa<„B¡ÆC!„úØ sss;!„Bõ+%%…~‹zKþZHÓXE„B¡œb$¤ô0B!„ЇOÉ#ùB¬B!„ÐGAµ± ÛËB!ôqQl#Sm8Ãx!„B>ÅÆ² ú‘ÿ!„B}À(ŠaýB!„>|ªñÐÿ´—aýB!„>xÆCÿ_†ñB!„>x!„BècWu<„‡B!ô±Ãú!„B}ø°~!„B¨*X?„B¡Ö5=‡–ÿBË—/oììTËÕ«Wåy>pà@ ëos!„Pí;è½°k×®ììl°´´œ>}zcgç¿ÞÛŒ!„ú`<„víÚíÛ·¯ÂŽ÷6c!„>$¡&©S§NQQQäuË–-73!„š:Œ‡P“¤§§çééÙØ¹@!ôÀþÔM†b'ëK—.¥¤¤L™2ÅÊÊJSSÓÝÝ}Ê”)/_¾TýVBBÂŒ3:tè`lllhhèåååïï*_aüøñE‘6)ˆˆˆ (ÊÃÃC1‘ÈÈÈñãÇ{zzéê꺺º2äܹsÇÕzwÞ™fÕ«¢?5ÏóW®\ñ÷÷·±±ÑÔÔÔ××÷ððX°`ABB‚ZŠ!„Ї뇚¤'Ožøûûçåå‘·±±±±±±'Ož wqq‘¯¶gÏžyóæ±,+_òàÁƒ;vlüøñ¤éwÄß~ûíÚµk—ÄÇÇÇÇÇ6,((¨:‰4@šDIIɤI“Nž<)_"‘HžÎœ™õóó›8q"óòèÑ£LA__?!!¡¸¸¸¸¸866VGG‡©ŽŽ®mð*ö©<0¹"##ýõW¦lddtþüùâââ‚‚‚7²ÏýòË/KJJd·UeJ ¥Âõ²fiöìÙ›7ofÊ7nÜ`N{TWW§¦¦ÚÛÛB=zÄ4PSSÓ××g ‹ 6;vŒyK$qÏÍÈ¢(êã?&„ 6¬oß¾„ þýû›™™1Ëqž?^ÛàßGŸŒŸþ™-‡……¹»»B455ÿïÿþïõë×Û¶m#„üú믳fÍ’ÚV•)€– ùP³äááÁ}Ù­[7¶\ZZÊ,--™‚X,îÑ£‡««ë¨Q£FŽéáá1vìXwÊ–ËËË=zwáÂvm2MÓµ þ}ôÉlÅž¬rtt:t(÷ÝÅ‹3ù!$&&F6ReJ ¥B>Ô,éêêr_²w¸.\xøðáòòræelllllì7ß|clloÞ¼;‰‹‹ëÞ½û† ™ÄÅØØxÊ”)íÛ·¯s`ï£OB›ùBôôô¤Þ¥(ÊÀÀ€)Ë]?|†|¨%³··?pà@^^Þ… æÍ›Ç^AcìÝ»7&&FÉæÕÕÕùùù„}}ý5kÖ¤¥¥ååå:uÊÔÔ´n!½>ì9¡ŒŒ ©w+**²²²˜²‘‘Q}v-ò¡–©ªªêÍ?(ŠòððصkWffæ;wFÍ6‹WÒIFFFRRS^±bÅêÕ«;tè@Q”D"IMM­[`ï£O†ºº:{ëÜ7˜”‹uáÂvYRÏž=ë³#hyµL999&ÿX»v-SIQ”““ÓªU«ØfìÓ}XEEEÜNØ2wMñdÕ£z`u듘"ãÇg ïÞ½[¶lû쥜œœ•+W²Í&L˜P«˜ ÅÃzê–©}ûö;vLKK#„üøãyyy „|‡|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò¡æ'22R 4~TgffFQEQ‡R¥þ}›2eÊgŸ}ö!÷Mò¡æÇÂÂÂ××·‰ôSXXHQÔ;wTiœ““CÓ´………Šõ ¨Vqß j~CBBšN?Íò¡¦hìØ±ÞÞÞÇ744ìÖ­[tt4SíÚ5溒Ôu.EíQÔÏ”)S|}}g̘a``бcÇÈÈH¦ž¦éU«VYZZ ‚Î;ïÞ½›òòåKŠ¢ !}ôÓaaa!!$22rРA†††­[·vwwðàAMÌ=þ|âĉúúúcÇŽ}ùòeVV–ººz\\ÛfæÌ™'NT'!D,Ï›7OWW—;^±X¼|ùrSSS@0xðàÇ+Ÿhî5Q'Ož\±bE~~þgŸ}6iÒ¤’’BÈàÁƒišŽˆˆP±½"Jú9uêÔœ9sòóógΜP]]M Ûµk×±cÇRSSCBB´µµ !–––4MBnß¾MÓ4MÓ„ÜÜÜ™3g^¿~ýþýû¶¶¶cÆŒ‹Å 1+ÿQZZ:tèÐÊÊʨ¨¨¸¸¸aÆegg[XX >üرcL›²²²ßÿÝÏÏOIœ„_~ù¥sçÎ7oÞtsscÇûÓO?''G]]=<<œV!dÊ”)]ºt™;wî±cǪªª”w’šš:yòdSSS555BˆòóUµõðáCgggæ4×ĉ%É… !ÇŽóòòÒÔÔTÞ•S066&„äçç‹Åâ/^8::2õ¦¦¦¦¦¦ì|Êhî5QÜ L5æuh¯ˆšÚÿ|$hš&„ØÚÚ¦¦¦nÙ²E ,\¸pÊ”)Ê;™4iEQ·nÝ‹Å̉‰DRçT'¦OŸ~ôèѼ¼¼?ÿüsÖ¬Y5n¢¡¡Á}ÉŒW[/w~ ¹C>ÔD±kE"QJJŠ­­mö¯-mmí‰'íß¿?<<œ]7£¥¥Eþ7+--}ðàÁ²eˬ­­ÕÔÔØÅÈ,‘H$» Eõ²ïÞ½[ZZ*ûÖìٳϟ?¿wïÞ®]»öéÓ‡­—S kkë‡2/ssssss;uê¤J`ÐL!j¢·lÙ’œœ¼téRuuõOÉÔ¶}­>|øÐ¡CÏž=KII sttTWWgÞ …ææægΜ)...//'„hkk[ZZ^¾|™’ŸŸ¿råJ©Þ>ú裓'O¦§§3ª±^ÖìÙ³™SAwîÜIJJúé§ŸØ õëׯcÇŽß}÷ÝÌ™3¹›ÈÆ©Äܹsƒ‚‚Ο?ÿäÉ“Ï>ûÌÒÒräÈ‘*Í4Oȇš(oo﨨¨ž={^»ví÷ßgV«¸¸¸PåááQQQÁÜ7¾iÓ&%íQÒ\ººº;wîtrrrvv... ã¾»{÷îßÿÝØØX(2÷±ÿòË/§OŸ¶°°pssû׿þ%ÕÛ† !vvvíÚµ+**ª±^–¶¶ö•+WÔÕÕÝÝÝûõëwéÒ%333öÝ™3gVWW{yyIm%§"K–,™5kÖìÙ³{÷îýæÍ›?þø£ÆuHЬQXÑ;¶S§N?ýôÓ{jß²-Z´èéÓ§—.]jì@  IKKSã`N°š;h&Š‹‹=zÄ<:¨±c€æù´“&MŠ hØåSÐâáz´|ʯ—a=5ðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùPST^^NýÃÆÆFnKKK¶MvÅn~èСúD[O8’ÚînÓ¦MlûG}˜À|||Þߎ Á}°I3 YÐhì yرcÇ›7o!æææÿú׿;œÓRǵ‚|T²cÇŽäädBˆ““SKÊZ긠VñT¿~ý™²••"i:€¦ùOéêêöêÕ«±£ äƒGÒtMÖS·@)))sçÎuvv622200èÝ»·——×Í›7¹mä.+>vì[™‘‘áïïoaa!ºuëæïïÿêÕ+ÙÝUVVnÞ¼¹GB¡ÐÜÜ|Á‚………ûöíc»*,,TmFRç3JKK×®]Û­[7@`jjêå啚šª$òüüüo¿ý¶wïÞzzzzzz½{÷^¾|ùßÿ­d“ÚúþûïÙhSRR¸o-X°@v†¥Ú‡‡‡üñÇúúúzzz>>E1•!wïÞ¥(ª{÷îµ ©ÃZ\\¼lÙ²öíÛûûû+šº}hš¾té’———µµµ@ ÐÓÓëÞ½ûâÅ‹¥æ°¡Æ%Eù´×6È:·—ÝÜÛÛ›Ì-[¶¨²ÐÐôˆD"öY[[Ëmcaa!÷ îÚµK]]]î±öññ©®®fš]ºt‰­?xð SyôèQ¶rË–-†††R=èêê&%%qwWPPðÑGI5³³³ûá‡Ø—JÛP‘Ôgà4MgffvîÜYjC}}ý™3g²/>|ȶ¿y󦱱±ì¾ZµjõË/¿(¯*óàííÍT®[·Ž­|þü9·ýçŸ.;ÃÜö‹/– oذa¥¥¥Lãèèh¶ÞÓÓ“ÛùÇÙ·6nÜèíí-Û•ƒƒCmgƒ{Xûí·þýû3åY³f)š–:|JJJ¦L™" !D]]}Û¶mlˆׯÙ·,X Û~Ô¨Q"‘¨nAÖ¡=7öC»fͶréÒ¥Š& KMMMOOÏÈÈøû￳²²^½z•››ûúõë¼¼<äCMQó¡„„ ÿ^522êС7Køþûï™–5f!Ü~¸FŒÁ cÆŒr›ikk³åzæCªDRÏK$¹{áb¿Zþþûo¦’9£`ggÇ6£(êüùó5fÅóÐ ùÃÐÐÐÞÞž[3þ|¦quuµ¥¥%S©­­]^^Îv¾yóf¶}ZZZttôÁƒÍÌ̘kkëƒþþûïµ îa4h[V1Rå“ ‘H&OžÌ}W ¨©ýωpö¸7Ô¸¸ù‡" .¬[uh/›…††²5Üð ò¡æ§ÎùЊ+˜55µëׯK$š¦_¾|éèèÈÔÛÚÚ2-kÌB!Ë–-{üøqVVÖ¾}û455™JuuõŠŠ ¦ýýû÷ÙÆ¦¦¦‘‘‘EEE/^d¿eõ̇T‰¤žÿõ×_ÙÊŽ;^½zµ¸¸øâÅ‹æææÜ0Ø|ÈÏÏ©7nÜ`*ØoÖÎ;³±©è}äCß|óX,fŽ›––Ö«W¯˜öË–-c_¼x‘íÜÝÝ©ìÛ·/[ÉfNNNÜHTŸ ©Ã*ÆÿùçŸÿüóÏŠ¦¥¶Ÿ„ .°ŒŒÎŸ?_YYYTT´qãFêŸ'u¾{÷®Ç%•9òáÇo߾ݽ{7¤@ ÈÉÉ©[µm/•ÅÅÅiii±±UVV*šm€– ùPóSç|h̘1L††ÆƒØúsçÎMÿGYY­B2{ölîîØïBÈ“'O˜Jîåö ƒ¦éäädî?^뙩I=Î^òÓÐÐHKKc7ç&|äŸ|¨²²’ývùâ‹/¸±íÝ»—m|åÊ%£V> ’ 4ˆÛ>,,Œ}kíÚµLå½{÷ØJöÆ»wïZµjÅTnݺ•íAnÞP«ÙàV“ôôô§¥¶Ÿ„‰'²•QQQÜöK–,aß:tèPŽ‹›têÔ‰› oß¾}kÍš5u ²¶í¹ñœ={ÖÄÄ„)ôÑGÜD€o”çC¸¿¬EaÖÅâ=z¸ººŽ5jäÈ‘cÇŽ­UWܗݺucË¥¥¥LMzõêåêêÊ6èҥˤI“NŸ>]‡!Ô-’ú ¼ººúÁƒLy„ :t`ßêÙ³ç Aƒbbb¸íÓÓÓ+**˜rUUÕ±cÇØ·rssÙr\\Ü!CTâ{!µHÙÓÓSOO¯¸¸˜Â.êÕ«—³ 8<<<((ˆ¢¨k×®UVVB(Šš:uªò½Ôy6¾øâ EÏ^W¤ÆOÍYåèè8tèPnûÅ‹oÛ¶)ÇÄÄÌš5Kɾê<®Ï?ÿœÍ& !+W®|÷î!„É>kd=5{öì·oß2å•+Wêèè(5Ÿ!jQ.\xøðáòòræelllllì7ß|cllôüùs¶¼sçÎ;wÊíVî}O„ÊÊʲ²2¦¬¥¥% å6«?îbBˆ††FÇŽ™–EQÞÞÞß~û-!$==ýéӧݺu‹ŒŒdÞuss“ºô)«Î³!µªI5~ÊÊÊ ˜²£££Ô·²²ÒÑÑ)))!„dff*ßWÇ%õ·  íììîܹCyöìY‚¬ç Ødˆòõ×_;VÑ2,žÃýöMMÓª×;88Ü»woöìÙRÿ |ûöí‰'†>oÞ¼† ¬°°M#dïúiÓ¦MCíHõ8÷ ƒ]?Ë’ZNNŽ*!É­ ™‰\ IDAT 5üÇÚµkUéªnT »ÈûîŠøððpB›)Z,ÏUçÙx‰ ›Bôôô¤Þ¥(Š=¾L¡DÇ%{†Ý)“ÖÔ6ÈúŠM€?~¼k×.%Ãà3äCM‘––û¿°×¯_Ë>¼D,çåå1e©ÿÿÚÛÛ8p //ïÂ… óæÍ“ú÷ýÞ½{¥ÎvÔ™‘‘QëÖ­™rVV–Ô»/^¼h½¨®ÎïØ±#[ÎÈÈzW¶†{•çÒ¥KŠ.Ts¯°(¢(ÙU¥±ÜGÚpÉ‚ôôt¦À}²@çÎÙåSááá)))ÌS—ÔÔÔÝàÍÕ€³QìéÙWQQÁ~PŒŒ”wUçqÉ>šˆ=Ì»ÚYÏAYXX$$$°‰Ô·ß~Ëþ¯¸5EEY[[3åòòò[·nI5ˆ‹‹«ªªbÊl˪ªª7ÿ (ÊÃÃc×®]™™™wîÜ=z4»m|||CikkË”£££¹kÀ%ɹsçd/ª¨çÀÙgÌœ?ž›gTTTDEEIµïÒ¥ [–ºBAÓtù?Äb±ÜÝqÂsOMqËìLÜ‹#ìEsñE‰?þøƒûòöíÛìÅ®]»rßòòòb ±±±ìÙîîîmÛ¶U¾ RïÙhXêêêìåª7näççsß½pá›SöìÙSyWu—Ô'ÿáÇìeefÚkd=uìØ±>}ú2/‹ŠŠV®\©lä|…|¨‰â.={6÷> Û·oÏ™3‡}9räH¦““còöB EQNNN«V­b۳ߵõÇ®¡~óæ sË!„¦é-[¶üõ×_ µ—ÕàžžžL!99™}ôMÓ«W¯–] baaajjÊ”CBB¸—3víÚ%ü‡Ôs±YÜe=çÎËÎÎ&„ˆD¢Ÿþ™­g‘Ìý…µ“'O²åðððË—/+ãøñãW¯^eÊïÞ½ûâ‹/Ø·¸i"!dÚ´iLâ%‘H~üñG¦rúôéŠzæ^$ªçl4¸ñãÇ3…wïÞ-[¶¬ººšy™““ÃÍ&L˜ »mƒŒ+44”öÒÒÒE‹ÉÆVÛ ë3(æÊõ¢E‹ØD|ÿþýÌz&øµ¼[ >ŒŒ © aNNNROÄÑÓÓËÊÊb·b/ý¨««Ï™3çÀøòË/¹_«Ìýä5ÞåÁ‡½‡…rûöm6Hî­4ƒ^ºt)÷9{ŒzÞo¯J$õøóçϹ:tèÒ¥K¹wÌ1Øçqïg0`ÀNž<9{öl¶“¡C‡*ouuu§NØÍµµµ{÷îͽÒ! ÙÑïÞ½Ë `ðàÁóæÍc$;ÃRÏjÕªÕ´iÓ.\Ƚ&8hÐ æùL\Rw-ijj¾}ûVª³³3ó.EQ‡f'_õÙPrX©í'!77—»¬ÿþßÿýòå˹§å<<<v\RÏb¦}Ñ¢EÜ=pà@vÚkdmÛË}>5÷'Y\\\ðHFà!<¨¹ŠŠŠ’]>É¥¯¯õêU©MäÞ~ÅZ·nÓ²A²š¦¹'`¸ØKiäƒäCõ8MÓË—/—»!ûìÂùj©¬¬Tr/½££cnn®’!_¾|YÉ=>ÁÁÁlK‰D"u“9C]]{qDn>äää$»¡¶¶öÝ»weCÚ·o·ÙرceÛHÝÀÏþ®…ê³ñò!š¦/\¸À>:HVçÎ¥ŽNýÇÅÍ?äÞI ­­}ïÞ=îNkd­ÚË͇Äb1÷§Ù>¬Êü´$Êó!\/kºÜÝÝïß¿ïåå%ûÝ©©©éëëûàÁƒÁƒKmráÂ…Êöfff¶eË–_:ðÝwß­Y³†›‹¨««±Œþ0ê?ðM›6mÚ´IꦧO?ýTijjž?~Á‚R‡F(.^¼øÚµkÊWÞ :ôöíÛnnnRõ‡æ>h‘¢¨áÇs›éééýòË/²›KÙ³gϰaø5]ºt¹|ùrŸ>}dOš4‰{ªOîŲիW˽ý¾ž³Ñà<<|øôéÓ/^´k×®S§N½{÷–{:MÓ =zóæµµõ˜1cÍç÷ßÿÍ7ß0åçÏŸÛÚÚ^¿~ýÎ;‰¤GC‡Ur^jâĉÌl@››+w¥¥¥ááá/^¼044tppº¤XŸÙhp4M'''ÇÅÅåææ¶jÕÊÊÊjÈ!ŠâiØq‰Åâ+W®0½ìÙ³ç!CýÒp­‚¬C{àJKKSã (Š¢(¶€|êîÁƒÌ£)ŠêÛ·/÷f¨Å‹BLMM³³³U|$Ô“T>Ä]¿R#oooææ²É“'7Ô³4šåùT u·nÝ:ö‹óòåËìšÜÌÌÌC‡1e'''$CMßÓ§OÏœ9Ô•ÜYÐR!‚º›9s&›Mž<ùË/¿´³³KJJ b—&pWÃ@´téÒ”””¨¨(æT_ûöíåÞ¶ в!‚º7nÜÖ­[+** ™ŸÁbéèèüðÃRº¦&!!!66–}¹fÍîÂjž@>õ²dÉ’©S§þøã÷îÝ{ñâÅ›7oÌÍÍmll¸hÑ"ö¹ÏÐdéééµjÕªcǎݺu›5kûè?^Ázjhù”¯§Æó‡€ïß!¾C>|‡|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò!à;äCM‘ÍŽ;¶Ï7nÈÖïÙ³çÁƒ„±XLQTJJJCíñýõÜ€Þ¼yCQTNNNƒ÷Ì¿A¼¿8€ ‚†ýÚþ0=7 ¡P¸téÒÖ­[7xÏ ;ü÷'B4;€Æ¤­­½eË–ÆŽ¢fÍ%N€f 燚“—/_Ž?ÞÐÐÐÖÖvÍš5ÕÕÕL}bbâ!Côõõg̘Q\\ÌÔß¿ßÕÕUOOoРAÏŸ?—íÐÙÙù¯¿þòõõýâ‹/˜šäädWWW—gÏžÉn’™™9f̃îÝ»9r„òÅ_Lœ8‘m0|øð¯¿þZŞ厈¹¾ö×_ 6LWW·ÿþL{¦þòåËNNN†††sçÎMOO2dˆŽŽNÿþý333•ÏÆóçχj``0bĈS§NuíÚ•RXXÈ^‡’š”ÂÂB???SSÓ6mÚ|öÙg¥¥¥„3gÎååå1=èêê^¿~]jør7T4LBÈ‘#Gììì„Ba÷îÝ###¥â¬Õ¤€Jhhz¬­­·oß.U)‹íííG{úôé¶mÛ®[·Ž¦i‰Dbnnîééyýúõ3gΘ™™­]»–¦éüü|ƒiӦůÆîܹSGGG___ªÏªªª=z:tH,WUUBììì®\¹ß·oßÉ“'Kµ¯¬¬ìÔ©Ó'Ÿ|Ïœ®¸yóftt´P(,--¥iúõë×jjj=R¥gE#bÚ»ººFEEÅÅÅ9::Nš4‰[Ÿ˜˜xüøqBˆ©©é¹sçbbbÌÍÍ—,Y¢d6D"‘¥¥¥¯¯oBBBpp°±±±MÓ„ììl¹CãŽ]"‘¸»»?>>>>22ÒÁÁaêÔ©Ì[&Lð÷÷§iÚÓÓsîܹR«hCEÃ|òä EQ7nŒÐ×ׯªªbã¬í¤#555===##ãï¿ÿÎÊÊzõêUvvvnnîëׯóòò5Er󡈈mmí‚‚æåÁƒMLLhš‰D›7oÎÎÎfêýýýhš ²²²ª¬¬dê—-[&›Ñ4ݳgÏ£GÒÿ|¡>|˜© qrr’jü믿¶iÓF$1/—,YrìØ1±XlbbòÇÐ4½gÏGGG{V4"¦ýþýû™ú;wöìÙ“­ˆˆ`ê»uëöÕW_1åùóç39–¢Ù8v옹¹9;³f͒ʇä;ö»wïêêê² ´´´ª««išfN }óÍ7fffìpØá+ÚPÑ0Ï;תU«7oÞÐ4]TTôûï¿‹D"6ÎÚN0”çCX?Ôl$'';::²÷ˆ¹ººæååΛ7ïÚµk‰‰‰ñññ—.]òõõ%„$%% 8PSS“i?hР÷Ò»wo¦ dß}üø±““ûÖÖ­[™Â„ Μ93~üø°°°3f¨Ø³¢éêêrÛqû±¶¶f †††666l977—é\îl}úÚµkmmm™å)ÜM˜ö5 …JÞ­ªªRWW—­÷ôôôóóËÎÎŽŽŽV”uÉö¬hDÊ#áDvPŠfC,s›©©I¯œS44–X,îܹóÅ‹¹•&&&LáÕ«W„ÔÔÔÚn(;LË—/ÿõ×_aaa_ýu```LL Û¬n“Êa=u³agg÷èÑ£¢¢"æå7Ú´icll|õêU&Y²d‰““SYYÓ k׮ׯ_g.£BbbbêC×®]ïÝ»WYYɼôññùúë¯ !îîîåååË–-srrêØ±c=GTŸ͆½½ýíÛ·ÙÙHHHPqhÜ­[·¶±±±±±yöìÙòåË™N÷îÝÛ³gÏ‘#GÖ¯_Ÿžž.Û³¢ åºxñâ¦M›zöì¹~ýúÇqs©÷1i€|¨‰JOOçÈÊÊ>|¸¥¥¥——W||ü¯¿þºbÅŠÅ‹S¥§§WXXùòåË;v„††fee•••y{{—””øøøÄÅÅíÙ³çèÑ£rw¤¦¦–’’RXX¨JTžžž·nÝÚ¶mÛ‰'ÜÝÝ !ZZZcÆŒ ýä“OTïYшj9UÿCÑlL:µººúÓO?½sçή]»ÂÃÃ¥N)ËÅÅ¥W¯^ãÆ»råÊ©S§>ýôSŠ¢Äbñ§Ÿ~úÿ÷¾¾¾“'O^¸p!s ‡¾¢  A"‘¬\¹2$$$11qëÖ­yyyŽŽŽõ™´àààÇ×qBx¢Q×6|ìÖÆišÎÌÌ7nœÍêÕ«Åb1MÓ‰dÙ²e†††Ì=VQQQmÚ´Ù¶mMÓÌýöººº®®®—/_–»žú§Ÿ~Ò×× dÎ<þœ©?zô¨ìzjš¦Ÿ={6lØ0:ìÝ»—­ ¥(*++«V=ËÓþéÓ§L›'Np×S³õ®®®»wïfÊ«V­bÖS+™¸¹¹éëëO:588ØÍÍæ¬§V24Öëׯ§M›fdddjjºdÉ’òòrš¦·lÙbggÇ”³³³õôô~ûí7îðm¨h˜‰dݺuZZZöööÌ:tnœµš4š¦ûõë'{« ß(_OMÑÿ» nÖ®]{íÚµ«W¯6v ò¥§§GEEÍ™3‡9•²jÕªçÏŸ‡……5v\𤥥©qPEQ[Àzj¨¯·oß&$$ìܹ388¸±cQHSSsÁ‚yyy¾¾¾Ož<ùùçŸ8ÐØA@SóCP_·nÝ9räŒ3víÚUÏÕ?ïUxxx```jjªÍ‚ æÍ›×”£€†¥üüò!hù”çC¸¿ øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò!à;äCÀwß½{÷.33³ªªª±h¢455­¬¬tuußß.5²ÌÌLKKK--­Æ ‰ª¨¨ÈÌÌtppx»Àõ²FVUU…d@ --­÷}!ùðò!à;äCÀwȇ€ïß!¾C>$ßd¿3½ê=øÐ»’ʯü>†içUÆ@WË©´±± …B¡ðèÑ£ µ£ÔÔT¡P˜‘‘ÑP6"//¯… 6vÐÌ´œ|èÛ×™¤„ý¯L$®O‡+õK¼æ§«ÓŠy™ö¢pé7WGL ë=øÐù?S¹-7|=(ñš_}ö%¥,‹º>Më|/Á­ù­ÄeÿóÖ‹/D"‘……Eîî=),, …wïÞm¦ý´œ|hžŸ“ûÆO›ØU_Oëä¾ñ'÷h©7`ÿe"±¥¹Îò}°OEžþ¤¡¦E´£²ø•qÏx¿ZN>ÔÎTÛ®“Q#¡º:e×ÉÈ®“‘šEYöíÕUëcV¬½6pÌñ13NÇ&dÕ­ÿîöm–Ìûhø`›ZmUT\‘›WFÓµÛWu%0¥ ºK4õHUé?µJ;‹ÅÖÖÖ#FŒxòä SÿçŸ6¬]»vFFF>dêKJJfÍšelllgg¡JTÛ·o···722ruuŽŽ&„”——;99­ZµŠiðìÙ³6mÚ\¼xñåË—B¡°]»v„777æ_aa¡òþSRR¦M›fjjjff6iÒ¤—/_feeikkÇÇdzm¦M›¦¼±X¼hÑ"{{û?ÿüSùüxyyùûûûúúš™™qÛ¯´œ|H‰K×^xŽé}ÎkÜÈNk¼Q]]Ëô¤6l»9jjXIiÍ‹¸:}Z•«þç Au9±™!®zGRB4î,ÕT²ÉŽ;Ž=ºgÏž›7oN:U,B^¿~ííí•бcGOOO¦~Íš5 çÏŸ?räÈÞ½{k )$$$((hëÖ­wïÞ9sæ¤I“222ÁáÇ÷ìÙsýúu±Xìïïïçç7räHKKK‘H”M¹qã†H$‰DJú/--5jTeeåùóç¯^½:dÈœœ ww÷'N0mÊÊÊΞ=ëã㣼ÿÓ§OwêÔéÚµk ˜7o^uuµ’ù!„üöÛo³gÏ~õê•··7Ûx…ùs/3gs55jܨNyoEyoËjÞ¦Qõ¢‡E•ü{Å€C/Njܚߪu{‰ó¿•ýtKHHÈçŸîááaooœ‘‘qéÒ%BˆÏìÙ³:wî¼víÚ¬¬¬§OŸÒ4}èС¯¾úÊÅÅ¥_¿~_}õU!mذaýúõ:t˜;w®““ÓéÓ§ !Ý»w_·n]@@@```EEÅ÷ß_·!9r¤¼¼üøñãÎÎÎööö .tvv&„øúúž>}º²²’.<<<”wÕ£GE‹988¬X±âÕ«WLÚ¤h~!ƒ :t¨šššÛx…ùP;S¦Ðºµ&!¤¸¸âƒíú‡Õƒ¹‹²U'.#ÿ¡~gI+½.×#•æ#%„RÜX,ÎÈÈ`ø×ÔÔÔÔÔ455•’––öÉ'ŸX[[·nݺ}ûö„ÒÒÒœœœÒÒR¶}÷îÝ•“———““ãçç'üÇõë×_¼xÁ¼;þ|;;»Ý»w:tH Ôv¤ŒÇ÷éÓG[[[ª~ܸq‰$22’râĉéÓ§kj*;OFéÒ¥ S022"„äçç+™B3-„¦}݆Í/ò!ŠúŸTâÃ]-««ü{jwiö”¸«T×"—† .8 ’‚j·°š¦iBÈŒ3(Šº~ýzII sæC"‘0¢¡ñŸkÌ0ÑÑÑ"ŽíÛ·3õIII·oß®U„ªS¦L9qâD^^^TT”··w›°ãbÐ Vo±õjjjrë€?x‘5¢‚ÂòW9%Ií¾b %n¡æ#$„"÷¿nÕn¤¸Çꪔ}ý'±ÓÖÖ‰Dl{ ++«Ç3/ssssssmmmKKK>|¸xñb+++55µ§OŸ2 LMMuuuÙ<ì™ELLLLMMcccå¾ûùçŸÛÛÛ‡††._¾üùóçl½––!„]¦£œƒƒCbbbii©ì[¾¾¾û÷ï·³³ëÝ»wúW4?ª|Ðrò¡ìÜÒä”ü7ù¢êj:9%?9%¿¶Yˆr••ÕL·„œ×¥É)ùoóE5nµ)(~̌ӥeÊ–þÈRÓ$ìÕ1JH*)q¡éÿ.''§S§N½xñ"''‡9Ÿ1gΜ;wFDD<}útÑ¢EÇ×ÖÖ¶°°¸ví!$??ÿÛo¿ýOŸåçç·cÇŽ’’’’’’   C ܸqã¡C‡ÒÒÒnÞ¼¹|ùòË—/Böïß³wïÞÑ£GϘ1ÃÏϯªê?ƒ …ææægÏž-...//WÞÿÌ™3µ´´|}}ïÞ½›œœ¼}ûvöÁB}ûöíСÆ ¤NÕª¹óS㨀'ZN>´ûÀ½sΆI**®˜1çìŒ9gË+òF¡¬œ¦[BÈŽ}÷fÌ9û[ø³ì_‘^ßWæÝPKúI³Ë<±¡ã2¼ï¾ûŽÒ£G:B.\èíí=wî\—7oÞœ:uй vìØ±ßÿÝÖÖÖÝÝ= €ívõêÕÖÖÖ;wvvv®q…2!dîܹ«W¯þ÷¿ÿÝ«W/Ÿœœœ:$%%}õÕW;wîdn}ß´iSqq1›uB‚‚‚Ξ=kaaahh¨ü~{mmíÈÈHuuõÑ£G4èÊ•+¦¦¦ì»ÞÞÞÕÕÕÓ¦M“ÚJõþÍ!„Âj ¹&û™:ÞnÆ${Õ7é=øÐî-#\œÍkµ£û÷ïwíÚµ–ÑñÎÒ¥K“’’Ο?ߨ@ãHJJêÕ«W}zHKKSã (Š¢¨ÿ*ÐFCú÷®Û}‡)Qá÷ËVÿp£ïð#jjóHh@ÅÅÅñññGŽñ÷÷oìX ÅÂù¡F†óCÊ=úæÍ›~~~[·n•ºOøã}ŸÂocÁ-_¾\n½§§ç€>p0Œ .4Ê~€WÁmÞ¼¹±ChX?|‡|øùðò¡FFQTEŇû}Y€f§¢¢Bê·)ÖS7²Ž;fdd¨ø#_<¤¡¡ammý^wç@ˇçS(ƒ|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò!à;äCÀwȇ€ï59cÇŽ¥ä ‹ÅE¥¤¤|ÈxÞ¼yCQTNNN}:)))¡(êÅ‹ TíìÙ³çÁƒ„F™@%j5·çÏŸoÛ¶íüùójïì´4È!ævø;©ÿ~¹3ÐàÓ¢º&ûA½qã†AcGð~!jr¶mÛvûöíÛ·oÿý÷àö?ÜÜÜ%¡P¸téÒÖ­[7ÊÞDƒ|O¿µšÛ   OOÏõë×7ÔÞÙii¨CÜ|ó!î 4ø´¨®É~Pø@£±i;wf )))êêêÎÎÎì[b±øÃÇ£­­½eË–¿_>¨ÕÜ–••}ôÑG†††F‹$w0-üBCSuâÄ mmmnMUU!$<<|À€ÚÚÚýúõKNNfÞ*((˜5kVÛ¶mçÎ[RR¶¿ÿ¾»»»ŽŽŽ‹‹ Û^‘ÇwéÒE 888DDD0=B²³³™Þ¢¢¢úôéc``ð¯ý+--mðàÁÚÚÚ...4M_½zÕÁÁáôéÓÝ»w766ž9sfQQMÓïÞ½#„¤§§/^¼x„ ìî† ¶jÕ*nÙÙÙ„¼¼<æåîÝ»?þøcE±)8—““óQ_¼xqm'Pvò•Ÿ¦é{÷î TYYÙ©S§O>ù$>>ž9'qóæMîÞÅbñ€öìÙ#‹•çC²‡[nçÜiaËÅööö£FŠ=}útÛ¶m×­[§â‰Û¡¢~jü°q;Q2™„>}úüøã=ânÂí?00ÐÝݦ鼼< Š¢ iš8pà·ß~+Õ ÷C.;-І¯è°rÉ|¹ª|Pës€äF¢¨Ãüü|ƒiӦůÆîܹSGG‡É‡TùKh²5WŠò!öv!!!NNN4Mß½{WWWW$1õ ZZZÕÕÕLûýû÷3õ;wîìÙ³§’=ž;w®U«VoÞ¼¡iº¨¨è÷߉DRß ì‰™nݺ}õÕWLyþüùÌÿ¯¯^½J‰ŽŽfêãââ!YYYl>$‹MLLþøãš¦÷ìÙÃþ«¥èË^nlŠ.ÕgÏž==Z‡ ”|åÉD›7ofN´Ð4íïï@Óô±cÇÌÍÍ+++™úY³fIåC¿þúk›6mØ–,YrìØ1©Q¸ºº†„„(™"E‡[Qçì´°aDDDhkk0-pEj;\ʇ_PPàææ¦««;}úôµk×ÚÚÚ–––™…ðjjÒ÷uVUUIeªc§ˆ!;:Õ;§išû’ “:Èí¹ný0äÐvíÚ± M&£Æuå`РA1117oÞ\´hQNNNXX˜žžÞàÁƒ555™`jµ8]îdžûRö°Å“¯Ê_ìës€äF¢¨C î[ìG½nqÍî·o ºvíš‘‘Ѻuk›gÏž-_¾œý·ê.^¼¸iÓ¦ž={®_¿þñãÇEEERÿãSEuu5s™ŒrûöíÒÒÒ®]»r¸»»———/[¶ÌÉÉ©cÇŽr;),,d ÷îÝS[C ¼¡ú¹zõ*ó ·dÉ''§²²2¦ÞÞÞþöíÛl^˜ À½{÷*++™—>>>_ýµò}ÉN‘"ªwngg÷èÑ£¢¢"æå7Ú´icll¬¼ÿºõSã‡MÑdªnذaW¯^½uëVÿþýÝÜÜ®^½:|øðÚö£H‡•(ž|Uþ dÕçÉDQ‡]»v½~ý:;´˜˜¶“ùKh‚µ...½zõ7nÜ•+WN:õé§ŸÚØØ(?y|ðàA©J‰D²råÊÄÄÄ­[·æåå9::Ö6Š¢>ýôÓ‹/^¸pÁÇÇgâĉíÛ·ç6ÐÒÒ3fLhhè'Ÿ|"»¹±±qëÖ­¿ûî»§OŸîÛ·ïòåËJbSqàjjj)))l!«(—žž^aaaddäË—/wìØš••UVV6uêÔêêêO?ýôÎ;»ví —:—àéé©¡¡pëÖ­mÛ¶8qÂÝÝ]Ñ^M‘"Š:—–áÇ[ZZzyyÅÇÇÿúë¯+V¬X¼x±êóÀv¨J?Š>ll'Š&SÉ~¥ê‡~ñâŶmÛšššvëÖMKKëâÅ‹5æC5~ZX5V¢xòåþÔ¸ëú ¹‘(êÐÛÛ»¤¤ÄÇÇ'..nÏž=Ge:Qò—|øðaU"h¢si(¥h=õóçÏ™—GeVYÒ4ýúõëiÓ¦™šš.Y²¤¼¼œmÿôéS¶Cv•e¿~ý¸wü2$ɺuë,,,´´´ìíí™åœR«JÙÞ\]]wïÞÍ”W­ZÅ®§Ö×× íܹ³¡¡¡sS»žšiJQTVV–܇……Y[[~òÉ''Ožd ËMÑÀ¥üôÓOúúúµ@ÙÉW>|‰D²lÙ2CCC掳¨¨¨6mÚ0‹v{ölذa::::tØ»w¯ì(ØõÔŠ¦HÉá–Û9;-Ü0233™»¯mll¤nç–Û³ÜyVÔ—¢Êv¢h2™ëƒìA”Ú/Wuuµ‰‰‰——órìØ±–––‰„þgQ6Û wd§EÉðåV)ŠŽ¬ì_*Ôú ¹‘(:R÷ïßwuuÕÕÕuuu½|ù2{¿½¢¿”~ýú1M–òõÔý¿×'ÊËË¿ýöÛü±a»½víÚĉkü·õÚµk¯]»ÆÜŒÆéééQQQsæÌaþ%½jÕªçÏŸ‡……5v\P/õ<¬|û+htiiijÌoa±¬§æ©ÿûß~~~~¿oß¾MHHعsgppð‡ß{cÑÔÔ\°`A^^ž¯¯ï“'O~þùç4vPP_u>¬üü+hâp~Rç‡nݺ5räÈ3fìÚµ« tš¯ðððÀÀÀÔÔT› Ì›7WÃo©êvXyûWи”ŸB>-Ÿò|÷—ß!¾C>|‡|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øN£±à»wïÞeffVUU5v M”¦¦¦•••®®îûÛò¡F–™™iii©¥¥ÕØ4Q™™™ïo¸^ÖȪªª (¡¥¥õ¾/¤ ¾C>|‡|øùðò!à;äCÀwȇ€ïðœ–s~èÛ×{>Äý¯L$®O‡+õK¼æÇ&Cag’¼þun€Ç1wÏ“_o¸þ&_ĶÜðõ Äk~õÙ—”ŒŒŒ‘#GZYYùùù‰D¢ë린°P(Þ½{·žý4w-'šçßçä¾ñÓ&vÕ×Ó:¹oüÉ}ãZê ØTtƈ!6Á†}»Üõᓼ//7`çRV­Z5eÊ”´´4CCÃÝ»w×XõÑrò¡v¦ÚvŒÚ ÕÕ)»NFvŒÔÔ(BȲo¯®Z³bíµcŽ™q:6!«nýÿ¼m¤ß'ŽÎ½Í>Ð~Áœ>“ß俕ՏUQqEn^M×n_4M«©ýçÐTWW3…üü|EõL!%%eÚ´i¦¦¦fff“&MzùòeVV–¶¶v||<Ûs@@À´iÓ^¾|) ÛµkGqss …B¡°°°i#‹-Zdbbbooÿ矲•ÖÖÖ#FŒxòä SïåååïïïëëkffÆmÐŒ´œ|H‰K×^xŽé}ÎkÜÈNk¼Q]]ËôDÆ»’J 5íÖš5¶Ü°í権a%¥•µêýúõ¿üòK‡^¿~=þüììì%K–lÚ´IQ=!¤´´tÔ¨Q•••çÏŸ¿zõê!Crrr,,,ÜÝÝOœ8Át[VVvöìYKKK‘H”M¹qã†H$‰DL³Ó§OwêÔéÚµk ˜7o“xíØ±ãèÑ£{öì¹y󦡡áÔ©SÅâÿ\Žüí·ßfÏžýêÕ+ooo¶=@3‹õÔν̘eÎãFuÚ{ø~ÞÛ2³¶Úuî­¼\|üÔãéž]u´kÎ‡êÆÆÆ†9ÑRTT´yóæÔÔÔ•+WÚÛÛBÕ9r¤¼¼üøñãÚÚÚ„¦’âëëûÅ_lÞ¼¹U«VáááÀÃÃCùÞ{ôè±hÑ"BÈŠ+BCC³³³---CBB>ÿüsfÛàà`[[ÛK—.1/ 4tèPBˆÏúõë™öïifÞ^œjgªÃZ·Ö$„WÔ¹+q5¸.ÚÈHøÅ\gUÚÿ°z0wQ¶ê***‚ƒƒgÍšåîî~ôèQ6¿QTÿøñã>}ú0É׸qã$Idd$!äĉÓ§O×Ô¬!ëÒ¥ S022"„äçç‹Å⌌ ¦ÞÔÔÔÔÔ455•yÙ¾}{¦ ££C8×ïš ^äCEq_ÖùjYu5½r]t~AyІaïqê"€¦¬åœÊÎ-MNÉ“/ª®¦“Sò“Sò%’†|‡|øùðò!à;äCÀwȇ€ïß!¾C>|‡|øùðò¡fláŅ„!‰!5¶ŒJ곿ÁÛÿ™ö§Áƒ†ˆ @>Ä +¯­|úæéèN£8/plëXc{s]s¯î^*v^X^(üAx7ûnýbl0›oo£ÑN£‰LÌz—ÕØá@3 ÑØÀ‡Y”IÙã±GOKO•öÝMºïµë=õ^{zì»›ßí¶«›q·%×–x]ðŠžÝØA@S‡óCMWÜ˸aLJþÛÐ2ØrÚoÓ^¾{IIÌIìw°Ÿá¿ gUY]É6.­*]ri‰õk“m&ƒŽú3íO¦¾ßÁ~„EE„ÓŸLk¼^“#üA(üA(u½Ì댗¸¿ï¾f?™Ùï±gúYüRøƒ°]P;BˆÛ7fÃÂòB%ýÿùçŸÃ† k×®‘‘‘‡‡ÇÇë8; „<ñ¶÷öíæëdê4$èVέÄ׉ » hy5Qé…éc~û2ÖÖжGÛÑ™Ñii4¡g›õàõƒv:íngß{ƶÿ<òó=÷öXë[{Úy¦¤N<51!+2Õ~êg}>ÓTÓ$„ô ø¬Ïg&Jö;Èjh…è©Ⱦõ[Òo³{Î~µø•wwïy‘óªéjK=KÑ QöâlBÈ™7D+D¢"²…G¯_¿öööŽŠŠJHHèØ±£§§§X,VÔøàÁƒBNNNŠÚWÓÕ÷_ßï×®ó²WÛ^uÁÝܦr!š,\/k¢¶&l-—Ïê1kÇBH¾(¿JRuûÕíçùÏÛj·½5ûEQŽ?;f—dBÒ Óyò‹±ÝUŸ«ê”zdj¤çiÏàÛÁÇ-Ž/sYF9úðh•¤jÓM:­têÒ «ACm†B|}ÖÇ®Ï.ɶԵ¬m'>>>lyíÚµxúô©££ü%M&LèÓ§T¥P(TÔyQEQ¥¤²°Íºøu¡OCïúÜ5ç•åÕ6HàäCMÔ£¼G„_G_楑Јrãï„þý™´Æ­½Û©§§!OÞ”––¶jÕª¸¸¸¼¼<š¦ !¥ÿÏÞ}Ç5uõ?7AHX&‘ ‚2ÊVFª‚œµEÀAµ£¶jµµŽêZ®ŠÖU[õQ,uT…Zµ€ ‚RE*‚,ei#$¿?nŸH!@„H>ï×óǹçž{Î÷ÞÜâ÷¹ç$—Ïo®1—ËÕÖn¸à‰¢(éCÔŠj¹j\-&ƒY+ªmm„ „)4Š4ño¿ ãŸOM•©JB!¤7§·¯µ¯¤™ŽºŽ|ƒaPÿš]¥³™Öòóó³´´Œ711©¨¨044‰DÍ5>tèÐüùóTÚÙÙ¥¦6=Ö]­»*Cõ¥àåç œˆÄ¢ÒªR]uÝ6Ä Jù‚²Ó±KÊK:zûh“þ„ÒªÒšºú Í­¢[t›´‚V ÛëØBjêj–X®ÑMƒR%¬zTú¨cBUSQ#„ÅÍ.’àóù;wî455%„dffJoßÚù2&Åì«×71?ñÓ>ŸBR RjEµ.z {hù‚Zä¶(ìvØ›RòSôÔõR RM86°ç@ ®Åƒ’SÏLePŒ¬—Ytc[Û‘#/<ºÐw_w#÷ŠšŠÔüÔOú~òýàï; T¶ ÛHË(ò~¤m[U¦*K…Õ\K ccãØØXww÷’’’Õ«WKï™Çãñx¼VóŸ>ÿ™3¨éP;žÝ×W¾v3psÑG>-À÷Ë”Ï*rJ¤‡±Çƒ’©©îFîæ\sÅ88栎ݹ‡çø5ü‰6%íÃÆ…}ÞïsBHäƒÈôÂt'}'ŸÞ>mwБAìö¸ãªëªéïÏoIÚÒâQ¡ÃB#ïGï0ænåJÿ¾}XXØéÓ§-,,¼½½gÍšÕ†¥ ´ üÖãÛo¯~ë}›£Æ9:ú¨Ü‡€®‡jÛ*—ôôt›ÎŽ@¡eee999µ§‡ììlF=EQõ¦ ¯@ÞQȇ@Ù!e‡|”ò!h…¿óÿ>xó`F‘œ_ й)´˜Ç1.¿¸p¶pZ|/}ÛDEEq8Ò^¿ÚÀ™{gæ_˜ÿWö_r¤Ãœ:uÊÓÓ³==„……1B^ñ€"@>¤ÐVÄ®È|‘9Úrô‚÷8ê5ýÒÓö022ò÷÷—{· K(®ZµjÅŠ’šÌ—™Sÿœjù‹%;”ž^¿ñæ”Íæ?›óvñÆGŒÏ{•'©Ÿ:uê³gÏÎ;×qqÀ[†|H¡å”çBöŽÚ»Ù{³‡±‡Üûwppؽ{·ìí¿ü½`™`©ÇR¹GÒ1Ξ=[]]]ÿéÎëÚ×½´{mòÚÔ eXfØw‰ß÷Žž]RUâîMÖÈd2[uÝ@Á!RPîÝÙ!ìòêrBˆþv}É|™@(ø2æK³Í8[8CƦ¦_XÿÀÏ.|ælàà ½///@!c<®†®s]æ:87¹÷øÝãSl§TTW¬Š]•[‘Ûbow^܉zíýdÁ“ÙN³ !E•E1þ1ÉAÉæó ''Eÿ¼ öTÖ© ¾AÏ>p˜wa^¸Žò]üwWŸ]˜qd쑟Ò~’ô¼?mhJè¶aÛRg¥Nwœî{ÒW’'B¿ ¶±:q]zQº»¡;½é¤çÄb²R S% ìííÙlvBBBƒ.\heeU^^Þâ P)¨¥Köƒ*S•²qÈÆ†ýÐߤÁë‚ǵմ“f&]˜záSçO+ª+ö¤î©àÞÑ{ÃLJïµ'Ê?JŽñ ë=ì‡a? 5ÚäÞõCÖo²a`ÏuâºÌ—-¼µžRY[¹oÔ>³îfÇׯ—èÔ7È^×ÞŠgìœ÷*/óÅ?ýx™z í5”A1Ÿ¿zžÿ:_LÄnø¦ÿ7ž==ûõûꃯÞDrmýºÁëFYŒêÍé=ÇeŽ«ëɬ“ô®ªªª¢¢"CCÃÃ+¯.¯Õè°uÖ&­µ=h+¨ô`÷(®|ó4ˆÉdêééåää´Ø¼ð~ûwIÖË,‘XäfäÆcó!#ÌGìOÛçÅúmÆZ¥ t.Õ1̺›B8,!äUõ«Û[÷°¦Kd—e¯Œ]y-÷Z1¿XLÄ„~-ŸÞÕS»']Ðì¦I)”¨P*¯j^9è:Ðõö:öt¡¸²¸àuÁÌ?fÎücfý±è‚@ „°X,OªVTËUãšh™0ÌZQmƒ½,‹î°¾#GŽ9rDÆþ@q êjX*²þ{/G¡ZÕ^[U»Aßi?K®eüôx-“Šê ÃPC‘XDïbüû-{-®Î‰›çfäÖ¸žÃᨨ¨”••µ^wµîª Õ—‚— œ,p^ ‹J«JuÕuë·)--ÕÑÑi±+x'`¾ì]bÝÚA1®?¿^"(!„п$y:òîâ×ò3Š2ö[hªmÊ -θékêk«iß.¾MoÞ}q—.èªëêkè_ͽÚäQEõéÓ'3³åé<&Åì«×71?‘ÞL)H©Õºè½YxôâÅ‹ÂÂÂÆoZ...~úô©H$jqP(x>ô.1Ô4œî8ýЭC‡<Ì9æq9qÚjÚó\ç½íqEbѲKË!‰y‰„sÏò U*†lKÿÝ4ŒµŒcsbÝÝK%«ãVKoO*¨OPHbˆ®[…½-y›d×òþË¿û–Ëâz™zò Oei1Ò»—7½wøðá AW×Ug•dÑåg¯žÝ,¾i n ¯¡ÿŸ>ÿ™3¨éP;žÝ×W¾v3psÑ“ÅÇÇëéé9;7\]þå—_ž¤¤¢¢¢||| y<Þ¨Q£222d9*<<|РAí÷Ê•+l6›Ífc¾ ò!%UTT“œœlnn>a¡PØÚN<ÈnÄÕÕUÊ!^^^ ""¢±È~HIJÊÁÁÁÈÌÌtttlU'ãÆsqqiPI¿ à‚|HIegg¯\¹òÚµkÅÅÅô;ìø|~sÿûßÿ~öÙg„±X,‰455 !öööIIIÚÚÚ S”LïÍPȇ””ŸŸŸ¥¥e||¼‰‰IEE…¡¡¡H$j®ñرcßÿ}BÈùóç?~ðàAB‹Å:tèÐüùó4¶³³KMM}«ÁÈò!eÄçó322vîÜijjJÉÌ̔ޞËår¹\BÈ­[·Øl¶½½½¤óeÐ RFÆÆÆ±±±îîî%%%«W¯n[?<ÇãÉ76€Ž‡ï—)©°°°Ó§O[XXx{{Ïš5KÆ£ÜÝÝ¿úê«öŒ;hÐ 6›=nܸêêjúûh[¶liO‡íGÑki¡³¤§§ÛØØtv -++ËÉÉ©==dgg3ê¡(Š¢¨7y ðŽB>Êù(;äC ì€²C>Ô¥DeGq¶Èç½ñ§Nòôô”¥eXX؈#ä2(@§@>Ô¥iù;øËظ¬ªŒÂNÍoâÝB¡pÕªU+V¬¥Ÿ©S§>{öìܹs­@‘ êRtvÜÝþ~Ξ=[]]-ãS&“¸{·Æèx_‡‚ò=é«­¦]\Y|£à††ÁŽ;<{þ3{µ*vÕ‚m&†$†ñ‹ú›ô?çwîJΕá#!jLµ²¥e’~üÏø³TXµuµÑ£¹,nèðÐáæÃs+r­öXÑ HòæsXÿ̵8qbĈ Æ›t9**jÓ¦MwîÜ©®®vwwß´i“£££dï˜1c¾ÿþûââb]]Ý·yUÞ <R\'2O,q_ò|áóÙN³ýNû½®y-ÙuçŨÇQÑþÑO<™í4›âeê%X&ˆ˜ѸŸSY§‚ú=_ø<À!`Þ…yuâ:mÁ2AþÂ|BHÂôÁ2`™@’ B¼¨µ¨¨( &&&99ÙÜÜ|„ B¡P²×ÞÞžÍf'$$Èý"täCŠË³§çÐ^C)BÍuˤ˜§ï–쪬­Ü7jŸYw3‹ãkã+½/S¯¡½†2(F càóWÏó_çKo_UUUTTdhhX¿2000((ÈÞÞÞÊÊ*888///33S²—Édêééåää´þ,:ò!Åeɳ¤ LŠÙ‹ÓëQé#É.ëÖõçH×S»']Ðì¦I)”Ho/!,«~evvöÔ©SÍÌÌÔÕÕ{öìIáóùõ°X,ú@€wò!Å%½™ª­«­¿K[U[ö~ÿ~K]‹oðåp8***eeeõ+ýüü(ŠŠýúu~~>!D$ÕoPZZª££#{TŠë©×íâÛtA <*}dÎ5—oÿj*j„¡XØ ž¢¨>}úÔŸãóù;wî455%„ÔßE{ñâEaaa;ß< ÐYð|Hq¥¤o¿¾ý~Éýo.}Ãd0'XOoÿl¶‘–QäýÈŠêŠ*aUý]ǯ¿8ZCCÃØØ866–RRR²zõê]ÅÇÇëéé9;;Ë7B€Ž|HqùÙû]zrÉí€Û•œ+Ç&ÓRÕ’ÒxБAìö¸ãªëªÙ!lv{KÒ–‡y?Òx‡1w+·¬êÍYPPеk×òòò$5aaa§OŸ¶°°ðööž5kVƒ~Ž;6}út&“ÙÊSPT‹«Ià­JOO·±±i\ï{Òׂk±Ù{sLJD›7o—Ë]¿~}‹-³³³===ÓÒÒôôô: 0PBYYYí\•‘ͨ‡¢(Š¢Þä(t1kÖ¬‘ñÇsss÷íÛ‡dÞ]XO M300X¼x±,-½¼¼Þv0oò!ujÒ©Î@Y`¾ ”ò!Pvȇ@Ù!e‡|¨“QU]]ÝÙQ(®êêj•·û 0|¿¬“™››?}úT(lø1 ©¨¨˜™™½Õ!ðûÔÐõá÷©¤A>Êù(;äC ì€²C>Êù(;äC ì)´%K–Pµwï^zsÁ‚T= ,èÜ𺼯Cq]ºt)11QUUµ~¥ÏæÍ›é²®®ngÄÐÕ RPeeesæÌ‰ˆˆpqq©_ß½{w''§ÎŠ  KÂ|™‚š?~PP]ƒúèèhmmm33³yóæ•••uJl] ž)¢ððð{÷î>|¸Aý Aƒ† bjjzûöí+Vû¬´´ÔÈÈÈ××wíÚµ@W@‰ÅâÎŽàíÊÎÎfÔC¿éáM¡³Ãèdȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|HqýùçŸE >¼¹ Gözè0þù§žžÞüùóåÕ¡P(¤(êáÇréíÅ‹EÈ¥··¤Snãׯ_SõäÉÙ‘ËÅlî†Ù»wï­[·ˆ¼o€·Dr)Z{åþYão ´ò!ÅuâÄ 6›}ùòå’’’ÎŽåm)++£(êï¿ÿ~Kí;Khhè„ Ö­[×Ù4Íf/Y²D]]½³é är1›»a$ùÐ;÷¼Ó)¨šššˆˆˆeË–‰ÅâÈÈÈÎZ§²²²_¿~\.·³iš††Æ–-[´µµ;;®@.SÁoᾂwò!uñâŲ²²3f 0à÷ß—Ô§§§0@[[ÛËËëÁƒ-Ö·Ç… ¼¼¼¸\®ººº···äÿ§Nš4iÚ´i~~~ÇÜÜüÂ… Òë…BáW_}¥¯¯Ïb±|çÎBHnn.EQô?ýúõ£(Š¢¨²²²æÆ•Òž²}ûvsssuuõ÷ßÿòåËÏ%77wìØ±\.×ÂÂâÿþïÿêêê!UUUE¥§§»ºº·ó²H <øêÕ«Ÿ~úéìÙ³Û6ôáÇ­­­Ùl¶ƒƒƒä2BîÝ»7`ÀMMMû÷ïK95777I·ÖÖÖ}úô¡Ë+V¬èß¿?ý˜­  €ž…¹yó¦––Ö| éöÁƒC‡åp8Ç?qâ„MãK‘––6dÈîÝ»÷èÑÃÏϯ¢¢¢q›œœœ?üÃá888>|XJ̤ùÛ¸¬¬læÌ™úúú:::sçÎåóù2#奿hÑ¢ñãÇK6‡ ¶jÕª&?ÉÅ”òÁI4yâ n‰÷ßÿæÍ›Ó¦M[´h]Óä ËÅ‘ýS ¯ØÅ‹]]]¹\îœ9s?~ø ''‡ëààðûï¿;::êèè̘1ƒ¾zõ/E‹Ÿ,²:ìæ „1(¤   ¾}ûŠÅâ7ªªª–——‹Åâ’’‡3eÊ”«W¯îÚµKSS³{÷îRêÛéСCû÷ïÏÈȸwïÞ§Ÿ~jbbR[[+‹'Nœ¨¦¦]WW·fÍ###¡P(¥~óæÍ=zô8{öì;wÆoaaA÷#‹KKK !)))²ŒÛ\û={ö˜˜˜œ={öÑ£G»víRWWüøqýB¡ÐÖÖväÈ‘W¯^=yò¤žžÞÚµkÅb±@ „¸¸¸lÚ´éöíÛí¼,õ‡ëß¿ÿÞ½{…Ba†¾{÷.EQ6lHJJš5kV÷îÝkkkkkk !ÖÖÖ—.]JJJrss›8q¢”S[¾|¹···X,...VQQ¡sG±Xìéé¹zõjú2æççÓÝ0 &&æÚµkŽŽŽ¾¾¾tx&&&Ó¦MKNNÞ±cG=¬­­\‘Hddd4a„øøø3gÎ7hSSScii9uêÔ¤¤¤-[¶B›‹¹¹ÛX$y{{;6))éÂ… ööö“'O–1˜æN°¹ö¯^½"„<~ü8..ŽÍfóù|±X\TTÄ`0nß¾ÝäG#¹˜Mî•å>¬ÃÔo_[[Û§OŸC‡ …ÂænY.N«>ÉKKK;zô(!D__ÿ?þ¸r劑‘ÑâÅ‹ÅbñåË—544lmm/\¸pöìYKKK:É¥\ÆæÂ“åOV‡Ý< <=zôøñã§OŸ>{ö,//ïùóçùùù………EEEÅÅÅȇQMM —Ë]³fX,¦?=zT,‡††šššÖÔÔÐÍ–.]Jÿ7ß\½BnÞ¼)‹'Nœ8bĺ>;;›’““#¥ÞÂÂâ»ï¾£ë ˜LæÙ³géÍ&󛿯m®½}}hƒ Ú¸qcýçÏŸ×ÐÐ(--¥7<¨««+þ_RòÃ?´é’4žÄ€öïßß¶¡ÿøãUUÕ/^ˆÅâòòòÓ§O ú_©ÿþ÷¿t›ýû÷»ººJéÿÒ¥Kêêê555üñÇÀMMMÏŸ?_UU¥¦¦ß úå—_èÃwíÚEgáaaaFFF’;jÆŒó!@°yóæüü|zó“O>™5kVƒ6¿ÿþ»ŽŽŽ@  7/^Ö\ÌÍÝÆ©©©ZZZ’N’““ÕÔÔêêêd ¦¹l®½är¡P¨««!‹÷îÝëèèØÜG#¹˜Mî­ds'.®wÃ4зoß#GŽHN¤ñ ËÅiÕ§@tþüyºÞÎÎî믿¦Ëóçϧóúl\\]íÚ5BH^^^ã|¨¹ðdù“Õa7(éùæËÑÅ‹KKKÇGqpp011¡§Ì²²²<==»uëF7óòò¢ ÍÕ7¶dÉ•ÿi±þÑ£G'NÔ××g0ººº„ׯ_Ó»LMMé‚––!D²â»q½P(|ò䉣£#]¯¯¯¯¯¯/ýk2RÆm¬¸¸¸   €úŸ¸¸¸Ç×osïÞ=GGGÉ÷M P\\Lÿí¦7[u¹Z^†zÓÌÌŒ.p¹Ü^½zIÊõ¯Oÿþý鲇‡‹Å’Ìß5ºÉðdù“Õa7M¥å&ÐáNžÿÖ­[K—.533c0ô"è6PQQ133ËÈÈ 7 ---éM555ú”%í¥Û¸½®®®¾¾¾”S&„X[[ß¾}»¼¼œÞLHHÐÑÑéÑ£G‹Á·ÿ²´aè¿þúkãÆ}ûö]·nÝ;wÊËËü5—±ŸË—/_¿~~>”œœ|ùòåaƵxÖ„[[Û””ÉuNNNnÜæòåËt®°xñbWW×ÊÊÊÆmlllnܸQSSCo®Zµª¹˜¥ÜÞOŸ>UWWïÕ«W¯^½îß¿ÿÕW_5Èêd ¦µí½½½«ªª–.]êêêjnnNZúhZüàÚ|J!ËÅiÕ§ ã¸uuuô4!$%%…Ïç7¹â¾¹ðdù“Õa7À?:~¤£ÿ†¦¦¦Jjêêêz÷îíççWVVÆåré5ƒ{öì100 çÈ›«o'“ï¿ÿ^,¿|ù’~þ/‹'Nœ8gκ ½€&--MJ}HHHýõÔfff’i~±Xlddôõ×_———Kæø›·¹ö»víÒÒÒúùçŸ>|˜°pᨨ¨ú'R[[kcc3zôèÄÄÄÆ‹šwîEQ?ýôÓ76mÚDùûï¿é?ô’öGŽ¡—4׿X,NOOg0½{÷‹Åuuu\.—Á`dffŠë­{¥»¥+Åbqxx8½¼†žb˜1cFJJÊ®]»ôôôlmmÄMQÔ™3gž={¶sçNuuõ‘#GÒ %*++MLL“““·mÛÆ`0.]ºÔ\ÌÍÝÆ"‘ÈÝÝÝÍÍíâÅ‹Ç755]ºt©ŒÁ4w‚͵—,|¡ÛÓÓd’e^M~4’‹ÙäÞúAJù°š[?äìì¼fÍšÒÒÒænY.N«>…WlÀ€{öì¡Ë+W®”¬¢(ê½÷Þ»páŸþiee5~üxq3ë©› O–?Yvó€òÀzêw̧Ÿ~Ú§O‘HT¿rÍš5ššš€þN©––Ö€.^¼(ù#Ò\}{\½zÕÉÉÉÈÈÈÖÖ–^`Ѷ|¨¦¦æË/¿ÔÕÕUUU8p`zzzýQ"""¬¬¬è:ô2ÉæÆm®½X, µ²²êÖ­›‘‘ÑÇüðáÃç’““óÑGq8œ^½z­Y³†þ"OÛò!éáÑêÿóÖÚ¡E"ÑÚµkÕÔÔlmmé%´ÍýsØ\ÿb±¸®®NWW×ßߟÞ3fŒ‰‰ }_µ˜‰Åâ[·n 8°{÷î“'OÞ±cÇÀǹtéR.—Kí(&&FGG§ñ ñû÷ïûøøhjjöîÝ{ß¾}Òcnî6.**š2e ÇÓ××_¼xqUU•ŒÁ4w‚͵oýúë¯EåååIùh$³É½ 4wâÍåCÛ·oïÞ½ûòåË¥Ü-^œV} 2æCÝ»wÿõ×_­¬¬¸\n`` ýíÅÆù”ðdù“Õ17(éù%–º’”ÐãÇcbbfÏžM¯ÃX¹r僎?ÞÙqu´àààØØØ&ÑJ™ÅÆÆŽ?^òë_ïŠììlF=ôWp$¬§€†ºuë¶`Á‚âââiӦݽ{÷§Ÿ~:pà@gÕ¡^¾|™œœ¼k×®;vtv,аž¢â!<<ÜÚÚzñâÅÁÁÁcÆŒéì :Ô£G&NœøñÇwv,Ð0_]Ÿôù2<e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”Šô݉÷ ïßú[ƒ:&€©««s-]?xO_^¶ý~WV3„rÑ_]^C´GYYYDÌÕÞûP^¶0_–V©'¯‘ä‚ÃáÈwò ë‡@Ù!e‡|”] ë©ám{õêUNNNmmmg  ºuëfjjª¥¥õö†@>ÔÉrrrLLLÔÔÔ:;U]]““cooÿö†À|Y'«­­E2 …ššÚÛžHA>Êù(;äC ì€²C>Êù(;üþPÓ&Î<“ý¤Œr嬿–¦ªôÆ+¾¿r>&›²gËp÷:"¾·Æßß¿G;wîìì@:N×y>´zC¼óàCõÿW)¶§Ãe_¸§ÅΔ$C¿½5qÆiáG†Ž ÿúÿbŸ¼–´\¿Ê+-vf{ÆjàéÓ§#FŒ0559s¦@ h±¾mÊÊÊØlvjjj;ûy×µö:àºt=]'š÷‰Ëo?2Þ¦»¶Úo?ýíç±,5¦ûWgw›=­ï¾F®]áõ¼àõËcäØy+W®œ4iRvv6—ËݳgO‹õÐ]'2Ô×°¶äéðØL&emɳ¶ä1!déêË+×]YëùáÑýN^MÎk[ÿS}mGù˜÷µ×àn>>†††<oÔ¨QÒÇm®¾Éø¥\‡&Ç•~ÝvîÜikkËãñ 'ßëoU×ɇ¤ˆŽ}2áÃ÷âþðÿh„e𦄺ºV¦'ÿö²Dyá¡‘fwí–ß³±þ‡Ä‘“¿æ×´jˆuëÖ;v¬wïÞEEEóçÏÏÏÏ_¼xñÆ›«'„ðùü‘#GÖÔÔüù矗/_2dHAA±±±··wxx8Ýmeeeddd`` ‰‰‰@ ÈÏÏ'„$$$@Àápèf'Ož´´´Œíß¿ÿ¼yóèÄëÇÝ××÷éÓ§r¼ðV)Åzê÷ èeδÜ÷ßôâ—•zmè'!)wኋ"‘ø= îÏÛG2™”¼#ýG¯^½èååå›7o~ôèÑŠ+lmm !ÍÕ>|¸ªªêèÑ£„º’2mÚ´E‹mÞ¼YUUõìÙ³,kÔ¨QÒGïÓ§Ï_|AY¶lÙ¯¿þšŸŸobb²ÿþÏ>ûŒ>vÇŽÑÑÑô¦——×СC !ëÖ­£ÛËëRJÊÁÁÁÈÌÌttt”2n“õRâoí¸MZ¿~ýÆéçÌ™óû￟¿ñ#¢iÓ¦ >ü—_~±¶¶vvv–Ô«©©B$Ëq¤PQQ155½sçÎØ±c !………………²ŸQ}zzzzzz²ÔóùüŒŒŒ;wšššB233Û6¢ôø_éã6n¯«««¯¯õêU77·¶E«ë<Ê/äß{Xò¢DPW'¾÷°äÞѨ]ë¦ëãWÖ._û$ãnqìÕgKW_æqY}šøG½¡IúäWÖ¶j8 zV‹~†¡ªªJÿ¬ªªÚd½P(œ>}ºššÚ´iÓRSSïÝ»·sçNÉS77·Þ½{¯_¿¾ÁÃ!6›mddYQQQUÕÂwåfÏž½k×®óçÏgff~ñÅÆÆÆÃ† kÕIµ†††±±qll,!¤¤¤dõêÕmîJJü¯ƒôq›¼nË—/ß°aáC‡²³³¿úê«‹/¶9Zè`]'Úsà†ßìÈãg²Ê+ªýfGúÍŽ¬ª–Ûº©0 jûÞ¿g-<ÿݦ‡µoÛY¾_ÖN=zôX¾|¹»»»»»û×_MÓ»Éz .0™ÌÑ£G{yy]ºtI___ÒO@@@]]Ý”)SôillÌår%ßoÒçŸ0gÎ/^œ8q¢ÅuHrvúôi ooïY³fµ¹éñ7¾ÒÇmÜ~Μ9kÖ¬Ùºu«““S```AAAïÞ½Û-t0Jú* ï_+éÂEõ‰GQLœyfòXk?_[Ùq|¨ ïëHOO·±±iet­¶dÉ’¬¬¬?ÿüómð6dee999Õ¯9yòä¤I“dï!;;›QEQE½)È;à.B…ImÝâ6ìðë×-ÿtК·a‡ ªU‹„:FEEERRÒáÇ?ùä“ÎŽ@Aa=uÓŽý2NöÆÁË/øö‚i??¿ÄÄÄ™3gúúúvv, ùPwîܹÎ@Ña¾ ”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!z‡}þ×çìöþ´ý-¶Œyãò‹ g GÆöQÙQœ-yÄð@>¤VÄ®È|‘9Úrô‚÷8ê9¶ØÞHËÈßÁ_ÆÎËªÊØ!ìÔüÔöÅ(7›S6›ÿlÎÛÅ1>ïU^g‡ï•Î:BNy!d悔ÚjÚ²´wÐuØ=r÷[ê­Ë û.ñ»Ý>»ízØ-Ž]ìÎ?îã¸Î ž)®k¹×|Žúp·rMv˜L95%÷U.!$­ Íý ;w+wF䌚ºIc~-qôb³ÍtÐõ:â•E×»tg‡°Ë«Ë !úÛõ[œ/»’s…Âf‡°Ì—ùŸñÿäì'Ó"¦l7°ÝkK÷Ÿ[‘Ëa†BHXVU&¥ÿ¨¨(CCC7jÔ¨ŒŒŒ6^f쿵?À6`šÝ4W}×Ð!¡× ®§¥Éwèz)¨Çe?<öáÕÜ«\‹>z}ârâ²K³ÅD<ã·Šnj¦ä§Ï<.iÿÙ…ÏöÞØkÖÝl‚õ„G¥ÆŸŸœ—L™l;y®ËÜnŒn„YN³æºÌµ×µ—2®—©—`™ brDã]§²Nõ z¾ðy€CÀ¼ óêÄu&Ú&‚e‚ü…ù„„é ‚eÁ2‡%máQQQQ@@@LLLrr²¹¹ù„ „Bas<ÈnÄÕÕµ¹öuâºô¢twCwzÓIωÅd¥*ÊD(,Ì—)¨mÉÛª„U3úÌØ;j/!¤DPR+ªMyžò äž†Þõ ëE9þä˜ÿ:Ÿò¸ìñ±»Ç¬{X_¼Ì¤˜]˜prÂŽ”G.õXJ9’q¤VT»qÈFMUÍ6‡äeê5´×PBH cຫëò_ç›h™´¶“ÀÀ@I988øÀ™™™ŽŽM/i7nœ‹‹KƒJ6›Ý\çåÕå5¢¶ÎÚ¤µ¿fþš˜ÚƒÝ£¸²¸µA€²A>¤ nß&„LsœFoòØŸß\c.—«­ÝpÁEQÒ‡¨ÕrÕ¸&Z&L³VTÛÚ@ !Rhiâß~Æ?Ÿš*S•.„BHoNo_k_I3uùàþ5»Jg3­åççgiiobbRQQahh(‰šk|èСùóç7¨´³³KMmz ¬»ZwU†êKÁËÎ 8/‰E¥U¥ºêºmˆ” ò!e§c—”—tôöÑþ&ý !¥U¥5u5ôš[E·è6iÿ¬¶×±'„ÔÔÕ,°\£›!¤JXõ¨ôQÇ„ª¦¢FŠ›]$Áçó322vîÜijjJÉÌ̔޾µóeLŠÙW¯ob~â§}>%„¤¤ÔŠj]ôöÐò!µÈmQØí°7¤ä§è©ë¥¤›pl`Ï\‹%¦ž™Ê Y/³èƶ:¶#-F^xt¡ïþ¾îFî5©ù©ŸôýäûÁßw@¨l¶‘–QäýHÛ¶ªLU– «¹–ÆÆÆ±±±îîî%%%«W¯–Þ3Çãñx­ æ?}þ3?fþPÓ¡v<»¯¯|ífàæ¢|Z€ï—)(+žUä”Hc%R RÝÜÍ¹æ ŠqpÌA;»sÏñkøm&JÚ‡ û¼ßç„È‘é…éNúN>½}Ú0î #ƒØ!ìq'ÆU×UÓߟߒ´¥Å£B‡…FÞ4ÞaÌÝÊ•þ}û°°°Ó§O[XXx{{Ïš5« Jhø­Ç·ß^ýÖû„7GstôQ¹]%}ˆ÷¯•tᢿz‡Ä£tÒÓÓmll:; …–••åääT¿æäÉ““&M’½‡ììlF=EQõ¦ ï€Þ1ȇ@Ù!e‡|”ò!h…¿óÿ>xó`F‘œ_ й)´˜Ç1.¿¸p¶pZ|/}ÛDEEq8Ò^¿ÚÀ™{gæ_˜ÿWö_r¤Ãœ:uÊÓÓ³==„……1B^ñ€"@>¤ÐVÄ®È|‘9Úrô‚÷8ê5ýÒÓö022ò÷÷—{· K(®ZµjÅŠ’šÌ—™Sÿœjù‹%;”ž^¿ñæ”Íæ?›óvñÆGŒÏ{•'©Ÿ:uê³gÏÎ;×qqÀ[†|H¡å”çBöŽÚ»Ù{³‡±‡Üûwppؽ{·ìí¿ü½`™`©ÇR¹GÒ1Ξ=[]]]ÿéÎëÚ×½´{mòÚÔ eXfØw‰ß÷Žž]RUâîMÖÈd2[uÝ@Á!RPîÝÙ!ìòêrBˆþv}É|™@(ø2æK³Í8[8CƦ»1%âÈØ#?¥ý$éyÚþДÐmö¥ÎJî8Ý÷¤¯$O"„$$$4~Alcuâºô¢twCwzÓIωÅd¥¦JØÛÛ³Ùì„„„.\¸ÐÊʪ¼¼¼Å!@¡ RPK=–þ0ìU¦*!dã? û¡¿Iÿ‚ׇ3k«i'ÍLº0õ§ΟVTWìIÝSÿÀ½£÷†ß3jO””ãÖ{ØÃ~j6´É½ë‡¬ß0dÃÀžëÄu™/[xk=!¤²¶rߨ}fÝÍ8,ޝ/!$Ð!0¨o½®½Ï*Ø+8ïU^æ‹úñ2õÚk(ƒb:>õ<ÿu¾˜ˆÜ<ðMÿo<{zö3ê÷Õ_½‰äÚúuƒ×²Õ›Ó{ŽËWדY'é]UUUEEE†††-†W^]^#ªÑaë¬MZk{ÐVP+èÁîQ\ùæi“ÉÔÓÓËÉÉi±+x'àýöï’¬—Y"±ÈÍÈÇæBF˜ØŸ¶ÿ΋;õÛŒµKè\ªc˜u7#„pXBÈ«êW-¶·îaM7–È.Ë^»òZîµb~±˜ˆ !üZ>½«§vOº ÙM“R"(Q¡T^Õ¼rÐu ëíuìéBqeqÁ낙̜ùÇÌúcÑ@@a±X2žT­¨–«Æ5Ñ2a2˜µ¢Ú{Y,Ýa}GŽ9r䈌ý€â@>ÔÕ°Tdý÷^Ž(Bµª½¶ªvƒ¿Ó~–\Ëøéñ&Z&Õ†¡†"±ˆÞÅø÷[öZ\7-ÎÍÈ­q=‡ÃQQQ)++k1¼îjÝUª//8/Xà¼@$•V•êªëÖoSZZª££ÓbWðNÀ|ٻĺ‡5ƒb\~½DPB¡HòtäÝůåge,ì·ÐTÛ”A1ZœqÓ×Ô×VÓ¾]|›Þ¼ûâ.]ÐU×Õ×пš{µÉ£(ŠêÓ§OffËÓyLŠÙW¯ob~"½™RR+ªuÑ{³ðèÅ‹……… Þ´L)..~úô©H$jqP(x>ô.1Ô4œî8ýЭC‡<Ì9æq9qÚjÚó\ç½íqEbѲKË!‰y‰„sÏò U*†lKÿÝ4ŒµŒcsbÝÝK%«ãVKoO*¨OPHbˆ®[…½-y›d×òþË¿û–Ëâz™zò Oei1Ò»—7½wøðá AW×Ug•dÑåg¯žÝ,¾i n ¯¡ÿŸ>ÿ™3¨éP;žÝ×W¾v3psÑ“ÅÇÇëéé9;7\]þå—_žï$gç”OR:; è"0_Êù(;äCÐ ¢¢¢ð = 8A'022ò÷÷ïì(þ|:ƒƒÃîÝ»;; € RRQQQ>>>†††<oÔ¨Q²>hРöŒ{åÊ6›Íf³1_Šù’*** ˆ‰‰INN677Ÿ0a‚P(lm'd7âêê*å///@ÑŽØä ¿?¤¤%åàààdff:::¶ª“qãÆ¹¸¸4¨¤ß†ðA>¤¤²³³W®\yíÚµââbúv|>¿¹Æÿýï?ûì3BˆX,‰Dššš„{{û¤¤$mmí)J¦÷f(äCJÊÏÏÏÒÒ2>>ÞÄĤ¢¢ÂÐÐP$5×xìØ±ï¿ÿ>!äüùóÇ?xð !„Åb:thþüù ÛÙÙ¥¦¦¾Õàä ù2âóù;wî455%„dffJoÏår¹\.!äÖ­[l6ÛÞÞ^Rù2è)# ccãØØXww÷’’’Õ«W·­Çãñä@ÇÃ÷Ë”TXXØéÓ§-,,¼½½gÍš%ãQîîî_}õU{Æ4h›Í7n\uu5ý}´-[¶´§C€öÃó!%åáá‘””$Ùôóó“å(sssssóöŒמÃÞ<e‡|”ò!Pvȇ@Ù!e‡|¨K‰ÊŽâl‘Ï{ãO:åéé)K˰°°#FÈeP€N|¨K1Ò2òwð—±qYU;„šßÄ»5„BáªU«V¬X!K?S§N}öìÙ¹sçZ(€"A>Ô¥8è:ì¹»ýýœ={¶ººZƧ>L&300p÷n9Œ Ð)ð{Œ Ê÷¤¯¶švqeñ‚;FìðìùÏìÕªØU7 nL´™’RÄ/êoÒÿœß¹+9WF„ „¨1ÕÊ––Iúñ?ãÏRaÕÖÕF?Žæ²¸¡ÃC‡›ϭȵÚcE7xx ]È_˜Ïaý3×vâĉ#F0oÒ娨¨M›6ݹs§ººÚÝÝ}Ó¦MŽŽŽ’½cÆŒùþûï‹‹‹uuußæUx+ð|HqÈ<±Ä}Éó…Ïg;Íö;í÷ºæµd×w¢GEûG?Yðd¶ÓlBˆ—©—`™ brDã~Ne êô|áó‡€yæÕ‰ëL´MËù ó ! ÓË‚eI2DIHHhð¢Ö¢¢¢€€€˜˜˜äädssó &…BÉ^{{{6› ÷‹Ð).ÏžžC{ ¥5×u.“bž¾wZ²«²¶rߨ}fÝÍ8,ޝ¯ô~¼L½†öÊ ŽÏ_=Ï/½}UUUQQ‘¡¡aýÊÀÀÀ   {{{++«àà༼¼ÌÌLÉ^&“©§§—““Óú³è|ȇ—%Ï’.0)f/N¯G¥$»¬{Xל#]OížtA³›&!¤DP"½½@ „°X¬ú•ÙÙÙS§N533SWWïÙ³'!„Ïç×oÀb±èÞ9ȇ—PôfBª¶®¶þ.mUmÙûaPÿú”Åb±ôöGEE¥¬¬¬~¥ŸŸEQñññ¯_¿ÎÏÏ'„ˆD¢ú JKKuttd @q`=µâº]|›.„‚G¥Ì¹íz±|cj*j„¡XØ ž¢¨>}úÔŸãóù;wî455%„ÔßE{ñâEaa¡“““|#èx>¤¸Ò Ò·_ß~¿äþ7—¾a2˜¬'È·¶ ÛHË(ò~dEuE•°ªþ®áÇ×_­¡¡allK)))Y½zuƒ®âããõôôœå!@Ç@>¤¸üìý.=¹ävÀíJΕcŽi©jIi<èÈ v{܉qÕuÕì6;„½%iK‹C„ ¼i¼Ã˜»•[Võf‚,((èÚµkyyy’š°°°Ó§O[XXx{{Ïš5«A?ÇŽ›>}:“Élå)(Jújï_+éÂEõ‰G餧§ÛØØ4®÷=ékÁµØì½¹ãC¢Í›7Ëå®_¿¾Å–ÙÙÙžžžiiizzz(¡¬¬¬«2Nž<9iÒ$Ù{ÈÎÎfÔCQEQo òºˆ5kÖÈøãŠ¹¹¹ûöíC2ï.¬§†¦,^¼X––^^^o;€· ù‚:5éTg‡ ,0_Êù(;äC ì€²C>ÔÉ(Šª®®îì(WuuµŠÊÛý¾_ÖÉÌÍÍŸ>}*6|‰ÐTTTÌÌÌÞîoµwh‘–––ƒƒCgG Ô0_Êù(;äC ì€²C>Êù(;äC ì€²C>¤Ð–,YBQÔÞ½{éÍ Põ,X° sÃèðûÔŠëÒ¥K‰‰‰ªªªõ+}||6oÞL—uuu;#.€®ù‚*++›3gNDD„‹‹KýúîÝ»;99uVT]æËÔüù󃂂ìììÔGGGkkk›™™Í›7¯¬¬¬Sbèbð|H…‡‡ß»wïðáà ê 4dÈSSÓÛ·o¯X±âÉ“'çϟäC çùóç‹-ŠŽŽVQiøéLž<™.ôë×Çã?þþýûï½÷^‡ÇÐ¥`¾Láܸq£¸¸ØÍÍÅb±X¬êêêÏ?ÿÜÛÛ»A3gggBÈÓ§O;#F€.χÎ!CîÞ½+ÙìÛ·ïŠ+‚‚‚4»yó&!¤wïÞ@W„|HáhhhØØØH6)ŠÒ××755­«« œ8q¢‰‰É½{÷V®\9zôhKKËN  k@>ôΠ(ŠÏçöÙg¥¥¥FFF¾¾¾k×®íì ºäCŠ®ªªŠ.0ŒÈÈÈÎ  KÂzjPvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò!Pvȇ@Ù!e‡|”ò¡¦=žåyÏGïžžèuy‹ó7Ì£WÞˆë€Ø@¾ºP>$ª+ÞÿÝÃIv÷GõÌùr\ÍÓû-ñºüž^Õ½ô&÷ê-XoSÄÐìNo¾ü5ôñ¬÷G›>œhûü»Ùµ9’–†Ë÷XÇÉå$ ãu|èåÑí¥§Ö›Üsë)Šb<[î'®©’cÿ ¶FÿÅ=·ünøÍµ…ÏrWʱsèD*€Ü”þq;~–¶ÏdBˆá²ù;ó¯_Ôøa^ð'*Ýyú‹¶BêÊ_>œhÛkï%fwÞ£©NôO?N¬Î<< jŒ;aö› ‘8w¥]Y1“£+=*QE©¨Z ¢cH(ª}çoKy>$|YPWRIJv¦7UôŒ™\½ªûMO„BTt¬cЬÎ< „˜íвŽ)ª?5&]]iQù_¿uÓïÉÔæµØ¸`Çצ:‰ø²t‚.ò|¨®ì!„©Å‘Ô0µ¹Â²—ò…Ÿ“ûm ‰ÔÌíM·! ¦|û€NÑEžu ¶£G¯}—LÖ‡3XêÛ¿&bq‹‡­Ú/û“'è]äù“£C©{U&©©«(Uáô „PõîˆEí…¡®©ÖÛN­·ËÒñá‡ÊôugÏötŠ ‹<RéaÀäéUÝK£7…Eyu¥E¬÷œ! m‘€O×׿Õ?Šê¦J×Õ¶v8±XDU ZlYWö¢¶ ‡ˆÚ•‡À[ÕEò!B÷£ ÒˆÿßÞÇÇtõÿÞÉ$3Ù÷E6C$„Èfk¨=û”R´< jyÒEûè¢U4h•"¶(ŨKBBI¬­D$#ûžÌÌýýqÓ1&3“ÉBù¼_}õuçÎ9ç~Ϲw&_çž™9YvçÏŒµóù6†ÞD$tíV|í‚47‹­¬È9°Q± #Ðç[ÚÅjþp¾¬¤èéçïž;Rö×Õ¢‹Qéa3t̬ :÷¬1ªÌ ËLé!+)¬gïàÕyCî—‘åä…²Ò¢¬Mÿ‘• ;y9}±—Ñ‘éÛãJ’.<˜îË·´³3·ðìl|•µyUÎI&Õðy{FW—á鈷„Ir2yÆú¼ì}Ã31€WìÍɇˆ§c={•õìUJ»=¡ýÊ­ò‡¦C_úE#¿ÁF~ƒkl›Ñ´Z¶©AÙ¯ÜJ G€&è͹_Ö°~Vøªä!ŽÚ|uг¯$q$Œ'@óóÍ5(Ñ–í Û-þÞnñ÷¯.x¥0Ÿ-ò!hé5QÙÙÙ .tuuÕ××wqqùàƒž>}ÚXÁ„‡‡ß¸qCûò±±±ff<6É IDATfJ;Ïž=keeUç²³³gÍšåêêjhhèîî¾qãF™Æou’Ç,‘H†¹ÿ~­Ryyù¼yólll\]]ùå•e BBBìíímmmßÿý‚‚ªµh+W®dL˜0Aûºð*`ýPSôôéS???·páÂÎ;§¥¥}÷ÝwQQQ õI)ê,<<ÜÈȨK—.¯ÿМçÏŸ{zz:88,[¶¬U«V±±±‹/.((X¶l™º*¯:æ¥K—ž:uj×®]>œ5k–H$êß¿¿R™wÞy'77÷×_•H$óçÏÿðÃýõW"zøðaHHȤI“¸bvvvÕÛWW^äCMÑ‚ .^¼(Ÿe3fL—.]6lذzõêF ­q¬ZµÊÞÞþܹsúúúD4hÐ ‡%K–üûßÿ¯?ž’’’íÛ·ïÞ½ûí·ß&¢k×®mܸQ)ºwï^LLLbbb×®]‰hÆ Æ Û¼y³¾¾þǧOŸ>x°Ú/zÐP÷÷  …Âý²&'++ëàÁƒkÖ¬Q¼ådjjzäÈù_м¼¼éÓ§ÛÚÚZYYÍ™3§¸¸˜þ¹1týúõûúú&''k(_VVÆ0LRR’——WXX%&&öïßßÔÔÔÒÒr„ Ü=š=z\¿~=$$dáÂ…êš"¢¤¤$“>}úÜ»wO]ï<عsgKKˉ'æääÑÂ… ßyçy®\¹R±JYYÙæÍ›W¬X¡˜ Lœ8ñ“O>ÉÏÏWY])f"º{÷®¿¿¿‘‘‘|Xž88øÂ… ‡¶³³ ãšíÒ¥ËöíÛ%‰º¦rrrÌÌÌÆ·qãF###SSS¥~ÅÄÄèè踹¹EEE;v¬]»vC† aY–›õ)..fY6++‹ÇãݺuK±âÍ›7‰(##Cåp©¬®37,®®®gΜ‰÷öö=z4˲‰¤S§NƒŽ‹‹‹ŒŒ´±±ùôÓO5 £¢#GŽXZZÊ^¼x‘ÇãI$uçT&“Íž=ÛÙÙY*•–——3 3fÌ''§ýë_………®źŠ´4¨Uù”””ÔÔÔ´´´Ç§§§?}ú4###333++K,ã~Y““››«¯¯ojZõË!¿ýöÛØ±c¹mKKËçÏŸ'&&^¾|9++K(‘¹¹yŸ>}ä‹‹g̘HDsæÌÙ²e i./ŸD)++[´hÑ”)S¸å,GŽyôèñù|†atttttt®]»¦²©;wš˜˜ìÚµKWW×ÏÏ/55uëVßÊ-•J·lÙâççGD~~~>ä¦m¢££ƒ‚‚~ûí7777ù¼'--a•Ã¥®ºÔä8;;—––>þÜÚÚšˆÞzë­+W®ÑÕ«WW¬XAD·oß.**’/•Édååå¶¶¶DÔ­[7n¿üo³ºò–––DäïïÏí …sçÎ={ölbbb||ü©S§BBB”bS×ÔßÿÝ»wo]]]nŸ>}TæCººº>>>ܶP(LNN‰D#GŽ<|øpPPÐþýû«ØÊÉɉeÙ¬¬,¥uÇwïÞµ°°°¶¶Ö\#.“ãª{xxÈoJúûû‹ÅâÜÜ\ccc•èÈÜܼ¨¨Hþ°¨¨ˆa®¢’sçÎMžÔätêÔ‰ˆbbbÆGDÜŸäË—/s$IûöíOž<©X‹Kžˆ¨ú’[uå¹)"sóª_¥ÍÍÍ 066?~|XX˜‹‹‹|mPMñù|–eå{¸¿ô5âñxÜjèàààéÓ§gddœ;w®z"Õ¡CwñâÅQ£FÉwæäätîÜùÀ£FÒ\S}X–ÇÌ-!RY^‘Mnnnii)WŒK.åé Ü¡C‡ÆŒ3gΜµk×ÊÓ>Ÿ¯ø!Ánݺ±,›‘‘Ñ®]»ëÀ+‚ø&ÇÆÆ&$$dåÊ•Š_9STTôÍ7ßpÛ;vLKK300‰D"‘(99yñâÅÕÿËiY>&&†K)-ZäååURR¢}S;v¼pá·ò†ˆÎŸ?¯2’ÊÊÊøøxnûòåË¥¥¥®®®DXVVêååÕ¶m[¥ZB¡pÆŒŸ|ò‰b~¶}ûvÞ½{×X]WW×[·nåççsccc­¬¬¸9³uíÚÕØØXÞÍ3gμõÖ[JeŠ‹‹§OŸ¾fÍš7*&4GíÝ»7·x‹ˆîܹ£§§×ºukmêÀ+‚|¨)Z³fT*õôô\¿~ý©S§¶lÙÒ½{wÅ;M]»v1bÄ™3g8À}ù†)-Ë›˜˜äååEEE=yòdÆ ééé\VÄãñîß¿Ÿ——§®©É“'M™2åâÅ‹ááá;wîT‰Á{ï½÷Ç?~|Ê”)'Nän ‚aÆEDDLœ8QeÅ/¾øâÙ³g~~~?ýôÓñãÇ—,YºbÅ nVLeuyÌê†eàÀŽŽŽ“&MŠ?xðàÒ¥K,X åÌ—¢…††^»víàÁƒ?ÿüóûï¿Ï=µ~ýzn1Љ'JKKÛµkwLAEEEß¾}oܸñî»ïž={öСCóæÍ[°`—žÖXW›Ø .4/ÆÆçËKnnî¼yó:uêdbbòÖ[o ¦aUTTÌ™3ÇÆÆ¦S§N;vìïïÕ«÷ùµ5kÖT¹‰Åb–eïܹóöÛo›™™µiÓfõêÕÚ×NÃ~¾Œa_^E¡$0¢ê¦ÉéIõN½Ô ;{ölLLL£T€f'22r̘1Ú—ðàO÷‹Iò ¬§†F–°qãÆõë׿þê„õCÐèRRR&Ož-ò!hé@K‡|Z:äCÐÒ!€–ŽßØ@uLcðJ±€2Ì@K‡|Z:äCMΊ¤¯O¿´ó_ÿ"[[*(P]¥¨ˆ†>|±Qò¦ž?'†¡gÏ M%‰„a˜û÷ï+íþü9Ã0Ï42<<üÆÚK]ùÁƒ3ÿÐÕÕussÛµk—6í¨ ¾>^EÇkKÞ¾6Á¼159Ë–‘¥%­XñbÏýûôãôùçdbÒñèëÓG‘Áë<¢þG}d ñ •ÑØ±c¯\¹råÊ•èèèÞ½{‡„„ÄÅÅ5ÔqkåUt¼¶äík Àë©›##úê+š4‰.¤î݉ˆ–/'wwz÷ÝÆ‰Çоþú5Ñðë×xH›=zpÛ}úô‰‰‰9r䈿¿ÿk @î5w\³& À«†ù¡¦h  >"–¥Ë—éÀúî;ÒÑ!"ÊË£éÓÉÖ–¬¬hÎ*.VÛÈ“'DææäâB«W“TJDäíMaaU\]©K—ªíåËÉÏOu;yyU÷Ë$bº~ ccòõ¥ääeTFõë¯äêJúúäîNQQªÛ¿{÷®¿¿¿‘‘‘Orr2åååqwj¸{Rׯ_0`€±±±¯¯/W Gׯ_ Y¸p¡Rk‰‰‰ýû÷755µ´´œ0aBAAæòJ†166.**Z¸pá;ï¼#ß?pàÀ•+WVo§zðDôäÉ“   sss—Õ«WK¥RúçþZõ¾¼<Ôµëx^^ÞôéÓmmm­¬¬æÌ™S\\LDeee Ã$%%yyy………©"zôèѰaÃÌÌÌÜÝÝýõW¥öåÁÔ§GÍ«Ñ[»‹¹ÿ4ƒE,K×®ÃÐï¿SŸ>4z4q;e2 ¤  Ч¨(rs£±c‰e©°ˆ(5õņDB:ÑàÁG‘‘dcCŸ~J,KË–Q` ±,‰ÅÄçÃP^±,õîMÿýoÕQ¸ÿäMåæedPe%‘¿?EGÓÅ‹äáA£FiŠêÎbúâ Š§™3ÉÔ”*+_ºÞ*++‰ÈÕÕõÌ™3ñññÞÞÞ£GfY677—ˆ222¸þþþÑÑÑ/^ôðð5jW±K—.Û·o—H$Š Êd2{{ûààà .>|ØÎÎ.,,LCy–e 4wîÜÊÊÊÊÊÊüüüððp"Ú·oß¹sçôõõ‹‹‹Y–ÍÊÊâñx·nÝRlG]ð‰¤S§NƒŽ‹‹‹ŒŒ´±±ùôÓOå­ÞEµê¸L& ŠŠŠrss;v,˲¥¥¥DÔ½{÷µk×ÞºuKݘTTT´k×nâĉñññÜ<Ð¥K—Û—SŸ¼:¨Uù”””ÔÔÔ´´´Ç§§§?}ú4###333++K,#j‚ª2’9sÈÜœôô(%¥jÏÕ«dlL¥¥UH ©TE>tâRnnUÉ_~!kkbY:s† ¨¢‚Ž¥€rv¦'¨¬ŒºpA«|è矫 lÜHžžš¢:z”ôôèùsbYÊϧC‡¸2/pPwìØÁ=ܺu«——[--øù矹7nôôôä¶===wîÜ©4p¥¥¥_}õUFF÷pÆŒ3gÎÔPžeÙAƒ)ý aÁ‚2™L"‘X[[ÿþûï,ˆ‡‡{xx(µ£.ø'Nææærûùåkkkyy•}‘«Uǯ^½jll\ZZÊíOHHR©”ˇ֭[§yL=xu6Âú¡¦ëÓOé×_iÞ>þÔ©S!!!5öhìØ±K–,!"†aD"‘¥¥%éèèŒ9òðáÃAAAû÷ïŸ0a‚æØäÁß½{×ÃÃÃ쟡÷÷÷‹Å¹¹¹ÆÆÆµêK…oß¾]TTd÷ϸËd²òòòŒŒ .~ùú'ucrûöm///yØß~û­º0°GMò¡¦ËʪjõœDBíÛÓÉ“/³¶¦Š åºìËßÍ0DDR) …Ô§?O—.Ñ¿þEÏžÑþýdbBýú‘®®VQéë+ïQ•ž>MׯÓþý´r%-[FçÏS«VÕ¬Öb- Èåææ?>,,ÌÅÅ¥Xë(®§V<}úôŒŒŒsçÎmݺUËØØ—‡ža"âܨ,¯æÂ‰¤}ûö'_wkkk™LFDæææÜucRYY©£2?­¦{Ðda=usÒ±#¥¥‘‰D$Qr2-^¬:qu¥[·(?¿êal,YY‘¥%Ñ€C—/“¯/PBÅÄÐÀ ÕÉ“ôå—äéIŸN·oS~¾rÎÔàbbb¸ôeÑ¢E^^^%%%õi-00°¬¬,44ÔËË«­|Ž®&®®®·nÝÊÿgèccc­¬¬¸9›†Õ±cÇ´´4‘H$‰’““/^¬[íjP7&;v¼víZÅ?©ô”)SV®\Ù¸=hDȇšêÚ•FŒ 3gèÀš5‹D¢ª¹%’£#MšDññtð -]J T•8Nž$²µ¥ÎI  “'ë•©‹J&£åËiëVJL¤o¿%±˜<<ê~%<ïþýûyyyŠ;MLLòòò¢¢¢ž®¢:::Nš4)>>þàÁƒK—.]°`£ò$Õ‰<Ÿ®]»Ž1âÌ™3˜5k–H$ª~ ucÌçógΜyùòåuëÖíÙ³'00PeëУõë×ïØ±£¡º ð jN†Ž%‘ˆÆŽ¥ùóiôhúì3Õ%ù|úãÒÑ¡!C(4”æÎ¥e˪žòð KKòõ%"âñÈߟìíÉÕµá£<˜>ù„ÂÂÈ×—~ù…vì /¯ºEÉ´iÓ¾ûk×*î üè£Þ}÷Ý^½z=xðàÈ‘#þùç–-[Ô•¯QPPÃ0ãÆÓ|\E|>ÿ?þÐÑÑ2dHhhèܹs—ɇ¾!È`æèÑ£"‘hìØ±óçÏ=zôgª®uc¢¯¯æÌ™gÏžþðÃ?þøcÿþýUv°=Šˆˆ8zôhöàUc”( Œ¨š]?= _SûÚà÷훊°°°³gÏÆÄÄ4v  ,22r̘1Ú—ðàO÷KMò ¬§P!;;;!!aãÆëׯoìXà•Ãý2RRR&OžÔ5§6oÜ/€–ù´tȇ ¥C>-ò¡&*;;{Ö¬Y®®®†††îîî7nä~§³¨¨ˆa˜‡>þœa˜gÏž©¬ž””¤îçâ_-ãÔ^xxø7ˆH"‘0 sÿþý:ÔmðÂu®¢’âXÕ§¿*EGG3 “——Ç0LtttÝŠ5TO묇EÝK¬¡ÄÆÆš™™i_þ]áµífƒ_xMSm¯dugóÿþïÿlllæÍ›W‡T¾Þ¡áóeMÑóçÏ===–-[ÖªU«ØØØÅ‹(þN‚¾¾þG}d`ÐÔ¿7¼¡â 722êҥ˫®[‡Õ'6EŠcÕPmÊq¿fÏçóåÛu(ÖàQÕVC ÍKì5{WxºÙè§øõh¨n~ÿý÷ÁÁÁŸþyê¾Ò×;Ôò¡¦hÕªUöööçÎÓ××'¢Aƒ988,Y²äßÿþ·¼Œ¡¡á×_Ýx1j«¹ÄÙ¼Ò±êСC›6mŒŒŒD"‘«ú߫ӲXs§á%&;ºÓBºÙˆJJJzöìinn^‡ºxolrXÞÚ]Ìý§¹4 ÒÒRC‡)îÌËË[·n]fffaa!¥¦¦rߘ‘‘Á²lZZÚСCMMMÝÜÜvìØÁ²lbb¢@ àêþôÓOfffIIIJzüøñˆ#ÌÌÌÚ¶m»jÕ*‰D²lee%%%%ùøøÜ½{·z×®]ëׯŸ‰‰‰……Åøñãóóó• TSC˹¹¹Ó¦M³±±±´´|ÿý÷‹ŠŠ”Zóúç—`,XÀµsìØ1???CCÃ^½zÉÛQ•b]¥fwìØÑ¡C¡Pèæævâĉê…U6XZZJD‰‰‰Ý»w_½zµR•êm*êÙ³çêÕ«¹í:xxxpÛË–-óõõ••–ý­Õ‘Éd6l`YvÆ 2™L¾çνzõª±XõÁQyýÔ8¬úÓÝP—Aíh~‰)®þâÒÐ÷ÄÄD???ccãÞ½{oÛ¶ÍÔÔ´a»V«+\s7,X0räHùþ¬X±BË0êóÖ‘œœÜ¿SSÓîß¿ßÕÕUÃÕöMC鵩利;;êΦ\ß¾}¹vfΜ©nXª‡$§òõ^}Ä@ƒÔª|JJJjjjZZÚãÇÓÓÓŸ>}š‘‘‘™™™••%‹‘597oÞ”':ÕUÏ3***Úµk7qâÄøøxî_—.]’çC‡222:þ¼R;‰¤S§NƒŽ‹‹‹ŒŒ´±±ùôÓOÙÞ€üýý£££/^¼èáá1jÔ(¥º2™ÌÞÞ>88øÂ… ‡¶³³ «1Nu-Ëd²ÀÀÀ   øøø¨¨(77·±cÇ*µVYYÙ¥K—íÛ·K$®WW×3gÎÄÇÇ{{{=ZCTŠuÛ¼sçÃ0_|ñE||üÌ™3MMM+++ «k{ƒëÞ½ûÚµkoݺ¥XEe›Š]¶lY`` ˲b±˜Ïçs+uX–íÝ»÷ÿû_űª±¿µ=#ê(åC(F¥îú©q„Õî¼ jlGóKL‘Ê—º¾çä䘙™7...nãÆFFFÜ_ÐÆºÂ5w“›4*..fY6++‹Çã)]Ìê¨Ï[Gii©££cHHHBBÂúõë---¹|HÝÕöMC鵩͈©kJÝÙT$‘HüüüÂÃÃ5¼"”BR¬®òõ^ã5 нáŽ;Æ0ŒT*Uùlõ<ãàÁƒVVV¥¥¥\E‹íÚµ‹Ë‡bbbÁo¿ýV½'NææærùåkkköŸ7 Ÿþ™Û¿qãFOOO¥º¥¥¥_}õ•ü}vÆŒÜ?4Ç©®å«W¯ËãOHHÕ»ïéé¹sçNy„ò©oݺÕËËKsTòºŠŽ=ª§§÷üùs–eóóó:ÄÅ /¬®Aî nݺuÕcSצܙ3g ***Ž=àìì|âĉ²²2@páÂÅ9¿û[Û3¢ŽöùbTê®E*GCÝén¨Ë@›v4¿Ä©|q©ëû÷ßïìì\QQÁí åþ‚6Ö®¹›‰ÄÚÚú÷ßgY6<<\>UYcõyëØµk—½½½|ˆ¦M›ÆåCꆨ¶oJ¯MmFL]SêΦÿ­[·j–êor*_ïP+ ›aýP“ãääIJlVV–âþ»wïZXXpëݾ}ÛËËKþi²o¿ý–ˆ’’’*++ÇŒ#RRRªåîÝ»òOLøûû‹ÅâÜÜ\ccc"êÖ­·ß¢z]¡P8wîܳgÏ&&&ÆÇÇŸ:u*$$DËÞUoùöíÛEEEòÎÊd²òòòŒŒ mÚ‘w¼¶Qõë×/ ÀÅÅåwÞ>|xpp°ÒòaÍ úûûסM___–e¯]»véÒ%___gg縸8==½^½zkß_íCUiÛ¶m³gÏ&"–ee2·€ÚÝÝ=))IsEŽºëGq!…ÊÑPwºê2ЦÍ/1kkkù•/®ï¿ÿ^eßÿþûïÞ½{ëêêrûûôé³uëV-CÒ¦kµ=Ë5vsäÈ‘‡ Ú¿ÿ„ ´ £>owîÜñöö–‘O||<©"[[[•mª+oiiI ¯MmFL]SêΦ:ꆅ{ÇVùvM ò¡&§C‡<ïâÅ‹£F’ïÌÉÉéܹóÞ~ûm¥ò•••*?.$“É~øá‡œœœåË—O:ÕÆÆFñY–}éWÒ†!"©TÊ=¬žu)ÊÍÍ 066?~|XX˜‹‹‹†?äJª·,‘HÚ·oòäIÅŠ“´l§¶Q>}úúõëû÷ï_¹rå²eËΟ?ߪU+-T¹‚²Æ6…BaŸ>}Ο?éÒ¥ýë_Ïž=Û¿¿‰‰I¿~ýäï¼Zö·>}'¢àà`ooo":vìØž={vïÞMj’-•4_?•£¡ît7Ôe M;š_bŠ;U¾¸ÔõÏç+>Åí×2$mºVÛ³\c7ƒƒƒ§OŸž‘‘qîÜ9u쫇QŸ·‰D¢øÇãÉ÷k"íÏ2÷Uòצ6#¦®)ugSÍÃR·×ðšáû‡š¡P8cÆŒO>ùDñ¥»}ûvÞ½{W/ß±cÇk×®UTTp§L™²råJ"'N|ÿý÷œœþóŸÿ(Õruu½uëV~~>÷066ÖÊÊŠû×Ubbb¸÷ÐE‹yyy•””Ô¡›Šñ§¥¥ˆD"‘H”œœ¼xñbÍÉAƒDuòäÉ/¿üÒÓÓóóÏ?¿}ûv~~¾Ò{bºYc›D4`À€˜˜˜Ë—/ûúú$$$ÄÄÄ 8°¶ý­g¨æææîîîîîŽúúúÜv»ví´<¢6×ÊÑPwºê2Цí_b*_\êúÞ±cÇ .p·xˆèüùóÚ‡¤Úžå»XVVêååÕгM IDAT¶m[-èÏ[G§N®\¹"¢„„n£¶C¤eymFLéòlªSŸa&ùPSôÅ_<{öÌÏÏï§Ÿ~:~üø’%KBCCW¬X¡òß”ÁÁÁ|>æÌ™—/_^·nÝž={åÏòùüuëÖýôÓOׯ_W¬5pà@GGÇI“&ÅÇÇTcµì{Ã’G¥Íõ£r4Ô ´lGË—˜Ê—º¾Ož<¹¨¨hÊ”)/^ ß¹sg­Bjð+¼Æn ‚aÆEDDLœ8Qû0êóÖ1vìX©T:kÖ¬?ÿüsÓ¦MÇŽ㦈´"9-Ëk3bêšRw6_ŰÐËþ~ýú;vhY’æÅGXOÝXž={"‰ <==7oÞÌ­‹TùyûäääµiÓfóæÍìËŸ·gYvĈýúõSúøô£G¸O‡ŠD"¥Íþõ×_\™={öT_)“ÉBCCÍÍÍííí-Zmee¥´`PÝzj•-gee7ÎÂÂÂÖÖvÑ¢EeeeÕä»ï¾355]¶l×ν{÷¸ý;wîä–yjˆJ^W©Ÿ~ú©ƒƒƒ@ èÔ©“|ݨ¼°º¹’ò”ª¨lS‘T*µ¶¶ž4i÷pøðᎎŽÜ©Q<§5ö·¶gDû÷ïs‹jµ¡8’*¯¥¨TކºÓÝ —–í°ê_bJª¿¸4ô=))ÉßßßØØØßßÿôéÓò¸r…kÓ͈ˆ†aÒÓÓk5Âu~ë`YöƦ¦¦cÇŽ]¿~}@@€†!ªí›†ÒkSËSwvÔMEòõÔꆥúÛ…œÊ×;˲½zõªþRP©a×S3ìËw=•FTýóô¤¦þ=ÈP+aaagÏž‰‰y=‡KMMŽŽ~ï½÷¸‰“+VÜ»woÿþý¯çèðæ‰ŒŒ3fŒöå-ò¡æäÎ;£Gvttdf×®]ÀùPsRTTÔ¶mÛï¾û®±x£àû›oooooouÏæää”””888hÿ£Ê@˜z“Ì›7ÏÉÉ)??¿±hf@K‡|èͱwï^–eÍÌÌ;€fù´tȇÞb±øáÇ2™¬±hf5'åååIIIIIIDôèÑ£¤¤¤ÌÌLù³óçÏoÓ¦MAAAãÐ,!jNRSS»uëÖ­[7"Z±bE·nݶnÝÚØA4{øþ¡æ¤cÇŽ,˪{vïÞ½{÷î}ñ¼0?-ò!hé@K‡|Z:äCÐÒ!€–ù´tȇ ¥C>ªÍ96‡ cÂÿ ¯±ä)¸mr~&Ô²|VÔå£Â·"F€†|¨9Ù´iS=Œíìì¦NúìٳƎˆˆhñ©ÅwÄw†w¾ ×‚.¶]j,/t°r Ñ6ªÌ+úéŸ÷çÝúÅØ`Nœ8±dÉ’?üpýúõyyy4 ü^GsrðàÁqãÆy{{†††]¾|¹±ƒ¢´¼4"Ú6r›‰ÀD›ò&m»n }ÅA½—.]:räHHHˆƒƒCDDDxxøÇÜØA@@>ÔtíÚµkÆ ñññò=§OŸ–o———;öÉ“'ŽŽŽÜžœœœ’’†ajl<öQìòÓ˯<½b¨kàðÃМLœ®e\›ñûŒ»Ùwƒ; ùByáâÊ⥧–¸s ¤²¤³uç°~aƒÛ &¢®á]¯g^çʘ~iJD?ûqN9êúülR\ÿEDÄèŽ(ûC¾ÿò˜U:ú¶R’uYÏÂØsÓ"›ÁÞ¥OÄ8ã œëYÕæÐÜ£ºfFêÚŠ¢5kèæM*/'__Z·ŽºÔ<]U çÎóñññóó#¢É“'þùç=rvvnÈc@c@>Ô\åååéé陘¼˜’™7oÞ¾}ûrssÍÌÌ4×}û`àÎe’2ww[CÛ˜‡1)9)Ž&ŽNLÎNnkÞ6!=áiáSyùÙGgGÜŒðvðîlÝùÈÝ#Cw½8ó¢£Ï÷ Î[®n©”UÎöš­ËÓu·q×p\«~]G²1YQ—ÞY©ôÔÓg}Ž}Ñ#båߟüš8síÛöë;Zdc*óŠŽ›è{%ܬ‡kc’™IS§’ééÑ×_Ó°a”šJ|5×xllìÎ;•v¶jÕjõêÕ*ËËd²Gpœœø|þÇ‘¼5K%%%ëÖ­ûàƒó!í­[[&)›ÑmÆÏA?QNiN¥¬òrúåäìdC›ës®3 Ó~}ûŒ¢ "zû âfDG«Žg^ÔatŽß;>,bØ·—¾Ý?vÿÇÑö¤í•²ÊoÞþÆHOíÌM¬úuµàEDÎÓÝ ÛQöô¹¾“Mm™6íÅöš5´u+ݹ£vЍ[·n­[·VÚ©«««®ñÒÒR©TjddtäÈ‘øøøÕ«WÖ6Hh‚59Û¶m›={6±,+“Éø|>¹»»'%%q$ɤI“lmmÿ÷¿ÿ)VÜ»wïÞ½{µ9Äͬ›D4½ëtî¡…¾{xŽˆüü¹´¦¯¨ïÞ[{‰è¶ø6ýýüoþ'/®–»Ù ¼ÀYß¹*ûáQeNaò¡”Z²„bcI,&–%"**R[ØÀÀ@(*í¬ñV£T*544´°°àñxR©´¶@Ó„|¨É ööö&¢cÇŽíÙ³g÷îÝD$ÿË-•J'Ožœ••õÇh˜ÌÐC*þöëêTµ©§£Çm”T–Q[ó¶c;•³2°ªÏ¡UÃ{风,—ÎÔÒ¨QÔ¾=]¾LNNTP@ææ$“©-W«ûeúúú:::EEE,Ë–””×!Nhj59ææææææD”””¤¯¯ïîþbEŽL&›:ujjjjtt´‘‘òÍ)±X\\\ìììÌãÕð5 nÖn_üõú¯ÎD”[–[!­hmÖšˆ’žUÍB]}z•Ûð°ñ ¢rIùúþÇP׈Ê$e÷sî7HgkÄ葬RRcÉâbºqƒ6o&î&ØíÛ5”¯íý2çää”’’Ò·o_"JMM•J¥Õ[€æùPs2cÆŒ³gÏFDD­û´³h—œÆ¦Ë××wùËÕ###Ÿ>}Ú¯_¿nÿHLL¬CË,;DM‰òsòKÎN¾òôН£¯‹… áíµÛÍÚíXò±âŠâqnãäå÷Ý¿Ðg!Ã0‡þ>t-ãZ÷VÝßv©ËLŸ÷™÷;ÓÿÒ¥²òÊß™þ¿3ýï}Qc-ÏeºpÜräQýA•yêíÛG‘‘äà@4{v¬¯¯ïˆ#~ûí·µk×¼ÿþû  h Œæ…%ÜÆéI¯%€šEFFŽ3Fûò<à)`†a˜¯.P€fù´tȇ ¥C>->oßulìÝ߯ó`˜€–ù´tȇ ©È¹ážºoLþß;hqµ\¤»mJ~–Ì„Ý ÿ3¯ÁÛŠ* “µ/ÿ$jПË?Ë8Û·Á#ym"# ½½ÓêÓÂŽùýû?j¨x@KXOÝœ|ñEö®]©©•FF¼~ý Ö®µ‰êþ÷‹O‰ïˆ+Fw2r4Ñíb+hÀ89ü“o¶É’HØ¥KÅë×ÛÈ÷ddüþ»(5Õ$/O0cÆ_½zeÉŸ:qÂ)&Æ¡¤„ß¡CÞÔ©ÉffÜþ)SL>ù$ûر¢áÕ¯^Ì5'FF¼+,OŸvúõ×Vii•#F<©Okiy•D´m¤Ýwƒmüœô(Æ<<[·Úi_¾Ë’oƦtì8gkƒGòz9RTVÆòâ7bËÊxVV¥ãÆÝW*yé’í‘#¢wÞI]¼øzq±nxxgùS::Ì´i&ë×羦 €ˆ0?Ô”íÚ•¿aC^|ü‹_PŸ?ß\¾-“±Ã†¥geIllªNbNŽ´¤DæàÀgFsË]Ã^Ï,ç¶M¿¼OD?³ÓìT"[rJ¼ÿva~™¬‡½ðÛAÖÞúD$‘±ºŸ&ÑÎ`»ÿÄd§Tö°^œ©ö§ÝÏž-éßÿ1 LYYÍëþÚ0÷ÖºܶÇâoä)ÑÉÁG îµw[ðCÊžñ² ½Žs¶¸ÎÚ¦¹©£Å>-çfüù…ìÌ"i€³~ôT§¨ûÅk.dßÌ*/—°¾NúëYw±јýéúº¼J)u¿ØB_gÓ0ÛÁí ‰¨¤R6ëè³CÙñ?ò5Ÿ"+uA[‘™.}Ÿ³>!ïY‘¤³µÞWoÛô½ø]¿={ †5äñ^ ~›6EmÚ¨øÚsçì}|2ýü2‰hòä{Ÿîõè‘‘³sUÉ‘#V¯Î‹%ÖÖ/½<µ?¿P[˜j–23%Û·ˆDº––:òóæe:9=ÈÏ—ÕX}‚»ñ=ÍtyDD³½L?èiæn£GDógm¸œ'ä3~NúqKî|’Y,Q¬8ûhf÷V‚©ž¦šÛï×Ï€e]OœpÔ²;æ]nºLÙmæv[峎 w:ZYdxsíG¥¶5¶v3«üÄýâsÓŸ…ºÌéaFD™Å’©ž&ÞuNš#r1ב.‘UýŒñÛ…ïu7ÍYÚnª§ÉÌ#Ϥ,KDÿy~!­ôÄÇ}cZmºòbeUøŸyß\Êýa¨Í­y¢ÝL‡G_ú𡱼€»»@_Ÿ9¾T©¢öçj ùP3süxŸ×Î.%9¹âìY'ºL|`¹a¨­žCDß¼m³a¨m€³Á³"É/Iù&^âû¢3Óœæô0-(—ýðÒ›ŸGÚçðSÝÙéÎ Ó""²ëÛ=ìSÛ€‹*Ÿí²l­ç²¯¬½ÿde:ù÷ÚרZI¥ì—‘v"3]3¡Î˜ÎÆD4ÍÓô½îfî6‚–zk­žH¦Çú‰ô´5ä1Ì´®&O %O %,±[®æ¯ìcÙ·µ·ƒþ²Þò–ÃÎ=ÿßëaíÚšëÍëiÞÓ^¸ïvUZSV&ËÊ’ÚÛ×<áZZÊ—JyFFGŽ´^¾Ü»²RÇÈHRXøb˜ŽckËø°RC#аp¿¬ÉÙ¶-öìgDIJ$“Ÿ—ˆÜÝII""êÝÛ 1QôäIågŸeÏ›ùÿç ¿{²w¯ýÞ½u?îq¹Œ%G¡…¾ moþgþͬ Å2Á«ùêÕ)«CÇt"Ò5. "I±aMÅ©£•ÀL¨£¸'%·bÉ)qì£Rq±”›*ª¨šr6­JDŒõxD”S*Õå1…2ùs›ª q‰äY‘tòo“ËP8–·QZÊ‘P¨í°H¥~lhjZabRÙ¯ßÓ;;tê”ko_¼»6mò[·~ñ1´sçJmltºwW^š]Ïó  jº|}õML^üåÓÓ#fÉqF†ÄÄ„çã#ܲÅ΢!ÿ4þ0ÔF_—Ù{«ðâãR?'á·ƒll ër…øø¤%$”qÛ s—ˆ¾øÂêã-U—–ñîmŸ*”}µ{öÕî<½Š†Ê‡ˆhßûŽgnº’g*à­èc©nŽGî“þVE’¿>¶1Ô õ³˜"KÀgˆh^Os‰Œþ›3÷X¦µ!¿·³~[óë ß{ÏôÿËNO¯tp¨Úùü¹ð³ÏzpÛ‡·9|¸MPPê°a|}3óòô~û­MI ßÕ5?$ä®âÑwï.˜1ôn+å nV㊉Àˆnãô$ Å Aulìà…˜‡%Cv=)]Ùž¡š”÷Þ{faÁ[»Ö¦Æ’ꤤTx{§ýõWù×J´Tk~:22r̘1Ú7÷àÁž†a†y±Q¿XÞ@·²Ê#ï>/‘<ʯüü|öDcm’!"úì3+¥/Q¬­Ç%¿üÒ ÉÀk†·]eRö“sÙSU˜xÃ:};ÈZËŠvvüÅ‹-j.§^¿~˜ˆhȇ”uo%¼1WÔØQÀëƒûeÐÒa~¨ êÚØ´,˜€–ù´tȇ ¥C>oš¨¨ ¡ð@cGÍ ò¡f飒f_xøýƤ)rpÐ 5vМ j~Μɼt鹞Îjf[·ölì( 9ÁßÔ¦k×®‡>>ÑJ;óò*ÞÿÏŸ~êÉTû‰œœò'OJ4ÿ Ýë•ѧÏsóß "cnÜÈÓ¦VB‚î—_Õç¸gÏf1Ì>†Ù‡ûeP+øþ¡ffÞ¼«ï¾Û¦sgS•OíÛ÷877ØÌLïõ¦(3³lêT‘¥žïë¯ÿ6ì|jêp>¿vÉwl¬ÞÎúJ;[µ’¬^]¬®J¿~6,;>**ãwbë7´Tȇš“={ÒîÞ-üõ×^H ¦Mk#ß^³¦ËÖ­îÜ)èÒŬVtëVѺµDi§®nãÏ~À›ùP“³mÛƒÙ³ÿ$"–%™Œåó÷‘»»éñã}.LÔä;z{[ѱcO÷ìIÛ½Û—ˆ„BÞµk¹bq¹·÷)®Xy¹lþük<>}ºc†«Ê¨Q±íÛ_¾<ÐÉÉ   ÒÜüL¦v^§[7‰HTDD7oê^¾ÌŸ9³”ˆtuÙ¸8Amï—Ô ò¡&ÇÜ\ÏÜ\ˆ’’rõõùîîUK…ZµÒ¿sgˆ¼˜§çÉåË;½ûî‹;SbqYq±ÔÙÙ€ÇkÌ”âbÉù›7÷hÝÚˆnßÎ×\ÞÀ€50`‰èÉ™žãà ãöâ~¼&ȇš CC~ÇŽ&ò‡ C¶¶BggCùžùó¯5…õÔ††|GGýÓ§³||¬rrÊ—/¿Y×vÈÐPÖ°±¨„|¨éòõµ21Ñmì(êbß>¿>¸ºiÓ}SSÝ+:Ÿ?/Ö¦VÛ¶R¡°¬>Çõñ‰NHÈæ¶f}ñE—?îTŸ6 %`4]M`D ·qz’Ák‰ˆhBcÐèjøˆPddä˜1c´oîÁƒ< Ã0 ób£~±4{ȇ ¥C>-ò!héðù²&ȱ±hY0?-ò!hé¶®<}öÓµ›72µúrE€fùhëàäYGÿ8~/µþMEF&{{ï֦䎷û÷ß_ÿ#h€|¨9ùðÃÓ óü¿?<ÝØ©u?• ûfð®ƒÕŸ’HdK—ž_µÊW›v¦LéôèQÁ±c)  À ø|Y33`€óW_õå¶­­õ_ç¡¿ÐçË}êßΑ#)eeÒ!CÚhSXG‡7mšÛúõ‰Ã‡»ÔÿÐ*!jºvíº³aCR|ü$Ŧ¦‚®]mT–ÏÉ)-)‘881 £¹eágß•K¥?°êìÅ ©t¡O÷U}ýˆH"“é~ºŽˆvùOL\zAQ{»‹3'~v>þ?1q\Ý/{àÍm»oÚ~[œ½ºŸï–«7Ë%’¼CýzÆ>JïýKÕΜLyÈ„}CDÆŽÓ¹·sÏž¿‡mÃã½2**uÍš„›7Ÿ——K}}í×­ë×¥‹µüÙ‘#]V¯¾(—X[ãGôà•Àý²fæÔ©4“Z·Þ2wnt^ÞK¿?oÞi'§-ùùåZ6µ6îÊdN2–]}öRôƒ4ŧf=Õ½•íTÏÎÜÞvôìÚ½•êܱ·¡§£Sc;"3"2 ˆ¨°¼BsáÒR  …/]x))yK–œM‹KX–ˆ¨¨è¥v„B®"À«€|¨É nïíÝŠˆŽ{°gÏß»w%"¡P9/éÖ͆ˆÒÒ åùPmUJ¥ôÏ !¿C5,WRdf&àóy¹¹/Ýé5êHûöf—/Ovr2.((77ß(“½T+'§ì5/€ùP“cn.47QRR–¾>ßÝÝJe±ë×ÅDÔ¦‰|X\R\\éìl¢¸TYƒÿ»—Ò¥3÷}B"3Ó]—QUO¶†ñô´¾s'[¾§¸¸òÆ ñæÍZ·6!¢Û·³•ª<^š™YÒ½»-¼ȇš ©T6eÊñÑ£;8:ݽ›»bEìСmÚµ3—˜?ÿ̾}wss?03jÓà‚1á^’AD“»tRWLƲÿ>y–ˆâ§ѱä”gEź:¼¯öÕÐx; 3Ã\x”>ã÷“&½w»¹yÚV-Ç2¤ââhCC]GGãÓ§ùøØçä”._«ÔÔ¹sml ºwW½š þ5]¾¾ö&&zò‡ ÃK>øàtnn™½½Ñ¨Qí?ýÔ¿>íÿ8-ò!hé@K‡|Z:äCÐÒÕuÑ}òzâÐR«¸$²§IDAT^^^ iõ»àZªáû©Çyšv»c@¥‘‘ xP€º300hߥG6Ȱ,Û€Í4A<à)`†û?뇠¥C>-Kõ›cȇ ¥C>-ò!xóqë¦Õ=‹|Z uYò!xó)fBÕS"äCÐRȳ"Åÿò!h 焔懆ùìÿÍÔ^YùrIEND®B`‚Cython-0.26.1/docs/src/quickstart/demo.pyx0000664000175000017500000000255713143605603021241 0ustar stefanstefan00000000000000from time import time from math import sin cdef double first_time = 0 def timeit(f, label): global first_time t = time() f(1.0, 2.0, 10**7) cdef double elapsed = time() - t if first_time == 0: first_time = elapsed print label, elapsed, (100*elapsed/first_time), '% or', first_time/elapsed, 'x' # Pure Python py_funcs = {'sin': sin} py_funcs.update(__builtins__.__dict__) exec """ def f(x): return x**2-x def integrate_f(a, b, N): s = 0 dx = (b-a)/N for i in range(N): s += f(a+i*dx) return s * dx """ in py_funcs timeit(py_funcs['integrate_f'], "Python") # Just compiled def f0(x): return x**2-x def integrate_f0(a, b, N): s = 0 dx = (b-a)/N for i in range(N): s += f0(a+i*dx) return s * dx timeit(integrate_f0, "Cython") # Typed vars def f1(double x): return x**2-x def integrate_f1(double a, double b, int N): cdef int i cdef double s, dx s = 0 dx = (b-a)/N for i in range(N): s += f1(a+i*dx) return s * dx timeit(integrate_f1, "Typed vars") # Typed func cdef double f2(double x) except? -2: return x**2-x def integrate_f2(double a, double b, int N): cdef int i cdef double s, dx s = 0 dx = (b-a)/N for i in range(N): s += f2(a+i*dx) return s * dx timeit(integrate_f2, "Typed func") Cython-0.26.1/docs/src/quickstart/build.rst0000664000175000017500000000566013143605603021402 0ustar stefanstefan00000000000000Building Cython code ==================== Cython code must, unlike Python, be compiled. This happens in two stages: - A ``.pyx`` file is compiled by Cython to a ``.c`` file, containing the code of a Python extension module - The ``.c`` file is compiled by a C compiler to a ``.so`` file (or ``.pyd`` on Windows) which can be ``import``-ed directly into a Python session. There are several ways to build Cython code: - Write a distutils ``setup.py``. - Use ``pyximport``, importing Cython ``.pyx`` files as if they were ``.py`` files (using distutils to compile and build in the background). - Run the ``cython`` command-line utility manually to produce the ``.c`` file from the ``.pyx`` file, then manually compiling the ``.c`` file into a shared object library or DLL suitable for import from Python. (These manual steps are mostly for debugging and experimentation.) - Use the [Jupyter]_ notebook or the [Sage]_ notebook, both of which allow Cython code inline. Currently, distutils is the most common way Cython files are built and distributed. The other methods are described in more detail in the :ref:`compilation` section of the reference manual. Building a Cython module using distutils ---------------------------------------- Imagine a simple "hello world" script in a file ``hello.pyx``:: def say_hello_to(name): print("Hello %s!" % name) The following could be a corresponding ``setup.py`` script:: from distutils.core import setup from Cython.Build import cythonize setup( name = 'Hello world app', ext_modules = cythonize("hello.pyx"), ) To build, run ``python setup.py build_ext --inplace``. Then simply start a Python session and do ``from hello import say_hello_to`` and use the imported function as you see fit. Using the Jupyter notebook -------------------------- Cython can be used conveniently and interactively from a web browser through the Jupyter notebook. To install Jupyter notebook, e.g. into a virtualenv, use pip: .. sourcecode:: bash (venv)$ pip install jupyter (venv)$ jupyter notebook To enable support for Cython compilation, install Cython and load the ``Cython`` extension from within the Jupyter notebook:: %load_ext Cython Then, prefix a cell with the ``%%cython`` marker to compile it:: %%cython cdef int a = 0 for i in range(10): a += i print(a) You can show Cython's code analysis by passing the ``--annotate`` option:: %%cython --annotate ... .. figure:: jupyter.png Using the Sage notebook ----------------------- .. figure:: sage.png For users of the Sage math distribution, the Sage notebook allows transparently editing and compiling Cython code simply by typing ``%cython`` at the top of a cell and evaluate it. Variables and functions defined in a Cython cell imported into the running session. .. [Jupyter] http://jupyter.org/ .. [Sage] W. Stein et al., Sage Mathematics Software, http://sagemath.org Cython-0.26.1/docs/src/quickstart/install.rst0000664000175000017500000000457113023021033021733 0ustar stefanstefan00000000000000Installing Cython ================= Many scientific Python distributions, such as Anaconda [Anaconda]_, Enthought Canopy [Canopy]_, Python(x,y) [Pythonxy]_, and Sage [Sage]_, bundle Cython and no setup is needed. Note however that if your distribution ships a version of Cython which is too old you can still use the instructions below to update Cython. Everything in this tutorial should work with Cython 0.11.2 and newer, unless a footnote says otherwise. Unlike most Python software, Cython requires a C compiler to be present on the system. The details of getting a C compiler varies according to the system used: - **Linux** The GNU C Compiler (gcc) is usually present, or easily available through the package system. On Ubuntu or Debian, for instance, the command ``sudo apt-get install build-essential`` will fetch everything you need. - **Mac OS X** To retrieve gcc, one option is to install Apple's XCode, which can be retrieved from the Mac OS X's install DVDs or from http://developer.apple.com. - **Windows** A popular option is to use the open source MinGW (a Windows distribution of gcc). See the appendix for instructions for setting up MinGW manually. Enthought Canopy and Python(x,y) bundle MinGW, but some of the configuration steps in the appendix might still be necessary. Another option is to use Microsoft's Visual C. One must then use the same version which the installed Python was compiled with. .. dagss tried other forms of ReST lists and they didn't look nice .. with rst2latex. The newest Cython release can always be downloaded from http://cython.org. Unpack the tarball or zip file, enter the directory, and then run:: python setup.py install If you have ``pip`` set up on your system (e.g. in a virtualenv or a recent Python version), you should be able to fetch Cython from PyPI and install it using :: pip install Cython For one-time builds, e.g. for CI/testing, on platforms that are not covered by one of the wheel packages provided on PyPI, it is substantially faster than a full source build to install an uncompiled (slower) version of Cython with :: pip install Cython --install-option="--no-cython-compile" .. [Anaconda] http://docs.continuum.io/anaconda/ .. [Canopy] https://enthought.com/products/canopy/ .. [Pythonxy] http://www.pythonxy.com/ .. [Sage] W. Stein et al., Sage Mathematics Software, http://sagemath.org Cython-0.26.1/docs/src/quickstart/index.rst0000664000175000017500000000015412542002467021404 0ustar stefanstefan00000000000000Getting Started =============== .. toctree:: :maxdepth: 2 overview install build cythonize Cython-0.26.1/docs/src/tutorial/0000775000175000017500000000000013151203436017210 5ustar stefanstefan00000000000000Cython-0.26.1/docs/src/tutorial/caveats.rst0000664000175000017500000000172013143605603021373 0ustar stefanstefan00000000000000Caveats ======= Since Cython mixes C and Python semantics, some things may be a bit surprising or unintuitive. Work always goes on to make Cython more natural for Python users, so this list may change in the future. - ``10**-2 == 0``, instead of ``0.01`` like in Python. - Given two typed ``int`` variables ``a`` and ``b``, ``a % b`` has the same sign as the second argument (following Python semantics) rather than having the same sign as the first (as in C). The C behavior can be obtained, at some speed gain, by enabling the cdivision directive (versions prior to Cython 0.12 always followed C semantics). - Care is needed with unsigned types. ``cdef unsigned n = 10; print(range(-n, n))`` will print an empty list, since ``-n`` wraps around to a large positive integer prior to being passed to the ``range`` function. - Python's ``float`` type actually wraps C ``double`` values, and the ``int`` type in Python 2.x wraps C ``long`` values. Cython-0.26.1/docs/src/tutorial/pxd_files.rst0000664000175000017500000000317212542002467021726 0ustar stefanstefan00000000000000pxd files ========= In addition to the ``.pyx`` source files, Cython uses ``.pxd`` files which work like C header files -- they contain Cython declarations (and sometimes code sections) which are only meant for inclusion by Cython modules. A ``pxd`` file is imported into a ``pyx`` module by using the ``cimport`` keyword. ``pxd`` files have many use-cases: 1. They can be used for sharing external C declarations. 2. They can contain functions which are well suited for inlining by the C compiler. Such functions should be marked ``inline``, example: :: cdef inline int int_min(int a, int b): return b if b < a else a 3. When accompanying an equally named ``pyx`` file, they provide a Cython interface to the Cython module so that other Cython modules can communicate with it using a more efficient protocol than the Python one. In our integration example, we might break it up into ``pxd`` files like this: 1. Add a ``cmath.pxd`` function which defines the C functions available from the C ``math.h`` header file, like ``sin``. Then one would simply do ``from cmath cimport sin`` in ``integrate.pyx``. 2. Add a ``integrate.pxd`` so that other modules written in Cython can define fast custom functions to integrate. :: cdef class Function: cpdef evaluate(self, double x) cpdef integrate(Function f, double a, double b, int N) Note that if you have a cdef class with attributes, the attributes must be declared in the class declaration ``pxd`` file (if you use one), not the ``pyx`` file. The compiler will tell you about this. Cython-0.26.1/docs/src/tutorial/numpy.rst0000664000175000017500000002762313023021033021111 0ustar stefanstefan00000000000000======================= Working with NumPy ======================= You can use NumPy from Cython exactly the same as in regular Python, but by doing so you are losing potentially high speedups because Cython has support for fast access to NumPy arrays. Let's see how this works with a simple example. The code below does 2D discrete convolution of an image with a filter (and I'm sure you can do better!, let it serve for demonstration purposes). It is both valid Python and valid Cython code. I'll refer to it as both :file:`convolve_py.py` for the Python version and :file:`convolve1.pyx` for the Cython version -- Cython uses ".pyx" as its file suffix. .. code-block:: python from __future__ import division import numpy as np def naive_convolve(f, g): # f is an image and is indexed by (v, w) # g is a filter kernel and is indexed by (s, t), # it needs odd dimensions # h is the output image and is indexed by (x, y), # it is not cropped if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: raise ValueError("Only odd dimensions on filter supported") # smid and tmid are number of pixels between the center pixel # and the edge, ie for a 5x5 filter they will be 2. # # The output size is calculated by adding smid, tmid to each # side of the dimensions of the input image. vmax = f.shape[0] wmax = f.shape[1] smax = g.shape[0] tmax = g.shape[1] smid = smax // 2 tmid = tmax // 2 xmax = vmax + 2*smid ymax = wmax + 2*tmid # Allocate result image. h = np.zeros([xmax, ymax], dtype=f.dtype) # Do convolution for x in range(xmax): for y in range(ymax): # Calculate pixel value for h at (x,y). Sum one component # for each pixel (s, t) of the filter g. s_from = max(smid - x, -smid) s_to = min((xmax - x) - smid, smid + 1) t_from = max(tmid - y, -tmid) t_to = min((ymax - y) - tmid, tmid + 1) value = 0 for s in range(s_from, s_to): for t in range(t_from, t_to): v = x - smid + s w = y - tmid + t value += g[smid - s, tmid - t] * f[v, w] h[x, y] = value return h This should be compiled to produce :file:`yourmod.so` (for Linux systems). We run a Python session to test both the Python version (imported from ``.py``-file) and the compiled Cython module. .. sourcecode:: ipython In [1]: import numpy as np In [2]: import convolve_py In [3]: convolve_py.naive_convolve(np.array([[1, 1, 1]], dtype=np.int), ... np.array([[1],[2],[1]], dtype=np.int)) Out [3]: array([[1, 1, 1], [2, 2, 2], [1, 1, 1]]) In [4]: import convolve1 In [4]: convolve1.naive_convolve(np.array([[1, 1, 1]], dtype=np.int), ... np.array([[1],[2],[1]], dtype=np.int)) Out [4]: array([[1, 1, 1], [2, 2, 2], [1, 1, 1]]) In [11]: N = 100 In [12]: f = np.arange(N*N, dtype=np.int).reshape((N,N)) In [13]: g = np.arange(81, dtype=np.int).reshape((9, 9)) In [19]: %timeit -n2 -r3 convolve_py.naive_convolve(f, g) 2 loops, best of 3: 1.86 s per loop In [20]: %timeit -n2 -r3 convolve1.naive_convolve(f, g) 2 loops, best of 3: 1.41 s per loop There's not such a huge difference yet; because the C code still does exactly what the Python interpreter does (meaning, for instance, that a new object is allocated for each number used). Look at the generated html file and see what is needed for even the simplest statements you get the point quickly. We need to give Cython more information; we need to add types. Adding types ============= To add types we use custom Cython syntax, so we are now breaking Python source compatibility. Consider this code (*read the comments!*) :: from __future__ import division import numpy as np # "cimport" is used to import special compile-time information # about the numpy module (this is stored in a file numpy.pxd which is # currently part of the Cython distribution). cimport numpy as np # We now need to fix a datatype for our arrays. I've used the variable # DTYPE for this, which is assigned to the usual NumPy runtime # type info object. DTYPE = np.int # "ctypedef" assigns a corresponding compile-time type to DTYPE_t. For # every type in the numpy module there's a corresponding compile-time # type with a _t-suffix. ctypedef np.int_t DTYPE_t # "def" can type its arguments but not have a return type. The type of the # arguments for a "def" function is checked at run-time when entering the # function. # # The arrays f, g and h is typed as "np.ndarray" instances. The only effect # this has is to a) insert checks that the function arguments really are # NumPy arrays, and b) make some attribute access like f.shape[0] much # more efficient. (In this example this doesn't matter though.) def naive_convolve(np.ndarray f, np.ndarray g): if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: raise ValueError("Only odd dimensions on filter supported") assert f.dtype == DTYPE and g.dtype == DTYPE # The "cdef" keyword is also used within functions to type variables. It # can only be used at the top indentation level (there are non-trivial # problems with allowing them in other places, though we'd love to see # good and thought out proposals for it). # # For the indices, the "int" type is used. This corresponds to a C int, # other C types (like "unsigned int") could have been used instead. # Purists could use "Py_ssize_t" which is the proper Python type for # array indices. cdef int vmax = f.shape[0] cdef int wmax = f.shape[1] cdef int smax = g.shape[0] cdef int tmax = g.shape[1] cdef int smid = smax // 2 cdef int tmid = tmax // 2 cdef int xmax = vmax + 2*smid cdef int ymax = wmax + 2*tmid cdef np.ndarray h = np.zeros([xmax, ymax], dtype=DTYPE) cdef int x, y, s, t, v, w # It is very important to type ALL your variables. You do not get any # warnings if not, only much slower code (they are implicitly typed as # Python objects). cdef int s_from, s_to, t_from, t_to # For the value variable, we want to use the same data type as is # stored in the array, so we use "DTYPE_t" as defined above. # NB! An important side-effect of this is that if "value" overflows its # datatype size, it will simply wrap around like in C, rather than raise # an error like in Python. cdef DTYPE_t value for x in range(xmax): for y in range(ymax): s_from = max(smid - x, -smid) s_to = min((xmax - x) - smid, smid + 1) t_from = max(tmid - y, -tmid) t_to = min((ymax - y) - tmid, tmid + 1) value = 0 for s in range(s_from, s_to): for t in range(t_from, t_to): v = x - smid + s w = y - tmid + t value += g[smid - s, tmid - t] * f[v, w] h[x, y] = value return h After building this and continuing my (very informal) benchmarks, I get: .. sourcecode:: ipython In [21]: import convolve2 In [22]: %timeit -n2 -r3 convolve2.naive_convolve(f, g) 2 loops, best of 3: 828 ms per loop Efficient indexing ==================== There's still a bottleneck killing performance, and that is the array lookups and assignments. The ``[]``-operator still uses full Python operations -- what we would like to do instead is to access the data buffer directly at C speed. What we need to do then is to type the contents of the :obj:`ndarray` objects. We do this with a special "buffer" syntax which must be told the datatype (first argument) and number of dimensions ("ndim" keyword-only argument, if not provided then one-dimensional is assumed). These are the needed changes:: ... def naive_convolve(np.ndarray[DTYPE_t, ndim=2] f, np.ndarray[DTYPE_t, ndim=2] g): ... cdef np.ndarray[DTYPE_t, ndim=2] h = ... Usage: .. sourcecode:: ipython In [18]: import convolve3 In [19]: %timeit -n3 -r100 convolve3.naive_convolve(f, g) 3 loops, best of 100: 11.6 ms per loop Note the importance of this change. *Gotcha*: This efficient indexing only affects certain index operations, namely those with exactly ``ndim`` number of typed integer indices. So if ``v`` for instance isn't typed, then the lookup ``f[v, w]`` isn't optimized. On the other hand this means that you can continue using Python objects for sophisticated dynamic slicing etc. just as when the array is not typed. Tuning indexing further ======================== The array lookups are still slowed down by two factors: 1. Bounds checking is performed. 2. Negative indices are checked for and handled correctly. The code above is explicitly coded so that it doesn't use negative indices, and it (hopefully) always access within bounds. We can add a decorator to disable bounds checking:: ... cimport cython @cython.boundscheck(False) # turn off bounds-checking for entire function @cython.wraparound(False) # turn off negative index wrapping for entire function def naive_convolve(np.ndarray[DTYPE_t, ndim=2] f, np.ndarray[DTYPE_t, ndim=2] g): ... Now bounds checking is not performed (and, as a side-effect, if you ''do'' happen to access out of bounds you will in the best case crash your program and in the worst case corrupt data). It is possible to switch bounds-checking mode in many ways, see :ref:`compiler-directives` for more information. Also, we've disabled the check to wrap negative indices (e.g. g[-1] giving the last value). As with disabling bounds checking, bad things will happen if we try to actually use negative indices with this disabled. The function call overhead now starts to play a role, so we compare the latter two examples with larger N: .. sourcecode:: ipython In [11]: %timeit -n3 -r100 convolve4.naive_convolve(f, g) 3 loops, best of 100: 5.97 ms per loop In [12]: N = 1000 In [13]: f = np.arange(N*N, dtype=np.int).reshape((N,N)) In [14]: g = np.arange(81, dtype=np.int).reshape((9, 9)) In [17]: %timeit -n1 -r10 convolve3.naive_convolve(f, g) 1 loops, best of 10: 1.16 s per loop In [18]: %timeit -n1 -r10 convolve4.naive_convolve(f, g) 1 loops, best of 10: 597 ms per loop (Also this is a mixed benchmark as the result array is allocated within the function call.) .. Warning:: Speed comes with some cost. Especially it can be dangerous to set typed objects (like ``f``, ``g`` and ``h`` in our sample code) to ``None``. Setting such objects to ``None`` is entirely legal, but all you can do with them is check whether they are None. All other use (attribute lookup or indexing) can potentially segfault or corrupt data (rather than raising exceptions as they would in Python). The actual rules are a bit more complicated but the main message is clear: Do not use typed objects without knowing that they are not set to None. More generic code ================== It would be possible to do:: def naive_convolve(object[DTYPE_t, ndim=2] f, ...): i.e. use :obj:`object` rather than :obj:`np.ndarray`. Under Python 3.0 this can allow your algorithm to work with any libraries supporting the buffer interface; and support for e.g. the Python Imaging Library may easily be added if someone is interested also under Python 2.x. There is some speed penalty to this though (as one makes more assumptions compile-time if the type is set to :obj:`np.ndarray`, specifically it is assumed that the data is stored in pure strided mode and not in indirect mode). Cython-0.26.1/docs/src/tutorial/strings.rst0000664000175000017500000010451713023021033021430 0ustar stefanstefan00000000000000.. highlight:: cython Unicode and passing strings =========================== Similar to the string semantics in Python 3, Cython strictly separates byte strings and unicode strings. Above all, this means that by default there is no automatic conversion between byte strings and unicode strings (except for what Python 2 does in string operations). All encoding and decoding must pass through an explicit encoding/decoding step. To ease conversion between Python and C strings in simple cases, the module-level ``c_string_type`` and ``c_string_encoding`` directives can be used to implicitly insert these encoding/decoding steps. Python string types in Cython code ---------------------------------- Cython supports four Python string types: :obj:`bytes`, :obj:`str`, :obj:`unicode` and :obj:`basestring`. The :obj:`bytes` and :obj:`unicode` types are the specific types known from normal Python 2.x (named :obj:`bytes` and :obj:`str` in Python 3). Additionally, Cython also supports the :obj:`bytearray` type which behaves like the :obj:`bytes` type, except that it is mutable. The :obj:`str` type is special in that it is the byte string in Python 2 and the Unicode string in Python 3 (for Cython code compiled with language level 2, i.e. the default). Meaning, it always corresponds exactly with the type that the Python runtime itself calls :obj:`str`. Thus, in Python 2, both :obj:`bytes` and :obj:`str` represent the byte string type, whereas in Python 3, both :obj:`str` and :obj:`unicode` represent the Python Unicode string type. The switch is made at C compile time, the Python version that is used to run Cython is not relevant. When compiling Cython code with language level 3, the :obj:`str` type is identified with exactly the Unicode string type at Cython compile time, i.e. it does not identify with :obj:`bytes` when running in Python 2. Note that the :obj:`str` type is not compatible with the :obj:`unicode` type in Python 2, i.e. you cannot assign a Unicode string to a variable or argument that is typed :obj:`str`. The attempt will result in either a compile time error (if detectable) or a :obj:`TypeError` exception at runtime. You should therefore be careful when you statically type a string variable in code that must be compatible with Python 2, as this Python version allows a mix of byte strings and unicode strings for data and users normally expect code to be able to work with both. Code that only targets Python 3 can safely type variables and arguments as either :obj:`bytes` or :obj:`unicode`. The :obj:`basestring` type represents both the types :obj:`str` and :obj:`unicode`, i.e. all Python text string types in Python 2 and Python 3. This can be used for typing text variables that normally contain Unicode text (at least in Python 3) but must additionally accept the :obj:`str` type in Python 2 for backwards compatibility reasons. It is not compatible with the :obj:`bytes` type. Its usage should be rare in normal Cython code as the generic :obj:`object` type (i.e. untyped code) will normally be good enough and has the additional advantage of supporting the assignment of string subtypes. Support for the :obj:`basestring` type was added in Cython 0.20. String literals --------------- Cython understands all Python string type prefixes: * ``b'bytes'`` for byte strings * ``u'text'`` for Unicode strings * ``f'formatted {value}'`` for formatted Unicode string literals as defined by :PEP:`498` (added in Cython 0.24) Unprefixed string literals become :obj:`str` objects when compiling with language level 2 and :obj:`unicode` objects (i.e. Python 3 :obj:`str`) with language level 3. General notes about C strings ----------------------------- In many use cases, C strings (a.k.a. character pointers) are slow and cumbersome. For one, they usually require manual memory management in one way or another, which makes it more likely to introduce bugs into your code. Then, Python string objects cache their length, so requesting it (e.g. to validate the bounds of index access or when concatenating two strings into one) is an efficient constant time operation. In contrast, calling :c:func:`strlen()` to get this information from a C string takes linear time, which makes many operations on C strings rather costly. Regarding text processing, Python has built-in support for Unicode, which C lacks completely. If you are dealing with Unicode text, you are usually better off using Python Unicode string objects than trying to work with encoded data in C strings. Cython makes this quite easy and efficient. Generally speaking: unless you know what you are doing, avoid using C strings where possible and use Python string objects instead. The obvious exception to this is when passing them back and forth from and to external C code. Also, C++ strings remember their length as well, so they can provide a suitable alternative to Python bytes objects in some cases, e.g. when reference counting is not needed within a well defined context. Passing byte strings -------------------- It is very easy to pass byte strings between C code and Python. When receiving a byte string from a C library, you can let Cython convert it into a Python byte string by simply assigning it to a Python variable:: cdef char* c_string = c_call_returning_a_c_string() cdef bytes py_string = c_string A type cast to :obj:`object` or :obj:`bytes` will do the same thing:: py_string = c_string This creates a Python byte string object that holds a copy of the original C string. It can be safely passed around in Python code, and will be garbage collected when the last reference to it goes out of scope. It is important to remember that null bytes in the string act as terminator character, as generally known from C. The above will therefore only work correctly for C strings that do not contain null bytes. Besides not working for null bytes, the above is also very inefficient for long strings, since Cython has to call :c:func:`strlen()` on the C string first to find out the length by counting the bytes up to the terminating null byte. In many cases, the user code will know the length already, e.g. because a C function returned it. In this case, it is much more efficient to tell Cython the exact number of bytes by slicing the C string:: cdef char* c_string = NULL cdef Py_ssize_t length = 0 # get pointer and length from a C function get_a_c_string(&c_string, &length) py_bytes_string = c_string[:length] Here, no additional byte counting is required and ``length`` bytes from the ``c_string`` will be copied into the Python bytes object, including any null bytes. Keep in mind that the slice indices are assumed to be accurate in this case and no bounds checking is done, so incorrect slice indices will lead to data corruption and crashes. Note that the creation of the Python bytes string can fail with an exception, e.g. due to insufficient memory. If you need to :c:func:`free()` the string after the conversion, you should wrap the assignment in a try-finally construct:: from libc.stdlib cimport free cdef bytes py_string cdef char* c_string = c_call_creating_a_new_c_string() try: py_string = c_string finally: free(c_string) To convert the byte string back into a C :c:type:`char*`, use the opposite assignment:: cdef char* other_c_string = py_string This is a very fast operation after which ``other_c_string`` points to the byte string buffer of the Python string itself. It is tied to the life time of the Python string. When the Python string is garbage collected, the pointer becomes invalid. It is therefore important to keep a reference to the Python string as long as the :c:type:`char*` is in use. Often enough, this only spans the call to a C function that receives the pointer as parameter. Special care must be taken, however, when the C function stores the pointer for later use. Apart from keeping a Python reference to the string object, no manual memory management is required. Starting with Cython 0.20, the :obj:`bytearray` type is supported and coerces in the same way as the :obj:`bytes` type. However, when using it in a C context, special care must be taken not to grow or shrink the object buffer after converting it to a C string pointer. These modifications can change the internal buffer address, which will make the pointer invalid. Accepting strings from Python code ---------------------------------- The other side, receiving input from Python code, may appear simple at first sight, as it only deals with objects. However, getting this right without making the API too narrow or too unsafe may not be entirely obvious. In the case that the API only deals with byte strings, i.e. binary data or encoded text, it is best not to type the input argument as something like :obj:`bytes`, because that would restrict the allowed input to exactly that type and exclude both subtypes and other kinds of byte containers, e.g. :obj:`bytearray` objects or memory views. Depending on how (and where) the data is being processed, it may be a good idea to instead receive a 1-dimensional memory view, e.g. :: def process_byte_data(unsigned char[:] data): length = data.shape[0] first_byte = data[0] slice_view = data[1:-1] ... Cython's memory views are described in more detail in :doc:`../userguide/memoryviews`, but the above example already shows most of the relevant functionality for 1-dimensional byte views. They allow for efficient processing of arrays and accept anything that can unpack itself into a byte buffer, without intermediate copying. The processed content can finally be returned in the memory view itself (or a slice of it), but it is often better to copy the data back into a flat and simple :obj:`bytes` or :obj:`bytearray` object, especially when only a small slice is returned. Since memoryviews do not copy the data, they would otherwise keep the entire original buffer alive. The general idea here is to be liberal with input by accepting any kind of byte buffer, but strict with output by returning a simple, well adapted object. This can simply be done as follows:: def process_byte_data(unsigned char[:] data): # ... process the data if return_all: return bytes(data) else: # example for returning a slice return bytes(data[5:35]) If the byte input is actually encoded text, and the further processing should happen at the Unicode level, then the right thing to do is to decode the input straight away. This is almost only a problem in Python 2.x, where Python code expects that it can pass a byte string (:obj:`str`) with encoded text into a text API. Since this usually happens in more than one place in the module's API, a helper function is almost always the way to go, since it allows for easy adaptation of the input normalisation process later. This kind of input normalisation function will commonly look similar to the following:: from cpython.version cimport PY_MAJOR_VERSION cdef unicode _ustring(s): if type(s) is unicode: # fast path for most common case(s) return s elif PY_MAJOR_VERSION < 3 and isinstance(s, bytes): # only accept byte strings in Python 2.x, not in Py3 return (s).decode('ascii') elif isinstance(s, unicode): # an evil cast to might work here in some(!) cases, # depending on what the further processing does. to be safe, # we can always create a copy instead return unicode(s) else: raise TypeError(...) And should then be used like this:: def api_func(s): text = _ustring(s) ... Similarly, if the further processing happens at the byte level, but Unicode string input should be accepted, then the following might work, if you are using memory views:: # define a global name for whatever char type is used in the module ctypedef unsigned char char_type cdef char_type[:] _chars(s): if isinstance(s, unicode): # encode to the specific encoding used inside of the module s = (s).encode('utf8') return s In this case, you might want to additionally ensure that byte string input really uses the correct encoding, e.g. if you require pure ASCII input data, you can run over the buffer in a loop and check the highest bit of each byte. This should then also be done in the input normalisation function. Dealing with "const" -------------------- Many C libraries use the ``const`` modifier in their API to declare that they will not modify a string, or to require that users must not modify a string they return, for example: .. code-block:: c typedef const char specialChar; int process_string(const char* s); const unsigned char* look_up_cached_string(const unsigned char* key); Since version 0.18, Cython has support for the ``const`` modifier in the language, so you can declare the above functions straight away as follows:: cdef extern from "someheader.h": ctypedef const char specialChar int process_string(const char* s) const unsigned char* look_up_cached_string(const unsigned char* key) Previous versions required users to make the necessary declarations at a textual level. If you need to support older Cython versions, you can use the following approach. In general, for arguments of external C functions, the ``const`` modifier does not matter and can be left out in the Cython declaration (e.g. in a .pxd file). The C compiler will still do the right thing, even if you declare this to Cython:: cdef extern from "someheader.h": int process_string(char* s) # note: looses API information! However, in most other situations, such as for return values and variables that use specifically typedef-ed API types, it does matter and the C compiler will emit at least a warning if used incorrectly. To help with this, you can use the type definitions in the ``libc.string`` module, e.g.:: from libc.string cimport const_char, const_uchar cdef extern from "someheader.h": ctypedef const_char specialChar int process_string(const_char* s) const_uchar* look_up_cached_string(const_uchar* key) Note: even if the API only uses ``const`` for function arguments, it is still preferable to properly declare them using these provided :c:type:`const_char` types in order to simplify adaptations. In Cython 0.18, these standard declarations have been changed to use the correct ``const`` modifier, so your code will automatically benefit from the new ``const`` support if it uses them. Decoding bytes to text ---------------------- The initially presented way of passing and receiving C strings is sufficient if your code only deals with binary data in the strings. When we deal with encoded text, however, it is best practice to decode the C byte strings to Python Unicode strings on reception, and to encode Python Unicode strings to C byte strings on the way out. With a Python byte string object, you would normally just call the ``bytes.decode()`` method to decode it into a Unicode string:: ustring = byte_string.decode('UTF-8') Cython allows you to do the same for a C string, as long as it contains no null bytes:: cdef char* some_c_string = c_call_returning_a_c_string() ustring = some_c_string.decode('UTF-8') And, more efficiently, for strings where the length is known:: cdef char* c_string = NULL cdef Py_ssize_t length = 0 # get pointer and length from a C function get_a_c_string(&c_string, &length) ustring = c_string[:length].decode('UTF-8') The same should be used when the string contains null bytes, e.g. when it uses an encoding like UCS-4, where each character is encoded in four bytes most of which tend to be 0. Again, no bounds checking is done if slice indices are provided, so incorrect indices lead to data corruption and crashes. However, using negative indices is possible since Cython 0.17 and will inject a call to :c:func:`strlen()` in order to determine the string length. Obviously, this only works for 0-terminated strings without internal null bytes. Text encoded in UTF-8 or one of the ISO-8859 encodings is usually a good candidate. If in doubt, it's better to pass indices that are 'obviously' correct than to rely on the data to be as expected. It is common practice to wrap string conversions (and non-trivial type conversions in general) in dedicated functions, as this needs to be done in exactly the same way whenever receiving text from C. This could look as follows:: from libc.stdlib cimport free cdef unicode tounicode(char* s): return s.decode('UTF-8', 'strict') cdef unicode tounicode_with_length( char* s, size_t length): return s[:length].decode('UTF-8', 'strict') cdef unicode tounicode_with_length_and_free( char* s, size_t length): try: return s[:length].decode('UTF-8', 'strict') finally: free(s) Most likely, you will prefer shorter function names in your code based on the kind of string being handled. Different types of content often imply different ways of handling them on reception. To make the code more readable and to anticipate future changes, it is good practice to use separate conversion functions for different types of strings. Encoding text to bytes ---------------------- The reverse way, converting a Python unicode string to a C :c:type:`char*`, is pretty efficient by itself, assuming that what you actually want is a memory managed byte string:: py_byte_string = py_unicode_string.encode('UTF-8') cdef char* c_string = py_byte_string As noted before, this takes the pointer to the byte buffer of the Python byte string. Trying to do the same without keeping a reference to the Python byte string will fail with a compile error:: # this will not compile ! cdef char* c_string = py_unicode_string.encode('UTF-8') Here, the Cython compiler notices that the code takes a pointer to a temporary string result that will be garbage collected after the assignment. Later access to the invalidated pointer will read invalid memory and likely result in a segfault. Cython will therefore refuse to compile this code. C++ strings ----------- When wrapping a C++ library, strings will usually come in the form of the :c:type:`std::string` class. As with C strings, Python byte strings automatically coerce from and to C++ strings:: # distutils: language = c++ from libcpp.string cimport string cdef string s = py_bytes_object try: s.append('abc') py_bytes_object = s finally: del s The memory management situation is different than in C because the creation of a C++ string makes an independent copy of the string buffer which the string object then owns. It is therefore possible to convert temporarily created Python objects directly into C++ strings. A common way to make use of this is when encoding a Python unicode string into a C++ string:: cdef string cpp_string = py_unicode_string.encode('UTF-8') Note that this involves a bit of overhead because it first encodes the Unicode string into a temporarily created Python bytes object and then copies its buffer into a new C++ string. For the other direction, efficient decoding support is available in Cython 0.17 and later:: cdef string s = string(b'abcdefg') ustring1 = s.decode('UTF-8') ustring2 = s[2:-2].decode('UTF-8') For C++ strings, decoding slices will always take the proper length of the string into account and apply Python slicing semantics (e.g. return empty strings for out-of-bounds indices). Auto encoding and decoding -------------------------- Cython 0.19 comes with two new directives: ``c_string_type`` and ``c_string_encoding``. They can be used to change the Python string types that C/C++ strings coerce from and to. By default, they only coerce from and to the bytes type, and encoding or decoding must be done explicitly, as described above. There are two use cases where this is inconvenient. First, if all C strings that are being processed (or the large majority) contain text, automatic encoding and decoding from and to Python unicode objects can reduce the code overhead a little. In this case, you can set the ``c_string_type`` directive in your module to :obj:`unicode` and the ``c_string_encoding`` to the encoding that your C code uses, for example:: # cython: c_string_type=unicode, c_string_encoding=utf8 cdef char* c_string = 'abcdefg' # implicit decoding: cdef object py_unicode_object = c_string # explicit conversion to Python bytes: py_bytes_object = c_string The second use case is when all C strings that are being processed only contain ASCII encodable characters (e.g. numbers) and you want your code to use the native legacy string type in Python 2 for them, instead of always using Unicode. In this case, you can set the string type to :obj:`str`:: # cython: c_string_type=str, c_string_encoding=ascii cdef char* c_string = 'abcdefg' # implicit decoding in Py3, bytes conversion in Py2: cdef object py_str_object = c_string # explicit conversion to Python bytes: py_bytes_object = c_string # explicit conversion to Python unicode: py_bytes_object = c_string The other direction, i.e. automatic encoding to C strings, is only supported for the ASCII codec (and the "default encoding", which is runtime specific and may or may not be ASCII). This is because CPython handles the memory management in this case by keeping an encoded copy of the string alive together with the original unicode string. Otherwise, there would be no way to limit the lifetime of the encoded string in any sensible way, thus rendering any attempt to extract a C string pointer from it a dangerous endeavour. As long as you stick to the ASCII encoding for the ``c_string_encoding`` directive, though, the following will work:: # cython: c_string_type=unicode, c_string_encoding=ascii def func(): ustring = u'abc' cdef char* s = ustring return s[0] # returns u'a' (This example uses a function context in order to safely control the lifetime of the Unicode string. Global Python variables can be modified from the outside, which makes it dangerous to rely on the lifetime of their values.) Source code encoding -------------------- When string literals appear in the code, the source code encoding is important. It determines the byte sequence that Cython will store in the C code for bytes literals, and the Unicode code points that Cython builds for unicode literals when parsing the byte encoded source file. Following :PEP:`263`, Cython supports the explicit declaration of source file encodings. For example, putting the following comment at the top of an ``ISO-8859-15`` (Latin-9) encoded source file (into the first or second line) is required to enable ``ISO-8859-15`` decoding in the parser:: # -*- coding: ISO-8859-15 -*- When no explicit encoding declaration is provided, the source code is parsed as UTF-8 encoded text, as specified by :PEP:`3120`. `UTF-8`_ is a very common encoding that can represent the entire Unicode set of characters and is compatible with plain ASCII encoded text that it encodes efficiently. This makes it a very good choice for source code files which usually consist mostly of ASCII characters. .. _`UTF-8`: http://en.wikipedia.org/wiki/UTF-8 As an example, putting the following line into a UTF-8 encoded source file will print ``5``, as UTF-8 encodes the letter ``'ö'`` in the two byte sequence ``'\xc3\xb6'``:: print( len(b'abcö') ) whereas the following ``ISO-8859-15`` encoded source file will print ``4``, as the encoding uses only 1 byte for this letter:: # -*- coding: ISO-8859-15 -*- print( len(b'abcö') ) Note that the unicode literal ``u'abcö'`` is a correctly decoded four character Unicode string in both cases, whereas the unprefixed Python :obj:`str` literal ``'abcö'`` will become a byte string in Python 2 (thus having length 4 or 5 in the examples above), and a 4 character Unicode string in Python 3. If you are not familiar with encodings, this may not appear obvious at first read. See `CEP 108`_ for details. As a rule of thumb, it is best to avoid unprefixed non-ASCII :obj:`str` literals and to use unicode string literals for all text. Cython also supports the ``__future__`` import ``unicode_literals`` that instructs the parser to read all unprefixed :obj:`str` literals in a source file as unicode string literals, just like Python 3. .. _`CEP 108`: http://wiki.cython.org/enhancements/stringliterals Single bytes and characters --------------------------- The Python C-API uses the normal C :c:type:`char` type to represent a byte value, but it has two special integer types for a Unicode code point value, i.e. a single Unicode character: :c:type:`Py_UNICODE` and :c:type:`Py_UCS4`. Since version 0.13, Cython supports the first natively, support for :c:type:`Py_UCS4` is new in Cython 0.15. :c:type:`Py_UNICODE` is either defined as an unsigned 2-byte or 4-byte integer, or as :c:type:`wchar_t`, depending on the platform. The exact type is a compile time option in the build of the CPython interpreter and extension modules inherit this definition at C compile time. The advantage of :c:type:`Py_UCS4` is that it is guaranteed to be large enough for any Unicode code point value, regardless of the platform. It is defined as a 32bit unsigned int or long. In Cython, the :c:type:`char` type behaves differently from the :c:type:`Py_UNICODE` and :c:type:`Py_UCS4` types when coercing to Python objects. Similar to the behaviour of the bytes type in Python 3, the :c:type:`char` type coerces to a Python integer value by default, so that the following prints 65 and not ``A``:: # -*- coding: ASCII -*- cdef char char_val = 'A' assert char_val == 65 # ASCII encoded byte value of 'A' print( char_val ) If you want a Python bytes string instead, you have to request it explicitly, and the following will print ``A`` (or ``b'A'`` in Python 3):: print( char_val ) The explicit coercion works for any C integer type. Values outside of the range of a :c:type:`char` or :c:type:`unsigned char` will raise an :obj:`OverflowError` at runtime. Coercion will also happen automatically when assigning to a typed variable, e.g.:: cdef bytes py_byte_string py_byte_string = char_val On the other hand, the :c:type:`Py_UNICODE` and :c:type:`Py_UCS4` types are rarely used outside of the context of a Python unicode string, so their default behaviour is to coerce to a Python unicode object. The following will therefore print the character ``A``, as would the same code with the :c:type:`Py_UNICODE` type:: cdef Py_UCS4 uchar_val = u'A' assert uchar_val == 65 # character point value of u'A' print( uchar_val ) Again, explicit casting will allow users to override this behaviour. The following will print 65:: cdef Py_UCS4 uchar_val = u'A' print( uchar_val ) Note that casting to a C :c:type:`long` (or :c:type:`unsigned long`) will work just fine, as the maximum code point value that a Unicode character can have is 1114111 (``0x10FFFF``). On platforms with 32bit or more, :c:type:`int` is just as good. Narrow Unicode builds ---------------------- In narrow Unicode builds of CPython before version 3.3, i.e. builds where ``sys.maxunicode`` is 65535 (such as all Windows builds, as opposed to 1114111 in wide builds), it is still possible to use Unicode character code points that do not fit into the 16 bit wide :c:type:`Py_UNICODE` type. For example, such a CPython build will accept the unicode literal ``u'\U00012345'``. However, the underlying system level encoding leaks into Python space in this case, so that the length of this literal becomes 2 instead of 1. This also shows when iterating over it or when indexing into it. The visible substrings are ``u'\uD808'`` and ``u'\uDF45'`` in this example. They form a so-called surrogate pair that represents the above character. For more information on this topic, it is worth reading the `Wikipedia article about the UTF-16 encoding`_. .. _`Wikipedia article about the UTF-16 encoding`: http://en.wikipedia.org/wiki/UTF-16/UCS-2 The same properties apply to Cython code that gets compiled for a narrow CPython runtime environment. In most cases, e.g. when searching for a substring, this difference can be ignored as both the text and the substring will contain the surrogates. So most Unicode processing code will work correctly also on narrow builds. Encoding, decoding and printing will work as expected, so that the above literal turns into exactly the same byte sequence on both narrow and wide Unicode platforms. However, programmers should be aware that a single :c:type:`Py_UNICODE` value (or single 'character' unicode string in CPython) may not be enough to represent a complete Unicode character on narrow platforms. For example, if an independent search for ``u'\uD808'`` and ``u'\uDF45'`` in a unicode string succeeds, this does not necessarily mean that the character ``u'\U00012345`` is part of that string. It may well be that two different characters are in the string that just happen to share a code unit with the surrogate pair of the character in question. Looking for substrings works correctly because the two code units in the surrogate pair use distinct value ranges, so the pair is always identifiable in a sequence of code points. As of version 0.15, Cython has extended support for surrogate pairs so that you can safely use an ``in`` test to search character values from the full :c:type:`Py_UCS4` range even on narrow platforms:: cdef Py_UCS4 uchar = 0x12345 print( uchar in some_unicode_string ) Similarly, it can coerce a one character string with a high Unicode code point value to a Py_UCS4 value on both narrow and wide Unicode platforms:: cdef Py_UCS4 uchar = u'\U00012345' assert uchar == 0x12345 In CPython 3.3 and later, the :c:type:`Py_UNICODE` type is an alias for the system specific :c:type:`wchar_t` type and is no longer tied to the internal representation of the Unicode string. Instead, any Unicode character can be represented on all platforms without resorting to surrogate pairs. This implies that narrow builds no longer exist from that version on, regardless of the size of :c:type:`Py_UNICODE`. See :PEP:`393` for details. Cython 0.16 and later handles this change internally and does the right thing also for single character values as long as either type inference is applied to untyped variables or the portable :c:type:`Py_UCS4` type is explicitly used in the source code instead of the platform specific :c:type:`Py_UNICODE` type. Optimisations that Cython applies to the Python unicode type will automatically adapt to :PEP:`393` at C compile time, as usual. Iteration --------- Cython 0.13 supports efficient iteration over :c:type:`char*`, bytes and unicode strings, as long as the loop variable is appropriately typed. So the following will generate the expected C code:: cdef char* c_string = ... cdef char c for c in c_string[:100]: if c == 'A': ... The same applies to bytes objects:: cdef bytes bytes_string = ... cdef char c for c in bytes_string: if c == 'A': ... For unicode objects, Cython will automatically infer the type of the loop variable as :c:type:`Py_UCS4`:: cdef unicode ustring = ... # NOTE: no typing required for 'uchar' ! for uchar in ustring: if uchar == u'A': ... The automatic type inference usually leads to much more efficient code here. However, note that some unicode operations still require the value to be a Python object, so Cython may end up generating redundant conversion code for the loop variable value inside of the loop. If this leads to a performance degradation for a specific piece of code, you can either type the loop variable as a Python object explicitly, or assign its value to a Python typed variable somewhere inside of the loop to enforce one-time coercion before running Python operations on it. There are also optimisations for ``in`` tests, so that the following code will run in plain C code, (actually using a switch statement):: cdef Py_UCS4 uchar_val = get_a_unicode_character() if uchar_val in u'abcABCxY': ... Combined with the looping optimisation above, this can result in very efficient character switching code, e.g. in unicode parsers. Windows and wide character APIs ------------------------------- Windows system APIs natively support Unicode in the form of zero-terminated UTF-16 encoded :c:type:`wchar_t*` strings, so called "wide strings". By default, Windows builds of CPython define :c:type:`Py_UNICODE` as a synonym for :c:type:`wchar_t`. This makes internal :obj:`unicode` representation compatible with UTF-16 and allows for efficient zero-copy conversions. This also means that Windows builds are always `Narrow Unicode builds`_ with all the caveats. To aid interoperation with Windows APIs, Cython 0.19 supports wide strings (in the form of :c:type:`Py_UNICODE*`) and implicitly converts them to and from :obj:`unicode` string objects. These conversions behave the same way as they do for :c:type:`char*` and :obj:`bytes` as described in `Passing byte strings`_. In addition to automatic conversion, unicode literals that appear in C context become C-level wide string literals and :py:func:`len` built-in function is specialized to compute the length of zero-terminated :c:type:`Py_UNICODE*` string or array. Here is an example of how one would call a Unicode API on Windows:: cdef extern from "Windows.h": ctypedef Py_UNICODE WCHAR ctypedef const WCHAR* LPCWSTR ctypedef void* HWND int MessageBoxW(HWND hWnd, LPCWSTR lpText, LPCWSTR lpCaption, int uType) title = u"Windows Interop Demo - Python %d.%d.%d" % sys.version_info[:3] MessageBoxW(NULL, u"Hello Cython \u263a", title, 0) .. Warning:: The use of :c:type:`Py_UNICODE*` strings outside of Windows is strongly discouraged. :c:type:`Py_UNICODE` is inherently not portable between different platforms and Python versions. CPython 3.3 has moved to a flexible internal representation of unicode strings (:pep:`393`), making all :c:type:`Py_UNICODE` related APIs deprecated and inefficient. One consequence of CPython 3.3 changes is that :py:func:`len` of :obj:`unicode` strings is always measured in *code points* ("characters"), while Windows API expect the number of UTF-16 *code units* (where each surrogate is counted individually). To always get the number of code units, call :c:func:`PyUnicode_GetSize` directly. Cython-0.26.1/docs/src/tutorial/cython_tutorial.rst0000664000175000017500000001426212542002467023202 0ustar stefanstefan00000000000000.. highlight:: cython .. _tutorial: ************** Basic Tutorial ************** The Basics of Cython ==================== The fundamental nature of Cython can be summed up as follows: Cython is Python with C data types. Cython is Python: Almost any piece of Python code is also valid Cython code. (There are a few :ref:`cython-limitations`, but this approximation will serve for now.) The Cython compiler will convert it into C code which makes equivalent calls to the Python/C API. But Cython is much more than that, because parameters and variables can be declared to have C data types. Code which manipulates Python values and C values can be freely intermixed, with conversions occurring automatically wherever possible. Reference count maintenance and error checking of Python operations is also automatic, and the full power of Python's exception handling facilities, including the try-except and try-finally statements, is available to you -- even in the midst of manipulating C data. Cython Hello World =================== As Cython can accept almost any valid python source file, one of the hardest things in getting started is just figuring out how to compile your extension. So lets start with the canonical python hello world:: print "Hello World" Save this code in a file named :file:`helloworld.pyx`. Now we need to create the :file:`setup.py`, which is like a python Makefile (for more information see :ref:`compilation`). Your :file:`setup.py` should look like:: from distutils.core import setup from Cython.Build import cythonize setup( ext_modules = cythonize("helloworld.pyx") ) To use this to build your Cython file use the commandline options: .. sourcecode:: text $ python setup.py build_ext --inplace Which will leave a file in your local directory called :file:`helloworld.so` in unix or :file:`helloworld.pyd` in Windows. Now to use this file: start the python interpreter and simply import it as if it was a regular python module:: >>> import helloworld Hello World Congratulations! You now know how to build a Cython extension. But so far this example doesn't really give a feeling why one would ever want to use Cython, so lets create a more realistic example. :mod:`pyximport`: Cython Compilation the Easy Way ================================================== If your module doesn't require any extra C libraries or a special build setup, then you can use the pyximport module by Paul Prescod and Stefan Behnel to load .pyx files directly on import, without having to write a :file:`setup.py` file. It is shipped and installed with Cython and can be used like this:: >>> import pyximport; pyximport.install() >>> import helloworld Hello World Since Cython 0.11, the :mod:`pyximport` module also has experimental compilation support for normal Python modules. This allows you to automatically run Cython on every .pyx and .py module that Python imports, including the standard library and installed packages. Cython will still fail to compile a lot of Python modules, in which case the import mechanism will fall back to loading the Python source modules instead. The .py import mechanism is installed like this:: >>> pyximport.install(pyimport = True) Fibonacci Fun ============== From the official Python tutorial a simple fibonacci function is defined as: .. literalinclude:: ../../examples/tutorial/fib1/fib.pyx Now following the steps for the Hello World example we first rename the file to have a `.pyx` extension, lets say :file:`fib.pyx`, then we create the :file:`setup.py` file. Using the file created for the Hello World example, all that you need to change is the name of the Cython filename, and the resulting module name, doing this we have: .. literalinclude:: ../../examples/tutorial/fib1/setup.py Build the extension with the same command used for the helloworld.pyx: .. sourcecode:: text $ python setup.py build_ext --inplace And use the new extension with:: >>> import fib >>> fib.fib(2000) 1 1 2 3 5 8 13 21 34 55 89 144 233 377 610 987 1597 Primes ======= Here's a small example showing some of what can be done. It's a routine for finding prime numbers. You tell it how many primes you want, and it returns them as a Python list. :file:`primes.pyx`: .. literalinclude:: ../../examples/tutorial/primes/primes.pyx :linenos: You'll see that it starts out just like a normal Python function definition, except that the parameter ``kmax`` is declared to be of type ``int`` . This means that the object passed will be converted to a C integer (or a ``TypeError.`` will be raised if it can't be). Lines 2 and 3 use the ``cdef`` statement to define some local C variables. Line 4 creates a Python list which will be used to return the result. You'll notice that this is done exactly the same way it would be in Python. Because the variable result hasn't been given a type, it is assumed to hold a Python object. Lines 7-9 set up for a loop which will test candidate numbers for primeness until the required number of primes has been found. Lines 11-12, which try dividing a candidate by all the primes found so far, are of particular interest. Because no Python objects are referred to, the loop is translated entirely into C code, and thus runs very fast. When a prime is found, lines 14-15 add it to the p array for fast access by the testing loop, and line 16 adds it to the result list. Again, you'll notice that line 16 looks very much like a Python statement, and in fact it is, with the twist that the C parameter ``n`` is automatically converted to a Python object before being passed to the append method. Finally, at line 18, a normal Python return statement returns the result list. Compiling primes.pyx with the Cython compiler produces an extension module which we can try out in the interactive interpreter as follows:: >>> import primes >>> primes.primes(10) [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] See, it works! And if you're curious about how much work Cython has saved you, take a look at the C code generated for this module. Language Details ================ For more about the Cython language, see :ref:`language-basics`. To dive right in to using Cython in a numerical computation context, see :ref:`numpy_tutorial`. Cython-0.26.1/docs/src/tutorial/memory_allocation.rst0000664000175000017500000001022012542002467023456 0ustar stefanstefan00000000000000.. _memory_allocation: ***************** Memory Allocation ***************** Dynamic memory allocation is mostly a non-issue in Python. Everything is an object, and the reference counting system and garbage collector automatically return memory to the system when it is no longer being used. When it comes to more low-level data buffers, Cython has special support for (multi-dimensional) arrays of simple types via NumPy, memory views or Python's stdlib array type. They are full featured, garbage collected and much easier to work with than bare pointers in C, while still retaining the speed and static typing benefits. See :ref:`array-array` and :ref:`memoryviews`. In some situations, however, these objects can still incur an unacceptable amount of overhead, which can then makes a case for doing manual memory management in C. Simple C values and structs (such as a local variable ``cdef double x``) are usually allocated on the stack and passed by value, but for larger and more complicated objects (e.g. a dynamically-sized list of doubles), the memory must be manually requested and released. C provides the functions :c:func:`malloc`, :c:func:`realloc`, and :c:func:`free` for this purpose, which can be imported in cython from ``clibc.stdlib``. Their signatures are: .. code-block:: c void* malloc(size_t size) void* realloc(void* ptr, size_t size) void free(void* ptr) A very simple example of malloc usage is the following:: import random from libc.stdlib cimport malloc, free def random_noise(int number=1): cdef int i # allocate number * sizeof(double) bytes of memory cdef double *my_array = malloc(number * sizeof(double)) if not my_array: raise MemoryError() try: ran = random.normalvariate for i in range(number): my_array[i] = ran(0,1) return [ my_array[i] for i in range(number) ] finally: # return the previously allocated memory to the system free(my_array) Note that the C-API functions for allocating memory on the Python heap are generally preferred over the low-level C functions above as the memory they provide is actually accounted for in Python's internal memory management system. They also have special optimisations for smaller memory blocks, which speeds up their allocation by avoiding costly operating system calls. The C-API functions can be found in the ``cpython.mem`` standard declarations file:: from cpython.mem cimport PyMem_Malloc, PyMem_Realloc, PyMem_Free Their interface and usage is identical to that of the corresponding low-level C functions. One important thing to remember is that blocks of memory obtained with :c:func:`malloc` or :c:func:`PyMem_Malloc` *must* be manually released with a corresponding call to :c:func:`free` or :c:func:`PyMem_Free` when they are no longer used (and *must* always use the matching type of free function). Otherwise, they won't be reclaimed until the python process exits. This is called a memory leak. If a chunk of memory needs a larger lifetime than can be managed by a ``try..finally`` block, another helpful idiom is to tie its lifetime to a Python object to leverage the Python runtime's memory management, e.g.:: cdef class SomeMemory: cdef double* data def __cinit__(self, size_t number): # allocate some memory (uninitialised, may contain arbitrary data) self.data = PyMem_Malloc(number * sizeof(double)) if not self.data: raise MemoryError() def resize(self, size_t new_number): # Allocates new_number * sizeof(double) bytes, # preserving the current content and making a best-effort to # re-use the original data location. mem = PyMem_Realloc(self.data, new_number * sizeof(double)) if not mem: raise MemoryError() # Only overwrite the pointer if the memory was really reallocated. # On error (mem is NULL), the originally memory has not been freed. self.data = mem def __dealloc__(self): PyMem_Free(self.data) # no-op if self.data is NULL Cython-0.26.1/docs/src/tutorial/array.rst0000664000175000017500000001226213143605603021066 0ustar stefanstefan00000000000000.. _array-array: ========================== Working with Python arrays ========================== Python has a builtin array module supporting dynamic 1-dimensional arrays of primitive types. It is possible to access the underlying C array of a Python array from within Cython. At the same time they are ordinary Python objects which can be stored in lists and serialized between processes when using :obj:`multiprocessing`. Compared to the manual approach with :c:func:`malloc` and :c:func:`free`, this gives the safe and automatic memory management of Python, and compared to a Numpy array there is no need to install a dependency, as the :obj:`array` module is built into both Python and Cython. Safe usage with memory views ---------------------------- :: from cpython cimport array import array cdef array.array a = array.array('i', [1, 2, 3]) cdef int[:] ca = a print ca[0] NB: the import brings the regular Python array object into the namespace while the cimport adds functions accessible from Cython. A Python array is constructed with a type signature and sequence of initial values. For the possible type signatures, refer to the Python documentation for the `array module `_. Notice that when a Python array is assigned to a variable typed as memory view, there will be a slight overhead to construct the memory view. However, from that point on the variable can be passed to other functions without overhead, so long as it is typed:: from cpython cimport array import array cdef array.array a = array.array('i', [1, 2, 3]) cdef int[:] ca = a cdef int overhead(object a): cdef int[:] ca = a return ca[0] cdef int no_overhead(int[:] ca): return ca[0] print overhead(a) # new memory view will be constructed, overhead print no_overhead(ca) # ca is already a memory view, so no overhead Zero-overhead, unsafe access to raw C pointer --------------------------------------------- To avoid any overhead and to be able to pass a C pointer to other functions, it is possible to access the underlying contiguous array as a pointer. There is no type or bounds checking, so be careful to use the right type and signedness. :: from cpython cimport array import array cdef array.array a = array.array('i', [1, 2, 3]) # access underlying pointer: print a.data.as_ints[0] from libc.string cimport memset memset(a.data.as_voidptr, 0, len(a) * sizeof(int)) Note that any length-changing operation on the array object may invalidate the pointer. Cloning, extending arrays ------------------------- To avoid having to use the array constructor from the Python module, it is possible to create a new array with the same type as a template, and preallocate a given number of elements. The array is initialized to zero when requested. :: from cpython cimport array import array cdef array.array int_array_template = array.array('i', []) cdef array.array newarray # create an array with 3 elements with same type as template newarray = array.clone(int_array_template, 3, zero=False) An array can also be extended and resized; this avoids repeated memory reallocation which would occur if elements would be appended or removed one by one. :: from cpython cimport array import array cdef array.array a = array.array('i', [1, 2, 3]) cdef array.array b = array.array('i', [4, 5, 6]) # extend a with b, resize as needed array.extend(a, b) # resize a, leaving just original three elements array.resize(a, len(a) - len(b)) API reference ------------- Data fields ~~~~~~~~~~~ :: data.as_voidptr data.as_chars data.as_schars data.as_uchars data.as_shorts data.as_ushorts data.as_ints data.as_uints data.as_longs data.as_ulongs data.as_longlongs # requires Python >=3 data.as_ulonglongs # requires Python >=3 data.as_floats data.as_doubles data.as_pyunicodes Direct access to the underlying contiguous C array, with given type; e.g., ``myarray.data.as_ints``. Functions ~~~~~~~~~ The following functions are available to Cython from the array module:: int resize(array self, Py_ssize_t n) except -1 Fast resize / realloc. Not suitable for repeated, small increments; resizes underlying array to exactly the requested amount. :: int resize_smart(array self, Py_ssize_t n) except -1 Efficient for small increments; uses growth pattern that delivers amortized linear-time appends. :: cdef inline array clone(array template, Py_ssize_t length, bint zero) Fast creation of a new array, given a template array. Type will be same as ``template``. If zero is ``True``, new array will be initialized with zeroes. :: cdef inline array copy(array self) Make a copy of an array. :: cdef inline int extend_buffer(array self, char* stuff, Py_ssize_t n) except -1 Efficient appending of new data of same type (e.g. of same array type) ``n``: number of elements (not number of bytes!) :: cdef inline int extend(array self, array other) except -1 Extend array with data from another array; types must match. :: cdef inline void zero(array self) Set all elements of array to zero. Cython-0.26.1/docs/src/tutorial/related_work.rst0000664000175000017500000000522713143605603022435 0ustar stefanstefan00000000000000Related work ============ Pyrex [Pyrex]_ is the compiler project that Cython was originally based on. Many features and the major design decisions of the Cython language were developed by Greg Ewing as part of that project. Today, Cython supersedes the capabilities of Pyrex by providing a substantially higher compatibility with Python code and Python semantics, as well as superior optimisations and better integration with scientific Python extensions like NumPy. ctypes [ctypes]_ is a foreign function interface (FFI) for Python. It provides C compatible data types, and allows calling functions in DLLs or shared libraries. It can be used to wrap these libraries in pure Python code. Compared to Cython, it has the major advantage of being in the standard library and being usable directly from Python code, without any additional dependencies. The major drawback is its performance, which suffers from the Python call overhead as all operations must pass through Python code first. Cython, being a compiled language, can avoid much of this overhead by moving more functionality and long-running loops into fast C code. SWIG [SWIG]_ is a wrapper code generator. It makes it very easy to parse large API definitions in C/C++ header files, and to generate straight forward wrapper code for a large set of programming languages. As opposed to Cython, however, it is not a programming language itself. Thin wrappers are easy to generate, but the more functionality a wrapper needs to provide, the harder it gets to implement it with SWIG. Cython, on the other hand, makes it very easy to write very elaborate wrapper code specifically for the Python language, and to make it as thin or thick as needed at any given place. Also, there exists third party code for parsing C header files and using it to generate Cython definitions and module skeletons. ShedSkin [ShedSkin]_ is an experimental Python-to-C++ compiler. It uses a very powerful whole-module type inference engine to generate a C++ program from (restricted) Python source code. The main drawback is that it has no support for calling the Python/C API for operations it does not support natively, and supports very few of the standard Python modules. .. [ctypes] http://docs.python.org/library/ctypes.html. .. there's also the original ctypes home page: http://python.net/crew/theller/ctypes/ .. [Pyrex] G. Ewing, Pyrex: C-Extensions for Python, http://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/ .. [ShedSkin] M. Dufour, J. Coughlan, ShedSkin, http://code.google.com/p/shedskin/ .. [SWIG] David M. Beazley et al., SWIG: An Easy to Use Tool for Integrating Scripting Languages with C and C++, http://www.swig.org. Cython-0.26.1/docs/src/tutorial/external.rst0000664000175000017500000001161612542002467021575 0ustar stefanstefan00000000000000Calling C functions ==================== This tutorial describes shortly what you need to know in order to call C library functions from Cython code. For a longer and more comprehensive tutorial about using external C libraries, wrapping them and handling errors, see :doc:`clibraries`. For simplicity, let's start with a function from the standard C library. This does not add any dependencies to your code, and it has the additional advantage that Cython already defines many such functions for you. So you can just cimport and use them. For example, let's say you need a low-level way to parse a number from a ``char*`` value. You could use the ``atoi()`` function, as defined by the ``stdlib.h`` header file. This can be done as follows:: from libc.stdlib cimport atoi cdef parse_charptr_to_py_int(char* s): assert s is not NULL, "byte string value is NULL" return atoi(s) # note: atoi() has no error detection! You can find a complete list of these standard cimport files in Cython's source package `Cython/Includes/ `_. They are stored in ``.pxd`` files, the standard way to provide reusable Cython declarations that can be shared across modules (see :ref:`sharing-declarations`). Cython also has a complete set of declarations for CPython's C-API. For example, to test at C compilation time which CPython version your code is being compiled with, you can do this:: from cpython.version cimport PY_VERSION_HEX # Python version >= 3.2 final ? print PY_VERSION_HEX >= 0x030200F0 Cython also provides declarations for the C math library:: from libc.math cimport sin cdef double f(double x): return sin(x*x) Dynamic linking --------------- The libc math library is special in that it is not linked by default on some Unix-like systems, such as Linux. In addition to cimporting the declarations, you must configure your build system to link against the shared library ``m``. For distutils, it is enough to add it to the ``libraries`` parameter of the ``Extension()`` setup:: from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize ext_modules=[ Extension("demo", sources=["demo.pyx"], libraries=["m"] # Unix-like specific ) ] setup( name = "Demos", ext_modules = cythonize(ext_modules) ) External declarations --------------------- If you want to access C code for which Cython does not provide a ready to use declaration, you must declare them yourself. For example, the above ``sin()`` function is defined as follows:: cdef extern from "math.h": double sin(double x) This declares the ``sin()`` function in a way that makes it available to Cython code and instructs Cython to generate C code that includes the ``math.h`` header file. The C compiler will see the original declaration in ``math.h`` at compile time, but Cython does not parse "math.h" and requires a separate definition. Just like the ``sin()`` function from the math library, it is possible to declare and call into any C library as long as the module that Cython generates is properly linked against the shared or static library. Note that you can easily export an external C function from your Cython module by declaring it as ``cpdef``. This generates a Python wrapper for it and adds it to the module dict. Here is a Cython module that provides direct access to the C ``sin()`` function for Python code:: """ >>> sin(0) 0.0 """ cdef extern from "math.h": cpdef double sin(double x) You get the same result when this declaration appears in the ``.pxd`` file that belongs to the Cython module (i.e. that has the same name, see :ref:`sharing-declarations`). This allows the C declaration to be reused in other Cython modules, while still providing an automatically generated Python wrapper in this specific module. Naming parameters ----------------- Both C and Cython support signature declarations without parameter names like this:: cdef extern from "string.h": char* strstr(const char*, const char*) However, this prevents Cython code from calling it with keyword arguments (supported since Cython 0.19). It is therefore preferable to write the declaration like this instead:: cdef extern from "string.h": char* strstr(const char *haystack, const char *needle) You can now make it clear which of the two arguments does what in your call, thus avoiding any ambiguities and often making your code more readable:: cdef char* data = "hfvcakdfagbcffvschvxcdfgccbcfhvgcsnfxjh" pos = strstr(needle='akd', haystack=data) print pos != NULL Note that changing existing parameter names later is a backwards incompatible API modification, just as for Python code. Thus, if you provide your own declarations for external C or C++ functions, it is usually worth the additional bit of effort to choose the names of their arguments well. Cython-0.26.1/docs/src/tutorial/cdef_classes.rst0000664000175000017500000001252113023021033022346 0ustar stefanstefan00000000000000Extension types (aka. cdef classes) =================================== To support object-oriented programming, Cython supports writing normal Python classes exactly as in Python:: class MathFunction(object): def __init__(self, name, operator): self.name = name self.operator = operator def __call__(self, *operands): return self.operator(*operands) Based on what Python calls a "built-in type", however, Cython supports a second kind of class: *extension types*, sometimes referred to as "cdef classes" due to the keywords used for their declaration. They are somewhat restricted compared to Python classes, but are generally more memory efficient and faster than generic Python classes. The main difference is that they use a C struct to store their fields and methods instead of a Python dict. This allows them to store arbitrary C types in their fields without requiring a Python wrapper for them, and to access fields and methods directly at the C level without passing through a Python dictionary lookup. Normal Python classes can inherit from cdef classes, but not the other way around. Cython requires to know the complete inheritance hierarchy in order to lay out their C structs, and restricts it to single inheritance. Normal Python classes, on the other hand, can inherit from any number of Python classes and extension types, both in Cython code and pure Python code. So far our integration example has not been very useful as it only integrates a single hard-coded function. In order to remedy this, with hardly sacrificing speed, we will use a cdef class to represent a function on floating point numbers:: cdef class Function: cpdef double evaluate(self, double x) except *: return 0 The directive cpdef makes two versions of the method available; one fast for use from Cython and one slower for use from Python. Then:: cdef class SinOfSquareFunction(Function): cpdef double evaluate(self, double x) except *: return sin(x**2) This does slightly more than providing a python wrapper for a cdef method: unlike a cdef method, a cpdef method is fully overrideable by methods and instance attributes in Python subclasses. It adds a little calling overhead compared to a cdef method. Using this, we can now change our integration example:: def integrate(Function f, double a, double b, int N): cdef int i cdef double s, dx if f is None: raise ValueError("f cannot be None") s = 0 dx = (b-a)/N for i in range(N): s += f.evaluate(a+i*dx) return s * dx print(integrate(SinOfSquareFunction(), 0, 1, 10000)) This is almost as fast as the previous code, however it is much more flexible as the function to integrate can be changed. We can even pass in a new function defined in Python-space:: >>> import integrate >>> class MyPolynomial(integrate.Function): ... def evaluate(self, x): ... return 2*x*x + 3*x - 10 ... >>> integrate(MyPolynomial(), 0, 1, 10000) -7.8335833300000077 This is about 20 times slower, but still about 10 times faster than the original Python-only integration code. This shows how large the speed-ups can easily be when whole loops are moved from Python code into a Cython module. Some notes on our new implementation of ``evaluate``: - The fast method dispatch here only works because ``evaluate`` was declared in ``Function``. Had ``evaluate`` been introduced in ``SinOfSquareFunction``, the code would still work, but Cython would have used the slower Python method dispatch mechanism instead. - In the same way, had the argument ``f`` not been typed, but only been passed as a Python object, the slower Python dispatch would be used. - Since the argument is typed, we need to check whether it is ``None``. In Python, this would have resulted in an ``AttributeError`` when the ``evaluate`` method was looked up, but Cython would instead try to access the (incompatible) internal structure of ``None`` as if it were a ``Function``, leading to a crash or data corruption. There is a *compiler directive* ``nonecheck`` which turns on checks for this, at the cost of decreased speed. Here's how compiler directives are used to dynamically switch on or off ``nonecheck``:: #cython: nonecheck=True # ^^^ Turns on nonecheck globally import cython # Turn off nonecheck locally for the function @cython.nonecheck(False) def func(): cdef MyClass obj = None try: # Turn nonecheck on again for a block with cython.nonecheck(True): print obj.myfunc() # Raises exception except AttributeError: pass print obj.myfunc() # Hope for a crash! Attributes in cdef classes behave differently from attributes in regular classes: - All attributes must be pre-declared at compile-time - Attributes are by default only accessible from Cython (typed access) - Properties can be declared to expose dynamic attributes to Python-space :: cdef class WaveFunction(Function): # Not available in Python-space: cdef double offset # Available in Python-space: cdef public double freq # Available in Python-space: @property def period(self): return 1.0 / self.freq @period.setter def period(self, value): self.freq = 1.0 / value <...> Cython-0.26.1/docs/src/tutorial/appendix.rst0000664000175000017500000000265413143605603021564 0ustar stefanstefan00000000000000Appendix: Installing MinGW on Windows ===================================== 1. Download the MinGW installer from http://www.mingw.org/wiki/HOWTO_Install_the_MinGW_GCC_Compiler_Suite. (As of this writing, the download link is a bit difficult to find; it's under "About" in the menu on the left-hand side). You want the file entitled "Automated MinGW Installer" (currently version 5.1.4). 2. Run it and install MinGW. Only the basic package is strictly needed for Cython, although you might want to grab at least the C++ compiler as well. 3. You need to set up Windows' "PATH" environment variable so that includes e.g. "c:\\mingw\\bin" (if you installed MinGW to "c:\\mingw"). The following web-page describes the procedure in Windows XP (the Vista procedure is similar): http://support.microsoft.com/kb/310519 4. Finally, tell Python to use MinGW as the default compiler (otherwise it will try for Visual C). If Python is installed to "c:\\Python27", create a file named "c:\\Python27\\Lib\\distutils\\distutils.cfg" containing:: [build] compiler = mingw32 The [WinInst]_ wiki page contains updated information about this procedure. Any contributions towards making the Windows install process smoother is welcomed; it is an unfortunate fact that none of the regular Cython developers have convenient access to Windows. .. [WinInst] http://wiki.cython.org/InstallingOnWindows Cython-0.26.1/docs/src/tutorial/pure.rst0000664000175000017500000003156013023021033020707 0ustar stefanstefan00000000000000 .. _pure-mode: Pure Python Mode ================ In some cases, it's desirable to speed up Python code without losing the ability to run it with the Python interpreter. While pure Python scripts can be compiled with Cython, it usually results only in a speed gain of about 20%-50%. To go beyond that, Cython provides language constructs to add static typing and cythonic functionalities to a Python module to make it run much faster when compiled, while still allowing it to be interpreted. This is accomplished either via an augmenting :file:`.pxd` file, or via special functions and decorators available after importing the magic ``cython`` module. Although it is not typically recommended over writing straight Cython code in a :file:`.pyx` file, there are legitimate reasons to do this - easier testing, collaboration with pure Python developers, etc. In pure mode, you are more or less restricted to code that can be expressed (or at least emulated) in Python, plus static type declarations. Anything beyond that can only be done in .pyx files with extended language syntax, because it depends on features of the Cython compiler. Augmenting .pxd --------------- Using an augmenting :file:`.pxd` allows to let the original :file:`.py` file completely untouched. On the other hand, one needs to maintain both the :file:`.pxd` and the :file:`.py` to keep them in sync. While declarations in a :file:`.pyx` file must correspond exactly with those of a :file:`.pxd` file with the same name (and any contradiction results in a compile time error, see :doc:`pxd_files`), the untyped definitions in a :file:`.py` file can be overridden and augmented with static types by the more specific ones present in a :file:`.pxd`. If a :file:`.pxd` file is found with the same name as the :file:`.py` file being compiled, it will be searched for :keyword:`cdef` classes and :keyword:`cdef`/:keyword:`cpdef` functions and methods. The compiler will then convert the corresponding classes/functions/methods in the :file:`.py` file to be of the declared type. Thus if one has a file :file:`A.py`:: def myfunction(x, y=2): a = x-y return a + x * y def _helper(a): return a + 1 class A: def __init__(self, b=0): self.a = 3 self.b = b def foo(self, x): print x + _helper(1.0) and adds :file:`A.pxd`:: cpdef int myfunction(int x, int y=*) cdef double _helper(double a) cdef class A: cdef public int a,b cpdef foo(self, double x) then Cython will compile the :file:`A.py` as if it had been written as follows:: cpdef int myfunction(int x, int y=2): a = x-y return a + x * y cdef double _helper(double a): return a + 1 cdef class A: cdef public int a,b def __init__(self, b=0): self.a = 3 self.b = b cpdef foo(self, double x): print x + _helper(1.0) Notice how in order to provide the Python wrappers to the definitions in the :file:`.pxd`, that is, to be accessible from Python, * Python visible function signatures must be declared as `cpdef` (with default arguments replaced by a `*` to avoid repetition):: cpdef int myfunction(int x, int y=*) * C function signatures of internal functions can be declared as `cdef`:: cdef double _helper(double a) * `cdef` classes (extension types) are declared as `cdef class`; * `cdef` class attributes must be declared as `cdef public` if read/write Python access is needed, `cdef readonly` for read-only Python access, or plain `cdef` for internal C level attributes; * `cdef` class methods must be declared as `cpdef` for Python visible methods or `cdef` for internal C methods. In the example above, the type of the local variable `a` in `myfunction()` is not fixed and will thus be a Python object. To statically type it, one can use Cython's ``@cython.locals`` decorator (see :ref:`magic_attributes`, and :ref:`magic_attributes_pxd`). Normal Python (:keyword:`def`) functions cannot be declared in :file:`.pxd` files. It is therefore currently impossible to override the types of plain Python functions in :file:`.pxd` files, e.g. to override types of their local variables. In most cases, declaring them as `cpdef` will work as expected. .. _magic_attributes: Magic Attributes ---------------- Special decorators are available from the magic ``cython`` module that can be used to add static typing within the Python file, while being ignored by the interpreter. This option adds the ``cython`` module dependency to the original code, but does not require to maintain a supplementary :file:`.pxd` file. Cython provides a fake version of this module as `Cython.Shadow`, which is available as `cython.py` when Cython is installed, but can be copied to be used by other modules when Cython is not installed. "Compiled" switch ^^^^^^^^^^^^^^^^^ * ``compiled`` is a special variable which is set to ``True`` when the compiler runs, and ``False`` in the interpreter. Thus, the code :: if cython.compiled: print("Yep, I'm compiled.") else: print("Just a lowly interpreted script.") will behave differently depending on whether or not the code is executed as a compiled extension (:file:`.so`/:file:`.pyd`) module or a plain :file:`.py` file. Static typing ^^^^^^^^^^^^^ * ``cython.declare`` declares a typed variable in the current scope, which can be used in place of the :samp:`cdef type var [= value]` construct. This has two forms, the first as an assignment (useful as it creates a declaration in interpreted mode as well):: x = cython.declare(cython.int) # cdef int x y = cython.declare(cython.double, 0.57721) # cdef double y = 0.57721 and the second mode as a simple function call:: cython.declare(x=cython.int, y=cython.double) # cdef int x; cdef double y It can also be used to type class constructors:: class A: cython.declare(a=cython.int, b=cython.int) def __init__(self, b=0): self.a = 3 self.b = b And even to define extension type private, readonly and public attributes:: @cython.cclass class A: cython.declare(a=cython.int, b=cython.int) c = cython.declare(cython.int, visibility='public') d = cython.declare(cython.int, 5) # private by default. e = cython.declare(cython.int, 5, visibility='readonly') * ``@cython.locals`` is a decorator that is used to specify the types of local variables in the function body (including the arguments):: @cython.locals(a=cython.double, b=cython.double, n=cython.p_double) def foo(a, b, x, y): n = a*b ... * ``@cython.returns()`` specifies the function's return type. * Starting with Cython 0.21, Python signature annotations can be used to declare argument types. Cython recognises three ways to do this, as shown in the following example. Note that it currently needs to be enabled explicitly with the directive ``annotation_typing=True``. This might change in a later version. :: # cython: annotation_typing=True def func(plain_python_type: dict, named_python_type: 'dict', explicit_python_type: {'type': dict}, explicit_c_type: {'ctype': 'int'}): ... C types ^^^^^^^ There are numerous types built into the Cython module. It provides all the standard C types, namely ``char``, ``short``, ``int``, ``long``, ``longlong`` as well as their unsigned versions ``uchar``, ``ushort``, ``uint``, ``ulong``, ``ulonglong``. The special ``bint`` type is used for C boolean values and ``Py_ssize_t`` for (signed) sizes of Python containers. For each type, there are pointer types ``p_int``, ``pp_int``, etc., up to three levels deep in interpreted mode, and infinitely deep in compiled mode. Further pointer types can be constructed with ``cython.pointer(cython.int)``, and arrays as ``cython.int[10]``. A limited attempt is made to emulate these more complex types, but only so much can be done from the Python language. The Python types int, long and bool are interpreted as C ``int``, ``long`` and ``bint`` respectively. Also, the Python builtin types ``list``, ``dict``, ``tuple``, etc. may be used, as well as any user defined types. Typed C-tuples can be declared as a tuple of C types. Extension types and cdef functions ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * The class decorator ``@cython.cclass`` creates a ``cdef class``. * The function/method decorator ``@cython.cfunc`` creates a :keyword:`cdef` function. * ``@cython.ccall`` creates a :keyword:`cpdef` function, i.e. one that Cython code can call at the C level. * ``@cython.locals`` declares local variables (see above). It can also be used to declare types for arguments, i.e. the local variables that are used in the signature. * ``@cython.inline`` is the equivalent of the C ``inline`` modifier. Here is an example of a :keyword:`cdef` function:: @cython.cfunc @cython.returns(cython.bint) @cython.locals(a=cython.int, b=cython.int) def c_compare(a,b): return a == b Further Cython functions and declarations ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * ``address`` is used in place of the ``&`` operator:: cython.declare(x=cython.int, x_ptr=cython.p_int) x_ptr = cython.address(x) * ``sizeof`` emulates the `sizeof` operator. It can take both types and expressions. :: cython.declare(n=cython.longlong) print cython.sizeof(cython.longlong) print cython.sizeof(n) * ``struct`` can be used to create struct types.:: MyStruct = cython.struct(x=cython.int, y=cython.int, data=cython.double) a = cython.declare(MyStruct) is equivalent to the code:: cdef struct MyStruct: int x int y double data cdef MyStruct a * ``union`` creates union types with exactly the same syntax as ``struct``. * ``typedef`` defines a type under a given name:: T = cython.typedef(cython.p_int) # ctypedef int* T * ``cast`` will (unsafely) reinterpret an expression type. ``cython.cast(T, t)`` is equivalent to ``t``. The first attribute must be a type, the second is the expression to cast. Specifying the optional keyword argument ``typecheck=True`` has the semantics of ``t``. :: t1 = cython.cast(T, t) t2 = cython.cast(T, t, typecheck=True) .. _magic_attributes_pxd: Magic Attributes within the .pxd ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The special `cython` module can also be imported and used within the augmenting :file:`.pxd` file. For example, the following Python file :file:`dostuff.py`:: def dostuff(n): t = 0 for i in range(n): t += i return t can be augmented with the following :file:`.pxd` file :file:`dostuff.pxd`:: import cython @cython.locals(t = cython.int, i = cython.int) cpdef int dostuff(int n) The :func:`cython.declare()` function can be used to specify types for global variables in the augmenting :file:`.pxd` file. Tips and Tricks --------------- Calling C functions ^^^^^^^^^^^^^^^^^^^ Normally, it isn't possible to call C functions in pure Python mode as there is no general way to support it in normal (uncompiled) Python. However, in cases where an equivalent Python function exists, this can be achieved by combining C function coercion with a conditional import as follows:: # in mymodule.pxd: # declare a C function as "cpdef" to export it to the module cdef extern from "math.h": cpdef double sin(double x) # in mymodule.py: import cython # override with Python import if not in compiled code if not cython.compiled: from math import sin # calls sin() from math.h when compiled with Cython and math.sin() in Python print(sin(0)) Note that the "sin" function will show up in the module namespace of "mymodule" here (i.e. there will be a ``mymodule.sin()`` function). You can mark it as an internal name according to Python conventions by renaming it to "_sin" in the ``.pxd`` file as follows:: cdef extern from "math.h": cpdef double _sin "sin" (double x) You would then also change the Python import to ``from math import sin as _sin`` to make the names match again. Using C arrays for fixed size lists ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Since Cython 0.22, C arrays can automatically coerce to Python lists or tuples. This can be exploited to replace fixed size Python lists in Python code by C arrays when compiled. An example:: import cython @cython.locals(counts=cython.int[10], digit=cython.int) def count_digits(digits): """ >>> digits = '01112222333334445667788899' >>> count_digits(map(int, digits)) [1, 3, 4, 5, 3, 1, 2, 2, 3, 2] """ counts = [0] * 10 for digit in digits: assert 0 <= digit <= 9 counts[digit] += 1 return counts In normal Python, this will use a Python list to collect the counts, whereas Cython will generate C code that uses a C array of C ints. Cython-0.26.1/docs/src/tutorial/queue_example/0000775000175000017500000000000013151203436022047 5ustar stefanstefan00000000000000Cython-0.26.1/docs/src/tutorial/queue_example/cqueue.pxd0000664000175000017500000000104112542002467024053 0ustar stefanstefan00000000000000cdef extern from "libcalg/queue.h": ctypedef struct Queue: pass ctypedef void* QueueValue Queue* queue_new() void queue_free(Queue* queue) int queue_push_head(Queue* queue, QueueValue data) QueueValue queue_pop_head(Queue* queue) QueueValue queue_peek_head(Queue* queue) int queue_push_tail(Queue* queue, QueueValue data) QueueValue queue_pop_tail(Queue* queue) QueueValue queue_peek_tail(Queue* queue) int queue_is_empty(Queue* queue) Cython-0.26.1/docs/src/tutorial/queue_example/queue.pyx0000664000175000017500000000453312542002467023746 0ustar stefanstefan00000000000000cimport cqueue cdef class Queue: cdef cqueue.Queue* _c_queue def __cinit__(self): self._c_queue = cqueue.queue_new() if self._c_queue is NULL: raise MemoryError() def __dealloc__(self): if self._c_queue is not NULL: cqueue.queue_free(self._c_queue) cpdef int append(self, int value) except -1: if not cqueue.queue_push_tail(self._c_queue, value): raise MemoryError() return 0 cdef int extend(self, int* values, Py_ssize_t count) except -1: cdef Py_ssize_t i for i in range(count): if not cqueue.queue_push_tail(self._c_queue, values[i]): raise MemoryError() return 0 cpdef int peek(self) except? 0: cdef int value = cqueue.queue_peek_head(self._c_queue) if value == 0: # this may mean that the queue is empty, or that it # happens to contain a 0 value if cqueue.queue_is_empty(self._c_queue): raise IndexError("Queue is empty") return value cpdef int pop(self) except? 0: cdef int value = cqueue.queue_pop_head(self._c_queue) if value == 0: # this may mean that the queue is empty, or that it # happens to contain a 0 value if cqueue.queue_is_empty(self._c_queue): raise IndexError("Queue is empty") return value def __bool__(self): # same as __nonzero__ in Python 2.x return not cqueue.queue_is_empty(self._c_queue) DEF repeat_count=10000 def test_cy(): cdef int i cdef Queue q = Queue() for i in range(repeat_count): q.append(i) for i in range(repeat_count): q.peek() while q: q.pop() def test_py(): cdef int i q = Queue() for i in range(repeat_count): q.append(i) for i in range(repeat_count): q.peek() while q: q.pop() from collections import deque def test_deque(): cdef int i q = deque() for i in range(repeat_count): q.appendleft(i) for i in range(repeat_count): q[-1] while q: q.pop() repeat = range(repeat_count) def test_py_exec(): q = Queue() d = dict(q=q, repeat=repeat) exec u"""\ for i in repeat: q.append(9) for i in repeat: q.peek() while q: q.pop() """ in d Cython-0.26.1/docs/src/tutorial/data.py0000664000175000017500000000311313143605603020474 0ustar stefanstefan00000000000000{ 'title': 'Cython Tutorial', 'paper_abstract': ''' Cython is a programming language based on Python with extra syntax to provide static type declarations. This takes advantage of the benefits of Python while allowing one to achieve the speed of C. In this paper we describe the Cython language and show how it can be used both to write optimized code and to interface with external C libraries. ''', 'authors': [ {'first_names': 'Stefan', 'surname': 'Behnel', 'address': '', 'country': 'Germany', 'email_address': 'stefan\_ml@behnel.de', 'institution': ''}, {'first_names': 'Robert W.', 'surname': 'Bradshaw', 'address': '', 'country': 'USA', 'email_address': 'robertwb@math.washington.edu', 'institution': '''University of Washington\\footnote{ Department of Mathematics, University of Washington, Seattle, WA, USA }'''}, {'first_names': 'Dag Sverre', 'surname': 'Seljebotn', 'address': '', 'country': 'Norway', 'email_address': 'dagss@student.matnat.uio.no', # I need three institutions w/ full address... leave it # all here until we get to editing stage 'institution': '''University of Oslo\\footnote{Institute of Theoretical Astrophysics, University of Oslo, P.O. Box 1029 Blindern, N-0315 Oslo, Norway}\\footnote{Department of Mathematics, University of Oslo, P.O. Box 1053 Blindern, N-0316 Oslo, Norway}\\footnote{Centre of Mathematics for Applications, University of Oslo, P.O. Box 1053 Blindern, N-0316 Oslo, Norway}'''} ], } Cython-0.26.1/docs/src/tutorial/profiling_tutorial.rst0000664000175000017500000003266213143605603023672 0ustar stefanstefan00000000000000.. highlight:: cython .. _profiling: ********* Profiling ********* This part describes the profiling abilities of Cython. If you are familiar with profiling pure Python code, you can only read the first section (:ref:`profiling_basics`). If you are not familiar with Python profiling you should also read the tutorial (:ref:`profiling_tutorial`) which takes you through a complete example step by step. .. _profiling_basics: Cython Profiling Basics ======================= Profiling in Cython is controlled by a compiler directive. It can be set either for an entire file or on a per function basis via a Cython decorator. Enabling profiling for a complete source file --------------------------------------------- Profiling is enabled for a complete source file via a global directive to the Cython compiler at the top of a file:: # cython: profile=True Note that profiling gives a slight overhead to each function call therefore making your program a little slower (or a lot, if you call some small functions very often). Once enabled, your Cython code will behave just like Python code when called from the cProfile module. This means you can just profile your Cython code together with your Python code using the same tools as for Python code alone. Disabling profiling function wise --------------------------------- If your profiling is messed up because of the call overhead to some small functions that you rather do not want to see in your profile - either because you plan to inline them anyway or because you are sure that you can't make them any faster - you can use a special decorator to disable profiling for one function only:: cimport cython @cython.profile(False) def my_often_called_function(): pass Enabling line tracing --------------------- To get more detailed trace information (for tools that can make use of it), you can enable line tracing:: # cython: linetrace=True This will also enable profiling support, so the above ``profile=True`` option is not needed. Line tracing is needed for coverage analysis, for example. Note that even if line tracing is enabled via the compiler directive, it is not used by default. As the runtime slowdown can be substantial, it must additionally be compiled in by the C compiler by setting the C macro definition ``CYTHON_TRACE=1``. To include nogil functions in the trace, set ``CYTHON_TRACE_NOGIL=1`` (which implies ``CYTHON_TRACE=1``). C macros can be defined either in the extension definition of the ``setup.py`` script or by setting the respective distutils options in the source file with the following file header comment (if ``cythonize()`` is used for compilation):: # distutils: define_macros=CYTHON_TRACE_NOGIL=1 Enabling coverage analysis -------------------------- Since Cython 0.23, line tracing (see above) also enables support for coverage reporting with the `coverage.py `_ tool. To make the coverage analysis understand Cython modules, you also need to enable Cython's coverage plugin in your ``.coveragerc`` file as follows: .. code-block:: ini [run] plugins = Cython.Coverage With this plugin, your Cython source files should show up normally in the coverage reports. To include the coverage report in the Cython annotated HTML file, you need to first run the coverage.py tool to generate an XML result file. Pass this file into the ``cython`` command as follows: .. code-block:: bash $ cython --annotate-coverage coverage.xml package/mymodule.pyx This will recompile the Cython module and generate one HTML output file next to each Cython source file it processes, containing colour markers for lines that were contained in the coverage report. .. _profiling_tutorial: Profiling Tutorial ================== This will be a complete tutorial, start to finish, of profiling Python code, turning it into Cython code and keep profiling until it is fast enough. As a toy example, we would like to evaluate the summation of the reciprocals of squares up to a certain integer :math:`n` for evaluating :math:`\pi`. The relation we want to use has been proven by Euler in 1735 and is known as the `Basel problem `_. .. math:: \pi^2 = 6 \sum_{k=1}^{\infty} \frac{1}{k^2} = 6 \lim_{k \to \infty} \big( \frac{1}{1^2} + \frac{1}{2^2} + \dots + \frac{1}{k^2} \big) \approx 6 \big( \frac{1}{1^2} + \frac{1}{2^2} + \dots + \frac{1}{n^2} \big) A simple Python code for evaluating the truncated sum looks like this:: #!/usr/bin/env python # encoding: utf-8 # filename: calc_pi.py def recip_square(i): return 1./i**2 def approx_pi(n=10000000): val = 0. for k in range(1,n+1): val += recip_square(k) return (6 * val)**.5 On my box, this needs approximately 4 seconds to run the function with the default n. The higher we choose n, the better will be the approximation for :math:`\pi`. An experienced Python programmer will already see plenty of places to optimize this code. But remember the golden rule of optimization: Never optimize without having profiled. Let me repeat this: **Never** optimize without having profiled your code. Your thoughts about which part of your code takes too much time are wrong. At least, mine are always wrong. So let's write a short script to profile our code:: #!/usr/bin/env python # encoding: utf-8 # filename: profile.py import pstats, cProfile import calc_pi cProfile.runctx("calc_pi.approx_pi()", globals(), locals(), "Profile.prof") s = pstats.Stats("Profile.prof") s.strip_dirs().sort_stats("time").print_stats() Running this on my box gives the following output: .. code-block:: none Sat Nov 7 17:40:54 2009 Profile.prof 10000004 function calls in 6.211 CPU seconds Ordered by: internal time ncalls tottime percall cumtime percall filename:lineno(function) 1 3.243 3.243 6.211 6.211 calc_pi.py:7(approx_pi) 10000000 2.526 0.000 2.526 0.000 calc_pi.py:4(recip_square) 1 0.442 0.442 0.442 0.442 {range} 1 0.000 0.000 6.211 6.211 :1() 1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Profiler' objects} This contains the information that the code runs in 6.2 CPU seconds. Note that the code got slower by 2 seconds because it ran inside the cProfile module. The table contains the real valuable information. You might want to check the Python `profiling documentation `_ for the nitty gritty details. The most important columns here are totime (total time spent in this function **not** counting functions that were called by this function) and cumtime (total time spent in this function **also** counting the functions called by this function). Looking at the tottime column, we see that approximately half the time is spent in approx_pi and the other half is spent in recip_square. Also half a second is spent in range ... of course we should have used xrange for such a big iteration. And in fact, just changing range to xrange makes the code run in 5.8 seconds. We could optimize a lot in the pure Python version, but since we are interested in Cython, let's move forward and bring this module to Cython. We would do this anyway at some time to get the loop run faster. Here is our first Cython version:: # encoding: utf-8 # cython: profile=True # filename: calc_pi.pyx def recip_square(int i): return 1./i**2 def approx_pi(int n=10000000): cdef double val = 0. cdef int k for k in xrange(1,n+1): val += recip_square(k) return (6 * val)**.5 Note the second line: We have to tell Cython that profiling should be enabled. This makes the Cython code slightly slower, but without this we would not get meaningful output from the cProfile module. The rest of the code is mostly unchanged, I only typed some variables which will likely speed things up a bit. We also need to modify our profiling script to import the Cython module directly. Here is the complete version adding the import of the pyximport module:: #!/usr/bin/env python # encoding: utf-8 # filename: profile.py import pstats, cProfile import pyximport pyximport.install() import calc_pi cProfile.runctx("calc_pi.approx_pi()", globals(), locals(), "Profile.prof") s = pstats.Stats("Profile.prof") s.strip_dirs().sort_stats("time").print_stats() We only added two lines, the rest stays completely the same. Alternatively, we could also manually compile our code into an extension; we wouldn't need to change the profile script then at all. The script now outputs the following: .. code-block:: none Sat Nov 7 18:02:33 2009 Profile.prof 10000004 function calls in 4.406 CPU seconds Ordered by: internal time ncalls tottime percall cumtime percall filename:lineno(function) 1 3.305 3.305 4.406 4.406 calc_pi.pyx:7(approx_pi) 10000000 1.101 0.000 1.101 0.000 calc_pi.pyx:4(recip_square) 1 0.000 0.000 4.406 4.406 {calc_pi.approx_pi} 1 0.000 0.000 4.406 4.406 :1() 1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Profiler' objects} We gained 1.8 seconds. Not too shabby. Comparing the output to the previous, we see that recip_square function got faster while the approx_pi function has not changed a lot. Let's concentrate on the recip_square function a bit more. First note, that this function is not to be called from code outside of our module; so it would be wise to turn it into a cdef to reduce call overhead. We should also get rid of the power operator: it is turned into a pow(i,2) function call by Cython, but we could instead just write i*i which could be faster. The whole function is also a good candidate for inlining. Let's look at the necessary changes for these ideas:: # encoding: utf-8 # cython: profile=True # filename: calc_pi.pyx cdef inline double recip_square(int i): return 1./(i*i) def approx_pi(int n=10000000): cdef double val = 0. cdef int k for k in xrange(1,n+1): val += recip_square(k) return (6 * val)**.5 Now running the profile script yields: .. code-block:: none Sat Nov 7 18:10:11 2009 Profile.prof 10000004 function calls in 2.622 CPU seconds Ordered by: internal time ncalls tottime percall cumtime percall filename:lineno(function) 1 1.782 1.782 2.622 2.622 calc_pi.pyx:7(approx_pi) 10000000 0.840 0.000 0.840 0.000 calc_pi.pyx:4(recip_square) 1 0.000 0.000 2.622 2.622 {calc_pi.approx_pi} 1 0.000 0.000 2.622 2.622 :1() 1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Profiler' objects} That bought us another 1.8 seconds. Not the dramatic change we could have expected. And why is recip_square still in this table; it is supposed to be inlined, isn't it? The reason for this is that Cython still generates profiling code even if the function call is eliminated. Let's tell it to not profile recip_square any more; we couldn't get the function to be much faster anyway:: # encoding: utf-8 # cython: profile=True # filename: calc_pi.pyx cimport cython @cython.profile(False) cdef inline double recip_square(int i): return 1./(i*i) def approx_pi(int n=10000000): cdef double val = 0. cdef int k for k in xrange(1,n+1): val += recip_square(k) return (6 * val)**.5 Running this shows an interesting result: .. code-block:: none Sat Nov 7 18:15:02 2009 Profile.prof 4 function calls in 0.089 CPU seconds Ordered by: internal time ncalls tottime percall cumtime percall filename:lineno(function) 1 0.089 0.089 0.089 0.089 calc_pi.pyx:10(approx_pi) 1 0.000 0.000 0.089 0.089 {calc_pi.approx_pi} 1 0.000 0.000 0.089 0.089 :1() 1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Profiler' objects} First note the tremendous speed gain: this version only takes 1/50 of the time of our first Cython version. Also note that recip_square has vanished from the table like we wanted. But the most peculiar and import change is that approx_pi also got much faster. This is a problem with all profiling: calling a function in a profile run adds a certain overhead to the function call. This overhead is **not** added to the time spent in the called function, but to the time spent in the **calling** function. In this example, approx_pi didn't need 2.622 seconds in the last run; but it called recip_square 10000000 times, each time taking a little to set up profiling for it. This adds up to the massive time loss of around 2.6 seconds. Having disabled profiling for the often called function now reveals realistic timings for approx_pi; we could continue optimizing it now if needed. This concludes this profiling tutorial. There is still some room for improvement in this code. We could try to replace the power operator in approx_pi with a call to sqrt from the C stdlib; but this is not necessarily faster than calling pow(x,0.5). Even so, the result we achieved here is quite satisfactory: we came up with a solution that is much faster then our original Python version while retaining functionality and readability. Cython-0.26.1/docs/src/tutorial/index.rst0000664000175000017500000000040112542002467021050 0ustar stefanstefan00000000000000Tutorials ========= .. toctree:: :maxdepth: 2 cython_tutorial external clibraries cdef_classes pxd_files caveats profiling_tutorial strings memory_allocation pure numpy array readings related_work appendix Cython-0.26.1/docs/src/tutorial/readings.rst0000664000175000017500000000253212542002467021544 0ustar stefanstefan00000000000000Further reading =============== The main documentation is located at http://docs.cython.org/. Some recent features might not have documentation written yet, in such cases some notes can usually be found in the form of a Cython Enhancement Proposal (CEP) on http://wiki.cython.org/enhancements. [Seljebotn09]_ contains more information about Cython and NumPy arrays. If you intend to use Cython code in a multi-threaded setting, it is essential to read up on Cython's features for managing the Global Interpreter Lock (the GIL). The same paper contains an explanation of the GIL, and the main documentation explains the Cython features for managing it. Finally, don't hesitate to ask questions (or post reports on successes!) on the Cython users mailing list [UserList]_. The Cython developer mailing list, [DevList]_, is also open to everybody, but focusses on core development issues. Feel free to use it to report a clear bug, to ask for guidance if you have time to spare to develop Cython, or if you have suggestions for future development. .. [DevList] Cython developer mailing list: http://mail.python.org/mailman/listinfo/cython-devel .. [Seljebotn09] D. S. Seljebotn, Fast numerical computations with Cython, Proceedings of the 8th Python in Science Conference, 2009. .. [UserList] Cython users mailing list: http://groups.google.com/group/cython-users Cython-0.26.1/docs/src/tutorial/clibraries.rst0000664000175000017500000005725612542002467022104 0ustar stefanstefan00000000000000Using C libraries ================= Apart from writing fast code, one of the main use cases of Cython is to call external C libraries from Python code. As Cython code compiles down to C code itself, it is actually trivial to call C functions directly in the code. The following gives a complete example for using (and wrapping) an external C library in Cython code, including appropriate error handling and considerations about designing a suitable API for Python and Cython code. Imagine you need an efficient way to store integer values in a FIFO queue. Since memory really matters, and the values are actually coming from C code, you cannot afford to create and store Python ``int`` objects in a list or deque. So you look out for a queue implementation in C. After some web search, you find the C-algorithms library [CAlg]_ and decide to use its double ended queue implementation. To make the handling easier, however, you decide to wrap it in a Python extension type that can encapsulate all memory management. .. [CAlg] Simon Howard, C Algorithms library, http://c-algorithms.sourceforge.net/ Defining external declarations ------------------------------ The C API of the queue implementation, which is defined in the header file ``libcalg/queue.h``, essentially looks like this:: /* file: queue.h */ typedef struct _Queue Queue; typedef void *QueueValue; Queue *queue_new(void); void queue_free(Queue *queue); int queue_push_head(Queue *queue, QueueValue data); QueueValue queue_pop_head(Queue *queue); QueueValue queue_peek_head(Queue *queue); int queue_push_tail(Queue *queue, QueueValue data); QueueValue queue_pop_tail(Queue *queue); QueueValue queue_peek_tail(Queue *queue); int queue_is_empty(Queue *queue); To get started, the first step is to redefine the C API in a ``.pxd`` file, say, ``cqueue.pxd``:: # file: cqueue.pxd cdef extern from "libcalg/queue.h": ctypedef struct Queue: pass ctypedef void* QueueValue Queue* queue_new() void queue_free(Queue* queue) int queue_push_head(Queue* queue, QueueValue data) QueueValue queue_pop_head(Queue* queue) QueueValue queue_peek_head(Queue* queue) int queue_push_tail(Queue* queue, QueueValue data) QueueValue queue_pop_tail(Queue* queue) QueueValue queue_peek_tail(Queue* queue) bint queue_is_empty(Queue* queue) Note how these declarations are almost identical to the header file declarations, so you can often just copy them over. However, you do not need to provide *all* declarations as above, just those that you use in your code or in other declarations, so that Cython gets to see a sufficient and consistent subset of them. Then, consider adapting them somewhat to make them more comfortable to work with in Cython. Specifically, you should take care of choosing good argument names for the C functions, as Cython allows you to pass them as keyword arguments. Changing them later on is a backwards incompatible API modification. Choosing good names right away will make these functions more pleasant to work with from Cython code. One noteworthy difference to the header file that we use above is the declaration of the ``Queue`` struct in the first line. ``Queue`` is in this case used as an *opaque handle*; only the library that is called knows what is really inside. Since no Cython code needs to know the contents of the struct, we do not need to declare its contents, so we simply provide an empty definition (as we do not want to declare the ``_Queue`` type which is referenced in the C header) [#]_. .. [#] There's a subtle difference between ``cdef struct Queue: pass`` and ``ctypedef struct Queue: pass``. The former declares a type which is referenced in C code as ``struct Queue``, while the latter is referenced in C as ``Queue``. This is a C language quirk that Cython is not able to hide. Most modern C libraries use the ``ctypedef`` kind of struct. Another exception is the last line. The integer return value of the ``queue_is_empty()`` function is actually a C boolean value, i.e. the only interesting thing about it is whether it is non-zero or zero, indicating if the queue is empty or not. This is best expressed by Cython's ``bint`` type, which is a normal ``int`` type when used in C but maps to Python's boolean values ``True`` and ``False`` when converted to a Python object. This way of tightening declarations in a ``.pxd`` file can often simplify the code that uses them. It is good practice to define one ``.pxd`` file for each library that you use, and sometimes even for each header file (or functional group) if the API is large. That simplifies their reuse in other projects. Sometimes, you may need to use C functions from the standard C library, or want to call C-API functions from CPython directly. For common needs like this, Cython ships with a set of standard ``.pxd`` files that provide these declarations in a readily usable way that is adapted to their use in Cython. The main packages are ``cpython``, ``libc`` and ``libcpp``. The NumPy library also has a standard ``.pxd`` file ``numpy``, as it is often used in Cython code. See Cython's ``Cython/Includes/`` source package for a complete list of provided ``.pxd`` files. Writing a wrapper class ----------------------- After declaring our C library's API, we can start to design the Queue class that should wrap the C queue. It will live in a file called ``queue.pyx``. [#]_ .. [#] Note that the name of the ``.pyx`` file must be different from the ``cqueue.pxd`` file with declarations from the C library, as both do not describe the same code. A ``.pxd`` file next to a ``.pyx`` file with the same name defines exported declarations for code in the ``.pyx`` file. As the ``cqueue.pxd`` file contains declarations of a regular C library, there must not be a ``.pyx`` file with the same name that Cython associates with it. Here is a first start for the Queue class:: # file: queue.pyx cimport cqueue cdef class Queue: cdef cqueue.Queue* _c_queue def __cinit__(self): self._c_queue = cqueue.queue_new() Note that it says ``__cinit__`` rather than ``__init__``. While ``__init__`` is available as well, it is not guaranteed to be run (for instance, one could create a subclass and forget to call the ancestor's constructor). Because not initializing C pointers often leads to hard crashes of the Python interpreter, Cython provides ``__cinit__`` which is *always* called immediately on construction, before CPython even considers calling ``__init__``, and which therefore is the right place to initialise ``cdef`` fields of the new instance. However, as ``__cinit__`` is called during object construction, ``self`` is not fully constructed yet, and one must avoid doing anything with ``self`` but assigning to ``cdef`` fields. Note also that the above method takes no parameters, although subtypes may want to accept some. A no-arguments ``__cinit__()`` method is a special case here that simply does not receive any parameters that were passed to a constructor, so it does not prevent subclasses from adding parameters. If parameters are used in the signature of ``__cinit__()``, they must match those of any declared ``__init__`` method of classes in the class hierarchy that are used to instantiate the type. Memory management ----------------- Before we continue implementing the other methods, it is important to understand that the above implementation is not safe. In case anything goes wrong in the call to ``queue_new()``, this code will simply swallow the error, so we will likely run into a crash later on. According to the documentation of the ``queue_new()`` function, the only reason why the above can fail is due to insufficient memory. In that case, it will return ``NULL``, whereas it would normally return a pointer to the new queue. The Python way to get out of this is to raise a ``MemoryError`` [#]_. We can thus change the init function as follows:: cimport cqueue cdef class Queue: cdef cqueue.Queue* _c_queue def __cinit__(self): self._c_queue = cqueue.queue_new() if self._c_queue is NULL: raise MemoryError() .. [#] In the specific case of a ``MemoryError``, creating a new exception instance in order to raise it may actually fail because we are running out of memory. Luckily, CPython provides a C-API function ``PyErr_NoMemory()`` that safely raises the right exception for us. Since version 0.14.1, Cython automatically substitutes this C-API call whenever you write ``raise MemoryError`` or ``raise MemoryError()``. If you use an older version, you have to cimport the C-API function from the standard package ``cpython.exc`` and call it directly. The next thing to do is to clean up when the Queue instance is no longer used (i.e. all references to it have been deleted). To this end, CPython provides a callback that Cython makes available as a special method ``__dealloc__()``. In our case, all we have to do is to free the C Queue, but only if we succeeded in initialising it in the init method:: def __dealloc__(self): if self._c_queue is not NULL: cqueue.queue_free(self._c_queue) Compiling and linking --------------------- At this point, we have a working Cython module that we can test. To compile it, we need to configure a ``setup.py`` script for distutils. Here is the most basic script for compiling a Cython module:: from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize setup( ext_modules = cythonize([Extension("queue", ["queue.pyx"])]) ) To build against the external C library, we must extend this script to include the necessary setup. Assuming the library is installed in the usual places (e.g. under ``/usr/lib`` and ``/usr/include`` on a Unix-like system), we could simply change the extension setup from :: ext_modules = cythonize([Extension("queue", ["queue.pyx"])]) to :: ext_modules = cythonize([ Extension("queue", ["queue.pyx"], libraries=["calg"]) ]) If it is not installed in a 'normal' location, users can provide the required parameters externally by passing appropriate C compiler flags, such as:: CFLAGS="-I/usr/local/otherdir/calg/include" \ LDFLAGS="-L/usr/local/otherdir/calg/lib" \ python setup.py build_ext -i Once we have compiled the module for the first time, we can now import it and instantiate a new Queue:: $ export PYTHONPATH=. $ python -c 'import queue.Queue as Q ; Q()' However, this is all our Queue class can do so far, so let's make it more usable. Mapping functionality --------------------- Before implementing the public interface of this class, it is good practice to look at what interfaces Python offers, e.g. in its ``list`` or ``collections.deque`` classes. Since we only need a FIFO queue, it's enough to provide the methods ``append()``, ``peek()`` and ``pop()``, and additionally an ``extend()`` method to add multiple values at once. Also, since we already know that all values will be coming from C, it's best to provide only ``cdef`` methods for now, and to give them a straight C interface. In C, it is common for data structures to store data as a ``void*`` to whatever data item type. Since we only want to store ``int`` values, which usually fit into the size of a pointer type, we can avoid additional memory allocations through a trick: we cast our ``int`` values to ``void*`` and vice versa, and store the value directly as the pointer value. Here is a simple implementation for the ``append()`` method:: cdef append(self, int value): cqueue.queue_push_tail(self._c_queue, value) Again, the same error handling considerations as for the ``__cinit__()`` method apply, so that we end up with this implementation instead:: cdef append(self, int value): if not cqueue.queue_push_tail(self._c_queue, value): raise MemoryError() Adding an ``extend()`` method should now be straight forward:: cdef extend(self, int* values, size_t count): """Append all ints to the queue. """ cdef size_t i for i in range(count): if not cqueue.queue_push_tail( self._c_queue, values[i]): raise MemoryError() This becomes handy when reading values from a NumPy array, for example. So far, we can only add data to the queue. The next step is to write the two methods to get the first element: ``peek()`` and ``pop()``, which provide read-only and destructive read access respectively:: cdef int peek(self): return cqueue.queue_peek_head(self._c_queue) cdef int pop(self): return cqueue.queue_pop_head(self._c_queue) Handling errors --------------- Now, what happens when the queue is empty? According to the documentation, the functions return a ``NULL`` pointer, which is typically not a valid value. Since we are simply casting to and from ints, we cannot distinguish anymore if the return value was ``NULL`` because the queue was empty or because the value stored in the queue was ``0``. However, in Cython code, we would expect the first case to raise an exception, whereas the second case should simply return ``0``. To deal with this, we need to special case this value, and check if the queue really is empty or not:: cdef int peek(self) except? -1: value = cqueue.queue_peek_head(self._c_queue) if value == 0: # this may mean that the queue is empty, or # that it happens to contain a 0 value if cqueue.queue_is_empty(self._c_queue): raise IndexError("Queue is empty") return value Note how we have effectively created a fast path through the method in the hopefully common cases that the return value is not ``0``. Only that specific case needs an additional check if the queue is empty. The ``except? -1`` declaration in the method signature falls into the same category. If the function was a Python function returning a Python object value, CPython would simply return ``NULL`` internally instead of a Python object to indicate an exception, which would immediately be propagated by the surrounding code. The problem is that the return type is ``int`` and any ``int`` value is a valid queue item value, so there is no way to explicitly signal an error to the calling code. In fact, without such a declaration, there is no obvious way for Cython to know what to return on exceptions and for calling code to even know that this method *may* exit with an exception. The only way calling code can deal with this situation is to call ``PyErr_Occurred()`` when returning from a function to check if an exception was raised, and if so, propagate the exception. This obviously has a performance penalty. Cython therefore allows you to declare which value it should implicitly return in the case of an exception, so that the surrounding code only needs to check for an exception when receiving this exact value. We chose to use ``-1`` as the exception return value as we expect it to be an unlikely value to be put into the queue. The question mark in the ``except? -1`` declaration indicates that the return value is ambiguous (there *may* be a ``-1`` value in the queue, after all) and that an additional exception check using ``PyErr_Occurred()`` is needed in calling code. Without it, Cython code that calls this method and receives the exception return value would silently (and sometimes incorrectly) assume that an exception has been raised. In any case, all other return values will be passed through almost without a penalty, thus again creating a fast path for 'normal' values. Now that the ``peek()`` method is implemented, the ``pop()`` method also needs adaptation. Since it removes a value from the queue, however, it is not enough to test if the queue is empty *after* the removal. Instead, we must test it on entry:: cdef int pop(self) except? -1: if cqueue.queue_is_empty(self._c_queue): raise IndexError("Queue is empty") return cqueue.queue_pop_head(self._c_queue) The return value for exception propagation is declared exactly as for ``peek()``. Lastly, we can provide the Queue with an emptiness indicator in the normal Python way by implementing the ``__bool__()`` special method (note that Python 2 calls this method ``__nonzero__``, whereas Cython code can use either name):: def __bool__(self): return not cqueue.queue_is_empty(self._c_queue) Note that this method returns either ``True`` or ``False`` as we declared the return type of the ``queue_is_empty()`` function as ``bint`` in ``cqueue.pxd``. Testing the result ------------------ Now that the implementation is complete, you may want to write some tests for it to make sure it works correctly. Especially doctests are very nice for this purpose, as they provide some documentation at the same time. To enable doctests, however, you need a Python API that you can call. C methods are not visible from Python code, and thus not callable from doctests. A quick way to provide a Python API for the class is to change the methods from ``cdef`` to ``cpdef``. This will let Cython generate two entry points, one that is callable from normal Python code using the Python call semantics and Python objects as arguments, and one that is callable from C code with fast C semantics and without requiring intermediate argument conversion from or to Python types. Note that ``cpdef`` methods ensure that they can be appropriately overridden by Python methods even when they are called from Cython. This adds a tiny overhead compared to ``cdef`` methods. The following listing shows the complete implementation that uses ``cpdef`` methods where possible:: cimport cqueue cdef class Queue: """A queue class for C integer values. >>> q = Queue() >>> q.append(5) >>> q.peek() 5 >>> q.pop() 5 """ cdef cqueue.Queue* _c_queue def __cinit__(self): self._c_queue = cqueue.queue_new() if self._c_queue is NULL: raise MemoryError() def __dealloc__(self): if self._c_queue is not NULL: cqueue.queue_free(self._c_queue) cpdef append(self, int value): if not cqueue.queue_push_tail(self._c_queue, value): raise MemoryError() cdef extend(self, int* values, size_t count): cdef size_t i for i in xrange(count): if not cqueue.queue_push_tail( self._c_queue, values[i]): raise MemoryError() cpdef int peek(self) except? -1: cdef int value = \ cqueue.queue_peek_head(self._c_queue) if value == 0: # this may mean that the queue is empty, # or that it happens to contain a 0 value if cqueue.queue_is_empty(self._c_queue): raise IndexError("Queue is empty") return value cpdef int pop(self) except? -1: if cqueue.queue_is_empty(self._c_queue): raise IndexError("Queue is empty") return cqueue.queue_pop_head(self._c_queue) def __bool__(self): return not cqueue.queue_is_empty(self._c_queue) The ``cpdef`` feature is obviously not available for the ``extend()`` method, as the method signature is incompatible with Python argument types. However, if wanted, we can rename the C-ish ``extend()`` method to e.g. ``c_extend()``, and write a new ``extend()`` method instead that accepts an arbitrary Python iterable:: cdef c_extend(self, int* values, size_t count): cdef size_t i for i in range(count): if not cqueue.queue_push_tail( self._c_queue, values[i]): raise MemoryError() cpdef extend(self, values): for value in values: self.append(value) As a quick test with 10000 numbers on the author's machine indicates, using this Queue from Cython code with C ``int`` values is about five times as fast as using it from Cython code with Python object values, almost eight times faster than using it from Python code in a Python loop, and still more than twice as fast as using Python's highly optimised ``collections.deque`` type from Cython code with Python integers. Callbacks --------- Let's say you want to provide a way for users to pop values from the queue up to a certain user defined event occurs. To this end, you want to allow them to pass a predicate function that determines when to stop, e.g.:: def pop_until(self, predicate): while not predicate(self.peek()): self.pop() Now, let us assume for the sake of argument that the C queue provides such a function that takes a C callback function as predicate. The API could look as follows:: /* C type of a predicate function that takes a queue value and returns * -1 for errors * 0 for reject * 1 for accept */ typedef int (*predicate_func)(void* user_context, QueueValue data); /* Pop values as long as the predicate evaluates to true for them, * returns -1 if the predicate failed with an error and 0 otherwise. */ int queue_pop_head_until(Queue *queue, predicate_func predicate, void* user_context); It is normal for C callback functions to have a generic :c:type:`void*` argument that allows passing any kind of context or state through the C-API into the callback function. We will use this to pass our Python predicate function. First, we have to define a callback function with the expected signature that we can pass into the C-API function:: cdef int evaluate_predicate(void* context, cqueue.QueueValue value): "Callback function that can be passed as predicate_func" try: # recover Python function object from void* argument func = context # call function, convert result into 0/1 for True/False return bool(func(value)) except: # catch any Python errors and return error indicator return -1 The main idea is to pass a pointer (a.k.a. borrowed reference) to the function object as the user context argument. We will call the C-API function as follows:: def pop_until(self, python_predicate_function): result = cqueue.queue_pop_head_until( self._c_queue, evaluate_predicate, python_predicate_function) if result == -1: raise RuntimeError("an error occurred") The usual pattern is to first cast the Python object reference into a :c:type:`void*` to pass it into the C-API function, and then cast it back into a Python object in the C predicate callback function. The cast to :c:type:`void*` creates a borrowed reference. On the cast to ````, Cython increments the reference count of the object and thus converts the borrowed reference back into an owned reference. At the end of the predicate function, the owned reference goes out of scope again and Cython discards it. The error handling in the code above is a bit simplistic. Specifically, any exceptions that the predicate function raises will essentially be discarded and only result in a plain ``RuntimeError()`` being raised after the fact. This can be improved by storing away the exception in an object passed through the context parameter and re-raising it after the C-API function has returned ``-1`` to indicate the error. Cython-0.26.1/docs/src/reference/0000775000175000017500000000000013151203436017303 5ustar stefanstefan00000000000000Cython-0.26.1/docs/src/reference/extension_types.rst0000664000175000017500000004225613023021033023273 0ustar stefanstefan00000000000000.. highlight:: cython .. _extension_types: *************** Extension Types *************** * Normal Python as well as extension type classes can be defined. * Extension types: * Are considered by Python as "built-in" types. * Can be used to wrap arbitrary C-data structures, and provide a Python-like interface to them from Python. * Attributes and methods can be called from Python or Cython code * Are defined by the ``cdef class`` statement. :: cdef class Shrubbery: cdef int width, height def __init__(self, w, h): self.width = w self.height = h def describe(self): print "This shrubbery is", self.width, \ "by", self.height, "cubits." ========== Attributes ========== * Are stored directly in the object's C struct. * Are fixed at compile time. * You can't add attributes to an extension type instance at run time like in normal Python, unless you define a ``__dict__`` attribute. * You can sub-class the extension type in Python to add attributes at run-time. * There are two ways to access extension type attributes: * By Python look-up. * Python code's only method of access. * By direct access to the C struct from Cython code. * Cython code can use either method of access, though. * By default, extension type attributes are: * Only accessible by direct access. * Not accessible from Python code. * To make attributes accessible to Python, they must be declared ``public`` or ``readonly``:: cdef class Shrubbery: cdef public int width, height cdef readonly float depth * The ``width`` and ``height`` attributes are readable and writable from Python code. * The ``depth`` attribute is readable but not writable. .. note:: .. note:: You can only expose simple C types, such as ints, floats, and strings, for Python access. You can also expose Python-valued attributes. .. note:: The ``public`` and ``readonly`` options apply only to Python access, not direct access. All the attributes of an extension type are always readable and writable by C-level access. ======= Methods ======= * ``self`` is used in extension type methods just like it normally is in Python. * See **Functions and Methods**; all of which applies here. ========== Properties ========== * Cython provides a special (deprecated) syntax:: cdef class Spam: property cheese: "A doc string can go here." def __get__(self): # This is called when the property is read. ... def __set__(self, value): # This is called when the property is written. ... def __del__(self): # This is called when the property is deleted. * The ``__get__()``, ``__set__()``, and ``__del__()`` methods are all optional. * If they are omitted, an exception is raised on attribute access. * Below, is a full example that defines a property which can.. * Add to a list each time it is written to (``"__set__"``). * Return the list when it is read (``"__get__"``). * Empty the list when it is deleted (``"__del__"``). :: # cheesy.pyx cdef class CheeseShop: cdef object cheeses def __cinit__(self): self.cheeses = [] property cheese: # note that this syntax is deprecated def __get__(self): return "We don't have: %s" % self.cheeses def __set__(self, value): self.cheeses.append(value) def __del__(self): del self.cheeses[:] # Test input from cheesy import CheeseShop shop = CheeseShop() print shop.cheese shop.cheese = "camembert" print shop.cheese shop.cheese = "cheddar" print shop.cheese del shop.cheese print shop.cheese :: # Test output We don't have: [] We don't have: ['camembert'] We don't have: ['camembert', 'cheddar'] We don't have: [] =============== Special Methods =============== .. note:: #. The semantics of Cython's special methods are similar in principle to that of Python's. #. There are substantial differences in some behavior. #. Some Cython special methods have no Python counter-part. * See the :ref:`special_methods_table` for the many that are available. Declaration =========== * Must be declared with ``def`` and cannot be declared with ``cdef``. * Performance is not affected by the ``def`` declaration because of special calling conventions Docstrings ========== * Docstrings are not supported yet for some special method types. * They can be included in the source, but may not appear in the corresponding ``__doc__`` attribute at run-time. * This a Python library limitation because the ``PyTypeObject`` data structure is limited Initialization: ``__cinit__()`` and ``__init__()`` ================================================== * Any arguments passed to the extension type's constructor will be passed to both initialization methods. * ``__cinit__()`` is where you should perform C-level initialization of the object * This includes any allocation of C data structures. * **Caution** is warranted as to what you do in this method. * The object may not be fully valid Python object when it is called. * Calling Python objects, including the extensions own methods, may be hazardous. * By the time ``__cinit__()`` is called... * Memory has been allocated for the object. * All C-level attributes have been initialized to 0 or null. * Python have been initialized to ``None``, but you can not rely on that for each occasion. * This initialization method is guaranteed to be called exactly once. * For Extensions types that inherit a base type: * The ``__cinit__()`` method of the base type is automatically called before this one. * The inherited ``__cinit__()`` method can not be called explicitly. * Passing modified argument lists to the base type must be done through ``__init__()``. * It may be wise to give the ``__cinit__()`` method both ``"*"`` and ``"**"`` arguments. * Allows the method to accept or ignore additional arguments. * Eliminates the need for a Python level sub-class, that changes the ``__init__()`` method's signature, to have to override both the ``__new__()`` and ``__init__()`` methods. * If ``__cinit__()`` is declared to take no arguments except ``self``, it will ignore any extra arguments passed to the constructor without complaining about a signature mis-match. * ``__init__()`` is for higher-level initialization and is safer for Python access. * By the time this method is called, the extension type is a fully valid Python object. * All operations are safe. * This method may sometimes be called more than once, or possibly not at all. * Take this into consideration to make sure the design of your other methods are robust of this fact. Note that all constructor arguments will be passed as Python objects. This implies that non-convertible C types such as pointers or C++ objects cannot be passed into the constructor from Cython code. If this is needed, use a factory function instead that handles the object initialisation. It often helps to directly call ``__new__()`` in this function to bypass the call to the ``__init__()`` constructor. Finalization: ``__dealloc__()`` =============================== * This method is the counter-part to ``__cinit__()``. * Any C-data that was explicitly allocated in the ``__cinit__()`` method should be freed here. * Use caution in this method: * The Python object to which this method belongs may not be completely intact at this point. * Avoid invoking any Python operations that may touch the object. * Don't call any of this object's methods. * It's best to just deallocate C-data structures here. * All Python attributes of your extension type object are deallocated by Cython after the ``__dealloc__()`` method returns. Arithmetic Methods ================== .. note:: Most of these methods behave differently than in Python * There are not "reversed" versions of these methods... there is no __radd__() for instance. * If the first operand cannot perform the operation, the same method of the second operand is called, with the operands in the same order. * Do not rely on the first parameter of these methods, being ``"self"`` or the right type. * The types of both operands should be tested before deciding what to do. * Return ``NotImplemented`` for unhandled, mis-matched operand types. * The previously mentioned points.. * Also apply to 'in-place' method ``__ipow__()``. * Do not apply to other 'in-place' methods like ``__iadd__()``, in that these always take ``self`` as the first argument. Rich Comparisons ================ .. note:: There are no separate methods for individual rich comparison operations. * A single special method called ``__richcmp__()`` replaces all the individual rich compare, special method types. * ``__richcmp__()`` takes an integer argument, indicating which operation is to be performed as shown in the table below. +-----+-----+ | < | 0 | +-----+-----+ | == | 2 | +-----+-----+ | > | 4 | +-----+-----+ | <= | 1 | +-----+-----+ | != | 3 | +-----+-----+ | >= | 5 | +-----+-----+ The ``__next__()`` Method ========================= * Extension types used to expose an iterator interface should define a ``__next__()`` method. * **Do not** explicitly supply a ``next()`` method, because Python does that for you automatically. =========== Subclassing =========== * An extension type may inherit from a built-in type or another extension type:: cdef class Parrot: ... cdef class Norwegian(Parrot): ... * A complete definition of the base type must be available to Cython * If the base type is a built-in type, it must have been previously declared as an ``extern`` extension type. * ``cimport`` can be used to import the base type, if the extern declared base type is in a ``.pxd`` definition file. * In Cython, multiple inheritance is not permitted.. singular inheritance only * Cython extension types can also be sub-classed in Python. * Here multiple inheritance is permissible as is normal for Python. * Even multiple extension types may be inherited, but C-layout of all the base classes must be compatible. ==================== Forward Declarations ==================== * Extension types can be "forward-declared". * This is necessary when two extension types refer to each other:: cdef class Shrubbery # forward declaration cdef class Shrubber: cdef Shrubbery work_in_progress cdef class Shrubbery: cdef Shrubber creator * An extension type that has a base-class, requires that both forward-declarations be specified:: cdef class A(B) ... cdef class A(B): # attributes and methods ======================== Extension Types and None ======================== * Parameters and C-variables declared as an Extension type, may take the value of ``None``. * This is analogous to the way a C-pointer can take the value of ``NULL``. .. note:: #. Exercise caution when using ``None`` #. Read this section carefully. * There is no problem as long as you are performing Python operations on it. * This is because full dynamic type checking is applied * When accessing an extension type's C-attributes, **make sure** it is not ``None``. * Cython does not check this for reasons of efficiency. * Be very aware of exposing Python functions that take extension types as arguments:: def widen_shrubbery(Shrubbery sh, extra_width): # This is sh.width = sh.width + extra_width * Users could **crash** the program by passing ``None`` for the ``sh`` parameter. * This could be avoided by:: def widen_shrubbery(Shrubbery sh, extra_width): if sh is None: raise TypeError sh.width = sh.width + extra_width * Cython provides a more convenient way with a ``not None`` clause:: def widen_shrubbery(Shrubbery sh not None, extra_width): sh.width = sh.width + extra_width * Now this function automatically checks that ``sh`` is not ``None``, as well as that is the right type. * ``not None`` can only be used in Python functions (declared with ``def`` **not** ``cdef``). * For ``cdef`` functions, you will have to provide the check yourself. * The ``self`` parameter of an extension type is guaranteed to **never** be ``None``. * When comparing a value ``x`` with ``None``, and ``x`` is a Python object, note the following: * ``x is None`` and ``x is not None`` are very efficient. * They translate directly to C-pointer comparisons. * ``x == None`` and ``x != None`` or ``if x: ...`` (a boolean condition), will invoke Python operations and will therefore be much slower. ================ Weak Referencing ================ * By default, weak references are not supported. * It can be enabled by declaring a C attribute of the ``object`` type called ``__weakref__()``:: cdef class ExplodingAnimal: """This animal will self-destruct when it is no longer strongly referenced.""" cdef object __weakref__ ================== Dynamic Attributes ================== * By default, you cannot dynamically add attributes to a ``cdef class`` instance at runtime. * It can be enabled by declaring a C attribute of the ``dict`` type called ``__dict__``:: cdef class ExtendableAnimal: """This animal can be extended with new attributes at runtime.""" cdef dict __dict__ .. note:: #. This can have a performance penalty, especially when using ``cpdef`` methods in a class. ========================= External and Public Types ========================= Public ====== * When an extension type is declared ``public``, Cython will generate a C-header (".h") file. * The header file will contain the declarations for it's **object-struct** and it's **type-object**. * External C-code can now access the attributes of the extension type. External ======== * An ``extern`` extension type allows you to gain access to the internals of: * Python objects defined in the Python core. * Non-Cython extension modules * The following example lets you get at the C-level members of Python's built-in "complex" object:: cdef extern from "complexobject.h": struct Py_complex: double real double imag ctypedef class __builtin__.complex [object PyComplexObject]: cdef Py_complex cval # A function which uses the above type def spam(complex c): print "Real:", c.cval.real print "Imag:", c.cval.imag .. note:: Some important things in the example: #. ``ctypedef`` has been used because Python's header file has the struct declared with:: ctypedef struct { ... } PyComplexObject; #. The module of where this type object can be found is specified along side the name of the extension type. See **Implicit Importing**. #. When declaring an external extension type... * Don't declare any methods, because they are Python method class the are not needed. * Similar to **structs** and **unions**, extension classes declared inside a ``cdef extern from`` block only need to declare the C members which you will actually need to access in your module. Name Specification Clause ========================= .. note:: Only available to **public** and **extern** extension types. * Example:: [object object_struct_name, type type_object_name ] * ``object_struct_name`` is the name to assume for the type's C-struct. * ``type_object_name`` is the name to assume for the type's statically declared type-object. * The object and type clauses can be written in any order. * For ``cdef extern from`` declarations, This clause **is required**. * The object clause is required because Cython must generate code that is compatible with the declarations in the header file. * Otherwise the object clause is optional. * For public extension types, both the object and type clauses **are required** for Cython to generate code that is compatible with external C-code. ================================ Type Names vs. Constructor Names ================================ * In a Cython module, the name of an extension type serves two distinct purposes: #. When used in an expression, it refers to a "module-level" global variable holding the type's constructor (i.e. it's type-object) #. It can also be used as a C-type name to declare a "type" for variables, arguments, and return values. * Example:: cdef extern class MyModule.Spam: ... * The name "Spam" serves both of these roles. * Only "Spam" can be used as the type-name. * The constructor can be referred to by other names. * Upon an explicit import of "MyModule"... * ``MyModule.Spam()`` could be used as the constructor call. * ``MyModule.Spam`` could not be used as a type-name * When an "as" clause is used, the name specified takes over both roles:: cdef extern class MyModule.Spam as Yummy: ... * ``Yummy`` becomes both type-name and a name for the constructor. * There other ways of course, to get hold of the constructor, but ``Yummy`` is the only usable type-name. Cython-0.26.1/docs/src/reference/language_basics.rst0000664000175000017500000006372013023021033023141 0ustar stefanstefan00000000000000.. highlight:: cython .. _language_basics: *************** Language Basics *************** ================= Cython File Types ================= There are three file types in Cython: * Implementation files carry a ``.pyx`` suffix * Definition files carry a ``.pxd`` suffix * Include files which carry a ``.pxi`` suffix Implementation File =================== What can it contain? -------------------- * Basically anything Cythonic, but see below. What can't it contain? ---------------------- * There are some restrictions when it comes to **extension types**, if the extension type is already defined else where... **more on this later** Definition File =============== What can it contain? -------------------- * Any kind of C type declaration. * ``extern`` C function or variable declarations. * Declarations for module implementations. * The definition parts of **extension types**. * All declarations of functions, etc., for an **external library** What can't it contain? ---------------------- * Any non-extern C variable declaration. * Implementations of C or Python functions. * Python class definitions * Python executable statements. * Any declaration that is defined as **public** to make it accessible to other Cython modules. * This is not necessary, as it is automatic. * a **public** declaration is only needed to make it accessible to **external C code**. What else? ---------- cimport ``````` * Use the **cimport** statement, as you would Python's import statement, to access these files from other definition or implementation files. * **cimport** does not need to be called in ``.pyx`` file for ``.pxd`` file that has the same name, as they are already in the same namespace. * For cimport to find the stated definition file, the path to the file must be appended to the ``-I`` option of the **Cython compile command**. compilation order ````````````````` * When a ``.pyx`` file is to be compiled, Cython first checks to see if a corresponding ``.pxd`` file exits and processes it first. Include File ============ What can it contain? -------------------- * Any Cythonic code really, because the entire file is textually embedded at the location you prescribe. How do I use it? ---------------- * Include the ``.pxi`` file with an ``include`` statement like: ``include "spamstuff.pxi`` * The ``include`` statement can appear anywhere in your Cython file and at any indentation level * The code in the ``.pxi`` file needs to be rooted at the "zero" indentation level. * The included code can itself contain other ``include`` statements. ==================== Declaring Data Types ==================== As a dynamic language, Python encourages a programming style of considering classes and objects in terms of their methods and attributes, more than where they fit into the class hierarchy. This can make Python a very relaxed and comfortable language for rapid development, but with a price - the 'red tape' of managing data types is dumped onto the interpreter. At run time, the interpreter does a lot of work searching namespaces, fetching attributes and parsing argument and keyword tuples. This run-time ‘late binding’ is a major cause of Python’s relative slowness compared to ‘early binding’ languages such as C++. However with Cython it is possible to gain significant speed-ups through the use of ‘early binding’ programming techniques. .. note:: Typing is not a necessity Providing static typing to parameters and variables is convenience to speed up your code, but it is not a necessity. Optimize where and when needed. The cdef Statement ================== The ``cdef`` statement is used to make C level declarations for: :Variables: :: cdef int i, j, k cdef float f, g[42], *h :Structs: :: cdef struct Grail: int age float volume ..note Structs can be declared as ``cdef packed struct``, which has the same effect as the C directive ``#pragma pack(1)``. :Unions: :: cdef union Food: char *spam float *eggs :Enums: :: cdef enum CheeseType: cheddar, edam, camembert Declaring an enum as ``cpdef`` will create a :pep:`435`-style Python wrapper:: cpdef enum CheeseState: hard = 1 soft = 2 runny = 3 :Functions: :: cdef int eggs(unsigned long l, float f): ... :Extension Types: :: cdef class Spam: ... .. note:: Constants Constants can be defined by using an anonymous enum:: cdef enum: tons_of_spam = 3 Grouping cdef Declarations ========================== A series of declarations can grouped into a ``cdef`` block:: cdef: struct Spam: int tons int i float f Spam *p void f(Spam *s): print s.tons, "Tons of spam" .. note:: ctypedef statement The ``ctypedef`` statement is provided for naming types:: ctypedef unsigned long ULong ctypedef int *IntPtr Parameters ========== * Both C and Python **function** types can be declared to have parameters C data types. * Use normal C declaration syntax:: def spam(int i, char *s): ... cdef int eggs(unsigned long l, float f): ... * As these parameters are passed into a Python declared function, they are magically **converted** to the specified C type value. * This holds true for only numeric and string types * If no type is specified for a parameter or a return value, it is assumed to be a Python object * The following takes two Python objects as parameters and returns a Python object:: cdef spamobjs(x, y): ... .. note:: -- This is different then C language behavior, where it is an int by default. * Python object types have reference counting performed according to the standard Python C-API rules: * Borrowed references are taken as parameters * New references are returned .. todo:: link or label here the one ref count caveat for NumPy. * The name ``object`` can be used to explicitly declare something as a Python Object. * For sake of code clarity, it recommended to always use ``object`` explicitly in your code. * This is also useful for cases where the name being declared would otherwise be taken for a type:: cdef foo(object int): ... * As a return type:: cdef object foo(object int): ... .. todo:: Do a see also here ..?? Optional Arguments ------------------ * Are supported for ``cdef`` and ``cpdef`` functions * There are differences though whether you declare them in a ``.pyx`` file or a ``.pxd`` file: * When in a ``.pyx`` file, the signature is the same as it is in Python itself:: cdef class A: cdef foo(self): print "A" cdef class B(A) cdef foo(self, x=None) print "B", x cdef class C(B): cpdef foo(self, x=True, int k=3) print "C", x, k * When in a ``.pxd`` file, the signature is different like this example: ``cdef foo(x=*)``:: cdef class A: cdef foo(self) cdef class B(A) cdef foo(self, x=*) cdef class C(B): cpdef foo(self, x=*, int k=*) * The number of arguments may increase when subclassing, but the arg types and order must be the same. * There may be a slight performance penalty when the optional arg is overridden with one that does not have default values. Keyword-only Arguments ======================= * As in Python 3, ``def`` functions can have keyword-only arguments listed after a ``"*"`` parameter and before a ``"**"`` parameter if any:: def f(a, b, *args, c, d = 42, e, **kwds): ... * Shown above, the ``c``, ``d`` and ``e`` arguments can not be passed as positional arguments and must be passed as keyword arguments. * Furthermore, ``c`` and ``e`` are required keyword arguments since they do not have a default value. * If the parameter name after the ``"*"`` is omitted, the function will not accept any extra positional arguments:: def g(a, b, *, c, d): ... * Shown above, the signature takes exactly two positional parameters and has two required keyword parameters Automatic Type Conversion ========================= * For basic numeric and string types, in most situations, when a Python object is used in the context of a C value and vice versa. * The following table summarizes the conversion possibilities, assuming ``sizeof(int) == sizeof(long)``: +----------------------------+--------------------+------------------+ | C types | From Python types | To Python types | +============================+====================+==================+ | [unsigned] char | int, long | int | +----------------------------+ | | | [unsigned] short | | | +----------------------------+ | | | int, long | | | +----------------------------+--------------------+------------------+ | unsigned int | int, long | long | +----------------------------+ | | | unsigned long | | | +----------------------------+ | | | [unsigned] long long | | | +----------------------------+--------------------+------------------+ | float, double, long double | int, long, float | float | +----------------------------+--------------------+------------------+ | char * | str/bytes | str/bytes [#]_ | +----------------------------+--------------------+------------------+ | struct | | dict | +----------------------------+--------------------+------------------+ .. note:: **Python String in a C Context** * A Python string, passed to C context expecting a ``char*``, is only valid as long as the Python string exists. * A reference to the Python string must be kept around for as long as the C string is needed. * If this can't be guaranteed, then make a copy of the C string. * Cython may produce an error message: ``Obtaining char* from a temporary Python value`` and will not resume compiling in situations like this:: cdef char *s s = pystring1 + pystring2 * The reason is that concatenating to strings in Python produces a temporary variable. * The variable is decrefed, and the Python string deallocated as soon as the statement has finished, * Therefore the lvalue **``s``** is left dangling. * The solution is to assign the result of the concatenation to a Python variable, and then obtain the ``char*`` from that:: cdef char *s p = pystring1 + pystring2 s = p .. note:: **It is up to you to be aware of this, and not to depend on Cython's error message, as it is not guaranteed to be generated for every situation.** Type Casting ============= * The syntax used in type casting are ``"<"`` and ``">"`` .. note:: The syntax is different from C convention :: cdef char *p, float *q p = q * If one of the types is a python object for ``x``, Cython will try and do a coercion. .. note:: Cython will not stop a casting where there is no conversion, but it will emit a warning. * If the address is what is wanted, cast to a ``void*`` first. Type Checking ------------- * A cast like ``x`` will cast x to type ``MyExtensionType`` without type checking at all. * To have a cast type checked, use the syntax like: ``x``. * In this case, Cython will throw an error if ``"x"`` is not a (subclass) of ``MyExtensionType`` * Automatic type checking for extension types can be obtained whenever ``isinstance()`` is used as the second parameter Python Objects ============== ========================== Statements and Expressions ========================== * For the most part, control structures and expressions follow Python syntax. * When applied to Python objects, the semantics are the same unless otherwise noted. * Most Python operators can be applied to C values with the obvious semantics. * An expression with mixed Python and C values will have **conversions** performed automatically. * Python operations are automatically checked for errors, with the appropriate action taken. Differences Between Cython and C ================================ * Most notable are C constructs which have no direct equivalent in Python. * An integer literal is treated as a C constant * It will be truncated to whatever size your C compiler thinks appropriate. * Cast to a Python object like this:: 10000000000000000000 * The ``"L"``, ``"LL"`` and the ``"U"`` suffixes have the same meaning as in C * There is no ``->`` operator in Cython.. instead of ``p->x``, use ``p.x``. * There is no ``*`` operator in Cython.. instead of ``*p``, use ``p[0]``. * ``&`` is permissible and has the same semantics as in C. * ``NULL`` is the null C pointer. * Do NOT use 0. * ``NULL`` is a reserved word in Cython * Syntax for **Type casts** are ``value``. Scope Rules =========== * All determination of scoping (local, module, built-in) in Cython is determined statically. * As with Python, a variable assignment which is not declared explicitly is implicitly declared to be a Python variable residing in the scope where it was assigned. .. note:: * Module-level scope behaves the same way as a Python local scope if you refer to the variable before assigning to it. * Tricks, like the following will NOT work in Cython:: try: x = True except NameError: True = 1 * The above example will not work because ``True`` will always be looked up in the module-level scope. Do the following instead:: import __builtin__ try: True = __builtin__.True except AttributeError: True = 1 Built-in Constants ================== Predefined Python built-in constants: * None * True * False Operator Precedence =================== * Cython uses Python precedence order, not C For-loops ========== The "for ... in iterable" loop works as in Python, but is even more versatile in Cython as it can additionally be used on C types. * ``range()`` is C optimized when the index value has been declared by ``cdef``, for example:: cdef size_t i for i in range(n): ... * Iteration over C arrays and sliced pointers is supported and automatically infers the type of the loop variable, e.g.:: cdef double* data = ... for x in data[:10]: ... * Iterating over many builtin types such as lists and tuples is optimized. * There is also a more verbose C-style for-from syntax which, however, is deprecated in favour of the normal Python "for ... in range()" loop. You might still find it in legacy code that was written for Pyrex, though. * The target expression must be a plain variable name. * The name between the lower and upper bounds must be the same as the target name. for i from 0 <= i < n: ... * Or when using a step size:: for i from 0 <= i < n by s: ... * To reverse the direction, reverse the conditional operation:: for i from n > i >= 0: ... * The ``break`` and ``continue`` statements are permissible. * Can contain an else clause. ===================== Functions and Methods ===================== * There are three types of function declarations in Cython as the sub-sections show below. * Only "Python" functions can be called outside a Cython module from *Python interpreted code*. Callable from Python (def) ========================== * Are declared with the ``def`` statement * Are called with Python objects * Return Python objects * See **Parameters** for special consideration .. _cdef: Callable from C (cdef) ====================== * Are declared with the ``cdef`` statement. * Are called with either Python objects or C values. * Can return either Python objects or C values. .. _cpdef: Callable from both Python and C (cpdef) ======================================= * Are declared with the ``cpdef`` statement. * Can be called from anywhere, because it uses a little Cython magic. * Uses the faster C calling conventions when being called from other Cython code. Overriding ========== ``cpdef`` methods can override ``cdef`` methods:: cdef class A: cdef foo(self): print "A" cdef class B(A) cdef foo(self, x=None) print "B", x cdef class C(B): cpdef foo(self, x=True, int k=3) print "C", x, k When subclassing an extension type with a Python class, ``def`` methods can override ``cpdef`` methods but not ``cdef`` methods:: cdef class A: cdef foo(self): print("A") cdef class B(A): cpdef foo(self): print("B") class C(B): # NOTE: not cdef class def foo(self): print("C") If ``C`` above would be an extension type (``cdef class``), this would not work correctly. The Cython compiler will give a warning in that case. Function Pointers ================= * Functions declared in a ``struct`` are automatically converted to function pointers. * see **using exceptions with function pointers** Python Built-ins ================ Cython compiles calls to most built-in functions into direct calls to the corresponding Python/C API routines, making them particularly fast. Only direct function calls using these names are optimised. If you do something else with one of these names that assumes it's a Python object, such as assign it to a Python variable, and later call it, the call will be made as a Python function call. +------------------------------+-------------+----------------------------+ | Function and arguments | Return type | Python/C API Equivalent | +==============================+=============+============================+ | abs(obj) | object, | PyNumber_Absolute, fabs, | | | double, ... | fabsf, ... | +------------------------------+-------------+----------------------------+ | callable(obj) | bint | PyObject_Callable | +------------------------------+-------------+----------------------------+ | delattr(obj, name) | None | PyObject_DelAttr | +------------------------------+-------------+----------------------------+ | exec(code, [glob, [loc]]) | object | - | +------------------------------+-------------+----------------------------+ | dir(obj) | list | PyObject_Dir | +------------------------------+-------------+----------------------------+ | divmod(a, b) | tuple | PyNumber_Divmod | +------------------------------+-------------+----------------------------+ | getattr(obj, name, [default])| object | PyObject_GetAttr | | (Note 1) | | | +------------------------------+-------------+----------------------------+ | hasattr(obj, name) | bint | PyObject_HasAttr | +------------------------------+-------------+----------------------------+ | hash(obj) | int / long | PyObject_Hash | +------------------------------+-------------+----------------------------+ | intern(obj) | object | Py*_InternFromString | +------------------------------+-------------+----------------------------+ | isinstance(obj, type) | bint | PyObject_IsInstance | +------------------------------+-------------+----------------------------+ | issubclass(obj, type) | bint | PyObject_IsSubclass | +------------------------------+-------------+----------------------------+ | iter(obj, [sentinel]) | object | PyObject_GetIter | +------------------------------+-------------+----------------------------+ | len(obj) | Py_ssize_t | PyObject_Length | +------------------------------+-------------+----------------------------+ | pow(x, y, [z]) | object | PyNumber_Power | +------------------------------+-------------+----------------------------+ | reload(obj) | object | PyImport_ReloadModule | +------------------------------+-------------+----------------------------+ | repr(obj) | object | PyObject_Repr | +------------------------------+-------------+----------------------------+ | setattr(obj, name) | void | PyObject_SetAttr | +------------------------------+-------------+----------------------------+ Note 1: Pyrex originally provided a function :func:`getattr3(obj, name, default)` corresponding to the three-argument form of the Python builtin :func:`getattr()`. Cython still supports this function, but the usage is deprecated in favour of the normal builtin, which Cython can optimise in both forms. ============================ Error and Exception Handling ============================ * A plain ``cdef`` declared function, that does not return a Python object... * Has no way of reporting a Python exception to it's caller. * Will only print a warning message and the exception is ignored. * In order to propagate exceptions like this to it's caller, you need to declare an exception value for it. * There are three forms of declaring an exception for a C compiled program. * First:: cdef int spam() except -1: ... * In the example above, if an error occurs inside spam, it will immediately return with the value of ``-1``, causing an exception to be propagated to it's caller. * Functions declared with an exception value, should explicitly prevent a return of that value. * Second:: cdef int spam() except? -1: ... * Used when a ``-1`` may possibly be returned and is not to be considered an error. * The ``"?"`` tells Cython that ``-1`` only indicates a *possible* error. * Now, each time ``-1`` is returned, Cython generates a call to ``PyErr_Occurred`` to verify it is an actual error. * Third:: cdef int spam() except * * A call to ``PyErr_Occurred`` happens *every* time the function gets called. .. note:: Returning ``void`` A need to propagate errors when returning ``void`` must use this version. * Exception values can only be declared for functions returning an.. * integer * enum * float * pointer type * Must be a constant expression .. note:: .. note:: Function pointers * Require the same exception value specification as it's user has declared. * Use cases here are when used as parameters and when assigned to a variable:: int (*grail)(int, char *) except -1 .. note:: Python Objects * Declared exception values are **not** need. * Remember that Cython assumes that a function without a declared return value, returns a Python object. * Exceptions on such functions are implicitly propagated by returning ``NULL`` .. note:: C++ * For exceptions from C++ compiled programs, see **Wrapping C++ Classes** Checking return values for non-Cython functions.. ================================================= * Do not try to raise exceptions by returning the specified value.. Example:: cdef extern FILE *fopen(char *filename, char *mode) except NULL # WRONG! * The except clause does not work that way. * It's only purpose is to propagate Python exceptions that have already been raised by either... * A Cython function * A C function that calls Python/C API routines. * To propagate an exception for these circumstances you need to raise it yourself:: cdef FILE *p p = fopen("spam.txt", "r") if p == NULL: raise SpamError("Couldn't open the spam file") ======================= Conditional Compilation ======================= * The expressions in the following sub-sections must be valid compile-time expressions. * They can evaluate to any Python value. * The *truth* of the result is determined in the usual Python way. Compile-Time Definitions ========================= * Defined using the ``DEF`` statement:: DEF FavouriteFood = "spam" DEF ArraySize = 42 DEF OtherArraySize = 2 * ArraySize + 17 * The right hand side must be a valid compile-time expression made up of either: * Literal values * Names defined by other ``DEF`` statements * They can be combined using any of the Python expression syntax * Cython provides the following predefined names * Corresponding to the values returned by ``os.uname()`` * UNAME_SYSNAME * UNAME_NODENAME * UNAME_RELEASE * UNAME_VERSION * UNAME_MACHINE * A name defined by ``DEF`` can appear anywhere an identifier can appear. * Cython replaces the name with the literal value before compilation. * The compile-time expression, in this case, must evaluate to a Python value of ``int``, ``long``, ``float``, or ``str``:: cdef int a1[ArraySize] cdef int a2[OtherArraySize] print "I like", FavouriteFood Conditional Statements ======================= * Similar semantics of the C pre-processor * The following statements can be used to conditionally include or exclude sections of code to compile. * ``IF`` * ``ELIF`` * ``ELSE`` :: IF UNAME_SYSNAME == "Windows": include "icky_definitions.pxi" ELIF UNAME_SYSNAME == "Darwin": include "nice_definitions.pxi" ELIF UNAME_SYSNAME == "Linux": include "penguin_definitions.pxi" ELSE: include "other_definitions.pxi" * ``ELIF`` and ``ELSE`` are optional. * ``IF`` can appear anywhere that a normal statement or declaration can appear * It can contain any statements or declarations that would be valid in that context. * This includes other ``IF`` and ``DEF`` statements .. [#] The conversion is to/from str for Python 2.x, and bytes for Python 3.x. Cython-0.26.1/docs/src/reference/interfacing_with_other_code.rst0000664000175000017500000000032212542002467025555 0ustar stefanstefan00000000000000.. highlight:: cython .. _interfacing_with_other_code: *************************** Interfacing with Other Code *************************** == C == === C++ === ======= Fortran ======= ===== NumPy ===== Cython-0.26.1/docs/src/reference/Makefile0000664000175000017500000000417012542002467020751 0ustar stefanstefan00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html web htmlhelp latex changes linkcheck help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " web to make files usable by Sphinx.web" @echo " htmlhelp to make HTML files and a HTML help project" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " changes to make an overview over all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" clean: -rm -rf build/* html: mkdir -p build/html build/doctrees $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html @echo @echo "Build finished. The HTML pages are in build/html." web: mkdir -p build/web build/doctrees $(SPHINXBUILD) -b web $(ALLSPHINXOPTS) build/web @echo @echo "Build finished; now you can run" @echo " python -m sphinx.web build/web" @echo "to start the server." htmlhelp: mkdir -p build/htmlhelp build/doctrees $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in build/htmlhelp." latex: mkdir -p build/latex build/doctrees $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex @echo @echo "Build finished; the LaTeX files are in build/latex." @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." changes: mkdir -p build/changes build/doctrees $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes @echo @echo "The overview file is in build/changes." linkcheck: mkdir -p build/linkcheck build/doctrees $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in build/linkcheck/output.txt." Cython-0.26.1/docs/src/reference/limitations.rst0000664000175000017500000000011612542002467022373 0ustar stefanstefan00000000000000.. highlight:: cython .. _limitations: *********** Limitations *********** Cython-0.26.1/docs/src/reference/special_methods_table.rst0000664000175000017500000005434113023021033024343 0ustar stefanstefan00000000000000.. _special_methods_table: Special Methods Table --------------------- This table lists all of the special methods together with their parameter and return types. In the table below, a parameter name of self is used to indicate that the parameter has the type that the method belongs to. Other parameters with no type specified in the table are generic Python objects. You don't have to declare your method as taking these parameter types. If you declare different types, conversions will be performed as necessary. General ^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __cinit__ |self, ... | | Basic initialisation (no direct Python equivalent) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __init__ |self, ... | | Further initialisation | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __dealloc__ |self | | Basic deallocation (no direct Python equivalent) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __cmp__ |x, y | int | 3-way comparison | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __richcmp__ |x, y, int op | object | Rich comparison (no direct Python equivalent) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __str__ |self | object | str(self) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __repr__ |self | object | repr(self) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __hash__ |self | int | Hash function | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __call__ |self, ... | object | self(...) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __iter__ |self | object | Return iterator for sequence | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getattr__ |self, name | object | Get attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getattribute__ |self, name | object | Get attribute, unconditionally | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __setattr__ |self, name, val | | Set attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __delattr__ |self, name | | Delete attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Arithmetic operators ^^^^^^^^^^^^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __add__ | x, y | object | binary `+` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __sub__ | x, y | object | binary `-` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __mul__ | x, y | object | `*` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __div__ | x, y | object | `/` operator for old-style division | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __floordiv__ | x, y | object | `//` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __truediv__ | x, y | object | `/` operator for new-style division | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __mod__ | x, y | object | `%` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __divmod__ | x, y | object | combined div and mod | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __pow__ | x, y, z | object | `**` operator or pow(x, y, z) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __neg__ | self | object | unary `-` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __pos__ | self | object | unary `+` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __abs__ | self | object | absolute value | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __nonzero__ | self | int | convert to boolean | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __invert__ | self | object | `~` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __lshift__ | x, y | object | `<<` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __rshift__ | x, y | object | `>>` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __and__ | x, y | object | `&` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __or__ | x, y | object | `|` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __xor__ | x, y | object | `^` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Numeric conversions ^^^^^^^^^^^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __int__ | self | object | Convert to integer | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __long__ | self | object | Convert to long integer | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __float__ | self | object | Convert to float | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __oct__ | self | object | Convert to octal | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __hex__ | self | object | Convert to hexadecimal | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __index__ | self | object | Convert to sequence index | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ In-place arithmetic operators ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __iadd__ | self, x | object | `+=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __isub__ | self, x | object | `-=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __imul__ | self, x | object | `*=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __idiv__ | self, x | object | `/=` operator for old-style division | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __ifloordiv__ | self, x | object | `//=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __itruediv__ | self, x | object | `/=` operator for new-style division | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __imod__ | self, x | object | `%=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __ipow__ | x, y, z | object | `**=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __ilshift__ | self, x | object | `<<=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __irshift__ | self, x | object | `>>=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __iand__ | self, x | object | `&=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __ior__ | self, x | object | `|=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __ixor__ | self, x | object | `^=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Sequences and mappings ^^^^^^^^^^^^^^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __len__ | self int | | len(self) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getitem__ | self, x | object | self[x] | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __setitem__ | self, x, y | | self[x] = y | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __delitem__ | self, x | | del self[x] | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getslice__ | self, Py_ssize_t i, Py_ssize_t j | object | self[i:j] | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __setslice__ | self, Py_ssize_t i, Py_ssize_t j, x | | self[i:j] = x | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __delslice__ | self, Py_ssize_t i, Py_ssize_t j | | del self[i:j] | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __contains__ | self, x | int | x in self | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Iterators ^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __next__ | self | object | Get next item (called next in Python) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Buffer interface ^^^^^^^^^^^^^^^^ .. note:: The buffer interface is intended for use by C code and is not directly accessible from Python. It is described in the Python/C API Reference Manual under sections 6.6 and 10.6. +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __getreadbuffer__ | self, int i, void `**p` | | | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getwritebuffer__ | self, int i, void `**p` | | | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getsegcount__ | self, int `*p` | | | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getcharbuffer__ | self, int i, char `**p` | | | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Descriptor objects ^^^^^^^^^^^^^^^^^^ .. note:: Descriptor objects are part of the support mechanism for new-style Python classes. See the discussion of descriptors in the Python documentation. See also :PEP:`252`, "Making Types Look More Like Classes", and :PEP:`253`, "Subtyping Built-In Types". +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __get__ | self, instance, class | object | Get value of attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __set__ | self, instance, value | | Set value of attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __delete__ | self, instance | | Delete attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Cython-0.26.1/docs/src/reference/directives.rst0000664000175000017500000000014412542002467022201 0ustar stefanstefan00000000000000Compiler Directives =================== See `Compilation `_. Cython-0.26.1/docs/src/reference/special_mention.rst0000664000175000017500000000013612542002467023212 0ustar stefanstefan00000000000000.. highlight:: cython .. _special_mention: *************** Special Mention *************** Cython-0.26.1/docs/src/reference/compilation.rst0000664000175000017500000005307613150045407022367 0ustar stefanstefan00000000000000.. highlight:: cython .. _compilation-reference: ============= Compilation ============= Cython code, unlike Python, must be compiled. This happens in two stages: * A ``.pyx`` file is compiled by Cython to a ``.c`` file. * The ``.c`` file is compiled by a C compiler to a ``.so`` file (or a ``.pyd`` file on Windows) The following sub-sections describe several ways to build your extension modules, and how to pass directives to the Cython compiler. Compiling from the command line =============================== Run the Cython compiler command with your options and list of ``.pyx`` files to generate. For example:: $ cython -a yourmod.pyx This creates a ``yourmod.c`` file, and the ``-a`` switch produces an annotated html file of the source code. Pass the ``-h`` flag for a complete list of supported flags. Compiling your ``.c`` files will vary depending on your operating system. Python documentation for writing extension modules should have some details for your system. Here we give an example on a Linux system:: $ gcc -shared -pthread -fPIC -fwrapv -O2 -Wall -fno-strict-aliasing \ -I/usr/include/python2.7 -o yourmod.so yourmod.c [``gcc`` will need to have paths to your included header files and paths to libraries you need to link with] A ``yourmod.so`` file is now in the same directory and your module, ``yourmod``, is available for you to import as you normally would. Compiling with ``distutils`` ============================ The ``distutils`` package is part of the standard library. It is the standard way of building Python packages, including native extension modules. The following example configures the build for a Cython file called *hello.pyx*. First, create a ``setup.py`` script:: from distutils.core import setup from Cython.Build import cythonize setup( name = "My hello app", ext_modules = cythonize('hello.pyx'), # accepts a glob pattern ) Now, run the command ``python setup.py build_ext --inplace`` in your system's command shell and you are done. Import your new extension module into your python shell or script as normal. The ``cythonize`` command also allows for multi-threaded compilation and dependency resolution. Recompilation will be skipped if the target file is up to date with its main source file and dependencies. Configuring the C-Build ------------------------ If you have include files in non-standard places you can pass an ``include_path`` parameter to ``cythonize``:: from distutils.core import setup from Cython.Build import cythonize setup( name = "My hello app", ext_modules = cythonize("src/*.pyx", include_path = [...]), ) Often, Python packages that offer a C-level API provide a way to find the necessary include files, e.g. for NumPy:: include_path = [numpy.get_include()] Note for Numpy users. Despite this, you will still get warnings like the following from the compiler, because Cython is using a deprecated Numpy API:: .../include/numpy/npy_1_7_deprecated_api.h:15:2: warning: #warning "Using deprecated NumPy API, disable it by " "#defining NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION" [-Wcpp] For the time being, it is just a warning that you can ignore. If you need to specify compiler options, libraries to link with or other linker options you will need to create ``Extension`` instances manually (note that glob syntax can still be used to specify multiple extensions in one line):: from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize extensions = [ Extension("primes", ["primes.pyx"], include_dirs = [...], libraries = [...], library_dirs = [...]), # Everything but primes.pyx is included here. Extension("*", ["*.pyx"], include_dirs = [...], libraries = [...], library_dirs = [...]), ] setup( name = "My hello app", ext_modules = cythonize(extensions), ) Note that when using setuptools, you should import it before Cython as setuptools may replace the ``Extension`` class in distutils. Otherwise, both might disagree about the class to use here. If your options are static (for example you do not need to call a tool like ``pkg-config`` to determine them) you can also provide them directly in your .pyx or .pxd source file using a special comment block at the start of the file:: # distutils: libraries = spam eggs # distutils: include_dirs = /opt/food/include If you cimport multiple .pxd files defining libraries, then Cython merges the list of libraries, so this works as expected (similarly with other options, like ``include_dirs`` above). If you have some C files that have been wrapped with Cython and you want to compile them into your extension, you can define the distutils ``sources`` parameter:: # distutils: sources = helper.c, another_helper.c Note that these sources are added to the list of sources of the current extension module. Spelling this out in the :file:`setup.py` file looks as follows:: from distutils.core import setup from Cython.Build import cythonize from distutils.extension import Extension sourcefiles = ['example.pyx', 'helper.c', 'another_helper.c'] extensions = [Extension("example", sourcefiles)] setup( ext_modules = cythonize(extensions) ) The :class:`Extension` class takes many options, and a fuller explanation can be found in the `distutils documentation`_. Some useful options to know about are ``include_dirs``, ``libraries``, and ``library_dirs`` which specify where to find the ``.h`` and library files when linking to external libraries. .. _distutils documentation: http://docs.python.org/extending/building.html Sometimes this is not enough and you need finer customization of the distutils :class:`Extension`. To do this, you can provide a custom function ``create_extension`` to create the final :class:`Extension` object after Cython has processed the sources, dependencies and ``# distutils`` directives but before the file is actually Cythonized. This function takes 2 arguments ``template`` and ``kwds``, where ``template`` is the :class:`Extension` object given as input to Cython and ``kwds`` is a :class:`dict` with all keywords which should be used to create the :class:`Extension`. The function ``create_extension`` must return a 2-tuple ``(extension, metadata)``, where ``extension`` is the created :class:`Extension` and ``metadata`` is metadata which will be written as JSON at the top of the generated C files. This metadata is only used for debugging purposes, so you can put whatever you want in there (as long as it can be converted to JSON). The default function (defined in ``Cython.Build.Dependencies``) is:: def default_create_extension(template, kwds): if 'depends' in kwds: include_dirs = kwds.get('include_dirs', []) + ["."] depends = resolve_depends(kwds['depends'], include_dirs) kwds['depends'] = sorted(set(depends + template.depends)) t = template.__class__ ext = t(**kwds) metadata = dict(distutils=kwds, module_name=kwds['name']) return (ext, metadata) In case that you pass a string instead of an :class:`Extension` to ``cythonize()``, the ``template`` will be an :class:`Extension` without sources. For example, if you do ``cythonize("*.pyx")``, the ``template`` will be ``Extension(name="*.pyx", sources=[])``. Just as an example, this adds ``mylib`` as library to every extension:: from Cython.Build.Dependencies import default_create_extension def my_create_extension(template, kwds): libs = kwds.get('libraries', []) + ["mylib"] kwds['libraries'] = libs return default_create_extension(template, kwds) ext_modules = cythonize(..., create_extension=my_create_extension) .. note:: If you Cythonize in parallel (using the ``nthreads`` argument), then the argument to ``create_extension`` must be pickleable. In particular, it cannot be a lambda function. Distributing Cython modules ---------------------------- It is strongly recommended that you distribute the generated ``.c`` files as well as your Cython sources, so that users can install your module without needing to have Cython available. It is also recommended that Cython compilation not be enabled by default in the version you distribute. Even if the user has Cython installed, he/she probably doesn't want to use it just to install your module. Also, the installed version may not be the same one you used, and may not compile your sources correctly. This simply means that the :file:`setup.py` file that you ship with will just be a normal distutils file on the generated `.c` files, for the basic example we would have instead:: from distutils.core import setup from distutils.extension import Extension setup( ext_modules = [Extension("example", ["example.c"])] ) This is easy to combine with :func:`cythonize` by changing the file extension of the extension module sources:: from distutils.core import setup from distutils.extension import Extension USE_CYTHON = ... # command line option, try-import, ... ext = '.pyx' if USE_CYTHON else '.c' extensions = [Extension("example", ["example"+ext])] if USE_CYTHON: from Cython.Build import cythonize extensions = cythonize(extensions) setup( ext_modules = extensions ) If you have many extensions and want to avoid the additional complexity in the declarations, you can declare them with their normal Cython sources and then call the following function instead of ``cythonize()`` to adapt the sources list in the Extensions when not using Cython:: import os.path def no_cythonize(extensions, **_ignore): for extension in extensions: sources = [] for sfile in extension.sources: path, ext = os.path.splitext(sfile) if ext in ('.pyx', '.py'): if extension.language == 'c++': ext = '.cpp' else: ext = '.c' sfile = path + ext sources.append(sfile) extension.sources[:] = sources return extensions Another option is to make Cython a setup dependency of your system and use Cython's build_ext module which runs ``cythonize`` as part of the build process:: setup( setup_requires=[ 'cython>=0.x', ], extensions = [Extension("*", ["*.pyx"])], cmdclass={'build_ext': Cython.Build.build_ext}, ... ) If you want to expose the C-level interface of your library for other libraries to cimport from, use package_data to install the ``.pxd`` files, e.g.:: setup( package_data = { 'my_package': ['*.pxd'], 'my_package/sub_package': ['*.pxd'], }, ... ) These ``.pxd`` files need not have corresponding ``.pyx`` modules if they contain purely declarations of external libraries. Compiling with ``pyximport`` ============================= For generating Cython code right in your pure python module just type:: >>> import pyximport; pyximport.install() >>> import helloworld Hello World This allows you to automatically run Cython on every ``.pyx`` that Python is trying to import. You should use this for simple Cython builds only where no extra C libraries and no special building setup is needed. In the case that Cython fails to compile a Python module, *pyximport* will fall back to loading the source modules instead. It is also possible to compile new ``.py`` modules that are being imported (including the standard library and installed packages). For using this feature, just tell that to ``pyximport``:: >>> pyximport.install(pyimport = True) Compiling with ``cython.inline`` ================================= One can also compile Cython in a fashion similar to SciPy's ``weave.inline``. For example:: >>> import cython >>> def f(a): ... ret = cython.inline("return a+b", b=3) ... Unbound variables are automatically pulled from the surrounding local and global scopes, and the result of the compilation is cached for efficient re-use. Compiling with Sage =================== The Sage notebook allows transparently editing and compiling Cython code simply by typing ``%cython`` at the top of a cell and evaluate it. Variables and functions defined in a Cython cell are imported into the running session. Please check `Sage documentation `_ for details. You can tailor the behavior of the Cython compiler by specifying the directives below. .. _compiler-directives: Compiler directives ==================== Compiler directives are instructions which affect the behavior of Cython code. Here is the list of currently supported directives: ``binding`` (True / False) Controls whether free functions behave more like Python's CFunctions (e.g. :func:`len`) or, when set to True, more like Python's functions. When enabled, functions will bind to an instance when looked up as a class attribute (hence the name) and will emulate the attributes of Python functions, including introspections like argument names and annotations. Default is False. ``boundscheck`` (True / False) If set to False, Cython is free to assume that indexing operations ([]-operator) in the code will not cause any IndexErrors to be raised. Lists, tuples, and strings are affected only if the index can be determined to be non-negative (or if ``wraparound`` is False). Conditions which would normally trigger an IndexError may instead cause segfaults or data corruption if this is set to False. Default is True. ``wraparound`` (True / False) In Python arrays can be indexed relative to the end. For example A[-1] indexes the last value of a list. In C negative indexing is not supported. If set to False, Cython will neither check for nor correctly handle negative indices, possibly causing segfaults or data corruption. Default is True. ``initializedcheck`` (True / False) If set to True, Cython checks that a memoryview is initialized whenever its elements are accessed or assigned to. Setting this to False disables these checks. Default is True. ``nonecheck`` (True / False) If set to False, Cython is free to assume that native field accesses on variables typed as an extension type, or buffer accesses on a buffer variable, never occurs when the variable is set to ``None``. Otherwise a check is inserted and the appropriate exception is raised. This is off by default for performance reasons. Default is False. ``overflowcheck`` (True / False) If set to True, raise errors on overflowing C integer arithmetic operations. Incurs a modest runtime penalty, but is much faster than using Python ints. Default is False. ``overflowcheck.fold`` (True / False) If set to True, and overflowcheck is True, check the overflow bit for nested, side-effect-free arithmetic expressions once rather than at every step. Depending on the compiler, architecture, and optimization settings, this may help or hurt performance. A simple suite of benchmarks can be found in ``Demos/overflow_perf.pyx``. Default is True. ``embedsignature`` (True / False) If set to True, Cython will embed a textual copy of the call signature in the docstring of all Python visible functions and classes. Tools like IPython and epydoc can thus display the signature, which cannot otherwise be retrieved after compilation. Default is False. ``cdivision`` (True / False) If set to False, Cython will adjust the remainder and quotient operators C types to match those of Python ints (which differ when the operands have opposite signs) and raise a ``ZeroDivisionError`` when the right operand is 0. This has up to a 35% speed penalty. If set to True, no checks are performed. See `CEP 516 `_. Default is False. ``cdivision_warnings`` (True / False) If set to True, Cython will emit a runtime warning whenever division is performed with negative operands. See `CEP 516 `_. Default is False. ``always_allow_keywords`` (True / False) Avoid the ``METH_NOARGS`` and ``METH_O`` when constructing functions/methods which take zero or one arguments. Has no effect on special methods and functions with more than one argument. The ``METH_NOARGS`` and ``METH_O`` signatures provide faster calling conventions but disallow the use of keywords. ``profile`` (True / False) Write hooks for Python profilers into the compiled C code. Default is False. ``linetrace`` (True / False) Write line tracing hooks for Python profilers or coverage reporting into the compiled C code. This also enables profiling. Default is False. Note that the generated module will not actually use line tracing, unless you additionally pass the C macro definition ``CYTHON_TRACE=1`` to the C compiler (e.g. using the distutils option ``define_macros``). Define ``CYTHON_TRACE_NOGIL=1`` to also include ``nogil`` functions and sections. ``infer_types`` (True / False) Infer types of untyped variables in function bodies. Default is None, indicating that only safe (semantically-unchanging) inferences are allowed. In particular, inferring *integral* types for variables *used in arithmetic expressions* is considered unsafe (due to possible overflow) and must be explicitly requested. ``language_level`` (2/3) Globally set the Python language level to be used for module compilation. Default is compatibility with Python 2. To enable Python 3 source code semantics, set this to 3 at the start of a module or pass the "-3" command line option to the compiler. Note that cimported and included source files inherit this setting from the module being compiled, unless they explicitly set their own language level. ``c_string_type`` (bytes / str / unicode) Globally set the type of an implicit coercion from char* or std::string. ``c_string_encoding`` (ascii, default, utf-8, etc.) Globally set the encoding to use when implicitly coercing char* or std:string to a unicode object. Coercion from a unicode object to C type is only allowed when set to ``ascii`` or ``default``, the latter being utf-8 in Python 3 and nearly-always ascii in Python 2. ``type_version_tag`` (True / False) Enables the attribute cache for extension types in CPython by setting the type flag ``Py_TPFLAGS_HAVE_VERSION_TAG``. Default is True, meaning that the cache is enabled for Cython implemented types. To disable it explicitly in the rare cases where a type needs to juggle with its ``tp_dict`` internally without paying attention to cache consistency, this option can be set to False. ``unraisable_tracebacks`` (True / False) Whether to print tracebacks when suppressing unraisable exceptions. Configurable optimisations -------------------------- ``optimize.use_switch`` (True / False) Whether to expand chained if-else statements (including statements like ``if x == 1 or x == 2:``) into C switch statements. This can have performance benefits if there are lots of values but cause compiler errors if there are any duplicate values (which may not be detectable at Cython compile time for all C constants). Default is True. ``optimize.unpack_method_calls`` (True / False) Cython can generate code that optimistically checks for Python method objects at call time and unpacks the underlying function to call it directly. This can substantially speed up method calls, especially for builtins, but may also have a slight negative performance impact in some cases where the guess goes completely wrong. Disabling this option can also reduce the code size. Default is True. How to set directives --------------------- Globally ::::::::: One can set compiler directives through a special header comment at the top of the file, like this:: #!python #cython: language_level=3, boundscheck=False The comment must appear before any code (but can appear after other comments or whitespace). One can also pass a directive on the command line by using the -X switch:: $ cython -X boundscheck=True ... Directives passed on the command line will override directives set in header comments. Locally :::::::: For local blocks, you need to cimport the special builtin ``cython`` module:: #!python cimport cython Then you can use the directives either as decorators or in a with statement, like this:: #!python @cython.boundscheck(False) # turn off boundscheck for this function def f(): ... # turn it temporarily on again for this block with cython.boundscheck(True): ... .. Warning:: These two methods of setting directives are **not** affected by overriding the directive on the command-line using the -X option. In :file:`setup.py` ::::::::::::::::::: Compiler directives can also be set in the :file:`setup.py` file by passing a keyword argument to ``cythonize``:: from distutils.core import setup from Cython.Build import cythonize setup( name = "My hello app", ext_modules = cythonize('hello.pyx', compiler_directives={'embedsignature': True}), ) This will override the default directives as specified in the ``compiler_directives`` dictionary. Note that explicit per-file or local directives as explained above take precedence over the values passed to ``cythonize``. Cython-0.26.1/docs/src/reference/index.rst0000664000175000017500000000071612542002467021154 0ustar stefanstefan00000000000000Reference Guide =============== .. note:: .. todo:: Most of the **boldface** is to be changed to refs or other markup later. Contents: .. toctree:: :maxdepth: 2 compilation language_basics extension_types interfacing_with_other_code special_mention limitations directives Indices and tables ------------------ .. toctree:: :maxdepth: 2 special_methods_table * :ref:`genindex` * :ref:`modindex` * :ref:`search` Cython-0.26.1/docs/src/userguide/0000775000175000017500000000000013151203436017341 5ustar stefanstefan00000000000000Cython-0.26.1/docs/src/userguide/source_files_and_compilation.rst0000664000175000017500000001372013143605603026003 0ustar stefanstefan00000000000000.. highlight:: cython .. _compilation: **************************** Source Files and Compilation **************************** .. note:: See :ref:`compilation-reference` reference section for more details Cython source file names consist of the name of the module followed by a ``.pyx`` extension, for example a module called primes would have a source file named :file:`primes.pyx`. Once you have written your ``.pyx`` file, there are a couple of ways of turning it into an extension module. One way is to compile it manually with the Cython compiler, e.g.: .. sourcecode:: text $ cython primes.pyx This will produce a file called :file:`primes.c`, which then needs to be compiled with the C compiler using whatever options are appropriate on your platform for generating an extension module. For these options look at the official Python documentation. The other, and probably better, way is to use the :mod:`distutils` extension provided with Cython. The benefit of this method is that it will give the platform specific compilation options, acting like a stripped down autotools. Basic setup.py =============== The distutils extension provided with Cython allows you to pass ``.pyx`` files directly to the ``Extension`` constructor in your setup file. If you have a single Cython file that you want to turn into a compiled extension, say with filename :file:`example.pyx` the associated :file:`setup.py` would be:: from distutils.core import setup from Cython.Build import cythonize setup( ext_modules = cythonize("example.pyx") ) To understand the :file:`setup.py` more fully look at the official :mod:`distutils` documentation. To compile the extension for use in the current directory use: .. sourcecode:: text $ python setup.py build_ext --inplace Multiple Cython Files in a Package =================================== To automatically compile multiple Cython files without listing all of them explicitly, you can use glob patterns:: setup( ext_modules = cythonize("package/*.pyx") ) You can also use glob patterns in :class:`Extension` objects if you pass them through :func:`cythonize`:: extensions = [Extension("*", ["*.pyx"])] setup( ext_modules = cythonize(extensions) ) .. _pyximport: Pyximport =========== .. TODO add some text about how this is Paul Prescods code. Also change the tone to be more universal (i.e. remove all the I statements) Cython is a compiler. Therefore it is natural that people tend to go through an edit/compile/test cycle with Cython modules. But my personal opinion is that one of the deep insights in Python's implementation is that a language can be compiled (Python modules are compiled to ``.pyc`` files) and hide that compilation process from the end-user so that they do not have to worry about it. Pyximport does this for Cython modules. For instance if you write a Cython module called :file:`foo.pyx`, with Pyximport you can import it in a regular Python module like this:: import pyximport; pyximport.install() import foo Doing so will result in the compilation of :file:`foo.pyx` (with appropriate exceptions if it has an error in it). If you would always like to import Cython files without building them specially, you can also the first line above to your :file:`sitecustomize.py`. That will install the hook every time you run Python. Then you can use Cython modules just with simple import statements. I like to test my Cython modules like this: .. sourcecode:: text $ python -c "import foo" Dependency Handling -------------------- In Pyximport 1.1 it is possible to declare that your module depends on multiple files, (likely ``.h`` and ``.pxd`` files). If your Cython module is named ``foo`` and thus has the filename :file:`foo.pyx` then you should make another file in the same directory called :file:`foo.pyxdep`. The :file:`modname.pyxdep` file can be a list of filenames or "globs" (like ``*.pxd`` or ``include/*.h``). Each filename or glob must be on a separate line. Pyximport will check the file date for each of those files before deciding whether to rebuild the module. In order to keep track of the fact that the dependency has been handled, Pyximport updates the modification time of your ".pyx" source file. Future versions may do something more sophisticated like informing distutils of the dependencies directly. Limitations ------------ Pyximport does not give you any control over how your Cython file is compiled. Usually the defaults are fine. You might run into problems if you wanted to write your program in half-C, half-Cython and build them into a single library. Pyximport 1.2 will probably do this. Pyximport does not hide the Distutils/GCC warnings and errors generated by the import process. Arguably this will give you better feedback if something went wrong and why. And if nothing went wrong it will give you the warm fuzzy that pyximport really did rebuild your module as it was supposed to. For further thought and discussion ------------------------------------ I don't think that Python's :func:`reload` will do anything for changed ``.so``'s on some (all?) platforms. It would require some (easy) experimentation that I haven't gotten around to. But reload is rarely used in applications outside of the Python interactive interpreter and certainly not used much for C extension modules. Info about Windows ``_ ``setup.py install`` does not modify :file:`sitecustomize.py` for you. Should it? Modifying Python's "standard interpreter" behaviour may be more than most people expect of a package they install.. Pyximport puts your ``.c`` file beside your ``.pyx`` file (analogous to ``.pyc`` beside ``.py``). But it puts the platform-specific binary in a build directory as per normal for Distutils. If I could wave a magic wand and get Cython or distutils or whoever to put the build directory I might do it but not necessarily: having it at the top level is *VERY* *HELPFUL* for debugging Cython problems. Cython-0.26.1/docs/src/userguide/extension_types.rst0000664000175000017500000006172013143605603023344 0ustar stefanstefan00000000000000.. highlight:: cython .. _extension-types: ****************** Extension Types ****************** Introduction ============== As well as creating normal user-defined classes with the Python class statement, Cython also lets you create new built-in Python types, known as extension types. You define an extension type using the :keyword:`cdef` class statement. Here's an example:: cdef class Shrubbery: cdef int width, height def __init__(self, w, h): self.width = w self.height = h def describe(self): print "This shrubbery is", self.width, \ "by", self.height, "cubits." As you can see, a Cython extension type definition looks a lot like a Python class definition. Within it, you use the def statement to define methods that can be called from Python code. You can even define many of the special methods such as :meth:`__init__` as you would in Python. The main difference is that you can use the :keyword:`cdef` statement to define attributes. The attributes may be Python objects (either generic or of a particular extension type), or they may be of any C data type. So you can use extension types to wrap arbitrary C data structures and provide a Python-like interface to them. .. _readonly: Attributes ============ Attributes of an extension type are stored directly in the object's C struct. The set of attributes is fixed at compile time; you can't add attributes to an extension type instance at run time simply by assigning to them, as you could with a Python class instance. (You can subclass the extension type in Python and add attributes to instances of the subclass, however.) There are two ways that attributes of an extension type can be accessed: by Python attribute lookup, or by direct access to the C struct from Cython code. Python code is only able to access attributes of an extension type by the first method, but Cython code can use either method. By default, extension type attributes are only accessible by direct access, not Python access, which means that they are not accessible from Python code. To make them accessible from Python code, you need to declare them as :keyword:`public` or :keyword:`readonly`. For example:: cdef class Shrubbery: cdef public int width, height cdef readonly float depth makes the width and height attributes readable and writable from Python code, and the depth attribute readable but not writable. .. note:: You can only expose simple C types, such as ints, floats, and strings, for Python access. You can also expose Python-valued attributes. .. note:: Also the :keyword:`public` and :keyword:`readonly` options apply only to Python access, not direct access. All the attributes of an extension type are always readable and writable by C-level access. Type declarations =================== Before you can directly access the attributes of an extension type, the Cython compiler must know that you have an instance of that type, and not just a generic Python object. It knows this already in the case of the ``self`` parameter of the methods of that type, but in other cases you will have to use a type declaration. For example, in the following function:: cdef widen_shrubbery(sh, extra_width): # BAD sh.width = sh.width + extra_width because the ``sh`` parameter hasn't been given a type, the width attribute will be accessed by a Python attribute lookup. If the attribute has been declared :keyword:`public` or :keyword:`readonly` then this will work, but it will be very inefficient. If the attribute is private, it will not work at all -- the code will compile, but an attribute error will be raised at run time. The solution is to declare ``sh`` as being of type :class:`Shrubbery`, as follows:: cdef widen_shrubbery(Shrubbery sh, extra_width): sh.width = sh.width + extra_width Now the Cython compiler knows that ``sh`` has a C attribute called :attr:`width` and will generate code to access it directly and efficiently. The same consideration applies to local variables, for example,:: cdef Shrubbery another_shrubbery(Shrubbery sh1): cdef Shrubbery sh2 sh2 = Shrubbery() sh2.width = sh1.width sh2.height = sh1.height return sh2 Type Testing and Casting ------------------------ Suppose I have a method :meth:`quest` which returns an object of type :class:`Shrubbery`. To access it's width I could write:: cdef Shrubbery sh = quest() print sh.width which requires the use of a local variable and performs a type test on assignment. If you *know* the return value of :meth:`quest` will be of type :class:`Shrubbery` you can use a cast to write:: print (quest()).width This may be dangerous if :meth:`quest()` is not actually a :class:`Shrubbery`, as it will try to access width as a C struct member which may not exist. At the C level, rather than raising an :class:`AttributeError`, either an nonsensical result will be returned (interpreting whatever data is at that address as an int) or a segfault may result from trying to access invalid memory. Instead, one can write:: print (quest()).width which performs a type check (possibly raising a :class:`TypeError`) before making the cast and allowing the code to proceed. To explicitly test the type of an object, use the :meth:`isinstance` method. By default, in Python, the :meth:`isinstance` method checks the :class:`__class__` attribute of the first argument to determine if it is of the required type. However, this is potentially unsafe as the :class:`__class__` attribute can be spoofed or changed, but the C structure of an extension type must be correct to access its :keyword:`cdef` attributes and call its :keyword:`cdef` methods. Cython detects if the second argument is a known extension type and does a type check instead, analogous to Pyrex's :meth:`typecheck`. The old behavior is always available by passing a tuple as the second parameter:: print isinstance(sh, Shrubbery) # Check the type of sh print isinstance(sh, (Shrubbery,)) # Check sh.__class__ Extension types and None ========================= When you declare a parameter or C variable as being of an extension type, Cython will allow it to take on the value ``None`` as well as values of its declared type. This is analogous to the way a C pointer can take on the value ``NULL``, and you need to exercise the same caution because of it. There is no problem as long as you are performing Python operations on it, because full dynamic type checking will be applied. However, when you access C attributes of an extension type (as in the widen_shrubbery function above), it's up to you to make sure the reference you're using is not ``None`` -- in the interests of efficiency, Cython does not check this. You need to be particularly careful when exposing Python functions which take extension types as arguments. If we wanted to make :func:`widen_shrubbery` a Python function, for example, if we simply wrote:: def widen_shrubbery(Shrubbery sh, extra_width): # This is sh.width = sh.width + extra_width # dangerous! then users of our module could crash it by passing ``None`` for the ``sh`` parameter. One way to fix this would be:: def widen_shrubbery(Shrubbery sh, extra_width): if sh is None: raise TypeError sh.width = sh.width + extra_width but since this is anticipated to be such a frequent requirement, Cython provides a more convenient way. Parameters of a Python function declared as an extension type can have a ``not None`` clause:: def widen_shrubbery(Shrubbery sh not None, extra_width): sh.width = sh.width + extra_width Now the function will automatically check that ``sh`` is ``not None`` along with checking that it has the right type. .. note:: ``not None`` clause can only be used in Python functions (defined with :keyword:`def`) and not C functions (defined with :keyword:`cdef`). If you need to check whether a parameter to a C function is None, you will need to do it yourself. .. note:: Some more things: * The self parameter of a method of an extension type is guaranteed never to be ``None``. * When comparing a value with ``None``, keep in mind that, if ``x`` is a Python object, ``x is None`` and ``x is not None`` are very efficient because they translate directly to C pointer comparisons, whereas ``x == None`` and ``x != None``, or simply using ``x`` as a boolean value (as in ``if x: ...``) will invoke Python operations and therefore be much slower. Special methods ================ Although the principles are similar, there are substantial differences between many of the :meth:`__xxx__` special methods of extension types and their Python counterparts. There is a :ref:`separate page ` devoted to this subject, and you should read it carefully before attempting to use any special methods in your extension types. Properties ============ You can declare properties in an extension class using the same syntax as in ordinary Python code:: cdef class Spam: @property def cheese(self): # This is called when the property is read. ... @cheese.setter def cheese(self, value): # This is called when the property is written. ... @cheese.deleter def cheese(self): # This is called when the property is deleted. There is also a special (deprecated) legacy syntax for defining properties in an extension class:: cdef class Spam: property cheese: "A doc string can go here." def __get__(self): # This is called when the property is read. ... def __set__(self, value): # This is called when the property is written. ... def __del__(self): # This is called when the property is deleted. The :meth:`__get__`, :meth:`__set__` and :meth:`__del__` methods are all optional; if they are omitted, an exception will be raised when the corresponding operation is attempted. Here's a complete example. It defines a property which adds to a list each time it is written to, returns the list when it is read, and empties the list when it is deleted.:: # cheesy.pyx cdef class CheeseShop: cdef object cheeses def __cinit__(self): self.cheeses = [] @property def cheese(self): return "We don't have: %s" % self.cheeses @cheese.setter def cheese(self, value): self.cheeses.append(value) @cheese.deleter def cheese(self): del self.cheeses[:] # Test input from cheesy import CheeseShop shop = CheeseShop() print shop.cheese shop.cheese = "camembert" print shop.cheese shop.cheese = "cheddar" print shop.cheese del shop.cheese print shop.cheese .. sourcecode:: text # Test output We don't have: [] We don't have: ['camembert'] We don't have: ['camembert', 'cheddar'] We don't have: [] Subclassing ============= An extension type may inherit from a built-in type or another extension type:: cdef class Parrot: ... cdef class Norwegian(Parrot): ... A complete definition of the base type must be available to Cython, so if the base type is a built-in type, it must have been previously declared as an extern extension type. If the base type is defined in another Cython module, it must either be declared as an extern extension type or imported using the :keyword:`cimport` statement. An extension type can only have one base class (no multiple inheritance). Cython extension types can also be subclassed in Python. A Python class can inherit from multiple extension types provided that the usual Python rules for multiple inheritance are followed (i.e. the C layouts of all the base classes must be compatible). Since Cython 0.13.1, there is a way to prevent extension types from being subtyped in Python. This is done via the ``final`` directive, usually set on an extension type using a decorator:: cimport cython @cython.final cdef class Parrot: def done(self): pass Trying to create a Python subclass from this type will raise a :class:`TypeError` at runtime. Cython will also prevent subtyping a final type inside of the same module, i.e. creating an extension type that uses a final type as its base type will fail at compile time. Note, however, that this restriction does not currently propagate to other extension modules, so even final extension types can still be subtyped at the C level by foreign code. C methods ========= Extension types can have C methods as well as Python methods. Like C functions, C methods are declared using :keyword:`cdef` or :keyword:`cpdef` instead of :keyword:`def`. C methods are "virtual", and may be overridden in derived extension types. In addition, :keyword:`cpdef` methods can even be overridden by python methods when called as C method. This adds a little to their calling overhead compared to a :keyword:`cdef` method:: # pets.pyx cdef class Parrot: cdef void describe(self): print "This parrot is resting." cdef class Norwegian(Parrot): cdef void describe(self): Parrot.describe(self) print "Lovely plumage!" cdef Parrot p1, p2 p1 = Parrot() p2 = Norwegian() print "p1:" p1.describe() print "p2:" p2.describe() .. sourcecode:: text # Output p1: This parrot is resting. p2: This parrot is resting. Lovely plumage! The above example also illustrates that a C method can call an inherited C method using the usual Python technique, i.e.:: Parrot.describe(self) `cdef` methods can be declared static by using the @staticmethod decorator. This can be especially useful for constructing classes that take non-Python compatible types.:: cdef class OwnedPointer: cdef void* ptr cdef __dealloc__(self): if ptr != NULL: free(ptr) @staticmethod cdef create(void* ptr): p = OwnedPointer() p.ptr = ptr return ptr Forward-declaring extension types =================================== Extension types can be forward-declared, like :keyword:`struct` and :keyword:`union` types. This is usually not necessary and violates the DRY principle (Don't Repeat Yourself). If you are forward-declaring an extension type that has a base class, you must specify the base class in both the forward declaration and its subsequent definition, for example,:: cdef class A(B) ... cdef class A(B): # attributes and methods Fast instantiation =================== Cython provides two ways to speed up the instantiation of extension types. The first one is a direct call to the ``__new__()`` special static method, as known from Python. For an extension type ``Penguin``, you could use the following code:: cdef class Penguin: cdef object food def __cinit__(self, food): self.food = food def __init__(self, food): print("eating!") normal_penguin = Penguin('fish') fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() ! Note that the path through ``__new__()`` will *not* call the type's ``__init__()`` method (again, as known from Python). Thus, in the example above, the first instantiation will print ``eating!``, but the second will not. This is only one of the reasons why the ``__cinit__()`` method is safer and preferable over the normal ``__init__()`` method for extension types. The second performance improvement applies to types that are often created and deleted in a row, so that they can benefit from a freelist. Cython provides the decorator ``@cython.freelist(N)`` for this, which creates a statically sized freelist of ``N`` instances for a given type. Example:: cimport cython @cython.freelist(8) cdef class Penguin: cdef object food def __cinit__(self, food): self.food = food penguin = Penguin('fish 1') penguin = None penguin = Penguin('fish 2') # does not need to allocate memory! Making extension types weak-referenceable ========================================== By default, extension types do not support having weak references made to them. You can enable weak referencing by declaring a C attribute of type object called :attr:`__weakref__`. For example,:: cdef class ExplodingAnimal: """This animal will self-destruct when it is no longer strongly referenced.""" cdef object __weakref__ Controlling cyclic garbage collection in CPython ================================================ By default each extension type will support the cyclic garbage collector of CPython. If any Python objects can be referenced, Cython will automatically generate the ``tp_traverse`` and ``tp_clear`` slots. This is usually what you want. There is at least one reason why this might not be what you want: If you need to cleanup some external resources in the ``__dealloc__`` special function and your object happened to be in a reference cycle, the garbage collector may have triggered a call to ``tp_clear`` to drop references. This is the way that reference cycles are broken so that the garbage can actually be reclaimed. In that case any object references have vanished by the time when ``__dealloc__`` is called. Now your cleanup code lost access to the objects it has to clean up. In that case you can disable the cycle breaker ``tp_clear`` by using the ``no_gc_clear`` decorator :: @cython.no_gc_clear cdef class DBCursor: cdef DBConnection conn cdef DBAPI_Cursor *raw_cursor # ... def __dealloc__(self): DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor) This example tries to close a cursor via a database connection when the Python object is destroyed. The ``DBConnection`` object is kept alive by the reference from ``DBCursor``. But if a cursor happens to be in a reference cycle, the garbage collector may effectively "steal" the database connection reference, which makes it impossible to clean up the cursor. Using the ``no_gc_clear`` decorator this can not happen anymore because the references of a cursor object will not be cleared anymore. In rare cases, extension types can be guaranteed not to participate in cycles, but the compiler won't be able to prove this. This would be the case if the class can never reference itself, even indirectly. In that case, you can manually disable cycle collection by using the ``no_gc`` decorator, but beware that doing so when in fact the extension type can participate in cycles could cause memory leaks :: @cython.no_gc cdef class UserInfo: cdef str name cdef tuple addresses If you can be sure addresses will contain only references to strings, the above would be safe, and it may yield a significant speedup, depending on your usage pattern. Controlling pickling ==================== By default, Cython will generate a ``__reduce__()`` method to allow pickling an extension type if and only if each of its members are convertible to Python and it has no ``__cinit__`` method. To require this behavior (i.e. throw an error at compile time if a class cannot be pickled) decorate the class with ``@cython.auto_pickle(True)``. One can also annotate with ``@cython.auto_pickle(False)`` to get the old behavior of not generating a ``__reduce__`` method in any case. Manually implementing a ``__reduce__`` or `__reduce_ex__`` method will also disable this auto-generation and can be used to support pickling of more complicated types. Public and external extension types ==================================== Extension types can be declared extern or public. An extern extension type declaration makes an extension type defined in external C code available to a Cython module. A public extension type declaration makes an extension type defined in a Cython module available to external C code. External extension types ------------------------ An extern extension type allows you to gain access to the internals of Python objects defined in the Python core or in a non-Cython extension module. .. note:: In previous versions of Pyrex, extern extension types were also used to reference extension types defined in another Pyrex module. While you can still do that, Cython provides a better mechanism for this. See :ref:`sharing-declarations`. Here is an example which will let you get at the C-level members of the built-in complex object.:: cdef extern from "complexobject.h": struct Py_complex: double real double imag ctypedef class __builtin__.complex [object PyComplexObject]: cdef Py_complex cval # A function which uses the above type def spam(complex c): print "Real:", c.cval.real print "Imag:", c.cval.imag .. note:: Some important things: 1. In this example, :keyword:`ctypedef` class has been used. This is because, in the Python header files, the ``PyComplexObject`` struct is declared with: .. sourcecode:: c typedef struct { ... } PyComplexObject; 2. As well as the name of the extension type, the module in which its type object can be found is also specified. See the implicit importing section below. 3. When declaring an external extension type, you don't declare any methods. Declaration of methods is not required in order to call them, because the calls are Python method calls. Also, as with :keyword:`struct` and :keyword:`union`, if your extension class declaration is inside a :keyword:`cdef` extern from block, you only need to declare those C members which you wish to access. Name specification clause ------------------------- The part of the class declaration in square brackets is a special feature only available for extern or public extension types. The full form of this clause is:: [object object_struct_name, type type_object_name ] where ``object_struct_name`` is the name to assume for the type's C struct, and type_object_name is the name to assume for the type's statically declared type object. (The object and type clauses can be written in either order.) If the extension type declaration is inside a :keyword:`cdef` extern from block, the object clause is required, because Cython must be able to generate code that is compatible with the declarations in the header file. Otherwise, for extern extension types, the object clause is optional. For public extension types, the object and type clauses are both required, because Cython must be able to generate code that is compatible with external C code. Implicit importing ------------------ Cython requires you to include a module name in an extern extension class declaration, for example,:: cdef extern class MyModule.Spam: ... The type object will be implicitly imported from the specified module and bound to the corresponding name in this module. In other words, in this example an implicit:: from MyModule import Spam statement will be executed at module load time. The module name can be a dotted name to refer to a module inside a package hierarchy, for example,:: cdef extern class My.Nested.Package.Spam: ... You can also specify an alternative name under which to import the type using an as clause, for example,:: cdef extern class My.Nested.Package.Spam as Yummy: ... which corresponds to the implicit import statement:: from My.Nested.Package import Spam as Yummy Type names vs. constructor names -------------------------------- Inside a Cython module, the name of an extension type serves two distinct purposes. When used in an expression, it refers to a module-level global variable holding the type's constructor (i.e. its type-object). However, it can also be used as a C type name to declare variables, arguments and return values of that type. When you declare:: cdef extern class MyModule.Spam: ... the name Spam serves both these roles. There may be other names by which you can refer to the constructor, but only Spam can be used as a type name. For example, if you were to explicitly import MyModule, you could use ``MyModule.Spam()`` to create a Spam instance, but you wouldn't be able to use :class:`MyModule.Spam` as a type name. When an as clause is used, the name specified in the as clause also takes over both roles. So if you declare:: cdef extern class MyModule.Spam as Yummy: ... then Yummy becomes both the type name and a name for the constructor. Again, there are other ways that you could get hold of the constructor, but only Yummy is usable as a type name. Public extension types ====================== An extension type can be declared public, in which case a ``.h`` file is generated containing declarations for its object struct and type object. By including the ``.h`` file in external C code that you write, that code can access the attributes of the extension type. Cython-0.26.1/docs/src/userguide/language_basics.rst0000664000175000017500000006342013143605603023212 0ustar stefanstefan00000000000000.. highlight:: cython .. _language-basics: .. _struct: .. _union: .. _enum: .. _ctypedef: ***************** Language Basics ***************** C variable and type definitions =============================== The :keyword:`cdef` statement is used to declare C variables, either local or module-level:: cdef int i, j, k cdef float f, g[42], *h and C :keyword:`struct`, :keyword:`union` or :keyword:`enum` types:: cdef struct Grail: int age float volume cdef union Food: char *spam float *eggs cdef enum CheeseType: cheddar, edam, camembert cdef enum CheeseState: hard = 1 soft = 2 runny = 3 See also :ref:`struct-union-enum-styles` There is currently no special syntax for defining a constant, but you can use an anonymous :keyword:`enum` declaration for this purpose, for example,:: cdef enum: tons_of_spam = 3 .. note:: the words ``struct``, ``union`` and ``enum`` are used only when defining a type, not when referring to it. For example, to declare a variable pointing to a ``Grail`` you would write:: cdef Grail *gp and not:: cdef struct Grail *gp # WRONG There is also a ``ctypedef`` statement for giving names to types, e.g.:: ctypedef unsigned long ULong ctypedef int* IntPtr Types ----- Cython uses the normal C syntax for C types, including pointers. It provides all the standard C types, namely ``char``, ``short``, ``int``, ``long``, ``long long`` as well as their ``unsigned`` versions, e.g. ``unsigned int``. The special ``bint`` type is used for C boolean values (``int`` with 0/non-0 values for False/True) and ``Py_ssize_t`` for (signed) sizes of Python containers. Pointer types are constructed as in C, by appending a ``*`` to the base type they point to, e.g. ``int**`` for a pointer to a pointer to a C int. Arrays use the normal C array syntax, e.g. ``int[10]``. Note that Cython uses array access for pointer dereferencing, as ``*x`` is not valid Python syntax, whereas ``x[0]`` is. Also, the Python types ``list``, ``dict``, ``tuple``, etc. may be used for static typing, as well as any user defined extension types. The Python types int, long, and float are not available for static typing and instead interpreted as C ``int``, ``long``, and ``float`` respectively, as statically typing variables with these Python types has zero advantages. While these C types can be vastly faster, they have C semantics. Specifically, the integer types overflow and the C ``float`` type only has 32 bits of precision (as opposed to the 64-bit C ``double`` which Python floats wrap and is typically what one wants). If you want to use these numeric Python types simply omit the type declaration and let them be objects. Grouping multiple C declarations -------------------------------- If you have a series of declarations that all begin with :keyword:`cdef`, you can group them into a :keyword:`cdef` block like this:: cdef: struct Spam: int tons int i float a Spam *p void f(Spam *s): print s.tons, "Tons of spam" Python functions vs. C functions ================================== There are two kinds of function definition in Cython: Python functions are defined using the def statement, as in Python. They take Python objects as parameters and return Python objects. C functions are defined using the new :keyword:`cdef` statement. They take either Python objects or C values as parameters, and can return either Python objects or C values. Within a Cython module, Python functions and C functions can call each other freely, but only Python functions can be called from outside the module by interpreted Python code. So, any functions that you want to "export" from your Cython module must be declared as Python functions using def. There is also a hybrid function, called :keyword:`cpdef`. A :keyword:`cpdef` can be called from anywhere, but uses the faster C calling conventions when being called from other Cython code. A :keyword:`cpdef` can also be overridden by a Python method on a subclass or an instance attribute, even when called from Cython. If this happens, most performance gains are of course lost and even if it does not, there is a tiny overhead in calling a :keyword:`cpdef` method from Cython compared to calling a :keyword:`cdef` method. Parameters of either type of function can be declared to have C data types, using normal C declaration syntax. For example,:: def spam(int i, char *s): ... cdef int eggs(unsigned long l, float f): ... When a parameter of a Python function is declared to have a C data type, it is passed in as a Python object and automatically converted to a C value, if possible. In other words, the definition of ``spam`` above is equivalent to writing:: def spam(python_i, python_s): cdef int i = python_i cdef char* s = python_s ... Automatic conversion is currently only possible for numeric types, string types and structs (composed recursively of any of these types); attempting to use any other type for the parameter of a Python function will result in a compile-time error. Care must be taken with strings to ensure a reference if the pointer is to be used after the call. Structs can be obtained from Python mappings, and again care must be taken with string attributes if they are to be used after the function returns. C functions, on the other hand, can have parameters of any type, since they're passed in directly using a normal C function call. Functions declared using :keyword:`cdef` with Python object return type, like Python functions, will return a :keyword:`None` value when execution leaves the function body without an explicit return value. This is in contrast to C/C++, which leaves the return value undefined. In the case of non-Python object return types, the equivalent of zero is returned, for example, 0 for ``int``, :keyword:`False` for ``bint`` and :keyword:`NULL` for pointer types. A more complete comparison of the pros and cons of these different method types can be found at :ref:`early-binding-for-speed`. Python objects as parameters and return values ---------------------------------------------- If no type is specified for a parameter or return value, it is assumed to be a Python object. (Note that this is different from the C convention, where it would default to int.) For example, the following defines a C function that takes two Python objects as parameters and returns a Python object:: cdef spamobjs(x, y): ... Reference counting for these objects is performed automatically according to the standard Python/C API rules (i.e. borrowed references are taken as parameters and a new reference is returned). The name object can also be used to explicitly declare something as a Python object. This can be useful if the name being declared would otherwise be taken as the name of a type, for example,:: cdef ftang(object int): ... declares a parameter called int which is a Python object. You can also use object as the explicit return type of a function, e.g.:: cdef object ftang(object int): ... In the interests of clarity, it is probably a good idea to always be explicit about object parameters in C functions. Error return values ------------------- If you don't do anything special, a function declared with :keyword:`cdef` that does not return a Python object has no way of reporting Python exceptions to its caller. If an exception is detected in such a function, a warning message is printed and the exception is ignored. If you want a C function that does not return a Python object to be able to propagate exceptions to its caller, you need to declare an exception value for it. Here is an example:: cdef int spam() except -1: ... With this declaration, whenever an exception occurs inside spam, it will immediately return with the value ``-1``. Furthermore, whenever a call to spam returns ``-1``, an exception will be assumed to have occurred and will be propagated. When you declare an exception value for a function, you should never explicitly or implicitly return that value. In particular, if the exceptional return value is a ``False`` value, then you should ensure the function will never terminate via an implicit or empty return. If all possible return values are legal and you can't reserve one entirely for signalling errors, you can use an alternative form of exception value declaration:: cdef int spam() except? -1: ... The "?" indicates that the value ``-1`` only indicates a possible error. In this case, Cython generates a call to :c:func:`PyErr_Occurred` if the exception value is returned, to make sure it really is an error. There is also a third form of exception value declaration:: cdef int spam() except *: ... This form causes Cython to generate a call to :c:func:`PyErr_Occurred` after every call to spam, regardless of what value it returns. If you have a function returning void that needs to propagate errors, you will have to use this form, since there isn't any return value to test. Otherwise there is little use for this form. An external C++ function that may raise an exception can be declared with:: cdef int spam() except + See :ref:`wrapping-cplusplus` for more details. Some things to note: * Exception values can only declared for functions returning an integer, enum, float or pointer type, and the value must be a constant expression. Void functions can only use the ``except *`` form. * The exception value specification is part of the signature of the function. If you're passing a pointer to a function as a parameter or assigning it to a variable, the declared type of the parameter or variable must have the same exception value specification (or lack thereof). Here is an example of a pointer-to-function declaration with an exception value:: int (*grail)(int, char*) except -1 * You don't need to (and shouldn't) declare exception values for functions which return Python objects. Remember that a function with no declared return type implicitly returns a Python object. (Exceptions on such functions are implicitly propagated by returning NULL.) Checking return values of non-Cython functions ---------------------------------------------- It's important to understand that the except clause does not cause an error to be raised when the specified value is returned. For example, you can't write something like:: cdef extern FILE *fopen(char *filename, char *mode) except NULL # WRONG! and expect an exception to be automatically raised if a call to :func:`fopen` returns ``NULL``. The except clause doesn't work that way; its only purpose is for propagating Python exceptions that have already been raised, either by a Cython function or a C function that calls Python/C API routines. To get an exception from a non-Python-aware function such as :func:`fopen`, you will have to check the return value and raise it yourself, for example,:: cdef FILE* p p = fopen("spam.txt", "r") if p == NULL: raise SpamError("Couldn't open the spam file") Automatic type conversions ========================== In most situations, automatic conversions will be performed for the basic numeric and string types when a Python object is used in a context requiring a C value, or vice versa. The following table summarises the conversion possibilities. +----------------------------+--------------------+------------------+ | C types | From Python types | To Python types | +============================+====================+==================+ | [unsigned] char, | int, long | int | | [unsigned] short, | | | | int, long | | | +----------------------------+--------------------+------------------+ | unsigned int, | int, long | long | | unsigned long, | | | | [unsigned] long long | | | +----------------------------+--------------------+------------------+ | float, double, long double | int, long, float | float | +----------------------------+--------------------+------------------+ | char* | str/bytes | str/bytes [#]_ | +----------------------------+--------------------+------------------+ | struct, | | dict [#1]_ | | union | | | +----------------------------+--------------------+------------------+ .. [#] The conversion is to/from str for Python 2.x, and bytes for Python 3.x. .. [#1] The conversion from a C union type to a Python dict will add a value for each of the union fields. Cython 0.23 and later, however, will refuse to automatically convert a union with unsafe type combinations. An example is a union of an ``int`` and a ``char*``, in which case the pointer value may or be not be a valid pointer. Caveats when using a Python string in a C context ------------------------------------------------- You need to be careful when using a Python string in a context expecting a ``char*``. In this situation, a pointer to the contents of the Python string is used, which is only valid as long as the Python string exists. So you need to make sure that a reference to the original Python string is held for as long as the C string is needed. If you can't guarantee that the Python string will live long enough, you will need to copy the C string. Cython detects and prevents some mistakes of this kind. For instance, if you attempt something like:: cdef char *s s = pystring1 + pystring2 then Cython will produce the error message ``Obtaining char* from temporary Python value``. The reason is that concatenating the two Python strings produces a new Python string object that is referenced only by a temporary internal variable that Cython generates. As soon as the statement has finished, the temporary variable will be decrefed and the Python string deallocated, leaving ``s`` dangling. Since this code could not possibly work, Cython refuses to compile it. The solution is to assign the result of the concatenation to a Python variable, and then obtain the ``char*`` from that, i.e.:: cdef char *s p = pystring1 + pystring2 s = p It is then your responsibility to hold the reference p for as long as necessary. Keep in mind that the rules used to detect such errors are only heuristics. Sometimes Cython will complain unnecessarily, and sometimes it will fail to detect a problem that exists. Ultimately, you need to understand the issue and be careful what you do. Statements and expressions ========================== Control structures and expressions follow Python syntax for the most part. When applied to Python objects, they have the same semantics as in Python (unless otherwise noted). Most of the Python operators can also be applied to C values, with the obvious semantics. If Python objects and C values are mixed in an expression, conversions are performed automatically between Python objects and C numeric or string types. Reference counts are maintained automatically for all Python objects, and all Python operations are automatically checked for errors, with appropriate action taken. Differences between C and Cython expressions -------------------------------------------- There are some differences in syntax and semantics between C expressions and Cython expressions, particularly in the area of C constructs which have no direct equivalent in Python. * An integer literal is treated as a C constant, and will be truncated to whatever size your C compiler thinks appropriate. To get a Python integer (of arbitrary precision) cast immediately to an object (e.g. ``100000000000000000000``). The ``L``, ``LL``, and ``U`` suffixes have the same meaning as in C. * There is no ``->`` operator in Cython. Instead of ``p->x``, use ``p.x`` * There is no unary ``*`` operator in Cython. Instead of ``*p``, use ``p[0]`` * There is an ``&`` operator, with the same semantics as in C. * The null C pointer is called ``NULL``, not ``0`` (and ``NULL`` is a reserved word). * Type casts are written ``value`` , for example,:: cdef char* p, float* q p = q Scope rules ----------- Cython determines whether a variable belongs to a local scope, the module scope, or the built-in scope completely statically. As with Python, assigning to a variable which is not otherwise declared implicitly declares it to be a variable residing in the scope where it is assigned. The type of the variable depends on type inference, except for the global module scope, where it is always a Python object. Built-in Functions ------------------ Cython compiles calls to most built-in functions into direct calls to the corresponding Python/C API routines, making them particularly fast. Only direct function calls using these names are optimised. If you do something else with one of these names that assumes it's a Python object, such as assign it to a Python variable, and later call it, the call will be made as a Python function call. +------------------------------+-------------+----------------------------+ | Function and arguments | Return type | Python/C API Equivalent | +==============================+=============+============================+ | abs(obj) | object, | PyNumber_Absolute, fabs, | | | double, ... | fabsf, ... | +------------------------------+-------------+----------------------------+ | callable(obj) | bint | PyObject_Callable | +------------------------------+-------------+----------------------------+ | delattr(obj, name) | None | PyObject_DelAttr | +------------------------------+-------------+----------------------------+ | exec(code, [glob, [loc]]) | object | - | +------------------------------+-------------+----------------------------+ | dir(obj) | list | PyObject_Dir | +------------------------------+-------------+----------------------------+ | divmod(a, b) | tuple | PyNumber_Divmod | +------------------------------+-------------+----------------------------+ | getattr(obj, name, [default])| object | PyObject_GetAttr | | (Note 1) | | | +------------------------------+-------------+----------------------------+ | hasattr(obj, name) | bint | PyObject_HasAttr | +------------------------------+-------------+----------------------------+ | hash(obj) | int / long | PyObject_Hash | +------------------------------+-------------+----------------------------+ | intern(obj) | object | Py*_InternFromString | +------------------------------+-------------+----------------------------+ | isinstance(obj, type) | bint | PyObject_IsInstance | +------------------------------+-------------+----------------------------+ | issubclass(obj, type) | bint | PyObject_IsSubclass | +------------------------------+-------------+----------------------------+ | iter(obj, [sentinel]) | object | PyObject_GetIter | +------------------------------+-------------+----------------------------+ | len(obj) | Py_ssize_t | PyObject_Length | +------------------------------+-------------+----------------------------+ | pow(x, y, [z]) | object | PyNumber_Power | +------------------------------+-------------+----------------------------+ | reload(obj) | object | PyImport_ReloadModule | +------------------------------+-------------+----------------------------+ | repr(obj) | object | PyObject_Repr | +------------------------------+-------------+----------------------------+ | setattr(obj, name) | void | PyObject_SetAttr | +------------------------------+-------------+----------------------------+ Note 1: Pyrex originally provided a function :func:`getattr3(obj, name, default)` corresponding to the three-argument form of the Python builtin :func:`getattr()`. Cython still supports this function, but the usage is deprecated in favour of the normal builtin, which Cython can optimise in both forms. Operator Precedence ------------------- Keep in mind that there are some differences in operator precedence between Python and C, and that Cython uses the Python precedences, not the C ones. Integer for-loops ------------------ Cython recognises the usual Python for-in-range integer loop pattern:: for i in range(n): ... If ``i`` is declared as a :keyword:`cdef` integer type, it will optimise this into a pure C loop. This restriction is required as otherwise the generated code wouldn't be correct due to potential integer overflows on the target architecture. If you are worried that the loop is not being converted correctly, use the annotate feature of the cython commandline (``-a``) to easily see the generated C code. See :ref:`automatic-range-conversion` For backwards compatibility to Pyrex, Cython also supports a more verbose form of for-loop which you might find in legacy code:: for i from 0 <= i < n: ... or:: for i from 0 <= i < n by s: ... where ``s`` is some integer step size. .. note:: This syntax is deprecated and should not be used in new code. Use the normal Python for-loop instead. Some things to note about the for-from loop: * The target expression must be a plain variable name. * The name between the lower and upper bounds must be the same as the target name. * The direction of iteration is determined by the relations. If they are both from the set {``<``, ``<=``} then it is upwards; if they are both from the set {``>``, ``>=``} then it is downwards. (Any other combination is disallowed.) Like other Python looping statements, break and continue may be used in the body, and the loop may have an else clause. The include statement ===================== .. warning:: Historically the ``include`` statement was used for sharing declarations. Use :ref:`sharing-declarations` instead. A Cython source file can include material from other files using the include statement, for example,:: include "spamstuff.pxi" The contents of the named file are textually included at that point. The included file can contain any complete statements or declarations that are valid in the context where the include statement appears, including other include statements. The contents of the included file should begin at an indentation level of zero, and will be treated as though they were indented to the level of the include statement that is including the file. .. note:: There are other mechanisms available for splitting Cython code into separate parts that may be more appropriate in many cases. See :ref:`sharing-declarations`. Conditional Compilation ======================= Some features are available for conditional compilation and compile-time constants within a Cython source file. Compile-Time Definitions ------------------------ A compile-time constant can be defined using the DEF statement:: DEF FavouriteFood = u"spam" DEF ArraySize = 42 DEF OtherArraySize = 2 * ArraySize + 17 The right-hand side of the ``DEF`` must be a valid compile-time expression. Such expressions are made up of literal values and names defined using ``DEF`` statements, combined using any of the Python expression syntax. The following compile-time names are predefined, corresponding to the values returned by :func:`os.uname`. UNAME_SYSNAME, UNAME_NODENAME, UNAME_RELEASE, UNAME_VERSION, UNAME_MACHINE The following selection of builtin constants and functions are also available: None, True, False, abs, all, any, ascii, bin, bool, bytearray, bytes, chr, cmp, complex, dict, divmod, enumerate, filter, float, format, frozenset, hash, hex, int, len, list, long, map, max, min, oct, ord, pow, range, reduce, repr, reversed, round, set, slice, sorted, str, sum, tuple, xrange, zip Note that some of these builtins may not be available when compiling under Python 2.x or 3.x, or may behave differently in both. A name defined using ``DEF`` can be used anywhere an identifier can appear, and it is replaced with its compile-time value as though it were written into the source at that point as a literal. For this to work, the compile-time expression must evaluate to a Python value of type ``int``, ``long``, ``float``, ``bytes`` or ``unicode`` (``str`` in Py3). :: cdef int a1[ArraySize] cdef int a2[OtherArraySize] print "I like", FavouriteFood Conditional Statements ---------------------- The ``IF`` statement can be used to conditionally include or exclude sections of code at compile time. It works in a similar way to the ``#if`` preprocessor directive in C.:: IF UNAME_SYSNAME == "Windows": include "icky_definitions.pxi" ELIF UNAME_SYSNAME == "Darwin": include "nice_definitions.pxi" ELIF UNAME_SYSNAME == "Linux": include "penguin_definitions.pxi" ELSE: include "other_definitions.pxi" The ``ELIF`` and ``ELSE`` clauses are optional. An ``IF`` statement can appear anywhere that a normal statement or declaration can appear, and it can contain any statements or declarations that would be valid in that context, including ``DEF`` statements and other ``IF`` statements. The expressions in the ``IF`` and ``ELIF`` clauses must be valid compile-time expressions as for the ``DEF`` statement, although they can evaluate to any Python value, and the truth of the result is determined in the usual Python way. Cython-0.26.1/docs/src/userguide/debugging.rst0000664000175000017500000002215313023021033022016 0ustar stefanstefan00000000000000.. highlight:: cython .. _debugging: ********************************** Debugging your Cython program ********************************** Cython comes with an extension for the GNU Debugger that helps users debug Cython code. To use this functionality, you will need to install gdb 7.2 or higher, built with Python support (linked to Python 2.6 or higher). The debugger supports debuggees with versions 2.6 and higher. For Python 3, code should be built with Python 3 and the debugger should be run with Python 2 (or at least it should be able to find the Python 2 Cython installation). Note that in recent versions of Ubuntu, for instance, ``gdb`` installed with ``apt-get`` is configured with Python 3. On such systems, the proper configuration of ``gdb`` can be obtained by downloading the ``gdb`` source, and then running:: ./configure --with-python=python2 make sudo make install The debugger will need debug information that the Cython compiler can export. This can be achieved from within the setup script by passing ``gdb_debug=True`` to ``cythonize()``:: from distutils.core import setup from distutils.extension import Extension extensions = [Extension('source', ['source.pyx'])] setup(..., ext_modules=cythonize(extensions, gdb_debug=True)) For development it's often helpful to pass the ``--inplace`` flag to the ``setup.py`` script, which makes distutils build your project "in place", i.e., not in a separate `build` directory. When invoking Cython from the command line directly you can have it write debug information using the ``--gdb`` flag:: cython --gdb myfile.pyx Running the Debugger ===================== .. highlight:: bash To run the Cython debugger and have it import the debug information exported by Cython, run ``cygdb`` in the build directory:: $ python setup.py build_ext --inplace $ cygdb GNU gdb (GDB) 7.2 ... (gdb) When using the Cython debugger, it's preferable that you build and run your code with an interpreter that is compiled with debugging symbols (i.e. configured with ``--with-pydebug`` or compiled with the ``-g`` CFLAG). If your Python is installed and managed by your package manager you probably need to install debug support separately, e.g. for ubuntu:: $ sudo apt-get install python-dbg $ python-dbg setup.py build_ext --inplace Then you need to run your script with ``python-dbg`` also. Ensure that when building your package with debug symbols that cython extensions are re-compiled if they had been previously compiled. If your package is version controlled, you might want to perform ``git clean -fxd`` or ``hg purge --all`` before building. You can also pass additional arguments to gdb:: $ cygdb /path/to/build/directory/ GDBARGS i.e.:: $ cygdb . -- --args python-dbg mainscript.py To tell cygdb not to import any debug information, supply ``--`` as the first argument:: $ cygdb -- Using the Debugger =================== The Cython debugger comes with a set of commands that support breakpoints, stack inspection, source code listing, stepping, stepping over, etc. Most of these commands are analogous to their respective gdb command. .. function:: cy break breakpoints... Break in a Python, Cython or C function. First it will look for a Cython function with that name, if cygdb doesn't know about a function (or method) with that name, it will set a (pending) C breakpoint. The ``-p`` option can be used to specify a Python breakpoint. Breakpoints can be set for either the function or method name, or they can be fully "qualified", which means that the entire "path" to a function is given:: (gdb) cy break cython_function_or_method (gdb) cy break packagename.cython_module.cython_function (gdb) cy break packagename.cython_module.ClassName.cython_method (gdb) cy break c_function You can also break on Cython line numbers:: (gdb) cy break :14 (gdb) cy break cython_module:14 (gdb) cy break packagename.cython_module:14 Python breakpoints currently support names of the module (not the entire package path) and the function or method:: (gdb) cy break -p python_module.python_function_or_method (gdb) cy break -p python_function_or_method .. note:: Python breakpoints only work in Python builds where the Python frame information can be read from the debugger. To ensure this, use a Python debug build or a non-stripped build compiled with debug support. .. function:: cy step Step through Python, Cython or C code. Python, Cython and C functions called directly from Cython code are considered relevant and will be stepped into. .. function:: cy next Step over Python, Cython or C code. .. function:: cy run Run the program. The default interpreter is the interpreter that was used to build your extensions with, or the interpreter ``cygdb`` is run with in case the "don't import debug information" option was in effect. The interpreter can be overridden using gdb's ``file`` command. .. function:: cy cont Continue the program. .. function:: cy up cy down Go up and down the stack to what is considered a relevant frame. .. function:: cy finish Execute until an upward relevant frame is met or something halts execution. .. function:: cy bt cy backtrace Print a traceback of all frames considered relevant. The ``-a`` option makes it print the full traceback (all C frames). .. function:: cy select Select a stack frame by number as listed by ``cy backtrace``. This command is introduced because ``cy backtrace`` prints a reversed stack trace, so frame numbers differ from gdb's ``bt``. .. function:: cy print varname Print a local or global Cython, Python or C variable (depending on the context). Variables may also be dereferenced:: (gdb) cy print x x = 1 (gdb) cy print *x *x = (PyObject) { _ob_next = 0x93efd8, _ob_prev = 0x93ef88, ob_refcnt = 65, ob_type = 0x83a3e0 } .. function:: cy set cython_variable = value Set a Cython variable on the Cython stack to value. .. function:: cy list List the source code surrounding the current line. .. function:: cy locals cy globals Print all the local and global variables and their values. .. function:: cy import FILE... Import debug information from files given as arguments. The easiest way to import debug information is to use the cygdb command line tool. .. function:: cy exec code Execute code in the current Python or Cython frame. This works like Python's interactive interpreter. For Python frames it uses the globals and locals from the Python frame, for Cython frames it uses the dict of globals used on the Cython module and a new dict filled with the local Cython variables. .. note:: ``cy exec`` modifies state and executes code in the debuggee and is therefore potentially dangerous. Example:: (gdb) cy exec x + 1 2 (gdb) cy exec import sys; print sys.version_info (2, 6, 5, 'final', 0) (gdb) cy exec >global foo > >foo = 'something' >end Convenience functions ===================== The following functions are gdb functions, which means they can be used in a gdb expression. .. function:: cy_cname(varname) Returns the C variable name of a Cython variable. For global variables this may not be actually valid. .. function:: cy_cvalue(varname) Returns the value of a Cython variable. .. function:: cy_eval(expression) Evaluates Python code in the nearest Python or Cython frame and returns the result of the expression as a gdb value. This gives a new reference if successful, NULL on error. .. function:: cy_lineno() Returns the current line number in the selected Cython frame. Example:: (gdb) print $cy_cname("x") $1 = "__pyx_v_x" (gdb) watch $cy_cvalue("x") Hardware watchpoint 13: $cy_cvalue("x") (gdb) cy set my_cython_variable = $cy_eval("{'spam': 'ham'}") (gdb) print $cy_lineno() $2 = 12 Configuring the Debugger ======================== A few aspects of the debugger are configurable with gdb parameters. For instance, colors can be disabled, the terminal background color and breakpoint autocompletion can be configured. .. c:macro:: cy_complete_unqualified Tells the Cython debugger whether ``cy break`` should also complete plain function names, i.e. not prefixed by their module name. E.g. if you have a function named ``spam``, in module ``M``, it tells whether to only complete ``M.spam`` or also just ``spam``. The default is true. .. c:macro:: cy_colorize_code Tells the debugger whether to colorize source code. The default is true. .. c:macro:: cy_terminal_background_color Tells the debugger about the terminal background color, which affects source code coloring. The default is "dark", another valid option is "light". This is how these parameters can be used:: (gdb) set cy_complete_unqualified off (gdb) set cy_terminal_background_color light (gdb) show cy_colorize_code Cython-0.26.1/docs/src/userguide/parallelism.rst0000664000175000017500000002135013023021033022366 0ustar stefanstefan00000000000000.. highlight:: cython .. py:module:: cython.parallel .. _parallel: ********************************** Using Parallelism ********************************** Cython supports native parallelism through the :py:mod:`cython.parallel` module. To use this kind of parallelism, the GIL must be released (see :ref:`Releasing the GIL `). It currently supports OpenMP, but later on more backends might be supported. .. NOTE:: Functionality in this module may only be used from the main thread or parallel regions due to OpenMP restrictions. .. function:: prange([start,] stop[, step][, nogil=False][, schedule=None[, chunksize=None]][, num_threads=None]) This function can be used for parallel loops. OpenMP automatically starts a thread pool and distributes the work according to the schedule used. ``step`` must not be 0. This function can only be used with the GIL released. If ``nogil`` is true, the loop will be wrapped in a nogil section. Thread-locality and reductions are automatically inferred for variables. If you assign to a variable in a prange block, it becomes lastprivate, meaning that the variable will contain the value from the last iteration. If you use an inplace operator on a variable, it becomes a reduction, meaning that the values from the thread-local copies of the variable will be reduced with the operator and assigned to the original variable after the loop. The index variable is always lastprivate. Variables assigned to in a parallel with block will be private and unusable after the block, as there is no concept of a sequentially last value. The ``schedule`` is passed to OpenMP and can be one of the following: static: If a chunksize is provided, iterations are distributed to all threads ahead of time in blocks of the given chunksize. If no chunksize is given, the iteration space is divided into chunks that are approximately equal in size, and at most one chunk is assigned to each thread in advance. This is most appropriate when the scheduling overhead matters and the problem can be cut down into equally sized chunks that are known to have approximately the same runtime. dynamic: The iterations are distributed to threads as they request them, with a default chunk size of 1. This is suitable when the runtime of each chunk differs and is not known in advance and therefore a larger number of smaller chunks is used in order to keep all threads busy. guided: As with dynamic scheduling, the iterations are distributed to threads as they request them, but with decreasing chunk size. The size of each chunk is proportional to the number of unassigned iterations divided by the number of participating threads, decreasing to 1 (or the chunksize if provided). This has an advantage over pure dynamic scheduling when it turns out that the last chunks take more time than expected or are otherwise being badly scheduled, so that most threads start running idle while the last chunks are being worked on by only a smaller number of threads. runtime: The schedule and chunk size are taken from the runtime scheduling variable, which can be set through the ``openmp.omp_set_schedule()`` function call, or the OMP_SCHEDULE environment variable. Note that this essentially disables any static compile time optimisations of the scheduling code itself and may therefore show a slightly worse performance than when the same scheduling policy is statically configured at compile time. .. auto The decision regarding scheduling is delegated to the .. compiler and/or runtime system. The programmer gives .. the implementation the freedom to choose any possible .. mapping of iterations to threads in the team. The default schedule is implementation defined. For more information consult the OpenMP specification [#]_. The ``num_threads`` argument indicates how many threads the team should consist of. If not given, OpenMP will decide how many threads to use. Typically this is the number of cores available on the machine. However, this may be controlled through the ``omp_set_num_threads()`` function, or through the ``OMP_NUM_THREADS`` environment variable. The ``chunksize`` argument indicates the chunksize to be used for dividing the iterations among threads. This is only valid for ``static``, ``dynamic`` and ``guided`` scheduling, and is optional. Different chunksizes may give substantially different performance results, depending on the schedule, the load balance it provides, the scheduling overhead and the amount of false sharing (if any). Example with a reduction:: from cython.parallel import prange cdef int i cdef int sum = 0 for i in prange(n, nogil=True): sum += i print sum Example with a typed memoryview (e.g. a NumPy array):: from cython.parallel import prange def func(double[:] x, double alpha): cdef Py_ssize_t i for i in prange(x.shape[0]): x[i] = alpha * x[i] .. function:: parallel(num_threads=None) This directive can be used as part of a ``with`` statement to execute code sequences in parallel. This is currently useful to setup thread-local buffers used by a prange. A contained prange will be a worksharing loop that is not parallel, so any variable assigned to in the parallel section is also private to the prange. Variables that are private in the parallel block are unavailable after the parallel block. Example with thread-local buffers:: from cython.parallel import parallel, prange from libc.stdlib cimport abort, malloc, free cdef Py_ssize_t idx, i, n = 100 cdef int * local_buf cdef size_t size = 10 with nogil, parallel(): local_buf = malloc(sizeof(int) * size) if local_buf == NULL: abort() # populate our local buffer in a sequential loop for i in xrange(size): local_buf[i] = i * 2 # share the work using the thread-local buffer(s) for i in prange(n, schedule='guided'): func(local_buf) free(local_buf) Later on sections might be supported in parallel blocks, to distribute code sections of work among threads. .. function:: threadid() Returns the id of the thread. For n threads, the ids will range from 0 to n-1. Compiling ========= To actually use the OpenMP support, you need to tell the C or C++ compiler to enable OpenMP. For gcc this can be done as follows in a setup.py:: from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize ext_modules = [ Extension( "hello", ["hello.pyx"], extra_compile_args=['-fopenmp'], extra_link_args=['-fopenmp'], ) ] setup( name='hello-parallel-world', ext_modules=cythonize(ext_modules), ) For Microsoft Visual C++ compiler, use ``'/openmp'`` instead of ``'-fopenmp'``. Breaking out of loops ===================== The parallel with and prange blocks support the statements break, continue and return in nogil mode. Additionally, it is valid to use a ``with gil`` block inside these blocks, and have exceptions propagate from them. However, because the blocks use OpenMP, they can not just be left, so the exiting procedure is best-effort. For prange() this means that the loop body is skipped after the first break, return or exception for any subsequent iteration in any thread. It is undefined which value shall be returned if multiple different values may be returned, as the iterations are in no particular order:: from cython.parallel import prange cdef int func(Py_ssize_t n): cdef Py_ssize_t i for i in prange(n, nogil=True): if i == 8: with gil: raise Exception() elif i == 4: break elif i == 2: return i In the example above it is undefined whether an exception shall be raised, whether it will simply break or whether it will return 2. Using OpenMP Functions ====================== OpenMP functions can be used by cimporting ``openmp``:: from cython.parallel cimport parallel cimport openmp cdef int num_threads openmp.omp_set_dynamic(1) with nogil, parallel(): num_threads = openmp.omp_get_num_threads() ... .. rubric:: References .. [#] http://www.openmp.org/mp-documents/spec30.pdf Cython-0.26.1/docs/src/userguide/limitations.rst0000664000175000017500000000447012542002467022440 0ustar stefanstefan00000000000000.. highlight:: cython .. _cython-limitations: ************* Limitations ************* This page used to list bugs in Cython that made the semantics of compiled code differ from that in Python. Most of the missing features have been fixed in Cython 0.15. The bug tracker has an up-to-date `list of remaining compatibility issues`_. Note that a future version 1.0 of Cython is planned to provide full Python language compatibility. .. _`list of remaining compatibility issues`: http://trac.cython.org/cython_trac/query?status=assigned&status=new&status=reopened&component=Python+Semantics&component=Python3+Semantics&order=priority&col=id&col=summary&col=component&col=status&col=type&col=priority&col=milestone Below is a list of differences that we will probably not be addressing. Most of these things that fall more into the implementation details rather than semantics, and we may decide not to fix (or require a --pedantic flag to get). Nested tuple argument unpacking =============================== :: def f((a,b), c): pass This was removed in Python 3. Inspect support =============== While it is quite possible to emulate the interface of functions in Cython's own function type, and recent Cython releases have seen several improvements here, the "inspect" module does not consider a Cython implemented function a "function", because it tests the object type explicitly instead of comparing an abstract interface or an abstract base class. This has a negative impact on code that uses inspect to inspect function objects, but would require a change to Python itself. Stack frames ============ Currently we generate fake tracebacks as part of exception propagation, but don't fill in locals and can't fill in co_code. To be fully compatible, we would have to generate these stack frame objects at function call time (with a potential performance penalty). We may have an option to enable this for debugging. Identity vs. equality for inferred literals =========================================== :: a = 1.0 # a inferred to be C type 'double' b = c = None # b and c inferred to be type 'object' if some_runtime_expression: b = a # creates a new Python float object c = a # creates a new Python float object print b is c # most likely not the same object Cython-0.26.1/docs/src/userguide/memoryviews.rst0000664000175000017500000005376213143605603022501 0ustar stefanstefan00000000000000.. highlight:: cython .. _memoryviews: ***************** Typed Memoryviews ***************** Typed memoryviews allow efficient access to memory buffers, such as those underlying NumPy arrays, without incurring any Python overhead. Memoryviews are similar to the current NumPy array buffer support (``np.ndarray[np.float64_t, ndim=2]``), but they have more features and cleaner syntax. Memoryviews are more general than the old NumPy array buffer support, because they can handle a wider variety of sources of array data. For example, they can handle C arrays and the Cython array type (:ref:`view_cython_arrays`). A memoryview can be used in any context (function parameters, module-level, cdef class attribute, etc) and can be obtained from nearly any object that exposes writable buffer through the `PEP 3118`_ buffer interface. .. _view_quickstart: Quickstart ========== If you are used to working with NumPy, the following examples should get you started with Cython memory views. :: from cython.view cimport array as cvarray import numpy as np # Memoryview on a NumPy array narr = np.arange(27, dtype=np.dtype("i")).reshape((3, 3, 3)) cdef int [:, :, :] narr_view = narr # Memoryview on a C array cdef int carr[3][3][3] cdef int [:, :, :] carr_view = carr # Memoryview on a Cython array cyarr = cvarray(shape=(3, 3, 3), itemsize=sizeof(int), format="i") cdef int [:, :, :] cyarr_view = cyarr # Show the sum of all the arrays before altering it print("NumPy sum of the NumPy array before assignments: %s" % narr.sum()) # We can copy the values from one memoryview into another using a single # statement, by either indexing with ... or (NumPy-style) with a colon. carr_view[...] = narr_view cyarr_view[:] = narr_view # NumPy-style syntax for assigning a single value to all elements. narr_view[:, :, :] = 3 # Just to distinguish the arrays carr_view[0, 0, 0] = 100 cyarr_view[0, 0, 0] = 1000 # Assigning into the memoryview on the NumPy array alters the latter print("NumPy sum of NumPy array after assignments: %s" % narr.sum()) # A function using a memoryview does not usually need the GIL cpdef int sum3d(int[:, :, :] arr) nogil: cdef size_t i, j, k cdef int total = 0 I = arr.shape[0] J = arr.shape[1] K = arr.shape[2] for i in range(I): for j in range(J): for k in range(K): total += arr[i, j, k] return total # A function accepting a memoryview knows how to use a NumPy array, # a C array, a Cython array... print("Memoryview sum of NumPy array is %s" % sum3d(narr)) print("Memoryview sum of C array is %s" % sum3d(carr)) print("Memoryview sum of Cython array is %s" % sum3d(cyarr)) # ... and of course, a memoryview. print("Memoryview sum of C memoryview is %s" % sum3d(carr_view)) This code should give the following output:: NumPy sum of the NumPy array before assignments: 351 NumPy sum of NumPy array after assignments: 81 Memoryview sum of NumPy array is 81 Memoryview sum of C array is 451 Memoryview sum of Cython array is 1351 Memoryview sum of C memoryview is 451 Using memoryviews ================= Syntax ------ Memory views use Python slicing syntax in a similar way as NumPy. To create a complete view on a one-dimensional int buffer:: cdef int[:] view1D = exporting_object A complete 3D view:: cdef int[:,:,:] view3D = exporting_object A 2D view that restricts the first dimension of a buffer to 100 rows starting at the second (index 1) and then skips every second (odd) row:: cdef int[1:102:2,:] partial_view = exporting_object This also works conveniently as function arguments: .. code-block:: cython def process_3d_buffer(int[1:102:2,:] view not None): ... The ``not None`` declaration for the argument automatically rejects None values as input, which would otherwise be allowed. The reason why None is allowed by default is that it is conveniently used for return arguments:: def process_buffer(int[:,:] input not None, int[:,:] output = None): if output is None: output = ... # e.g. numpy.empty_like(input) # process 'input' into 'output' return output Cython will reject incompatible buffers automatically, e.g. passing a three dimensional buffer into a function that requires a two dimensional buffer will raise a ``ValueError``. Indexing -------- In Cython, index access on memory views is automatically translated into memory addresses. The following code requests a two-dimensional memory view of C ``int`` typed items and indexes into it:: cdef int[:,:] buf = exporting_object print(buf[1,2]) Negative indices work as well, counting from the end of the respective dimension:: print(buf[-1,-2]) The following function loops over each dimension of a 2D array and adds 1 to each item:: def add_one(int[:,:] buf): for x in xrange(buf.shape[0]): for y in xrange(buf.shape[1]): buf[x,y] += 1 Indexing and slicing can be done with or without the GIL. It basically works like NumPy. If indices are specified for every dimension you will get an element of the base type (e.g. `int`). Otherwise, you will get a new view. An Ellipsis means you get consecutive slices for every unspecified dimension:: cdef int[:, :, :] my_view = exporting_object # These are all equivalent my_view[10] my_view[10, :, :] my_view[10, ...] Copying ------- Memory views can be copied in place:: cdef int[:, :, :] to_view, from_view ... # copy the elements in from_view to to_view to_view[...] = from_view # or to_view[:] = from_view # or to_view[:, :, :] = from_view They can also be copied with the ``copy()`` and ``copy_fortran()`` methods; see :ref:`view_copy_c_fortran`. .. _view_transposing: Transposing ----------- In most cases (see below), the memoryview can be transposed in the same way that NumPy slices can be transposed:: cdef int[:, ::1] c_contig = ... cdef int[::1, :] f_contig = c_contig.T This gives a new, transposed, view on the data. Transposing requires that all dimensions of the memoryview have a direct access memory layout (i.e., there are no indirections through pointers). See :ref:`view_general_layouts` for details. Newaxis ------- As for NumPy, new axes can be introduced by indexing an array with ``None`` :: cdef double[:] myslice = np.linspace(0, 10, num=50) # 2D array with shape (1, 50) myslice[None] # or myslice[None, :] # 2D array with shape (50, 1) myslice[:, None] One may mix new axis indexing with all other forms of indexing and slicing. See also an example_. Comparison to the old buffer support ==================================== You will probably prefer memoryviews to the older syntax because: * The syntax is cleaner * Memoryviews do not usually need the GIL (see :ref:`view_needs_gil`) * Memoryviews are considerably faster For example, this is the old syntax equivalent of the ``sum3d`` function above:: cpdef int old_sum3d(object[int, ndim=3, mode='strided'] arr): cdef int I, J, K, total = 0 I = arr.shape[0] J = arr.shape[1] K = arr.shape[2] for i in range(I): for j in range(J): for k in range(K): total += arr[i, j, k] return total Note that we can't use ``nogil`` for the buffer version of the function as we could for the memoryview version of ``sum3d`` above, because buffer objects are Python objects. However, even if we don't use ``nogil`` with the memoryview, it is significantly faster. This is a output from an IPython session after importing both versions:: In [2]: import numpy as np In [3]: arr = np.zeros((40, 40, 40), dtype=int) In [4]: timeit -r15 old_sum3d(arr) 1000 loops, best of 15: 298 us per loop In [5]: timeit -r15 sum3d(arr) 1000 loops, best of 15: 219 us per loop Python buffer support ===================== Cython memoryviews support nearly all objects exporting the interface of Python `new style buffers`_. This is the buffer interface described in `PEP 3118`_. NumPy arrays support this interface, as do :ref:`view_cython_arrays`. The "nearly all" is because the Python buffer interface allows the *elements* in the data array to themselves be pointers; Cython memoryviews do not yet support this. .. _view_memory_layout: Memory layout ============= The buffer interface allows objects to identify the underlying memory in a variety of ways. With the exception of pointers for data elements, Cython memoryviews support all Python new-type buffer layouts. It can be useful to know or specify memory layout if the memory has to be in a particular format for an external routine, or for code optimization. Background ---------- The concepts are as follows: there is data access and data packing. Data access means either direct (no pointer) or indirect (pointer). Data packing means your data may be contiguous or not contiguous in memory, and may use *strides* to identify the jumps in memory consecutive indices need to take for each dimension. NumPy arrays provide a good model of strided direct data access, so we'll use them for a refresher on the concepts of C and Fortran contiguous arrays, and data strides. Brief recap on C, Fortran and strided memory layouts ---------------------------------------------------- The simplest data layout might be a C contiguous array. This is the default layout in NumPy and Cython arrays. C contiguous means that the array data is continuous in memory (see below) and that neighboring elements in the first dimension of the array are furthest apart in memory, whereas neighboring elements in the last dimension are closest together. For example, in NumPy:: In [2]: arr = np.array([['0', '1', '2'], ['3', '4', '5']], dtype='S1') Here, ``arr[0, 0]`` and ``arr[0, 1]`` are one byte apart in memory, whereas ``arr[0, 0]`` and ``arr[1, 0]`` are 3 bytes apart. This leads us to the idea of *strides*. Each axis of the array has a stride length, which is the number of bytes needed to go from one element on this axis to the next element. In the case above, the strides for axes 0 and 1 will obviously be:: In [3]: arr.strides Out[4]: (3, 1) For a 3D C contiguous array:: In [5]: c_contig = np.arange(24, dtype=np.int8).reshape((2,3,4)) In [6] c_contig.strides Out[6]: (12, 4, 1) A Fortran contiguous array has the opposite memory ordering, with the elements on the first axis closest togther in memory:: In [7]: f_contig = np.array(c_contig, order='F') In [8]: np.all(f_contig == c_contig) Out[8]: True In [9]: f_contig.strides Out[9]: (1, 2, 6) A contiguous array is one for which a single continuous block of memory contains all the data for the elements of the array, and therefore the memory block length is the product of number of elements in the array and the size of the elements in bytes. In the example above, the memory block is 2 * 3 * 4 * 1 bytes long, where 1 is the length of an int8. An array can be contiguous without being C or Fortran order:: In [10]: c_contig.transpose((1, 0, 2)).strides Out[10]: (4, 12, 1) Slicing an NumPy array can easily make it not contiguous:: In [11]: sliced = c_contig[:,1,:] In [12]: sliced.strides Out[12]: (12, 1) In [13]: sliced.flags Out[13]: C_CONTIGUOUS : False F_CONTIGUOUS : False OWNDATA : False WRITEABLE : True ALIGNED : True UPDATEIFCOPY : False Default behavior for memoryview layouts --------------------------------------- As you'll see in :ref:`view_general_layouts`, you can specify memory layout for any dimension of an memoryview. For any dimension for which you don't specify a layout, then the data access is assumed to be direct, and the data packing assumed to be strided. For example, that will be the assumption for memoryviews like:: int [:, :, :] my_memoryview = obj C and Fortran contiguous memoryviews ------------------------------------ You can specify C and Fortran contiguous layouts for the memoryview by using the ``::1`` step syntax at definition. For example, if you know for sure your memoryview will be on top of a 3D C contiguous layout, you could write:: cdef int[:, :, ::1] c_contiguous = c_contig where ``c_contig`` could be a C contiguous NumPy array. The ``::1`` at the 3rd position means that the elements in this 3rd dimension will be one element apart in memory. If you know you will have a 3D Fortran contiguous array:: cdef int[::1, :, :] f_contiguous = f_contig If you pass a non-contiguous buffer, for example :: # This array is C contiguous c_contig = np.arange(24).reshape((2,3,4)) cdef int[:, :, ::1] c_contiguous = c_contig # But this isn't c_contiguous = np.array(c_contig, order='F') you will get a ``ValueError`` at runtime:: /Users/mb312/dev_trees/minimal-cython/mincy.pyx in init mincy (mincy.c:17267)() 69 70 # But this isn't ---> 71 c_contiguous = np.array(c_contig, order='F') 72 73 # Show the sum of all the arrays before altering it /Users/mb312/dev_trees/minimal-cython/stringsource in View.MemoryView.memoryview_cwrapper (mincy.c:9995)() /Users/mb312/dev_trees/minimal-cython/stringsource in View.MemoryView.memoryview.__cinit__ (mincy.c:6799)() ValueError: ndarray is not C-contiguous Thus the `::1` in the slice type specification indicates in which dimension the data is contiguous. It can only be used to specify full C or Fortran contiguity. .. _view_copy_c_fortran: C and Fortran contiguous copies ------------------------------- .. Mark : I could not make this work - should it? # This slice is C contiguous c_contig = np.arange(24).reshape((2,3,4)) f_contig = np.array(c_contig, order='F') cdef int [:, :, ::1] c_contig_view = c_contig cdef int [::1, :, :] f_contig_view = f_contig cdef int[:, :, ::1] f2c = f_contig_view.copy() cdef int[::1, :, :] c2f = c_contig_view.copy_fortran() Copies can be made C or Fortran contiguous using the ``.copy()`` and ``.copy_fortran()`` methods:: # This view is C contiguous cdef int[:, :, ::1] c_contiguous = myview.copy() # This view is Fortran contiguous cdef int[::1, :] f_contiguous_slice = myview.copy_fortran() .. _view_general_layouts: Specifying more general memory layouts -------------------------------------- Data layout can be specified using the previously seen ``::1`` slice syntax, or by using any of the constants in ``cython.view``. If no specifier is given in any dimension, then the data access is assumed to be direct, and the data packing assumed to be strided. If you don't know whether a dimension will be direct or indirect (because you're getting an object with a buffer interface from some library perhaps), then you can specify the `generic` flag, in which case it will be determined at runtime. The flags are as follows: * generic - strided and direct or indirect * strided - strided and direct (this is the default) * indirect - strided and indirect * contiguous - contiguous and direct * indirect_contiguous - the list of pointers is contiguous and they can be used like this:: from cython cimport view # direct access in both dimensions, strided in the first dimension, contiguous in the last cdef int[:, ::view.contiguous] a # contiguous list of pointers to contiguous lists of ints cdef int[::view.indirect_contiguous, ::1] b # direct or indirect in the first dimension, direct in the second dimension # strided in both dimensions cdef int[::view.generic, :] c Only the first, last or the dimension following an indirect dimension may be specified contiguous:: # INVALID cdef int[::view.contiguous, ::view.indirect, :] a cdef int[::1, ::view.indirect, :] b # VALID cdef int[::view.indirect, ::1, :] a cdef int[::view.indirect, :, ::1] b cdef int[::view.indirect_contiguous, ::1, :] The difference between the `contiguous` flag and the `::1` specifier is that the former specifies contiguity for only one dimension, whereas the latter specifies contiguity for all following (Fortran) or preceding (C) dimensions:: cdef int[:, ::1] c_contig = ... # VALID cdef int[:, ::view.contiguous] myslice = c_contig[::2] # INVALID cdef int[:, ::1] myslice = c_contig[::2] The former case is valid because the last dimension remains contiguous, but the first dimension does not "follow" the last one anymore (meaning, it was strided already, but it is not C or Fortran contiguous any longer), since it was sliced. .. _view_needs_gil: Memoryviews and the GIL ======================= As you will see from the :ref:`view_quickstart` section, memoryviews often do not need the GIL:: cpdef int sum3d(int[:, :, :] arr) nogil: ... In particular, you do not need the GIL for memoryview indexing, slicing or transposing. Memoryviews require the GIL for the copy methods (:ref:`view_copy_c_fortran`), or when the dtype is object and an object element is read or written. Memoryview Objects and Cython Arrays ==================================== These typed memoryviews can be converted to Python memoryview objects (`cython.view.memoryview`). These Python objects are indexable, slicable and transposable in the same way that the original memoryviews are. They can also be converted back to Cython-space memoryviews at any time. They have the following attributes: * ``shape``: size in each dimension, as a tuple. * ``strides``: stride along each dimension, in bytes. * ``suboffsets`` * ``ndim``: number of dimensions. * ``size``: total number of items in the view (product of the shape). * ``itemsize``: size, in bytes, of the items in the view. * ``nbytes``: equal to ``size`` times ``itemsize``. * ``base`` And of course the aforementioned ``T`` attribute (:ref:`view_transposing`). These attributes have the same semantics as in NumPy_. For instance, to retrieve the original object:: import numpy cimport numpy as cnp cdef cnp.int32_t[:] a = numpy.arange(10, dtype=numpy.int32) a = a[::2] print(a) print(numpy.asarray(a)) print(a.base) # this prints: # # [0 2 4 6 8] # [0 1 2 3 4 5 6 7 8 9] Note that this example returns the original object from which the view was obtained, and that the view was resliced in the meantime. .. _view_cython_arrays: Cython arrays ============= Whenever a Cython memoryview is copied (using any of the `copy` or `copy_fortran` methods), you get a new memoryview slice of a newly created ``cython.view.array`` object. This array can also be used manually, and will automatically allocate a block of data. It can later be assigned to a C or Fortran contiguous slice (or a strided slice). It can be used like:: from cython cimport view my_array = view.array(shape=(10, 2), itemsize=sizeof(int), format="i") cdef int[:, :] my_slice = my_array It also takes an optional argument `mode` ('c' or 'fortran') and a boolean `allocate_buffer`, that indicates whether a buffer should be allocated and freed when it goes out of scope:: cdef view.array my_array = view.array(..., mode="fortran", allocate_buffer=False) my_array.data = my_data_pointer # define a function that can deallocate the data (if needed) my_array.callback_free_data = free You can also cast pointers to array, or C arrays to arrays:: cdef view.array my_array = my_data_pointer cdef view.array my_array = my_c_array Of course, you can also immediately assign a cython.view.array to a typed memoryview slice. A C array may be assigned directly to a memoryview slice:: cdef int[:, ::1] myslice = my_2d_c_array The arrays are indexable and slicable from Python space just like memoryview objects, and have the same attributes as memoryview objects. CPython array module ==================== An alternative to ``cython.view.array`` is the ``array`` module in the Python standard library. In Python 3, the ``array.array`` type supports the buffer interface natively, so memoryviews work on top of it without additional setup. Starting with Cython 0.17, however, it is possible to use these arrays as buffer providers also in Python 2. This is done through explicitly cimporting the ``cpython.array`` module as follows:: cimport cpython.array def sum_array(int[:] view): """ >>> from array import array >>> sum_array( array('i', [1,2,3]) ) 6 """ cdef int total for i in range(view.shape[0]): total += view[i] return total Note that the cimport also enables the old buffer syntax for the array type. Therefore, the following also works:: from cpython cimport array def sum_array(array.array[int] arr): # using old buffer syntax ... Coercion to NumPy ================= Memoryview (and array) objects can be coerced to a NumPy ndarray, without having to copy the data. You can e.g. do:: cimport numpy as np import numpy as np numpy_array = np.asarray( my_pointer) Of course, you are not restricted to using NumPy's type (such as ``np.int32_t`` here), you can use any usable type. None Slices =========== Although memoryview slices are not objects they can be set to None and they can be checked for being None as well:: def func(double[:] myarray = None): print(myarray is None) If the function requires real memory views as input, it is therefore best to reject None input straight away in the signature, which is supported in Cython 0.17 and later as follows:: def func(double[:] myarray not None): ... Unlike object attributes of extension classes, memoryview slices are not initialized to None. .. _GIL: http://docs.python.org/dev/glossary.html#term-global-interpreter-lock .. _new style buffers: http://docs.python.org/c-api/buffer.html .. _pep 3118: http://www.python.org/peps/pep-3118.html .. _NumPy: http://docs.scipy.org/doc/numpy/reference/arrays.ndarray.html#memory-layout .. _example: http://www.scipy.org/Numpy_Example_List#newaxis Cython-0.26.1/docs/src/userguide/special_methods.rst0000664000175000017500000007475013143605603023256 0ustar stefanstefan00000000000000.. _special-methods: Special Methods of Extension Types =================================== This page describes the special methods currently supported by Cython extension types. A complete list of all the special methods appears in the table at the bottom. Some of these methods behave differently from their Python counterparts or have no direct Python counterparts, and require special mention. .. Note: Everything said on this page applies only to extension types, defined with the :keyword:`cdef class` statement. It doesn't apply to classes defined with the Python :keyword:`class` statement, where the normal Python rules apply. Declaration ------------ Special methods of extension types must be declared with :keyword:`def`, not :keyword:`cdef`. This does not impact their performance--Python uses different calling conventions to invoke these special methods. Docstrings ----------- Currently, docstrings are not fully supported in some special methods of extension types. You can place a docstring in the source to serve as a comment, but it won't show up in the corresponding :attr:`__doc__` attribute at run time. (This seems to be is a Python limitation -- there's nowhere in the `PyTypeObject` data structure to put such docstrings.) Initialisation methods: :meth:`__cinit__` and :meth:`__init__` --------------------------------------------------------------- There are two methods concerned with initialising the object. The :meth:`__cinit__` method is where you should perform basic C-level initialisation of the object, including allocation of any C data structures that your object will own. You need to be careful what you do in the :meth:`__cinit__` method, because the object may not yet be fully valid Python object when it is called. Therefore, you should be careful invoking any Python operations which might touch the object; in particular, its methods. By the time your :meth:`__cinit__` method is called, memory has been allocated for the object and any C attributes it has have been initialised to 0 or null. (Any Python attributes have also been initialised to None, but you probably shouldn't rely on that.) Your :meth:`__cinit__` method is guaranteed to be called exactly once. If your extension type has a base type, the :meth:`__cinit__` method of the base type is automatically called before your :meth:`__cinit__` method is called; you cannot explicitly call the inherited :meth:`__cinit__` method. If you need to pass a modified argument list to the base type, you will have to do the relevant part of the initialisation in the :meth:`__init__` method instead (where the normal rules for calling inherited methods apply). Any initialisation which cannot safely be done in the :meth:`__cinit__` method should be done in the :meth:`__init__` method. By the time :meth:`__init__` is called, the object is a fully valid Python object and all operations are safe. Under some circumstances it is possible for :meth:`__init__` to be called more than once or not to be called at all, so your other methods should be designed to be robust in such situations. Any arguments passed to the constructor will be passed to both the :meth:`__cinit__` method and the :meth:`__init__` method. If you anticipate subclassing your extension type in Python, you may find it useful to give the :meth:`__cinit__` method `*` and `**` arguments so that it can accept and ignore extra arguments. Otherwise, any Python subclass which has an :meth:`__init__` with a different signature will have to override :meth:`__new__` [#]_ as well as :meth:`__init__`, which the writer of a Python class wouldn't expect to have to do. Alternatively, as a convenience, if you declare your :meth:`__cinit__`` method to take no arguments (other than self) it will simply ignore any extra arguments passed to the constructor without complaining about the signature mismatch. .. Note: Older Cython files may use :meth:`__new__` rather than :meth:`__cinit__`. The two are synonyms. The name change from :meth:`__new__` to :meth:`__cinit__` was to avoid confusion with Python :meth:`__new__` (which is an entirely different concept) and eventually the use of :meth:`__new__` in Cython will be disallowed to pave the way for supporting Python-style :meth:`__new__` .. [#] http://docs.python.org/reference/datamodel.html#object.__new__ Finalization method: :meth:`__dealloc__` ---------------------------------------- The counterpart to the :meth:`__cinit__` method is the :meth:`__dealloc__` method, which should perform the inverse of the :meth:`__cinit__` method. Any C data that you explicitly allocated (e.g. via malloc) in your :meth:`__cinit__` method should be freed in your :meth:`__dealloc__` method. You need to be careful what you do in a :meth:`__dealloc__` method. By the time your :meth:`__dealloc__` method is called, the object may already have been partially destroyed and may not be in a valid state as far as Python is concerned, so you should avoid invoking any Python operations which might touch the object. In particular, don't call any other methods of the object or do anything which might cause the object to be resurrected. It's best if you stick to just deallocating C data. You don't need to worry about deallocating Python attributes of your object, because that will be done for you by Cython after your :meth:`__dealloc__` method returns. When subclassing extension types, be aware that the :meth:`__dealloc__` method of the superclass will always be called, even if it is overridden. This is in contrast to typical Python behavior where superclass methods will not be executed unless they are explicitly called by the subclass. .. Note:: There is no :meth:`__del__` method for extension types. Arithmetic methods ------------------- Arithmetic operator methods, such as :meth:`__add__`, behave differently from their Python counterparts. There are no separate "reversed" versions of these methods (:meth:`__radd__`, etc.) Instead, if the first operand cannot perform the operation, the same method of the second operand is called, with the operands in the same order. This means that you can't rely on the first parameter of these methods being "self" or being the right type, and you should test the types of both operands before deciding what to do. If you can't handle the combination of types you've been given, you should return `NotImplemented`. This also applies to the in-place arithmetic method :meth:`__ipow__`. It doesn't apply to any of the other in-place methods (:meth:`__iadd__`, etc.) which always take `self` as the first argument. Rich comparisons ----------------- There are no separate methods for the individual rich comparison operations (:meth:`__eq__`, :meth:`__le__`, etc.) Instead there is a single method :meth:`__richcmp__` which takes an integer indicating which operation is to be performed, as follows: +-----+-----+ | < | 0 | +-----+-----+ | == | 2 | +-----+-----+ | > | 4 | +-----+-----+ | <= | 1 | +-----+-----+ | != | 3 | +-----+-----+ | >= | 5 | +-----+-----+ The :meth:`__next__` method ---------------------------- Extension types wishing to implement the iterator interface should define a method called :meth:`__next__`, not next. The Python system will automatically supply a next method which calls your :meth:`__next__`. Do *NOT* explicitly give your type a :meth:`next` method, or bad things could happen. Special Method Table --------------------- This table lists all of the special methods together with their parameter and return types. In the table below, a parameter name of self is used to indicate that the parameter has the type that the method belongs to. Other parameters with no type specified in the table are generic Python objects. You don't have to declare your method as taking these parameter types. If you declare different types, conversions will be performed as necessary. General ^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __cinit__ |self, ... | | Basic initialisation (no direct Python equivalent) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __init__ |self, ... | | Further initialisation | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __dealloc__ |self | | Basic deallocation (no direct Python equivalent) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __cmp__ |x, y | int | 3-way comparison | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __richcmp__ |x, y, int op | object | Rich comparison (no direct Python equivalent) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __str__ |self | object | str(self) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __repr__ |self | object | repr(self) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __hash__ |self | int | Hash function | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __call__ |self, ... | object | self(...) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __iter__ |self | object | Return iterator for sequence | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getattr__ |self, name | object | Get attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getattribute__ |self, name | object | Get attribute, unconditionally | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __setattr__ |self, name, val | | Set attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __delattr__ |self, name | | Delete attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Arithmetic operators ^^^^^^^^^^^^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __add__ | x, y | object | binary `+` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __sub__ | x, y | object | binary `-` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __mul__ | x, y | object | `*` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __div__ | x, y | object | `/` operator for old-style division | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __floordiv__ | x, y | object | `//` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __truediv__ | x, y | object | `/` operator for new-style division | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __mod__ | x, y | object | `%` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __divmod__ | x, y | object | combined div and mod | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __pow__ | x, y, z | object | `**` operator or pow(x, y, z) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __neg__ | self | object | unary `-` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __pos__ | self | object | unary `+` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __abs__ | self | object | absolute value | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __nonzero__ | self | int | convert to boolean | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __invert__ | self | object | `~` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __lshift__ | x, y | object | `<<` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __rshift__ | x, y | object | `>>` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __and__ | x, y | object | `&` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __or__ | x, y | object | `|` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __xor__ | x, y | object | `^` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Numeric conversions ^^^^^^^^^^^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __int__ | self | object | Convert to integer | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __long__ | self | object | Convert to long integer | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __float__ | self | object | Convert to float | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __oct__ | self | object | Convert to octal | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __hex__ | self | object | Convert to hexadecimal | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __index__ (2.5+ only) | self | object | Convert to sequence index | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ In-place arithmetic operators ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __iadd__ | self, x | object | `+=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __isub__ | self, x | object | `-=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __imul__ | self, x | object | `*=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __idiv__ | self, x | object | `/=` operator for old-style division | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __ifloordiv__ | self, x | object | `//=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __itruediv__ | self, x | object | `/=` operator for new-style division | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __imod__ | self, x | object | `%=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __ipow__ | x, y, z | object | `**=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __ilshift__ | self, x | object | `<<=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __irshift__ | self, x | object | `>>=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __iand__ | self, x | object | `&=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __ior__ | self, x | object | `|=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __ixor__ | self, x | object | `^=` operator | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Sequences and mappings ^^^^^^^^^^^^^^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __len__ | self int | | len(self) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getitem__ | self, x | object | self[x] | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __setitem__ | self, x, y | | self[x] = y | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __delitem__ | self, x | | del self[x] | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getslice__ | self, Py_ssize_t i, Py_ssize_t j | object | self[i:j] | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __setslice__ | self, Py_ssize_t i, Py_ssize_t j, x | | self[i:j] = x | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __delslice__ | self, Py_ssize_t i, Py_ssize_t j | | del self[i:j] | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __contains__ | self, x | int | x in self | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Iterators ^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __next__ | self | object | Get next item (called next in Python) | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Buffer interface [:PEP:`3118`] (no Python equivalents - see note 1) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __getbuffer__ | self, Py_buffer `*view`, int flags | | | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __releasebuffer__ | self, Py_buffer `*view` | | | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Buffer interface [legacy] (no Python equivalents - see note 1) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __getreadbuffer__ | self, Py_ssize_t i, void `**p` | | | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getwritebuffer__ | self, Py_ssize_t i, void `**p` | | | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getsegcount__ | self, Py_ssize_t `*p` | | | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __getcharbuffer__ | self, Py_ssize_t i, char `**p` | | | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ Descriptor objects (see note 2) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | Name | Parameters | Return type | Description | +=======================+=======================================+=============+=====================================================+ | __get__ | self, instance, class | object | Get value of attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __set__ | self, instance, value | | Set value of attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ | __delete__ | self, instance | | Delete attribute | +-----------------------+---------------------------------------+-------------+-----------------------------------------------------+ .. note:: (1) The buffer interface was intended for use by C code and is not directly accessible from Python. It is described in the Python/C API Reference Manual of Python 2.x under sections 6.6 and 10.6. It was superseded by the new :PEP:`3118` buffer protocol in Python 2.6 and is no longer available in Python 3. For a how-to guide to the new API, see :ref:`buffer`. .. note:: (2) Descriptor objects are part of the support mechanism for new-style Python classes. See the discussion of descriptors in the Python documentation. See also :PEP:`252`, "Making Types Look More Like Classes", and :PEP:`253`, "Subtyping Built-In Types". Cython-0.26.1/docs/src/userguide/sharing_declarations.rst0000664000175000017500000002154213143605603024265 0ustar stefanstefan00000000000000.. highlight:: cython .. _sharing-declarations: ******************************************** Sharing Declarations Between Cython Modules ******************************************** This section describes how to make C declarations, functions and extension types in one Cython module available for use in another Cython module. These facilities are closely modeled on the Python import mechanism, and can be thought of as a compile-time version of it. Definition and Implementation files ==================================== A Cython module can be split into two parts: a definition file with a ``.pxd`` suffix, containing C declarations that are to be available to other Cython modules, and an implementation file with a ``.pyx`` suffix, containing everything else. When a module wants to use something declared in another module's definition file, it imports it using the :keyword:`cimport` statement. A ``.pxd`` file that consists solely of extern declarations does not need to correspond to an actual ``.pyx`` file or Python module. This can make it a convenient place to put common declarations, for example declarations of functions from an :ref:`external library ` that one wants to use in several modules. What a Definition File contains ================================ A definition file can contain: * Any kind of C type declaration. * extern C function or variable declarations. * Declarations of C functions defined in the module. * The definition part of an extension type (see below). It cannot contain the implementations of any C or Python functions, or any Python class definitions, or any executable statements. It is needed when one wants to access :keyword:`cdef` attributes and methods, or to inherit from :keyword:`cdef` classes defined in this module. .. note:: You don't need to (and shouldn't) declare anything in a declaration file public in order to make it available to other Cython modules; its mere presence in a definition file does that. You only need a public declaration if you want to make something available to external C code. What an Implementation File contains ====================================== An implementation file can contain any kind of Cython statement, although there are some restrictions on the implementation part of an extension type if the corresponding definition file also defines that type (see below). If one doesn't need to :keyword:`cimport` anything from this module, then this is the only file one needs. .. _cimport: The cimport statement ======================= The :keyword:`cimport` statement is used in a definition or implementation file to gain access to names declared in another definition file. Its syntax exactly parallels that of the normal Python import statement:: cimport module [, module...] from module cimport name [as name] [, name [as name] ...] Here is an example. :file:`dishes.pxd` is a definition file which exports a C data type. :file:`restaurant.pyx` is an implementation file which imports and uses it. :file:`dishes.pxd`:: cdef enum otherstuff: sausage, eggs, lettuce cdef struct spamdish: int oz_of_spam otherstuff filler :file:`restaurant.pyx`:: cimport dishes from dishes cimport spamdish cdef void prepare(spamdish *d): d.oz_of_spam = 42 d.filler = dishes.sausage def serve(): cdef spamdish d prepare(&d) print "%d oz spam, filler no. %d" % (d.oz_of_spam, d.filler) It is important to understand that the :keyword:`cimport` statement can only be used to import C data types, C functions and variables, and extension types. It cannot be used to import any Python objects, and (with one exception) it doesn't imply any Python import at run time. If you want to refer to any Python names from a module that you have cimported, you will have to include a regular import statement for it as well. The exception is that when you use :keyword:`cimport` to import an extension type, its type object is imported at run time and made available by the name under which you imported it. Using :keyword:`cimport` to import extension types is covered in more detail below. If a ``.pxd`` file changes, any modules that :keyword:`cimport` from it may need to be recompiled. The ``Cython.Build.cythonize`` utility can take care of this for you. Search paths for definition files ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ When you :keyword:`cimport` a module called ``modulename``, the Cython compiler searches for a file called :file:`modulename.pxd`. It searches for this file along the path for include files (as specified by ``-I`` command line options or the ``include_path`` option to ``cythonize()``), as well as ``sys.path``. Using ``package_data`` to install ``.pxd`` files in your ``setup.py`` script allows other packages to cimport items from your module as a dependency. Also, whenever you compile a file :file:`modulename.pyx`, the corresponding definition file :file:`modulename.pxd` is first searched for along the include path (but not ``sys.path``), and if found, it is processed before processing the ``.pyx`` file. Using cimport to resolve naming conflicts ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The :keyword:`cimport` mechanism provides a clean and simple way to solve the problem of wrapping external C functions with Python functions of the same name. All you need to do is put the extern C declarations into a ``.pxd`` file for an imaginary module, and :keyword:`cimport` that module. You can then refer to the C functions by qualifying them with the name of the module. Here's an example: :file:`c_lunch.pxd`:: cdef extern from "lunch.h": void eject_tomato(float) :file:`lunch.pyx`:: cimport c_lunch def eject_tomato(float speed): c_lunch.eject_tomato(speed) You don't need any :file:`c_lunch.pyx` file, because the only things defined in :file:`c_lunch.pxd` are extern C entities. There won't be any actual ``c_lunch`` module at run time, but that doesn't matter; the :file:`c_lunch.pxd` file has done its job of providing an additional namespace at compile time. Sharing C Functions =================== C functions defined at the top level of a module can be made available via :keyword:`cimport` by putting headers for them in the ``.pxd`` file, for example: :file:`volume.pxd`:: cdef float cube(float) :file:`volume.pyx`:: cdef float cube(float x): return x * x * x :file:`spammery.pyx`:: from volume cimport cube def menu(description, size): print description, ":", cube(size), \ "cubic metres of spam" menu("Entree", 1) menu("Main course", 3) menu("Dessert", 2) .. note:: When a module exports a C function in this way, an object appears in the module dictionary under the function's name. However, you can't make use of this object from Python, nor can you use it from Cython using a normal import statement; you have to use :keyword:`cimport`. Sharing Extension Types ======================= An extension type can be made available via :keyword:`cimport` by splitting its definition into two parts, one in a definition file and the other in the corresponding implementation file. The definition part of the extension type can only declare C attributes and C methods, not Python methods, and it must declare all of that type's C attributes and C methods. The implementation part must implement all of the C methods declared in the definition part, and may not add any further C attributes. It may also define Python methods. Here is an example of a module which defines and exports an extension type, and another module which uses it: :file:`Shrubbing.pxd`:: cdef class Shrubbery: cdef int width cdef int length :file:`Shrubbing.pyx`:: cdef class Shrubbery: def __cinit__(self, int w, int l): self.width = w self.length = l def standard_shrubbery(): return Shrubbery(3, 7) :file:`Landscaping.pyx`:: cimport Shrubbing import Shrubbing cdef Shrubbing.Shrubbery sh sh = Shrubbing.standard_shrubbery() print "Shrubbery size is %d x %d" % (sh.width, sh.length) One would then need to compile both of these modules, e.g. using :file:`setup.py`:: from distutils.core import setup from Cython.Build import cythonize setup(ext_modules = cythonize(["Landscaping.pyx", "Shrubbing.pyx"])) Some things to note about this example: * There is a :keyword:`cdef` class Shrubbery declaration in both :file:`Shrubbing.pxd` and :file:`Shrubbing.pyx`. When the Shrubbing module is compiled, these two declarations are combined into one. * In Landscaping.pyx, the :keyword:`cimport` Shrubbing declaration allows us to refer to the Shrubbery type as :class:`Shrubbing.Shrubbery`. But it doesn't bind the name Shrubbing in Landscaping's module namespace at run time, so to access :func:`Shrubbing.standard_shrubbery` we also need to ``import Shrubbing``. Cython-0.26.1/docs/src/userguide/numpy_pythran.rst0000664000175000017500000000336513143605603023022 0ustar stefanstefan00000000000000.. highlight:: python .. _numpy-pythran: ************************** Pythran as a Numpy backend ************************** Using the flag ``--np-pythran``, it is possible to use the `Pythran`_ numpy implementation for numpy related operations. One advantage to use this backend is that the Pythran implementation uses C++ expression templates to save memory transfers and can benefit from SIMD instructions of modern CPU. This can lead to really interesting speedup in some cases, going from 2 up to 16, depending on the targeted CPU architecture and the original algorithm. Please note that this feature is experimental. Usage example with distutils ---------------------------- You first need to install Pythran. See its `documentation `_ for more information. Then, simply add a ``cython: np_pythran=True`` directive at the top of the Python files that needs to be compiled using Pythran numpy support. Here is an example of a simple ``setup.py`` file using distutils: .. code:: from distutils.core import setup from Cython.Build import cythonize setup( name = "My hello app", ext_modules = cythonize('hello_pythran.pyx') ) Then, with the following header in ``hello_pythran.pyx``: .. code:: # cython: np_pythran=True ``hello_pythran.pyx`` will be compiled using Pythran numpy support. Please note that Pythran can further be tweaked by adding settings in the ``$HOME/.pythranrc`` file. For instance, this can be used to enable `Boost.SIMD`_ support. See the `Pythran user manual `_ for more information. .. _Pythran: https://github.com/serge-sans-paille/pythran .. _Boost.SIMD: https://github.com/NumScale/boost.simd Cython-0.26.1/docs/src/userguide/pyrex_differences.rst0000664000175000017500000002641513143605603023612 0ustar stefanstefan00000000000000.. highlight:: cython .. _pyrex-differences: ************************************** Differences between Cython and Pyrex ************************************** .. warning:: Both Cython and Pyrex are moving targets. It has come to the point that an explicit list of all the differences between the two projects would be laborious to list and track, but hopefully this high-level list gives an idea of the differences that are present. It should be noted that both projects make an effort at mutual compatibility, but Cython's goal is to be as close to and complete as Python as reasonable. Python 3 Support ================ Cython creates ``.c`` files that can be built and used with both Python 2.x and Python 3.x. In fact, compiling your module with Cython may very well be the easiest way to port code to Python 3. We are also working to make the compiler run in both Python 2.x and 3.x. Many Python 3 constructs are already supported by Cython. List/Set/Dict Comprehensions ---------------------------- Cython supports the different comprehensions defined by Python 3 for lists, sets and dicts:: [expr(x) for x in A] # list {expr(x) for x in A} # set {key(x) : value(x) for x in A} # dict Looping is optimized if ``A`` is a list, tuple or dict. You can use the :keyword:`for` ... :keyword:`from` syntax, too, but it is generally preferred to use the usual :keyword:`for` ... :keyword:`in` ``range(...)`` syntax with a C run variable (e.g. ``cdef int i``). .. note:: see :ref:`automatic-range-conversion` Note that Cython also supports set literals starting from Python 2.4. Keyword-only arguments ---------------------- Python functions can have keyword-only arguments listed after the ``*`` parameter and before the ``**`` parameter if any, e.g.:: def f(a, b, *args, c, d = 42, e, **kwds): ... Here ``c``, ``d`` and ``e`` cannot be passed as position arguments and must be passed as keyword arguments. Furthermore, ``c`` and ``e`` are required keyword arguments, since they do not have a default value. If the parameter name after the ``*`` is omitted, the function will not accept any extra positional arguments, e.g.:: def g(a, b, *, c, d): ... takes exactly two positional parameters and has two required keyword parameters. Conditional expressions "x if b else y" ========================================= Conditional expressions as described in http://www.python.org/dev/peps/pep-0308/:: X if C else Y Only one of ``X`` and ``Y`` is evaluated (depending on the value of C). .. _inline: cdef inline ============= Module level functions can now be declared inline, with the :keyword:`inline` keyword passed on to the C compiler. These can be as fast as macros.:: cdef inline int something_fast(int a, int b): return a*a + b Note that class-level :keyword:`cdef` functions are handled via a virtual function table, so the compiler won't be able to inline them in almost all cases. Assignment on declaration (e.g. "cdef int spam = 5") ====================================================== In Pyrex, one must write:: cdef int i, j, k i = 2 j = 5 k = 7 Now, with cython, one can write:: cdef int i = 2, j = 5, k = 7 The expression on the right hand side can be arbitrarily complicated, e.g.:: cdef int n = python_call(foo(x,y), a + b + c) - 32 'by' expression in for loop (e.g. "for i from 0 <= i < 10 by 2") ================================================================== :: for i from 0 <= i < 10 by 2: print i yields:: 0 2 4 6 8 .. note:: Usage of this syntax is discouraged as it is redundant with the normal Python :keyword:`for` loop. See :ref:`automatic-range-conversion`. Boolean int type (e.g. it acts like a c int, but coerces to/from python as a boolean) ====================================================================================== In C, ints are used for truth values. In python, any object can be used as a truth value (using the :meth:`__nonzero__` method), but the canonical choices are the two boolean objects ``True`` and ``False``. The :c:type:`bint` (for "boolean int") type is compiled to a C int, but coerces to and from Python as booleans. The return type of comparisons and several builtins is a :c:type:`bint` as well. This reduces the need for wrapping things in :func:`bool()`. For example, one can write:: def is_equal(x): return x == y which would return ``1`` or ``0`` in Pyrex, but returns ``True`` or ``False`` in Cython. One can declare variables and return values for functions to be of the :c:type:`bint` type. For example:: cdef int i = x cdef bint b = x The first conversion would happen via ``x.__int__()`` whereas the second would happen via ``x.__bool__()`` (a.k.a. ``__nonzero__()``), with appropriate optimisations for known builtin types. Executable class bodies ======================= Including a working :func:`classmethod`:: cdef class Blah: def some_method(self): print self some_method = classmethod(some_method) a = 2*3 print "hi", a cpdef functions ================= Cython adds a third function type on top of the usual :keyword:`def` and :keyword:`cdef`. If a function is declared :keyword:`cpdef` it can be called from and overridden by both extension and normal python subclasses. You can essentially think of a :keyword:`cpdef` method as a :keyword:`cdef` method + some extras. (That's how it's implemented at least.) First, it creates a :keyword:`def` method that does nothing but call the underlying :keyword:`cdef` method (and does argument unpacking/coercion if needed). At the top of the :keyword:`cdef` method a little bit of code is added to see if it's overridden, similar to the following pseudocode:: if hasattr(type(self), '__dict__'): foo = self.foo if foo is not wrapper_foo: return foo(args) [cdef method body] To detect whether or not a type has a dictionary, it just checks the ``tp_dictoffset`` slot, which is ``NULL`` (by default) for extension types, but non- null for instance classes. If the dictionary exists, it does a single attribute lookup and can tell (by comparing pointers) whether or not the returned result is actually a new function. If, and only if, it is a new function, then the arguments packed into a tuple and the method called. This is all very fast. A flag is set so this lookup does not occur if one calls the method on the class directly, e.g.:: cdef class A: cpdef foo(self): pass x = A() x.foo() # will check to see if overridden A.foo(x) # will call A's implementation whether overridden or not See :ref:`early-binding-for-speed` for explanation and usage tips. .. _automatic-range-conversion: Automatic range conversion ============================ This will convert statements of the form ``for i in range(...)`` to ``for i from ...`` when ``i`` is any cdef'd integer type, and the direction (i.e. sign of step) can be determined. .. warning:: This may change the semantics if the range causes assignment to ``i`` to overflow. Specifically, if this option is set, an error will be raised before the loop is entered, whereas without this option the loop will execute until a overflowing value is encountered. If this affects you, change ``Cython/Compiler/Options.py`` (eventually there will be a better way to set this). More friendly type casting =========================== In Pyrex, if one types ``x`` where ``x`` is a Python object, one will get the memory address of ``x``. Likewise, if one types ``i`` where ``i`` is a C int, one will get an "object" at location ``i`` in memory. This leads to confusing results and segfaults. In Cython ``x`` will try and do a coercion (as would happen on assignment of ``x`` to a variable of type type) if exactly one of the types is a python object. It does not stop one from casting where there is no conversion (though it will emit a warning). If one really wants the address, cast to a ``void *`` first. As in Pyrex ``x`` will cast ``x`` to type :c:type:`MyExtensionType` without any type checking. Cython supports the syntax ```` to do the cast with type checking (i.e. it will throw an error if ``x`` is not a (subclass of) :c:type:`MyExtensionType`. Optional arguments in cdef/cpdef functions ============================================ Cython now supports optional arguments for :keyword:`cdef` and :keyword:`cpdef` functions. The syntax in the ``.pyx`` file remains as in Python, but one declares such functions in the ``.pxd`` file by writing ``cdef foo(x=*)``. The number of arguments may increase on subclassing, but the argument types and order must remain the same. There is a slight performance penalty in some cases when a cdef/cpdef function without any optional is overridden with one that does have default argument values. For example, one can have the ``.pxd`` file:: cdef class A: cdef foo(self) cdef class B(A) cdef foo(self, x=*) cdef class C(B): cpdef foo(self, x=*, int k=*) with corresponding ``.pyx`` file:: cdef class A: cdef foo(self): print "A" cdef class B(A) cdef foo(self, x=None) print "B", x cdef class C(B): cpdef foo(self, x=True, int k=3) print "C", x, k .. note:: this also demonstrates how :keyword:`cpdef` functions can override :keyword:`cdef` functions. Function pointers in structs ============================= Functions declared in :keyword:`struct` are automatically converted to function pointers for convenience. C++ Exception handling ========================= :keyword:`cdef` functions can now be declared as:: cdef int foo(...) except + cdef int foo(...) except +TypeError cdef int foo(...) except +python_error_raising_function in which case a Python exception will be raised when a C++ error is caught. See :ref:`wrapping-cplusplus` for more details. Synonyms ========= ``cdef import from`` means the same thing as ``cdef extern from`` Source code encoding ====================== Cython supports :PEP:`3120` and :PEP:`263`, i.e. you can start your Cython source file with an encoding comment and generally write your source code in UTF-8. This impacts the encoding of byte strings and the conversion of unicode string literals like ``u'abcd'`` to unicode objects. Automatic ``typecheck`` ======================== Rather than introducing a new keyword ``typecheck`` as explained in the `Pyrex docs `_, Cython emits a (non-spoofable and faster) typecheck whenever :func:`isinstance` is used with an extension type as the second parameter. From __future__ directives ========================== Cython supports several ``from __future__ import ...`` directives, namely ``absolute_import``, ``unicode_literals``, ``print_function`` and ``division``. With statements are always enabled. Pure Python mode ================ Cython has support for compiling ``.py`` files, and accepting type annotations using decorators and other valid Python syntax. This allows the same source to be interpreted as straight Python, or compiled for optimized results. See :ref:`pure-mode` for more details. Cython-0.26.1/docs/src/userguide/numpy_tutorial.rst0000664000175000017500000005314013143605603023174 0ustar stefanstefan00000000000000.. highlight:: cython .. _numpy_tutorial: ************************** Cython for NumPy users ************************** This tutorial is aimed at NumPy users who have no experience with Cython at all. If you have some knowledge of Cython you may want to skip to the ''Efficient indexing'' section which explains the new improvements made in summer 2008. The main scenario considered is NumPy end-use rather than NumPy/SciPy development. The reason is that Cython is not (yet) able to support functions that are generic with respect to datatype and the number of dimensions in a high-level fashion. This restriction is much more severe for SciPy development than more specific, "end-user" functions. See the last section for more information on this. The style of this tutorial will not fit everybody, so you can also consider: * Robert Bradshaw's `slides on cython for SciPy2008 `_ (a higher-level and quicker introduction) * Basic Cython documentation (see `Cython front page `_). * ``[:enhancements/buffer:Spec for the efficient indexing]`` .. Note:: The fast array access documented below is a completely new feature, and there may be bugs waiting to be discovered. It might be a good idea to do a manual sanity check on the C code Cython generates before using this for serious purposes, at least until some months have passed. Cython at a glance ==================== Cython is a compiler which compiles Python-like code files to C code. Still, ''Cython is not a Python to C translator''. That is, it doesn't take your full program and "turns it into C" -- rather, the result makes full use of the Python runtime environment. A way of looking at it may be that your code is still Python in that it runs within the Python runtime environment, but rather than compiling to interpreted Python bytecode one compiles to native machine code (but with the addition of extra syntax for easy embedding of faster C-like code). This has two important consequences: * Speed. How much depends very much on the program involved though. Typical Python numerical programs would tend to gain very little as most time is spent in lower-level C that is used in a high-level fashion. However for-loop-style programs can gain many orders of magnitude, when typing information is added (and is so made possible as a realistic alternative). * Easy calling into C code. One of Cython's purposes is to allow easy wrapping of C libraries. When writing code in Cython you can call into C code as easily as into Python code. Some Python constructs are not yet supported, though making Cython compile all Python code is a stated goal (among the more important omissions are inner functions and generator functions). Your Cython environment ======================== Using Cython consists of these steps: 1. Write a :file:`.pyx` source file 2. Run the Cython compiler to generate a C file 3. Run a C compiler to generate a compiled library 4. Run the Python interpreter and ask it to import the module However there are several options to automate these steps: 1. The `SAGE `_ mathematics software system provides excellent support for using Cython and NumPy from an interactive command line (like IPython) or through a notebook interface (like Maple/Mathematica). See `this documentation `_. 2. A version of `pyximport `_ is shipped with Cython, so that you can import pyx-files dynamically into Python and have them compiled automatically (See :ref:`pyximport`). 3. Cython supports distutils so that you can very easily create build scripts which automate the process, this is the preferred method for full programs. 4. Manual compilation (see below) .. Note:: If using another interactive command line environment than SAGE, like IPython or Python itself, it is important that you restart the process when you recompile the module. It is not enough to issue an "import" statement again. Installation ============= Unless you are used to some other automatic method: `download Cython `_ (0.9.8.1.1 or later), unpack it, and run the usual ```python setup.py install``. This will install a ``cython`` executable on your system. It is also possible to use Cython from the source directory without installing (simply launch :file:`cython.py` in the root directory). As of this writing SAGE comes with an older release of Cython than required for this tutorial. So if using SAGE you should download the newest Cython and then execute :: $ cd path/to/cython-distro $ path-to-sage/sage -python setup.py install This will install the newest Cython into SAGE. Manual compilation ==================== As it is always important to know what is going on, I'll describe the manual method here. First Cython is run:: $ cython yourmod.pyx This creates :file:`yourmod.c` which is the C source for a Python extension module. A useful additional switch is ``-a`` which will generate a document :file:`yourmod.html`) that shows which Cython code translates to which C code line by line. Then we compile the C file. This may vary according to your system, but the C file should be built like Python was built. Python documentation for writing extensions should have some details. On Linux this often means something like:: $ gcc -shared -pthread -fPIC -fwrapv -O2 -Wall -fno-strict-aliasing -I/usr/include/python2.7 -o yourmod.so yourmod.c ``gcc`` should have access to the NumPy C header files so if they are not installed at :file:`/usr/include/numpy` or similar you may need to pass another option for those. This creates :file:`yourmod.so` in the same directory, which is importable by Python by using a normal ``import yourmod`` statement. The first Cython program ========================== The code below does 2D discrete convolution of an image with a filter (and I'm sure you can do better!, let it serve for demonstration purposes). It is both valid Python and valid Cython code. I'll refer to it as both :file:`convolve_py.py` for the Python version and :file:`convolve1.pyx` for the Cython version -- Cython uses ".pyx" as its file suffix. .. code-block:: python from __future__ import division import numpy as np def naive_convolve(f, g): # f is an image and is indexed by (v, w) # g is a filter kernel and is indexed by (s, t), # it needs odd dimensions # h is the output image and is indexed by (x, y), # it is not cropped if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: raise ValueError("Only odd dimensions on filter supported") # smid and tmid are number of pixels between the center pixel # and the edge, ie for a 5x5 filter they will be 2. # # The output size is calculated by adding smid, tmid to each # side of the dimensions of the input image. vmax = f.shape[0] wmax = f.shape[1] smax = g.shape[0] tmax = g.shape[1] smid = smax // 2 tmid = tmax // 2 xmax = vmax + 2*smid ymax = wmax + 2*tmid # Allocate result image. h = np.zeros([xmax, ymax], dtype=f.dtype) # Do convolution for x in range(xmax): for y in range(ymax): # Calculate pixel value for h at (x,y). Sum one component # for each pixel (s, t) of the filter g. s_from = max(smid - x, -smid) s_to = min((xmax - x) - smid, smid + 1) t_from = max(tmid - y, -tmid) t_to = min((ymax - y) - tmid, tmid + 1) value = 0 for s in range(s_from, s_to): for t in range(t_from, t_to): v = x - smid + s w = y - tmid + t value += g[smid - s, tmid - t] * f[v, w] h[x, y] = value return h This should be compiled to produce :file:`yourmod.so` (for Linux systems). We run a Python session to test both the Python version (imported from ``.py``-file) and the compiled Cython module. .. sourcecode:: ipython In [1]: import numpy as np In [2]: import convolve_py In [3]: convolve_py.naive_convolve(np.array([[1, 1, 1]], dtype=np.int), ... np.array([[1],[2],[1]], dtype=np.int)) Out [3]: array([[1, 1, 1], [2, 2, 2], [1, 1, 1]]) In [4]: import convolve1 In [4]: convolve1.naive_convolve(np.array([[1, 1, 1]], dtype=np.int), ... np.array([[1],[2],[1]], dtype=np.int)) Out [4]: array([[1, 1, 1], [2, 2, 2], [1, 1, 1]]) In [11]: N = 100 In [12]: f = np.arange(N*N, dtype=np.int).reshape((N,N)) In [13]: g = np.arange(81, dtype=np.int).reshape((9, 9)) In [19]: %timeit -n2 -r3 convolve_py.naive_convolve(f, g) 2 loops, best of 3: 1.86 s per loop In [20]: %timeit -n2 -r3 convolve1.naive_convolve(f, g) 2 loops, best of 3: 1.41 s per loop There's not such a huge difference yet; because the C code still does exactly what the Python interpreter does (meaning, for instance, that a new object is allocated for each number used). Look at the generated html file and see what is needed for even the simplest statements you get the point quickly. We need to give Cython more information; we need to add types. Adding types ============= To add types we use custom Cython syntax, so we are now breaking Python source compatibility. Here's :file:`convolve2.pyx`. *Read the comments!* :: from __future__ import division import numpy as np # "cimport" is used to import special compile-time information # about the numpy module (this is stored in a file numpy.pxd which is # currently part of the Cython distribution). cimport numpy as np # We now need to fix a datatype for our arrays. I've used the variable # DTYPE for this, which is assigned to the usual NumPy runtime # type info object. DTYPE = np.int # "ctypedef" assigns a corresponding compile-time type to DTYPE_t. For # every type in the numpy module there's a corresponding compile-time # type with a _t-suffix. ctypedef np.int_t DTYPE_t # The builtin min and max functions works with Python objects, and are # so very slow. So we create our own. # - "cdef" declares a function which has much less overhead than a normal # def function (but it is not Python-callable) # - "inline" is passed on to the C compiler which may inline the functions # - The C type "int" is chosen as return type and argument types # - Cython allows some newer Python constructs like "a if x else b", but # the resulting C file compiles with Python 2.3 through to Python 3.0 beta. cdef inline int int_max(int a, int b): return a if a >= b else b cdef inline int int_min(int a, int b): return a if a <= b else b # "def" can type its arguments but not have a return type. The type of the # arguments for a "def" function is checked at run-time when entering the # function. # # The arrays f, g and h is typed as "np.ndarray" instances. The only effect # this has is to a) insert checks that the function arguments really are # NumPy arrays, and b) make some attribute access like f.shape[0] much # more efficient. (In this example this doesn't matter though.) def naive_convolve(np.ndarray f, np.ndarray g): if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: raise ValueError("Only odd dimensions on filter supported") assert f.dtype == DTYPE and g.dtype == DTYPE # The "cdef" keyword is also used within functions to type variables. It # can only be used at the top indentation level (there are non-trivial # problems with allowing them in other places, though we'd love to see # good and thought out proposals for it). # # For the indices, the "int" type is used. This corresponds to a C int, # other C types (like "unsigned int") could have been used instead. # Purists could use "Py_ssize_t" which is the proper Python type for # array indices. cdef int vmax = f.shape[0] cdef int wmax = f.shape[1] cdef int smax = g.shape[0] cdef int tmax = g.shape[1] cdef int smid = smax // 2 cdef int tmid = tmax // 2 cdef int xmax = vmax + 2*smid cdef int ymax = wmax + 2*tmid cdef np.ndarray h = np.zeros([xmax, ymax], dtype=DTYPE) cdef int x, y, s, t, v, w # It is very important to type ALL your variables. You do not get any # warnings if not, only much slower code (they are implicitly typed as # Python objects). cdef int s_from, s_to, t_from, t_to # For the value variable, we want to use the same data type as is # stored in the array, so we use "DTYPE_t" as defined above. # NB! An important side-effect of this is that if "value" overflows its # datatype size, it will simply wrap around like in C, rather than raise # an error like in Python. cdef DTYPE_t value for x in range(xmax): for y in range(ymax): s_from = int_max(smid - x, -smid) s_to = int_min((xmax - x) - smid, smid + 1) t_from = int_max(tmid - y, -tmid) t_to = int_min((ymax - y) - tmid, tmid + 1) value = 0 for s in range(s_from, s_to): for t in range(t_from, t_to): v = x - smid + s w = y - tmid + t value += g[smid - s, tmid - t] * f[v, w] h[x, y] = value return h At this point, have a look at the generated C code for :file:`convolve1.pyx` and :file:`convolve2.pyx`. Click on the lines to expand them and see corresponding C. (Note that this code annotation is currently experimental and especially "trailing" cleanup code for a block may stick to the last expression in the block and make it look worse than it is -- use some common sense). * .. literalinclude: convolve1.html * .. literalinclude: convolve2.html Especially have a look at the for loops: In :file:`convolve1.c`, these are ~20 lines of C code to set up while in :file:`convolve2.c` a normal C for loop is used. After building this and continuing my (very informal) benchmarks, I get: .. sourcecode:: ipython In [21]: import convolve2 In [22]: %timeit -n2 -r3 convolve2.naive_convolve(f, g) 2 loops, best of 3: 828 ms per loop Efficient indexing ==================== There's still a bottleneck killing performance, and that is the array lookups and assignments. The ``[]``-operator still uses full Python operations -- what we would like to do instead is to access the data buffer directly at C speed. What we need to do then is to type the contents of the :obj:`ndarray` objects. We do this with a special "buffer" syntax which must be told the datatype (first argument) and number of dimensions ("ndim" keyword-only argument, if not provided then one-dimensional is assumed). More information on this syntax [:enhancements/buffer:can be found here]. Showing the changes needed to produce :file:`convolve3.pyx` only:: ... def naive_convolve(np.ndarray[DTYPE_t, ndim=2] f, np.ndarray[DTYPE_t, ndim=2] g): ... cdef np.ndarray[DTYPE_t, ndim=2] h = ... Usage: .. sourcecode:: ipython In [18]: import convolve3 In [19]: %timeit -n3 -r100 convolve3.naive_convolve(f, g) 3 loops, best of 100: 11.6 ms per loop Note the importance of this change. *Gotcha*: This efficient indexing only affects certain index operations, namely those with exactly ``ndim`` number of typed integer indices. So if ``v`` for instance isn't typed, then the lookup ``f[v, w]`` isn't optimized. On the other hand this means that you can continue using Python objects for sophisticated dynamic slicing etc. just as when the array is not typed. Tuning indexing further ======================== The array lookups are still slowed down by two factors: 1. Bounds checking is performed. 2. Negative indices are checked for and handled correctly. The code above is explicitly coded so that it doesn't use negative indices, and it (hopefully) always access within bounds. We can add a decorator to disable bounds checking:: ... cimport cython @cython.boundscheck(False) # turn off bounds-checking for entire function def naive_convolve(np.ndarray[DTYPE_t, ndim=2] f, np.ndarray[DTYPE_t, ndim=2] g): ... Now bounds checking is not performed (and, as a side-effect, if you ''do'' happen to access out of bounds you will in the best case crash your program and in the worst case corrupt data). It is possible to switch bounds-checking mode in many ways, see :ref:`compiler-directives` for more information. Negative indices are dealt with by ensuring Cython that the indices will be positive, by casting the variables to unsigned integer types (if you do have negative values, then this casting will create a very large positive value instead and you will attempt to access out-of-bounds values). Casting is done with a special ``<>``-syntax. The code below is changed to use either unsigned ints or casting as appropriate:: ... cdef int s, t # changed cdef unsigned int x, y, v, w # changed cdef int s_from, s_to, t_from, t_to cdef DTYPE_t value for x in range(xmax): for y in range(ymax): s_from = max(smid - x, -smid) s_to = min((xmax - x) - smid, smid + 1) t_from = max(tmid - y, -tmid) t_to = min((ymax - y) - tmid, tmid + 1) value = 0 for s in range(s_from, s_to): for t in range(t_from, t_to): v = (x - smid + s) # changed w = (y - tmid + t) # changed value += g[(smid - s), (tmid - t)] * f[v, w] # changed h[x, y] = value ... (In the next Cython release we will likely add a compiler directive or argument to the ``np.ndarray[]``-type specifier to disable negative indexing so that casting so much isn't necessary; feedback on this is welcome.) The function call overhead now starts to play a role, so we compare the latter two examples with larger N: .. sourcecode:: ipython In [11]: %timeit -n3 -r100 convolve4.naive_convolve(f, g) 3 loops, best of 100: 5.97 ms per loop In [12]: N = 1000 In [13]: f = np.arange(N*N, dtype=np.int).reshape((N,N)) In [14]: g = np.arange(81, dtype=np.int).reshape((9, 9)) In [17]: %timeit -n1 -r10 convolve3.naive_convolve(f, g) 1 loops, best of 10: 1.16 s per loop In [18]: %timeit -n1 -r10 convolve4.naive_convolve(f, g) 1 loops, best of 10: 597 ms per loop (Also this is a mixed benchmark as the result array is allocated within the function call.) .. Warning:: Speed comes with some cost. Especially it can be dangerous to set typed objects (like ``f``, ``g`` and ``h`` in our sample code) to ``None``. Setting such objects to ``None`` is entirely legal, but all you can do with them is check whether they are None. All other use (attribute lookup or indexing) can potentially segfault or corrupt data (rather than raising exceptions as they would in Python). The actual rules are a bit more complicated but the main message is clear: Do not use typed objects without knowing that they are not set to ``None``. More generic code ================== It would be possible to do:: def naive_convolve(object[DTYPE_t, ndim=2] f, ...): i.e. use :obj:`object` rather than :obj:`np.ndarray`. Under Python 3.0 this can allow your algorithm to work with any libraries supporting the buffer interface; and support for e.g. the Python Imaging Library may easily be added if someone is interested also under Python 2.x. There is some speed penalty to this though (as one makes more assumptions compile-time if the type is set to :obj:`np.ndarray`, specifically it is assumed that the data is stored in pure strided mode and not in indirect mode). [:enhancements/buffer:More information] The future ============ These are some points to consider for further development. All points listed here has gone through a lot of thinking and planning already; still they may or may not happen depending on available developer time and resources for Cython. 1. Support for efficient access to structs/records stored in arrays; currently only primitive types are allowed. 2. Support for efficient access to complex floating point types in arrays. The main obstacle here is getting support for efficient complex datatypes in Cython. 3. Calling NumPy/SciPy functions currently has a Python call overhead; it would be possible to take a short-cut from Cython directly to C. (This does however require some isolated and incremental changes to those libraries; mail the Cython mailing list for details). 4. Efficient code that is generic with respect to the number of dimensions. This can probably be done today by calling the NumPy C multi-dimensional iterator API directly; however it would be nice to have for-loops over :func:`enumerate` and :func:`ndenumerate` on NumPy arrays create efficient code. 5. A high-level construct for writing type-generic code, so that one can write functions that work simultaneously with many datatypes. Note however that a macro preprocessor language can help with doing this for now. Cython-0.26.1/docs/src/userguide/early_binding_for_speed.rst0000664000175000017500000001144713143605603024741 0ustar stefanstefan00000000000000.. highlight:: cython .. _early-binding-for-speed: ************************** Early Binding for Speed ************************** As a dynamic language, Python encourages a programming style of considering classes and objects in terms of their methods and attributes, more than where they fit into the class hierarchy. This can make Python a very relaxed and comfortable language for rapid development, but with a price - the 'red tape' of managing data types is dumped onto the interpreter. At run time, the interpreter does a lot of work searching namespaces, fetching attributes and parsing argument and keyword tuples. This run-time 'late binding' is a major cause of Python's relative slowness compared to 'early binding' languages such as C++. However with Cython it is possible to gain significant speed-ups through the use of 'early binding' programming techniques. For example, consider the following (silly) code example: .. sourcecode:: cython cdef class Rectangle: cdef int x0, y0 cdef int x1, y1 def __init__(self, int x0, int y0, int x1, int y1): self.x0 = x0; self.y0 = y0; self.x1 = x1; self.y1 = y1 def area(self): area = (self.x1 - self.x0) * (self.y1 - self.y0) if area < 0: area = -area return area def rectArea(x0, y0, x1, y1): rect = Rectangle(x0, y0, x1, y1) return rect.area() In the :func:`rectArea` method, the call to :meth:`rect.area` and the :meth:`.area` method contain a lot of Python overhead. However, in Cython, it is possible to eliminate a lot of this overhead in cases where calls occur within Cython code. For example: .. sourcecode:: cython cdef class Rectangle: cdef int x0, y0 cdef int x1, y1 def __init__(self, int x0, int y0, int x1, int y1): self.x0 = x0; self.y0 = y0; self.x1 = x1; self.y1 = y1 cdef int _area(self): cdef int area area = (self.x1 - self.x0) * (self.y1 - self.y0) if area < 0: area = -area return area def area(self): return self._area() def rectArea(x0, y0, x1, y1): cdef Rectangle rect rect = Rectangle(x0, y0, x1, y1) return rect._area() Here, in the Rectangle extension class, we have defined two different area calculation methods, the efficient :meth:`_area` C method, and the Python-callable :meth:`area` method which serves as a thin wrapper around :meth:`_area`. Note also in the function :func:`rectArea` how we 'early bind' by declaring the local variable ``rect`` which is explicitly given the type Rectangle. By using this declaration, instead of just dynamically assigning to ``rect``, we gain the ability to access the much more efficient C-callable :meth:`_area` method. But Cython offers us more simplicity again, by allowing us to declare dual-access methods - methods that can be efficiently called at C level, but can also be accessed from pure Python code at the cost of the Python access overheads. Consider this code: .. sourcecode:: cython cdef class Rectangle: cdef int x0, y0 cdef int x1, y1 def __init__(self, int x0, int y0, int x1, int y1): self.x0 = x0; self.y0 = y0; self.x1 = x1; self.y1 = y1 cpdef int area(self): cdef int area area = (self.x1 - self.x0) * (self.y1 - self.y0) if area < 0: area = -area return area def rectArea(x0, y0, x1, y1): cdef Rectangle rect rect = Rectangle(x0, y0, x1, y1) return rect.area() .. note:: in earlier versions of Cython, the :keyword:`cpdef` keyword is ``rdef`` - but has the same effect). Here, we just have a single area method, declared as :keyword:`cpdef` to make it efficiently callable as a C function, but still accessible from pure Python (or late-binding Cython) code. If within Cython code, we have a variable already 'early-bound' (ie, declared explicitly as type Rectangle, (or cast to type Rectangle), then invoking its area method will use the efficient C code path and skip the Python overhead. But if in Pyrex or regular Python code we have a regular object variable storing a Rectangle object, then invoking the area method will require: * an attribute lookup for the area method * packing a tuple for arguments and a dict for keywords (both empty in this case) * using the Python API to call the method and within the area method itself: * parsing the tuple and keywords * executing the calculation code * converting the result to a python object and returning it So within Cython, it is possible to achieve massive optimisations by using strong typing in declaration and casting of variables. For tight loops which use method calls, and where these methods are pure C, the difference can be huge. Cython-0.26.1/docs/src/userguide/fusedtypes.rst0000664000175000017500000002150612542002467022276 0ustar stefanstefan00000000000000.. highlight:: cython .. _fusedtypes: *********************** Fused Types (Templates) *********************** Fused types allow you to have one type definition that can refer to multiple types. This allows you to write a single static-typed cython algorithm that can operate on values of multiple types. Thus fused types allow `generic programming`_ and are akin to templates in C++ or generics in languages like Java / C#. .. _generic programming: http://en.wikipedia.org/wiki/Generic_programming .. Note:: Support is still somewhat experimental, there may be bugs! .. Note:: Fused types are not currently supported as attributes of extension types. Only variables and function/method arguments can be declared with fused types. Quickstart ========== :: cimport cython ctypedef fused char_or_float: cython.char cython.float cpdef char_or_float plus_one(char_or_float var): return var + 1 def show_me(): cdef: cython.char a = 127 cython.float b = 127 print 'char', plus_one(a) print 'float', plus_one(b) This gives:: >>> show_me() char -128 float 128.0 ``plus_one(a)`` "specializes" the fused type ``char_or_float`` as a ``char``, whereas ``plus_one(b)`` specializes ``char_or_float`` as a ``float``. Declaring Fused Types ===================== Fused types may be declared as follows:: cimport cython ctypedef fused my_fused_type: cython.int cython.double This declares a new type called ``my_fused_type`` which can be *either* an ``int`` *or* a ``double``. Alternatively, the declaration may be written as:: my_fused_type = cython.fused_type(cython.int, cython.float) Only names may be used for the constituent types, but they may be any (non-fused) type, including a typedef. i.e. one may write:: ctypedef double my_double my_fused_type = cython.fused_type(cython.int, my_double) Using Fused Types ================= Fused types can be used to declare parameters of functions or methods:: cdef cfunc(my_fused_type arg): return arg + 1 If the you use the same fused type more than once in an argument list, then each specialization of the fused type must be the same:: cdef cfunc(my_fused_type arg1, my_fused_type arg2): return cython.typeof(arg1) == cython.typeof(arg2) In this case, the type of both parameters is either an int, or a double (according to the previous examples). However, because these arguments use the same fused type ``my_fused_type``, both ``arg1`` and ``arg2`` are specialized to the same type. Therefore this function returns True for every possible valid invocation. You are allowed to mix fused types however:: def func(A x, B y): ... where ``A`` and ``B`` are different fused types. This will result in specialized code paths for all combinations of types contained in ``A`` and ``B``. Fused types and arrays ---------------------- Note that specializations of only numeric types may not be very useful, as one can usually rely on promotion of types. This is not true for arrays, pointers and typed views of memory however. Indeed, one may write:: def myfunc(A[:, :] x): ... # and cdef otherfunc(A *x): ... Note that in Cython 0.20.x and earlier, the compiler generated the full cross product of all type combinations when a fused type was used by more than one memory view in a type signature, e.g. :: def myfunc(A[:] a, A[:] b): # a and b had independent item types in Cython 0.20.x and earlier. ... This was unexpected for most users, unlikely to be desired, and also inconsistent with other structured type declarations like C arrays of fused types, which were considered the same type. It was thus changed in Cython 0.21 to use the same type for all memory views of a fused type. In order to get the original behaviour, it suffices to declare the same fused type under different names, and then use these in the declarations:: ctypedef fused A: int long ctypedef fused B: int long def myfunc(A[:] a, B[:] b): # a and b are independent types here and may have different item types ... To get only identical types also in older Cython versions (pre-0.21), a ``ctypedef`` can be used:: ctypedef A[:] A_1d def myfunc(A_1d a, A_1d b): # a and b have identical item types here, also in older Cython versions ... Selecting Specializations ========================= You can select a specialization (an instance of the function with specific or specialized (i.e., non-fused) argument types) in two ways: either by indexing or by calling. Indexing -------- You can index functions with types to get certain specializations, i.e.:: cfunc[cython.p_double](p1, p2) # From Cython space func[float, double](myfloat, mydouble) # From Python space func[cython.float, cython.double](myfloat, mydouble) If a fused type is used as a base type, this will mean that the base type is the fused type, so the base type is what needs to be specialized:: cdef myfunc(A *x): ... # Specialize using int, not int * myfunc[int](myint) Calling ------- A fused function can also be called with arguments, where the dispatch is figured out automatically:: cfunc(p1, p2) func(myfloat, mydouble) For a ``cdef`` or ``cpdef`` function called from Cython this means that the specialization is figured out at compile time. For ``def`` functions the arguments are typechecked at runtime, and a best-effort approach is performed to figure out which specialization is needed. This means that this may result in a runtime ``TypeError`` if no specialization was found. A ``cpdef`` function is treated the same way as a ``def`` function if the type of the function is unknown (e.g. if it is external and there is no cimport for it). The automatic dispatching rules are typically as follows, in order of preference: * try to find an exact match * choose the biggest corresponding numerical type (biggest float, biggest complex, biggest int) Built-in Fused Types ==================== There are some built-in fused types available for convenience, these are:: cython.integral # short, int, long cython.floating # float, double cython.numeric # short, int, long, float, double, float complex, double complex Casting Fused Functions ======================= Fused ``cdef`` and ``cpdef`` functions may be cast or assigned to C function pointers as follows:: cdef myfunc(cython.floating, cython.integral): ... # assign directly cdef object (*funcp)(float, int) funcp = myfunc funcp(f, i) # alternatively, cast it ( myfunc)(f, i) # This is also valid funcp = myfunc[float, int] funcp(f, i) Type Checking Specializations ============================= Decisions can be made based on the specializations of the fused parameters. False conditions are pruned to avoid invalid code. One may check with ``is``, ``is not`` and ``==`` and ``!=`` to see if a fused type is equal to a certain other non-fused type (to check the specialization), or use ``in`` and ``not in`` to figure out whether a specialization is part of another set of types (specified as a fused type). In example:: ctypedef fused bunch_of_types: ... ctypedef fused string_t: cython.p_char bytes unicode cdef cython.integral myfunc(cython.integral i, bunch_of_types s): cdef int *int_pointer cdef long *long_pointer # Only one of these branches will be compiled for each specialization! if cython.integral is int: int_pointer = &i else: long_pointer = &i if bunch_of_types in string_t: print "s is a string!" __signatures__ ============== Finally, function objects from ``def`` or ``cpdef`` functions have an attribute __signatures__, which maps the signature strings to the actual specialized functions. This may be useful for inspection. Listed signature strings may also be used as indices to the fused function, but the index format may change between Cython versions:: specialized_function = fused_function["MyExtensionClass|int|float"] It would usually be preferred to index like this, however:: specialized_function = fused_function[MyExtensionClass, int, float] Although the latter will select the biggest types for ``int`` and ``float`` from Python space, as they are not type identifiers but builtin types there. Passing ``cython.int`` and ``cython.float`` would resolve that, however. For memoryview indexing from python space we can do the following:: ctypedef fused my_fused_type: int[:, ::1] float[:, ::1] def func(my_fused_type array): ... my_fused_type[cython.int[:, ::1]](myarray) The same goes for when using e.g. ``cython.numeric[:, :]``. Cython-0.26.1/docs/src/userguide/buffer.rst0000664000175000017500000001453313023021033021337 0ustar stefanstefan00000000000000.. _buffer: Implementing the buffer protocol ================================ Cython objects can expose memory buffers to Python code by implementing the "buffer protocol". This chapter shows how to implement the protocol and make use of the memory managed by an extension type from NumPy. A matrix class -------------- The following Cython/C++ code implements a matrix of floats, where the number of columns is fixed at construction time but rows can be added dynamically. :: # matrix.pyx from libcpp.vector cimport vector cdef class Matrix: cdef unsigned ncols cdef vector[float] v def __cinit__(self, unsigned ncols): self.ncols = ncols def add_row(self): """Adds a row, initially zero-filled.""" self.v.extend(self.ncols) There are no methods to do anything productive with the matrices' contents. We could implement custom ``__getitem__``, ``__setitem__``, etc. for this, but instead we'll use the buffer protocol to expose the matrix's data to Python so we can use NumPy to do useful work. Implementing the buffer protocol requires adding two methods, ``__getbuffer__`` and ``__releasebuffer__``, which Cython handles specially. :: from cpython cimport Py_buffer from libcpp.vector cimport vector cdef class Matrix: cdef Py_ssize_t ncols cdef Py_ssize_t shape[2] cdef Py_ssize_t strides[2] cdef vector[float] v def __cinit__(self, Py_ssize_t ncols): self.ncols = ncols def add_row(self): """Adds a row, initially zero-filled.""" self.v.extend(self.ncols) def __getbuffer__(self, Py_buffer *buffer, int flags): cdef Py_ssize_t itemsize = sizeof(self.v[0]) self.shape[0] = self.v.size() / self.ncols self.shape[1] = self.ncols # Stride 1 is the distance, in bytes, between two items in a row; # this is the distance between two adjacent items in the vector. # Stride 0 is the distance between the first elements of adjacent rows. self.strides[1] = ( &(self.v[1]) - &(self.v[0])) self.strides[0] = self.ncols * self.strides[1] buffer.buf = &(self.v[0]) buffer.format = 'f' # float buffer.internal = NULL # see References buffer.itemsize = itemsize buffer.len = self.v.size() * itemsize # product(shape) * itemsize buffer.ndim = 2 buffer.obj = self buffer.readonly = 0 buffer.shape = self.shape buffer.strides = self.strides buffer.suboffsets = NULL # for pointer arrays only def __releasebuffer__(self, Py_buffer *buffer): pass The method ``Matrix.__getbuffer__`` fills a descriptor structure, called a ``Py_buffer``, that is defined by the Python C-API. It contains a pointer to the actual buffer in memory, as well as metadata about the shape of the array and the strides (step sizes to get from one element or row to the next). Its ``shape`` and ``strides`` members are pointers that must point to arrays of type and size ``Py_ssize_t[ndim]``. These arrays have to stay alive as long as any buffer views the data, so we store them on the ``Matrix`` object as members. The code is not yet complete, but we can already compile it and test the basic functionality. :: >>> from matrix import Matrix >>> import numpy as np >>> m = Matrix(10) >>> np.asarray(m) array([], shape=(0, 10), dtype=float32) >>> m.add_row() >>> a = np.asarray(m) >>> a[:] = 1 >>> m.add_row() >>> a = np.asarray(m) >>> a array([[ 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.], [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]], dtype=float32) Now we can view the ``Matrix`` as a NumPy ``ndarray``, and modify its contents using standard NumPy operations. Memory safety and reference counting ------------------------------------ The ``Matrix`` class as implemented so far is unsafe. The ``add_row`` operation can move the underlying buffer, which invalidates any NumPy (or other) view on the data. If you try to access values after an ``add_row`` call, you'll get outdated values or a segfault. This is where ``__releasebuffer__`` comes in. We can add a reference count to each matrix, and lock it for mutation whenever a view exists. :: cdef class Matrix: # ... cdef int view_count def __cinit__(self, Py_ssize_t ncols): self.ncols = ncols self.view_count = 0 def add_row(self): if self.view_count > 0: raise ValueError("can't add row while being viewed") self.v.resize(self.v.size() + self.ncols) def __getbuffer__(self, Py_buffer *buffer, int flags): # ... as before self.view_count += 1 def __releasebuffer__(self, Py_buffer *buffer): self.view_count -= 1 Flags ----- We skipped some input validation in the code. The ``flags`` argument to ``__getbuffer__`` comes from ``np.asarray`` (and other clients) and is an OR of boolean flags that describe the kind of array that is requested. Strictly speaking, if the flags contain ``PyBUF_ND``, ``PyBUF_SIMPLE``, or ``PyBUF_F_CONTIGUOUS``, ``__getbuffer__`` must raise a ``BufferError``. These macros can be ``cimport``'d from ``cpython.buffer``. (The matrix-in-vector structure actually conforms to ``PyBUF_ND``, but that would prohibit ``__getbuffer__`` from filling in the strides. A single-row matrix is F-contiguous, but a larger matrix is not.) References ---------- The buffer interface used here is set out in :PEP:`3118`, Revising the buffer protocol. A tutorial for using this API from C is on Jake Vanderplas's blog, `An Introduction to the Python Buffer Protocol `_. Reference documentation is available for `Python 3 `_ and `Python 2 `_. The Py2 documentation also describes an older buffer protocol that is no longer in use; since Python 2.6, the :PEP:`3118` protocol has been implemented, and the older protocol is only relevant for legacy code. Cython-0.26.1/docs/src/userguide/index.rst0000664000175000017500000000076413143605603021214 0ustar stefanstefan00000000000000Users Guide =========== Contents: .. toctree:: :maxdepth: 2 language_basics extension_types special_methods sharing_declarations external_C_code source_files_and_compilation early_binding_for_speed wrapping_CPlusPlus fusedtypes pypy limitations pyrex_differences memoryviews buffer parallelism debugging numpy_tutorial numpy_pythran Indices and tables ------------------ * :ref:`genindex` * :ref:`modindex` * :ref:`search` .. toctree:: Cython-0.26.1/docs/src/userguide/wrapping_CPlusPlus.rst0000664000175000017500000005600513023021033023667 0ustar stefanstefan00000000000000.. highlight:: cython .. _wrapping-cplusplus: ******************************** Using C++ in Cython ******************************** Overview ========= Cython has native support for most of the C++ language. Specifically: * C++ objects can now be dynamically allocated with ``new`` and ``del`` keywords. * C++ objects can be stack-allocated. * C++ classes can be declared with the new keyword ``cppclass``. * Templated classes and functions are supported. * Overloaded functions are supported. * Overloading of C++ operators (such as operator+, operator[],...) is supported. Procedure Overview ------------------- The general procedure for wrapping a C++ file can now be described as follows: * Specify C++ language in :file:`setup.py` script or locally in a source file. * Create one or more .pxd files with ``cdef extern from`` blocks and (if existing) the C++ namespace name. In these blocks, * declare classes as ``cdef cppclass`` blocks * declare public names (variables, methods and constructors) * Write an extension modules, ``cimport`` from the .pxd file and use the declarations. A simple Tutorial ================== An example C++ API ------------------- Here is a tiny C++ API which we will use as an example throughout this document. Let's assume it will be in a header file called :file:`Rectangle.h`: .. sourcecode:: c++ namespace shapes { class Rectangle { public: int x0, y0, x1, y1; Rectangle(); Rectangle(int x0, int y0, int x1, int y1); ~Rectangle(); int getArea(); void getSize(int* width, int* height); void move(int dx, int dy); }; } and the implementation in the file called :file:`Rectangle.cpp`: .. sourcecode:: c++ #include "Rectangle.h" namespace shapes { Rectangle::Rectangle() { } Rectangle::Rectangle(int X0, int Y0, int X1, int Y1) { x0 = X0; y0 = Y0; x1 = X1; y1 = Y1; } Rectangle::~Rectangle() { } int Rectangle::getArea() { return (x1 - x0) * (y1 - y0); } void Rectangle::getSize(int *width, int *height) { (*width) = x1 - x0; (*height) = y1 - y0; } void Rectangle::move(int dx, int dy) { x0 += dx; y0 += dy; x1 += dx; y1 += dy; } } This is pretty dumb, but should suffice to demonstrate the steps involved. Specify C++ language in setup.py --------------------------------- The best way to build Cython code from :file:`setup.py` scripts is the ``cythonize()`` function. To make Cython generate and compile C++ code with distutils, you just need to pass the option ``language="c++"``:: from distutils.core import setup from Cython.Build import cythonize setup(ext_modules = cythonize( "rect.pyx", # our Cython source sources=["Rectangle.cpp"], # additional source file(s) language="c++", # generate C++ code )) Cython will generate and compile the :file:`rect.cpp` file (from the :file:`rect.pyx`), then it will compile :file:`Rectangle.cpp` (implementation of the ``Rectangle`` class) and link both objects files together into :file:`rect.so`, which you can then import in Python using ``import rect`` (if you forget to link the :file:`Rectangle.o`, you will get missing symbols while importing the library in Python). Note that the ``language`` option has no effect on user provided Extension objects that are passed into ``cythonize()``. It is only used for modules found by file name (as in the example above). The ``cythonize()`` function in Cython versions up to 0.21 does not recognize the ``language`` option and it needs to be specified as an option to an :class:`Extension` that describes your extension and that is then handled by ``cythonize()`` as follows:: from distutils.core import setup, Extension from Cython.Build import cythonize setup(ext_modules = cythonize(Extension( "rect", # the extension name sources=["rect.pyx", "Rectangle.cpp"], # the Cython source and # additional C++ source files language="c++", # generate and compile C++ code ))) The options can also be passed directly from the source file, which is often preferable (and overrides any global option). Starting with version 0.17, Cython also allows to pass external source files into the ``cythonize()`` command this way. Here is a simplified setup.py file:: from distutils.core import setup from Cython.Build import cythonize setup( name = "rectangleapp", ext_modules = cythonize('*.pyx'), ) And in the .pyx source file, write this into the first comment block, before any source code, to compile it in C++ mode and link it statically against the :file:`Rectangle.cpp` code file:: # distutils: language = c++ # distutils: sources = Rectangle.cpp To compile manually (e.g. using ``make``), the ``cython`` command-line utility can be used to generate a C++ ``.cpp`` file, and then compile it into a python extension. C++ mode for the ``cython`` command is turned on with the ``--cplus`` option. Declaring a C++ class interface -------------------------------- The procedure for wrapping a C++ class is quite similar to that for wrapping normal C structs, with a couple of additions. Let's start here by creating the basic ``cdef extern from`` block:: cdef extern from "Rectangle.h" namespace "shapes": This will make the C++ class def for Rectangle available. Note the namespace declaration. Namespaces are simply used to make the fully qualified name of the object, and can be nested (e.g. ``"outer::inner"``) or even refer to classes (e.g. ``"namespace::MyClass`` to declare static members on MyClass). Declare class with cdef cppclass ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Now, let's add the Rectangle class to this extern from block - just copy the class name from Rectangle.h and adjust for Cython syntax, so now it becomes:: cdef extern from "Rectangle.h" namespace "shapes": cdef cppclass Rectangle: Add public attributes ^^^^^^^^^^^^^^^^^^^^^^ We now need to declare the attributes and methods for use on Cython:: cdef extern from "Rectangle.h" namespace "shapes": cdef cppclass Rectangle: Rectangle() except + Rectangle(int, int, int, int) except + int x0, y0, x1, y1 int getArea() void getSize(int* width, int* height) void move(int, int) Note that the constructor is declared as "except +". If the C++ code or the initial memory allocation raises an exception due to a failure, this will let Cython safely raise an appropriate Python exception instead (see below). Without this declaration, C++ exceptions originating from the constructor will not be handled by Cython. Declare a var with the wrapped C++ class ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Now, we use cdef to declare a var of the class with the C++ ``new`` statement:: rec_ptr = new Rectangle(1, 2, 3, 4) try: recArea = rec_ptr.getArea() ... finally: del rec_ptr # delete heap allocated object It's also possible to declare a stack allocated object, as long as it has a "default" constructor:: cdef extern from "Foo.h": cdef cppclass Foo: Foo() def func(): cdef Foo foo ... Note that, like C++, if the class has only one constructor and it is a nullary one, it's not necessary to declare it. Create Cython wrapper class ---------------------------- At this point, we have exposed into our pyx file's namespace the interface of the C++ Rectangle type. Now, we need to make this accessible from external Python code (which is our whole point). Common programming practice is to create a Cython extension type which holds a C++ instance as an attribute and create a bunch of forwarding methods. So we can implement the Python extension type as:: cdef class PyRectangle: cdef Rectangle c_rect # hold a C++ instance which we're wrapping def __cinit__(self, int x0, int y0, int x1, int y1): self.c_rect = Rectangle(x0, y0, x1, y1) def get_area(self): return self.c_rect.getArea() def get_size(self): cdef int width, height self.c_rect.getSize(&width, &height) return width, height def move(self, dx, dy): self.c_rect.move(dx, dy) And there we have it. From a Python perspective, this extension type will look and feel just like a natively defined Rectangle class. It should be noted that If you want to give attribute access, you could just implement some properties:: @property def x0(self): return self.c_rect.x0 @x0.setter def x0(self): def __set__(self, x0): self.c_rect.x0 = x0 ... Cython initializes C++ class attributes of a cdef class using the nullary constructor. If the class you're wrapping does not have a nullary constructor, you must store a pointer to the wrapped class and manually allocate and deallocate it. A convienient and safe place to do so is in the `__cinit__` and `__dealloc__` methods which are guaranteed to be called exactly once upon creation and deletion of the Python instance. :: cdef class PyRectangle: cdef Rectangle* c_rect # hold a pointer to the C++ instance which we're wrapping def __cinit__(self, int x0, int y0, int x1, int y1): self.c_rect = new Rectangle(x0, y0, x1, y1) def __dealloc__(self): del self.c_rect ... If you prefer giving the same name to the wrapper as the C++ class, see the section on :ref:`resolving naming conflicts `. Advanced C++ features ====================== We describe here all the C++ features that were not discussed in the above tutorial. Overloading ------------ Overloading is very simple. Just declare the method with different parameters and use any of them:: cdef extern from "Foo.h": cdef cppclass Foo: Foo(int) Foo(bool) Foo(int, bool) Foo(int, int) Overloading operators ---------------------- Cython uses C++ naming for overloading operators:: cdef extern from "foo.h": cdef cppclass Foo: Foo() Foo operator+(Foo) Foo operator-(Foo) int operator*(Foo) int operator/(int) cdef Foo foo = new Foo() foo2 = foo + foo foo2 = foo - foo x = foo * foo2 x = foo / 1 Note that if one has *pointers* to C++ objects, dereferencing must be done to avoid doing pointer arithmetic rather than arithmetic on the objects themselves:: cdef Foo* foo_ptr = new Foo() foo = foo_ptr[0] + foo_ptr[0] x = foo_ptr[0] / 2 del foo_ptr Nested class declarations -------------------------- C++ allows nested class declaration. Class declarations can also be nested in Cython:: cdef extern from "" namespace "std": cdef cppclass vector[T]: cppclass iterator: T operator*() iterator operator++() bint operator==(iterator) bint operator!=(iterator) vector() void push_back(T&) T& operator[](int) T& at(int) iterator begin() iterator end() cdef vector[int].iterator iter #iter is declared as being of type vector::iterator Note that the nested class is declared with a ``cppclass`` but without a ``cdef``. C++ operators not compatible with Python syntax ------------------------------------------------ Cython try to keep a syntax as close as possible to standard Python. Because of this, certain C++ operators, like the preincrement ``++foo`` or the dereferencing operator ``*foo`` cannot be used with the same syntax as C++. Cython provides functions replacing these operators in a special module ``cython.operator``. The functions provided are: * ``cython.operator.dereference`` for dereferencing. ``dereference(foo)`` will produce the C++ code ``*(foo)`` * ``cython.operator.preincrement`` for pre-incrementation. ``preincrement(foo)`` will produce the C++ code ``++(foo)``. Similarly for ``predecrement``, ``postincrement`` and ``postdecrement``. * ``cython.operator.comma`` for the comma operator. ``comma(a, b)`` will produce the C++ code ``((a), (b))``. These functions need to be cimported. Of course, one can use a ``from ... cimport ... as`` to have shorter and more readable functions. For example: ``from cython.operator cimport dereference as deref``. For completeness, it's also worth mentioning ``cython.operator.address`` which can also be written ``&foo``. Templates ---------- Cython uses a bracket syntax for templating. A simple example for wrapping C++ vector:: # import dereference and increment operators from cython.operator cimport dereference as deref, preincrement as inc cdef extern from "" namespace "std": cdef cppclass vector[T]: cppclass iterator: T operator*() iterator operator++() bint operator==(iterator) bint operator!=(iterator) vector() void push_back(T&) T& operator[](int) T& at(int) iterator begin() iterator end() cdef vector[int] *v = new vector[int]() cdef int i for i in range(10): v.push_back(i) cdef vector[int].iterator it = v.begin() while it != v.end(): print deref(it) inc(it) del v Multiple template parameters can be defined as a list, such as ``[T, U, V]`` or ``[int, bool, char]``. Optional template parameters can be indicated by writing ``[T, U, V=*]``. In the event that Cython needs to explicitly reference the type of a default template parameter for an incomplete template instantiation, it will write ``MyClass::V``, so if the class provides a typedef for its template parameters it is preferable to use that name here. Template functions are defined similarly to class templates, with the template parameter list following the function name:: cdef extern from "" namespace "std": T max[T](T a, T b) print max[long](3, 4) print max(1.5, 2.5) # simple template argument deduction Standard library ----------------- Most of the containers of the C++ Standard Library have been declared in pxd files located in ``/Cython/Includes/libcpp``. These containers are: deque, list, map, pair, queue, set, stack, vector. For example:: from libcpp.vector cimport vector cdef vector[int] vect cdef int i for i in range(10): vect.push_back(i) for i in range(10): print vect[i] The pxd files in ``/Cython/Includes/libcpp`` also work as good examples on how to declare C++ classes. Since Cython 0.17, the STL containers coerce from and to the corresponding Python builtin types. The conversion is triggered either by an assignment to a typed variable (including typed function arguments) or by an explicit cast, e.g.:: from libcpp.string cimport string from libcpp.vector cimport vector cdef string s = py_bytes_object print(s) cpp_string = py_unicode_object.encode('utf-8') cdef vector[int] vect = xrange(1, 10, 2) print(vect) # [1, 3, 5, 7, 9] cdef vector[string] cpp_strings = b'ab cd ef gh'.split() print(cpp_strings[1]) # b'cd' The following coercions are available: +------------------+----------------+-----------------+ | Python type => | *C++ type* | => Python type | +==================+================+=================+ | bytes | std::string | bytes | +------------------+----------------+-----------------+ | iterable | std::vector | list | +------------------+----------------+-----------------+ | iterable | std::list | list | +------------------+----------------+-----------------+ | iterable | std::set | set | +------------------+----------------+-----------------+ | iterable (len 2) | std::pair | tuple (len 2) | +------------------+----------------+-----------------+ All conversions create a new container and copy the data into it. The items in the containers are converted to a corresponding type automatically, which includes recursively converting containers inside of containers, e.g. a C++ vector of maps of strings. Iteration over stl containers (or indeed any class with ``begin()`` and ``end()`` methods returning an object supporting incrementing, dereferencing, and comparison) is supported via the ``for .. in`` syntax (including in list comprehensions). For example, one can write:: cdef vector[int] v = ... for value in v: f(value) return [x*x for x in v if x % 2 == 0] If the loop target variable is unspecified, an assignment from type ``*container.begin()`` is used for :ref:`type inference `. Simplified wrapping with default constructor -------------------------------------------- If your extension type instantiates a wrapped C++ class using the default constructor (not passing any arguments), you may be able to simplify the lifecycle handling by tying it directly to the lifetime of the Python wrapper object. Instead of a pointer attribute, you can declare an instance:: cdef class VectorStack: cdef vector[int] v def push(self, x): self.v.push_back(x) def pop(self): if self.v.empty(): raise IndexError() x = self.v.back() self.v.pop_back() return x Cython will automatically generate code that instantiates the C++ object instance when the Python object is created and deletes it when the Python object is garbage collected. Exceptions ----------- Cython cannot throw C++ exceptions, or catch them with a try-except statement, but it is possible to declare a function as potentially raising an C++ exception and converting it into a Python exception. For example, :: cdef extern from "some_file.h": cdef int foo() except + This will translate try and the C++ error into an appropriate Python exception. The translation is performed according to the following table (the ``std::`` prefix is omitted from the C++ identifiers): +-----------------------+---------------------+ | C++ | Python | +=======================+=====================+ | ``bad_alloc`` | ``MemoryError`` | +-----------------------+---------------------+ | ``bad_cast`` | ``TypeError`` | +-----------------------+---------------------+ | ``bad_typeid`` | ``TypeError`` | +-----------------------+---------------------+ | ``domain_error`` | ``ValueError`` | +-----------------------+---------------------+ | ``invalid_argument`` | ``ValueError`` | +-----------------------+---------------------+ | ``ios_base::failure`` | ``IOError`` | +-----------------------+---------------------+ | ``out_of_range`` | ``IndexError`` | +-----------------------+---------------------+ | ``overflow_error`` | ``OverflowError`` | +-----------------------+---------------------+ | ``range_error`` | ``ArithmeticError`` | +-----------------------+---------------------+ | ``underflow_error`` | ``ArithmeticError`` | +-----------------------+---------------------+ | (all others) | ``RuntimeError`` | +-----------------------+---------------------+ The ``what()`` message, if any, is preserved. Note that a C++ ``ios_base_failure`` can denote EOF, but does not carry enough information for Cython to discern that, so watch out with exception masks on IO streams. :: cdef int bar() except +MemoryError This will catch any C++ error and raise a Python MemoryError in its place. (Any Python exception is valid here.) :: cdef int raise_py_error() cdef int something_dangerous() except +raise_py_error If something_dangerous raises a C++ exception then raise_py_error will be called, which allows one to do custom C++ to Python error "translations." If raise_py_error does not actually raise an exception a RuntimeError will be raised. Static member method -------------------- If the Rectangle class has a static member: .. sourcecode:: c++ namespace shapes { class Rectangle { ... public: static void do_something(); }; } you can declare it using the Python @staticmethod decorator, i.e.:: cdef extern from "Rectangle.h" namespace "shapes": cdef cppclass Rectangle: ... @staticmethod void do_something() Declaring/Using References --------------------------- Cython supports declaring lvalue references using the standard ``Type&`` syntax. Note, however, that it is unnecessary to declare the arguments of extern functions as references (const or otherwise) as it has no impact on the caller's syntax. ``auto`` Keyword ---------------- Though Cython does not have an ``auto`` keyword, Cython local variables not explicitly typed with ``cdef`` are deduced from the types of the right hand side of *all* their assignments (see the ``infer_types`` :ref:`compiler directive `). This is particularly handy when dealing with functions that return complicated, nested, templated types, e.g.:: cdef vector[int] v = ... it = v.begin() (Though of course the ``for .. in`` syntax is prefered for objects supporting the iteration protocol.) RTTI and typeid() ================= Cython has support for the ``typeid(...)`` operator. from cython.operator cimport typeid The ``typeid(...)`` operator returns an object of the type ``const type_info &``. If you want to store a type_info value in a C variable, you will need to store it as a pointer rather than a reference: from libcpp.typeinfo cimport type_info cdef const type_info* info = &typeid(MyClass) If an invalid type is passed to ``typeid``, it will throw an ``std::bad_typeid`` exception which is converted into a ``TypeError`` exception in Python. An additional C++11-only RTTI-related class, ``std::type_index``, is available in ``libcpp.typeindex``. Caveats and Limitations ======================== Access to C-only functions --------------------------- Whenever generating C++ code, Cython generates declarations of and calls to functions assuming these functions are C++ (ie, not declared as ``extern "C" {...}``. This is ok if the C functions have C++ entry points, but if they're C only, you will hit a roadblock. If you have a C++ Cython module needing to make calls to pure-C functions, you will need to write a small C++ shim module which: * includes the needed C headers in an extern "C" block * contains minimal forwarding functions in C++, each of which calls the respective pure-C function C++ left-values ---------------- C++ allows functions returning a reference to be left-values. This is currently not supported in Cython. ``cython.operator.dereference(foo)`` is also not considered a left-value. Cython-0.26.1/docs/src/userguide/pypy.rst0000664000175000017500000001734512542002467021112 0ustar stefanstefan00000000000000Porting Cython code to PyPy =========================== Since version 0.17, Cython has basic support for cpyext, the layer in `PyPy `_ that emulates CPython's C-API. This is achieved by making the generated C code adapt at C compile time, so the generated code will compile in both CPython and PyPy unchanged. However, beyond what Cython can cover and adapt internally, the cpyext C-API emulation involves some differences to the real C-API in CPython that have a visible impact on user code. This page lists major differences and ways to deal with them in order to write Cython code that works in both CPython and PyPy. Reference counts ---------------- A general design difference in PyPy is that the runtime does not use reference counting internally but always a garbage collector. Reference counting is only emulated at the cpyext layer by counting references being held in C space. This implies that the reference count in PyPy is generally different from that in CPython because it does not count any references held in Python space. Object lifetime --------------- As a direct consequence of the different garbage collection characteristics, objects may see the end of their lifetime at other points than in CPython. Special care therefore has to be taken when objects are expected to have died in CPython but may not in PyPy. Specifically, a deallocator method of an extension type (``__dealloc__()``) may get called at a much later point than in CPython, triggered rather by memory getting tighter than by objects dying. If the point in the code is known when an object is supposed to die (e.g. when it is tied to another object or to the execution time of a function), it is worth considering if it can be invalidated and cleaned up manually at that point, rather than relying on a deallocator. As a side effect, this can sometimes even lead to a better code design, e.g. when context managers can be used together with the ``with`` statement. Borrowed references and data pointers ------------------------------------- The memory management in PyPy is allowed to move objects around in memory. The C-API layer is only an indirect view on PyPy objects and often replicates data or state into C space that is then tied to the lifetime of a C-API object rather then the underlying PyPy object. It is important to understand that these two objects are separate things in cpyext. The effect can be that when data pointers or borrowed references are used, and the owning object is no longer directly referenced from C space, the reference or data pointer may become invalid at some point, even if the object itself is still alive. As opposed to CPython, it is not enough to keep the reference to the object alive in a list (or other Python container), because the contents of those is only managed in Python space and thus only references the PyPy object. A reference in a Python container will not keep the C-API view on it alive. Entries in a Python class dict will obviously not work either. One of the more visible places where this may happen is when accessing the :c:type:`char*` buffer of a byte string. In PyPy, this will only work as long as the Cython code holds a direct reference to the byte string object itself. Another point is when CPython C-API functions are used directly that return borrowed references, e.g. :c:func:`PyTuple_GET_ITEM()` and similar functions, but also some functions that return borrowed references to built-in modules or low-level objects of the runtime environment. The GIL in PyPy only guarantees that the borrowed reference stays valid up to the next call into PyPy (or its C-API), but not necessarily longer. When accessing the internals of Python objects or using borrowed references longer than up to the next call into PyPy, including reference counting or anything that frees the GIL, it is therefore required to additionally keep direct owned references to these objects alive in C space, e.g. in local variables in a function or in the attributes of an extension type. When in doubt, avoid using C-API functions that return borrowed references, or surround the usage of a borrowed reference explicitly by a pair of calls to :c:func:`Py_INCREF()` when getting the reference and :c:func:`Py_DECREF()` when done with it to convert it into an owned reference. Builtin types, slots and fields ------------------------------- The following builtin types are not currently available in cpyext in form of their C level representation: :c:type:`PyComplexObject`, :c:type:`PyFloatObject` and :c:type:`PyBoolObject`. Many of the type slot functions of builtin types are not initialised in cpyext and can therefore not be used directly. Similarly, almost none of the (implementation) specific struct fields of builtin types is exposed at the C level, such as the ``ob_digit`` field of :c:type:`PyLongObject` or the ``allocated`` field of the :c:type:`PyListObject` struct etc. Although the ``ob_size`` field of containers (used by the :c:func:`Py_SIZE()` macro) is available, it is not guaranteed to be accurate. It is best not to access any of these struct fields and slots and to use the normal Python types instead as well as the normal Python protocols for object operations. Cython will map them to an appropriate usage of the C-API in both CPython and cpyext. GIL handling ------------ Currently, the GIL handling function :c:func:`PyGILState_Ensure` is not re-entrant in PyPy and deadlocks when called twice. This means that code that tries to acquire the GIL "just in case", because it might be called with or without the GIL, will not work as expected in PyPy. See `PyGILState_Ensure should not deadlock if GIL already held `_. Efficiency ---------- Simple functions and especially macros that are used for speed in CPython may exhibit substantially different performance characteristics in cpyext. Functions returning borrowed references were already mentioned as requiring special care, but they also induce substantially more runtime overhead because they often create weak references in PyPy where they only return a plain pointer in CPython. A visible example is :c:func:`PyTuple_GET_ITEM()`. Some more high-level functions may also show entirely different performance characteristics, e.g. :c:func:`PyDict_Next()` for dict iteration. While being the fastest way to iterate over a dict in CPython, having linear time complexity and a low overhead, it currently has quadratic runtime in PyPy because it maps to normal dict iteration, which cannot keep track of the current position between two calls and thus needs to restart the iteration on each call. The general advice applies here even more than in CPython, that it is always best to rely on Cython generating appropriately adapted C-API handling code for you than to use the C-API directly - unless you really know what you are doing. And if you find a better way of doing something in PyPy and cpyext than Cython currently does, it's best to fix Cython for everyone's benefit. Known problems -------------- * As of PyPy 1.9, subtyping builtin types can result in infinite recursion on method calls in some rare cases. * Docstrings of special methods are not propagated to Python space. * The Python 3.x adaptations in pypy3 only slowly start to include the C-API, so more incompatibilities can be expected there. Bugs and crashes ---------------- The cpyext implementation in PyPy is much younger and substantially less mature than the well tested C-API and its underlying native implementation in CPython. This should be remembered when running into crashes, as the problem may not always be in your code or in Cython. Also, PyPy and its cpyext implementation are less easy to debug at the C level than CPython and Cython, simply because they were not designed for it. Cython-0.26.1/docs/src/userguide/external_C_code.rst0000664000175000017500000006211613143605603023162 0ustar stefanstefan00000000000000.. highlight:: cython .. _external-C-code: ********************************** Interfacing with External C Code ********************************** One of the main uses of Cython is wrapping existing libraries of C code. This is achieved by using external declarations to declare the C functions and variables from the library that you want to use. You can also use public declarations to make C functions and variables defined in a Cython module available to external C code. The need for this is expected to be less frequent, but you might want to do it, for example, if you are `embedding Python`_ in another application as a scripting language. Just as a Cython module can be used as a bridge to allow Python code to call C code, it can also be used to allow C code to call Python code. .. _embedding Python: http://www.freenet.org.nz/python/embeddingpyrex/ External declarations ======================= By default, C functions and variables declared at the module level are local to the module (i.e. they have the C static storage class). They can also be declared extern to specify that they are defined elsewhere, for example,:: cdef extern int spam_counter cdef extern void order_spam(int tons) Referencing C header files --------------------------- When you use an extern definition on its own as in the examples above, Cython includes a declaration for it in the generated C file. This can cause problems if the declaration doesn't exactly match the declaration that will be seen by other C code. If you're wrapping an existing C library, for example, it's important that the generated C code is compiled with exactly the same declarations as the rest of the library. To achieve this, you can tell Cython that the declarations are to be found in a C header file, like this:: cdef extern from "spam.h": int spam_counter void order_spam(int tons) The ``cdef extern`` from clause does three things: 1. It directs Cython to place a ``#include`` statement for the named header file in the generated C code. 2. It prevents Cython from generating any C code for the declarations found in the associated block. 3. It treats all declarations within the block as though they started with ``cdef extern``. It's important to understand that Cython does not itself read the C header file, so you still need to provide Cython versions of any declarations from it that you use. However, the Cython declarations don't always have to exactly match the C ones, and in some cases they shouldn't or can't. In particular: #. Leave out any platform-specific extensions to C declarations such as ``__declspec()``. #. If the header file declares a big struct and you only want to use a few members, you only need to declare the members you're interested in. Leaving the rest out doesn't do any harm, because the C compiler will use the full definition from the header file. In some cases, you might not need any of the struct's members, in which case you can just put pass in the body of the struct declaration, e.g.:: cdef extern from "foo.h": struct spam: pass .. note:: you can only do this inside a ``cdef extern from`` block; struct declarations anywhere else must be non-empty. #. If the header file uses ``typedef`` names such as :c:type:`word` to refer to platform-dependent flavours of numeric types, you will need a corresponding :keyword:`ctypedef` statement, but you don't need to match the type exactly, just use something of the right general kind (int, float, etc). For example,:: ctypedef int word will work okay whatever the actual size of a :c:type:`word` is (provided the header file defines it correctly). Conversion to and from Python types, if any, will also be used for this new type. #. If the header file uses macros to define constants, translate them into a normal external variable declaration. You can also declare them as an :keyword:`enum` if they contain normal :c:type:`int` values. Note that Cython considers :keyword:`enum` to be equivalent to :c:type:`int`, so do not do this for non-int values. #. If the header file defines a function using a macro, declare it as though it were an ordinary function, with appropriate argument and result types. #. For archaic reasons C uses the keyword ``void`` to declare a function taking no parameters. In Cython as in Python, simply declare such functions as :meth:`foo()`. A few more tricks and tips: * If you want to include a C header because it's needed by another header, but don't want to use any declarations from it, put pass in the extern-from block:: cdef extern from "spam.h": pass * If you want to include a system header, put angle brackets inside the quotes:: cdef extern from "": ... * If you want to include some external declarations, but don't want to specify a header file (because it's included by some other header that you've already included) you can put ``*`` in place of the header file name:: cdef extern from *: ... Implementing functions in C --------------------------- When you want to call C code from a Cython module, usually that code will be in some external library that you link your extension against. However, you can also directly compile C (or C++) code as part of your Cython module. In the ``.pyx`` file, you can put something like:: cdef extern from "spam.c": void order_spam(int tons) Cython will assume that the function ``order_spam()`` is defined in the file ``spam.c``. If you also want to cimport this function from another module, it must be declared (not extern!) in the ``.pxd`` file:: cdef void order_spam(int tons) For this to work, the signature of ``order_spam()`` in ``spam.c`` must match the signature that Cython uses, in particular the function must be static: .. code-block:: c static void order_spam(int tons) { printf("Ordered %i tons of spam!\n", tons); } .. _struct-union-enum-styles: Styles of struct, union and enum declaration ---------------------------------------------- There are two main ways that structs, unions and enums can be declared in C header files: using a tag name, or using a typedef. There are also some variations based on various combinations of these. It's important to make the Cython declarations match the style used in the header file, so that Cython can emit the right sort of references to the type in the code it generates. To make this possible, Cython provides two different syntaxes for declaring a struct, union or enum type. The style introduced above corresponds to the use of a tag name. To get the other style, you prefix the declaration with :keyword:`ctypedef`, as illustrated below. The following table shows the various possible styles that can be found in a header file, and the corresponding Cython declaration that you should put in the ``cdef extern`` from block. Struct declarations are used as an example; the same applies equally to union and enum declarations. +-------------------------+---------------------------------------------+-----------------------------------------------------------------------+ | C code | Possibilities for corresponding Cython Code | Comments | +=========================+=============================================+=======================================================================+ | .. sourcecode:: c | :: | Cython will refer to the as ``struct Foo`` in the generated C code. | | | | | | struct Foo { | cdef struct Foo: | | | ... | ... | | | }; | | | +-------------------------+---------------------------------------------+-----------------------------------------------------------------------+ | .. sourcecode:: c | :: | Cython will refer to the type simply as ``Foo`` in | | | | the generated C code. | | typedef struct { | ctypedef struct Foo: | | | ... | ... | | | } Foo; | | | +-------------------------+---------------------------------------------+-----------------------------------------------------------------------+ | .. sourcecode:: c | :: | If the C header uses both a tag and a typedef with *different* | | | | names, you can use either form of declaration in Cython | | typedef struct foo { | cdef struct foo: | (although if you need to forward reference the type, | | ... | ... | you'll have to use the first form). | | } Foo; | ctypedef foo Foo #optional | | | | | | | | or:: | | | | | | | | ctypedef struct Foo: | | | | ... | | +-------------------------+---------------------------------------------+-----------------------------------------------------------------------+ | .. sourcecode:: c | :: | If the header uses the *same* name for the tag and typedef, you | | | | won't be able to include a :keyword:`ctypedef` for it -- but then, | | typedef struct Foo { | cdef struct Foo: | it's not necessary. | | ... | ... | | | } Foo; | | | +-------------------------+---------------------------------------------+-----------------------------------------------------------------------+ Note that in all the cases below, you refer to the type in Cython code simply as :c:type:`Foo`, not ``struct Foo``. Accessing Python/C API routines --------------------------------- One particular use of the ``cdef extern from`` statement is for gaining access to routines in the Python/C API. For example,:: cdef extern from "Python.h": object PyString_FromStringAndSize(char *s, Py_ssize_t len) will allow you to create Python strings containing null bytes. Special Types -------------- Cython predefines the name ``Py_ssize_t`` for use with Python/C API routines. To make your extensions compatible with 64-bit systems, you should always use this type where it is specified in the documentation of Python/C API routines. Windows Calling Conventions ---------------------------- The ``__stdcall`` and ``__cdecl`` calling convention specifiers can be used in Cython, with the same syntax as used by C compilers on Windows, for example,:: cdef extern int __stdcall FrobnicateWindow(long handle) cdef void (__stdcall *callback)(void *) If ``__stdcall`` is used, the function is only considered compatible with other ``__stdcall`` functions of the same signature. .. _resolve-conflicts: Resolving naming conflicts - C name specifications -------------------------------------------------- Each Cython module has a single module-level namespace for both Python and C names. This can be inconvenient if you want to wrap some external C functions and provide the Python user with Python functions of the same names. Cython provides a couple of different ways of solving this problem. The best way, especially if you have many C functions to wrap, is to put the extern C function declarations into a ``.pxd`` file and thus a different namespace, using the facilities described in :ref:`sharing declarations between Cython modules `. Writing them into a ``.pxd`` file allows their reuse across modules, avoids naming collisions in the normal Python way and even makes it easy to rename them on cimport. For example, if your ``decl.pxd`` file declared a C function ``eject_tomato``:: cdef extern from "myheader.h": void eject_tomato(float speed) then you can cimport and wrap it in a ``.pyx`` file as follows:: from decl cimport eject_tomato as c_eject_tomato def eject_tomato(speed): c_eject_tomato(speed) or simply cimport the ``.pxd`` file and use it as prefix:: cimport decl def eject_tomato(speed): decl.eject_tomato(speed) Note that this has no runtime lookup overhead, as it would in Python. Cython resolves the names in the ``.pxd`` file at compile time. For special cases where namespacing or renaming on import is not enough, e.g. when a name in C conflicts with a Python keyword, you can use a C name specification to give different Cython and C names to the C function at declaration time. Suppose, for example, that you want to wrap an external C function called :func:`yield`. If you declare it as:: cdef extern from "myheader.h": void c_yield "yield" (float speed) then its Cython visible name will be ``c_yield``, whereas its name in C will be ``yield``. You can then wrap it with:: def call_yield(speed): c_yield(speed) As for functions, C names can be specified for variables, structs, unions, enums, struct and union members, and enum values. For example:: cdef extern int one "eins", two "zwei" cdef extern float three "drei" cdef struct spam "SPAM": int i "eye" cdef enum surprise "inquisition": first "alpha" second "beta" = 3 Note that Cython will not do any validation or name mangling on the string you provide. It will inject the bare text into the C code unmodified, so you are entirely on your own with this feature. If you want to declare a name ``xyz`` and have Cython inject the text "make the C compiler fail here" into the C file for it, you can do this using a C name declaration. Consider this an advanced feature, only for the rare cases where everything else fails. Using Cython Declarations from C ================================ Cython provides two methods for making C declarations from a Cython module available for use by external C code---public declarations and C API declarations. .. note:: You do not need to use either of these to make declarations from one Cython module available to another Cython module – you should use the :keyword:`cimport` statement for that. Sharing Declarations Between Cython Modules. Public Declarations --------------------- You can make C types, variables and functions defined in a Cython module accessible to C code that is linked together with the Cython-generated C file, by declaring them with the public keyword:: cdef public struct Bunny: # public type declaration int vorpalness cdef public int spam # public variable declaration cdef public void grail(Bunny *) # public function declaration If there are any public declarations in a Cython module, a header file called :file:`modulename.h` file is generated containing equivalent C declarations for inclusion in other C code. A typical use case for this is building an extension module from multiple C sources, one of them being Cython generated (i.e. with something like ``Extension("grail", sources=["grail.pyx", "grail_helper.c"])`` in ``setup.py``. In this case, the file ``grail_helper.c`` just needs to add ``#include "grail.h"`` in order to access the public Cython variables. A more advanced use case is embedding Python in C using Cython. In this case, make sure to call Py_Initialize() and Py_Finalize(). For example, in the following snippet that includes :file:`grail.h`: .. code-block:: c #include #include "grail.h" int main() { Py_Initialize(); initgrail(); Bunny b; grail(b); Py_Finalize(); } This C code can then be built together with the Cython-generated C code in a single program (or library). If the Cython module resides within a package, then the name of the ``.h`` file consists of the full dotted name of the module, e.g. a module called :mod:`foo.spam` would have a header file called :file:`foo.spam.h`. .. NOTE:: On some operating systems like Linux, it is also possible to first build the Cython extension in the usual way and then link against the resulting ``.so`` file like a dynamic library. Beware that this is not portable, so it should be avoided. .. _api: C API Declarations ------------------- The other way of making declarations available to C code is to declare them with the :keyword:`api` keyword. You can use this keyword with C functions and extension types. A header file called :file:`modulename_api.h` is produced containing declarations of the functions and extension types, and a function called :func:`import_modulename`. C code wanting to use these functions or extension types needs to include the header and call the :func:`import_modulename` function. The other functions can then be called and the extension types used as usual. If the C code wanting to use these functions is part of more than one shared library or executable, then :func:`import_modulename` function needs to be called in each of the shared libraries which use these functions. If you crash with a segmentation fault (SIGSEGV on linux) when calling into one of these api calls, this is likely an indication that the shared library which contains the api call which is generating the segmentation fault does not call the :func:`import_modulename` function before the api call which crashes. Any public C type or extension type declarations in the Cython module are also made available when you include :file:`modulename_api.h`.:: # delorean.pyx cdef public struct Vehicle: int speed float power cdef api void activate(Vehicle *v): if v.speed >= 88 and v.power >= 1.21: print "Time travel achieved" .. sourcecode:: c # marty.c #include "delorean_api.h" Vehicle car; int main(int argc, char *argv[]) { Py_Initialize(); import_delorean(); car.speed = atoi(argv[1]); car.power = atof(argv[2]); activate(&car); Py_Finalize(); } .. note:: Any types defined in the Cython module that are used as argument or return types of the exported functions will need to be declared public, otherwise they won't be included in the generated header file, and you will get errors when you try to compile a C file that uses the header. Using the :keyword:`api` method does not require the C code using the declarations to be linked with the extension module in any way, as the Python import machinery is used to make the connection dynamically. However, only functions can be accessed this way, not variables. Note also that for the module import mechanism to be set up correctly, the user must call Py_Initialize() and Py_Finalize(); if you experience a segmentation fault in the call to :func:`import_modulename`, it is likely that this wasn't done. You can use both :keyword:`public` and :keyword:`api` on the same function to make it available by both methods, e.g.:: cdef public api void belt_and_braces(): ... However, note that you should include either :file:`modulename.h` or :file:`modulename_api.h` in a given C file, not both, otherwise you may get conflicting dual definitions. If the Cython module resides within a package, then: * The name of the header file contains of the full dotted name of the module. * The name of the importing function contains the full name with dots replaced by double underscores. E.g. a module called :mod:`foo.spam` would have an API header file called :file:`foo.spam_api.h` and an importing function called :func:`import_foo__spam`. Multiple public and API declarations -------------------------------------- You can declare a whole group of items as :keyword:`public` and/or :keyword:`api` all at once by enclosing them in a :keyword:`cdef` block, for example,:: cdef public api: void order_spam(int tons) char *get_lunch(float tomato_size) This can be a useful thing to do in a ``.pxd`` file (see :ref:`sharing-declarations`) to make the module's public interface available by all three methods. Acquiring and Releasing the GIL --------------------------------- Cython provides facilities for acquiring and releasing the `Global Interpreter Lock (GIL) `_. This may be useful when calling from multi-threaded code into (external C) code that may block, or when wanting to use Python from a (native) C thread callback. Releasing the GIL should obviously only be done for thread-safe code or for code that uses other means of protection against race conditions and concurrency issues. Note that acquiring the GIL is a blocking thread-synchronising operation, and therefore potentially costly. It might not be worth releasing the GIL for minor calculations. Usually, I/O operations and substantial computations in parallel code will benefit from it. .. _nogil: Releasing the GIL ^^^^^^^^^^^^^^^^^ You can release the GIL around a section of code using the ``with nogil`` statement:: with nogil: Code in the body of the with-statement must not raise exceptions or manipulate Python objects in any way, and must not call anything that manipulates Python objects without first re-acquiring the GIL. Cython validates these operations at compile time, but cannot look into external C functions, for example. They must be correctly declared as requiring or not requiring the GIL (see below) in order to make Cython's checks effective. .. _gil: Acquiring the GIL ^^^^^^^^^^^^^^^^^ A C function that is to be used as a callback from C code that is executed without the GIL needs to acquire the GIL before it can manipulate Python objects. This can be done by specifying ``with gil`` in the function header:: cdef void my_callback(void *data) with gil: ... If the callback may be called from another non-Python thread, care must be taken to initialize the GIL first, through a call to `PyEval_InitThreads() `_. If you're already using :ref:`cython.parallel ` in your module, this will already have been taken care of. The GIL may also be acquired through the ``with gil`` statement:: with gil: Declaring a function as callable without the GIL -------------------------------------------------- You can specify :keyword:`nogil` in a C function header or function type to declare that it is safe to call without the GIL.:: cdef void my_gil_free_func(int spam) nogil: ... When you implement such a function in Cython, it cannot have any Python arguments or Python object return type. Furthermore, any operation that involves Python objects (including calling Python functions) must explicitly acquire the GIL first, e.g. by using a ``with gil`` block or by calling a function that has been defined ``with gil``. These restrictions are checked by Cython and you will get a compile error if it finds any Python interaction inside of a ``nogil`` code section. .. NOTE:: The ``nogil`` function annotation declares that it is safe to call the function without the GIL. It is perfectly allowed to execute it while holding the GIL. The function does not in itself release the GIL if it is held by the caller. Declaring a function ``with gil`` (i.e. as acquiring the GIL on entry) also implicitly makes its signature :keyword:`nogil`. Cython-0.26.1/docs/index.rst0000664000175000017500000000037713023021033016413 0ustar stefanstefan00000000000000 Welcome to Cython's Documentation ================================= Also see the `Cython project homepage `_. .. toctree:: :maxdepth: 2 src/quickstart/index src/tutorial/index src/userguide/index src/reference/index Cython-0.26.1/runtests.py0000775000175000017500000025057513150050502016101 0ustar stefanstefan00000000000000#!/usr/bin/env python import atexit import os import sys import re import gc import locale import shutil import time import unittest import doctest import operator import subprocess import tempfile import traceback import warnings try: import platform IS_PYPY = platform.python_implementation() == 'PyPy' IS_CPYTHON = platform.python_implementation() == 'CPython' except (ImportError, AttributeError): IS_CPYTHON = True IS_PYPY = False from io import open as io_open try: from StringIO import StringIO except ImportError: from io import StringIO # doesn't accept 'str' in Py2 try: import cPickle as pickle except ImportError: import pickle try: import threading except ImportError: # No threads, no problems threading = None try: from collections import defaultdict except ImportError: class defaultdict(object): def __init__(self, default_factory=lambda : None): self._dict = {} self.default_factory = default_factory def __getitem__(self, key): if key not in self._dict: self._dict[key] = self.default_factory() return self._dict[key] def __setitem__(self, key, value): self._dict[key] = value def __contains__(self, key): return key in self._dict def __repr__(self): return repr(self._dict) def __nonzero__(self): return bool(self._dict) try: basestring except NameError: basestring = str WITH_CYTHON = True CY3_DIR = None from distutils.command.build_ext import build_ext as _build_ext from distutils import sysconfig _to_clean = [] @atexit.register def _cleanup_files(): """ This is only used on Cygwin to clean up shared libraries that are unsafe to delete while the test suite is running. """ for filename in _to_clean: if os.path.isdir(filename): shutil.rmtree(filename, ignore_errors=True) else: try: os.remove(filename) except OSError: pass def get_distutils_distro(_cache=[]): if _cache: return _cache[0] # late import to accomodate for setuptools override from distutils.dist import Distribution distutils_distro = Distribution() if sys.platform == 'win32': # TODO: Figure out why this hackery (see http://thread.gmane.org/gmane.comp.python.cython.devel/8280/). config_files = distutils_distro.find_config_files() try: config_files.remove('setup.cfg') except ValueError: pass distutils_distro.parse_config_files(config_files) cfgfiles = distutils_distro.find_config_files() try: cfgfiles.remove('setup.cfg') except ValueError: pass distutils_distro.parse_config_files(cfgfiles) _cache.append(distutils_distro) return distutils_distro EXT_DEP_MODULES = { 'tag:numpy': 'numpy', 'tag:asyncio': 'asyncio', 'tag:pstats': 'pstats', 'tag:posix': 'posix', 'tag:array': 'array', 'tag:coverage': 'Cython.Coverage', 'Coverage': 'Cython.Coverage', 'tag:ipython': 'IPython', 'tag:jedi': 'jedi', } def patch_inspect_isfunction(): import inspect orig_isfunction = inspect.isfunction def isfunction(obj): return orig_isfunction(obj) or type(obj).__name__ == 'cython_function_or_method' isfunction._orig_isfunction = orig_isfunction inspect.isfunction = isfunction def unpatch_inspect_isfunction(): import inspect try: orig_isfunction = inspect.isfunction._orig_isfunction except AttributeError: pass else: inspect.isfunction = orig_isfunction def def_to_cdef(source): ''' Converts the module-level def methods into cdef methods, i.e. @decorator def foo([args]): """ [tests] """ [body] becomes def foo([args]): """ [tests] """ return foo_c([args]) cdef foo_c([args]): [body] ''' output = [] skip = False def_node = re.compile(r'def (\w+)\(([^()*]*)\):').match lines = iter(source.split('\n')) for line in lines: if not line.strip(): output.append(line) continue if skip: if line[0] != ' ': skip = False else: continue if line[0] == '@': skip = True continue m = def_node(line) if m: name = m.group(1) args = m.group(2) if args: args_no_types = ", ".join(arg.split()[-1] for arg in args.split(',')) else: args_no_types = "" output.append("def %s(%s):" % (name, args_no_types)) line = next(lines) if '"""' in line: has_docstring = True output.append(line) for line in lines: output.append(line) if '"""' in line: break else: has_docstring = False output.append(" return %s_c(%s)" % (name, args_no_types)) output.append('') output.append("cdef %s_c(%s):" % (name, args)) if not has_docstring: output.append(line) else: output.append(line) return '\n'.join(output) def update_linetrace_extension(ext): ext.define_macros.append(('CYTHON_TRACE', 1)) return ext def update_numpy_extension(ext): import numpy from numpy.distutils.misc_util import get_info ext.include_dirs.append(numpy.get_include()) # We need the npymath library for numpy.math. # This is typically a static-only library. for attr, value in get_info('npymath').items(): getattr(ext, attr).extend(value) def update_openmp_extension(ext): ext.openmp = True language = ext.language if language == 'cpp': flags = OPENMP_CPP_COMPILER_FLAGS else: flags = OPENMP_C_COMPILER_FLAGS if flags: compile_flags, link_flags = flags ext.extra_compile_args.extend(compile_flags.split()) ext.extra_link_args.extend(link_flags.split()) return ext elif sys.platform == 'win32': return ext return EXCLUDE_EXT def get_openmp_compiler_flags(language): """ As of gcc 4.2, it supports OpenMP 2.5. Gcc 4.4 implements 3.0. We don't (currently) check for other compilers. returns a two-tuple of (CFLAGS, LDFLAGS) to build the OpenMP extension """ if language == 'cpp': cc = sysconfig.get_config_var('CXX') else: cc = sysconfig.get_config_var('CC') if not cc: if sys.platform == 'win32': return '/openmp', '' return None # For some reason, cc can be e.g. 'gcc -pthread' cc = cc.split()[0] # Force english output env = os.environ.copy() env['LC_MESSAGES'] = 'C' matcher = re.compile(r"gcc version (\d+\.\d+)").search try: p = subprocess.Popen([cc, "-v"], stderr=subprocess.PIPE, env=env) except EnvironmentError: # Be compatible with Python 3 warnings.warn("Unable to find the %s compiler: %s: %s" % (language, os.strerror(sys.exc_info()[1].errno), cc)) return None _, output = p.communicate() output = output.decode(locale.getpreferredencoding() or 'ASCII', 'replace') gcc_version = matcher(output) if not gcc_version: return None # not gcc - FIXME: do something about other compilers # gcc defines "__int128_t", assume that at least all 64 bit architectures have it global COMPILER_HAS_INT128 COMPILER_HAS_INT128 = getattr(sys, 'maxsize', getattr(sys, 'maxint', 0)) > 2**60 compiler_version = gcc_version.group(1) if compiler_version and compiler_version.split('.') >= ['4', '2']: return '-fopenmp', '-fopenmp' try: locale.setlocale(locale.LC_ALL, '') except locale.Error: pass COMPILER = None COMPILER_HAS_INT128 = False OPENMP_C_COMPILER_FLAGS = get_openmp_compiler_flags('c') OPENMP_CPP_COMPILER_FLAGS = get_openmp_compiler_flags('cpp') # Return this from the EXT_EXTRAS matcher callback to exclude the extension EXCLUDE_EXT = object() EXT_EXTRAS = { 'tag:numpy' : update_numpy_extension, 'tag:openmp': update_openmp_extension, 'tag:trace' : update_linetrace_extension, } def _is_py3_before_32(excluded, version): return version[0] >= 3 and version < (3,2) # TODO: use tags VER_DEP_MODULES = { # tests are excluded if 'CurrentPythonVersion OP VersionTuple', i.e. # (2,4) : (operator.lt, ...) excludes ... when PyVer < 2.4.x (2,7) : (operator.lt, lambda x: x in ['run.withstat_py27', # multi context with statement 'run.yield_inside_lambda', 'run.test_dictviews', 'run.pyclass_special_methods', 'run.set_literals', ]), # The next line should start (3,); but this is a dictionary, so # we can only have one (3,) key. Since 2.7 is supposed to be the # last 2.x release, things would have to change drastically for this # to be unsafe... (2,999): (operator.lt, lambda x: x in ['run.special_methods_T561_py3', 'run.test_raisefrom', ]), (3,): (operator.ge, lambda x: x in ['run.non_future_division', 'compile.extsetslice', 'compile.extdelslice', 'run.special_methods_T561_py2' ]), (3,1): (_is_py3_before_32, lambda x: x in ['run.pyclass_special_methods', ]), (3,3) : (operator.lt, lambda x: x in ['build.package_compilation', 'run.yield_from_py33', ]), (3,4): (operator.lt, lambda x: x in ['run.py34_signature', ]), (3,5): (operator.lt, lambda x: x in ['run.py35_pep492_interop', ]), } INCLUDE_DIRS = [ d for d in os.getenv('INCLUDE', '').split(os.pathsep) if d ] CFLAGS = os.getenv('CFLAGS', '').split() CCACHE = os.getenv('CYTHON_RUNTESTS_CCACHE', '').split() TEST_SUPPORT_DIR = 'testsupport' BACKENDS = ['c', 'cpp'] UTF8_BOM_BYTES = r'\xef\xbb\xbf'.encode('ISO-8859-1').decode('unicode_escape') def memoize(f): uncomputed = object() f._cache = {} def func(*args): res = f._cache.get(args, uncomputed) if res is uncomputed: res = f._cache[args] = f(*args) return res return func @memoize def parse_tags(filepath): tags = defaultdict(list) parse_tag = re.compile(r'#\s*(\w+)\s*:(.*)$').match f = io_open(filepath, encoding='ISO-8859-1', errors='ignore') try: for line in f: # ignore BOM-like bytes and whitespace line = line.lstrip(UTF8_BOM_BYTES).strip() if not line: if tags: break # assume all tags are in one block else: continue if line[0] != '#': break parsed = parse_tag(line) if parsed: tag, values = parsed.groups() if tag in ('coding', 'encoding'): continue if tag == 'tags': tag = 'tag' print("WARNING: test tags use the 'tag' directive, not 'tags' (%s)" % filepath) if tag not in ('mode', 'tag', 'ticket', 'cython', 'distutils', 'preparse'): print("WARNING: unknown test directive '%s' found (%s)" % (tag, filepath)) values = values.split(',') tags[tag].extend(filter(None, [value.strip() for value in values])) elif tags: break # assume all tags are in one block finally: f.close() return tags list_unchanging_dir = memoize(lambda x: os.listdir(x)) @memoize def _list_pyregr_data_files(test_directory): is_data_file = re.compile('(?:[.](txt|pem|db|html)|^bad.*[.]py)$').search return ['__init__.py'] + [ filename for filename in list_unchanging_dir(test_directory) if is_data_file(filename)] def import_ext(module_name, file_path=None): if file_path: import imp return imp.load_dynamic(module_name, file_path) else: try: from importlib import invalidate_caches except ImportError: pass else: invalidate_caches() return __import__(module_name, globals(), locals(), ['*']) class build_ext(_build_ext): def build_extension(self, ext): try: try: # Py2.7+ & Py3.2+ compiler_obj = self.compiler_obj except AttributeError: compiler_obj = self.compiler if ext.language == 'c++': compiler_obj.compiler_so.remove('-Wstrict-prototypes') if CCACHE: compiler_obj.compiler_so = CCACHE + compiler_obj.compiler_so if getattr(ext, 'openmp', None) and compiler_obj.compiler_type == 'msvc': ext.extra_compile_args.append('/openmp') except Exception: pass _build_ext.build_extension(self, ext) class ErrorWriter(object): match_error = re.compile('(warning:)?(?:.*:)?\s*([-0-9]+)\s*:\s*([-0-9]+)\s*:\s*(.*)').match def __init__(self): self.output = [] self.write = self.output.append def _collect(self): s = ''.join(self.output) results = {'errors': [], 'warnings': []} for line in s.splitlines(): match = self.match_error(line) if match: is_warning, line, column, message = match.groups() results['warnings' if is_warning else 'errors'].append((int(line), int(column), message.strip())) return [["%d:%d: %s" % values for values in sorted(results[key])] for key in ('errors', 'warnings')] def geterrors(self): return self._collect()[0] def getwarnings(self): return self._collect()[1] def getall(self): return self._collect() def close(self): pass # ignore, only to match file-like interface class TestBuilder(object): def __init__(self, rootdir, workdir, selectors, exclude_selectors, annotate, cleanup_workdir, cleanup_sharedlibs, cleanup_failures, with_pyregr, cython_only, languages, test_bugs, fork, language_level, test_determinism, common_utility_dir, pythran_dir=None): self.rootdir = rootdir self.workdir = workdir self.selectors = selectors self.exclude_selectors = exclude_selectors self.annotate = annotate self.cleanup_workdir = cleanup_workdir self.cleanup_sharedlibs = cleanup_sharedlibs self.cleanup_failures = cleanup_failures self.with_pyregr = with_pyregr self.cython_only = cython_only self.languages = languages self.test_bugs = test_bugs self.fork = fork self.language_level = language_level self.test_determinism = test_determinism self.common_utility_dir = common_utility_dir self.pythran_dir = pythran_dir def build_suite(self): suite = unittest.TestSuite() filenames = os.listdir(self.rootdir) filenames.sort() for filename in filenames: path = os.path.join(self.rootdir, filename) if os.path.isdir(path) and filename != TEST_SUPPORT_DIR: if filename == 'pyregr' and not self.with_pyregr: continue if filename == 'broken' and not self.test_bugs: continue suite.addTest( self.handle_directory(path, filename)) if sys.platform not in ['win32']: # Non-Windows makefile. if [1 for selector in self.selectors if selector("embedded")] \ and not [1 for selector in self.exclude_selectors if selector("embedded")]: suite.addTest(unittest.makeSuite(EmbedTest)) return suite def handle_directory(self, path, context): workdir = os.path.join(self.workdir, context) if not os.path.exists(workdir): os.makedirs(workdir) suite = unittest.TestSuite() filenames = list_unchanging_dir(path) filenames.sort() for filename in filenames: filepath = os.path.join(path, filename) module, ext = os.path.splitext(filename) if ext not in ('.py', '.pyx', '.srctree'): continue if filename.startswith('.'): continue # certain emacs backup files if context == 'pyregr': tags = defaultdict(list) else: tags = parse_tags(filepath) fqmodule = "%s.%s" % (context, module) if not [ 1 for match in self.selectors if match(fqmodule, tags) ]: continue if self.exclude_selectors: if [1 for match in self.exclude_selectors if match(fqmodule, tags)]: continue mode = 'run' # default if tags['mode']: mode = tags['mode'][0] elif context == 'pyregr': mode = 'pyregr' if ext == '.srctree': if 'cpp' not in tags['tag'] or 'cpp' in self.languages: suite.addTest(EndToEndTest(filepath, workdir, self.cleanup_workdir)) continue # Choose the test suite. if mode == 'pyregr': if not filename.startswith('test_'): continue test_class = CythonPyregrTestCase elif mode == 'run': if module.startswith("test_"): test_class = CythonUnitTestCase else: test_class = CythonRunTestCase else: test_class = CythonCompileTestCase for test in self.build_tests(test_class, path, workdir, module, mode == 'error', tags): suite.addTest(test) if mode == 'run' and ext == '.py' and not self.cython_only and not filename.startswith('test_'): # additionally test file in real Python suite.addTest(PureDoctestTestCase(module, os.path.join(path, filename))) return suite def build_tests(self, test_class, path, workdir, module, expect_errors, tags): warning_errors = 'werror' in tags['tag'] expect_warnings = 'warnings' in tags['tag'] if expect_errors: if 'cpp' in tags['tag'] and 'cpp' in self.languages: languages = ['cpp'] else: languages = self.languages[:1] else: languages = self.languages if 'cpp' in tags['tag'] and 'c' in languages: languages = list(languages) languages.remove('c') elif 'no-cpp' in tags['tag'] and 'cpp' in self.languages: languages = list(languages) languages.remove('cpp') preparse_list = tags.get('preparse', ['id']) tests = [ self.build_test(test_class, path, workdir, module, tags, language, expect_errors, expect_warnings, warning_errors, preparse, self.pythran_dir if language == "cpp" else None) for language in languages for preparse in preparse_list ] return tests def build_test(self, test_class, path, workdir, module, tags, language, expect_errors, expect_warnings, warning_errors, preparse, pythran_dir): language_workdir = os.path.join(workdir, language) if not os.path.exists(language_workdir): os.makedirs(language_workdir) workdir = os.path.join(language_workdir, module) if preparse != 'id': workdir += '_%s' % str(preparse) return test_class(path, workdir, module, tags, language=language, preparse=preparse, expect_errors=expect_errors, expect_warnings=expect_warnings, annotate=self.annotate, cleanup_workdir=self.cleanup_workdir, cleanup_sharedlibs=self.cleanup_sharedlibs, cleanup_failures=self.cleanup_failures, cython_only=self.cython_only, fork=self.fork, language_level=self.language_level, warning_errors=warning_errors, test_determinism=self.test_determinism, common_utility_dir=self.common_utility_dir, pythran_dir=pythran_dir) class CythonCompileTestCase(unittest.TestCase): def __init__(self, test_directory, workdir, module, tags, language='c', preparse='id', expect_errors=False, expect_warnings=False, annotate=False, cleanup_workdir=True, cleanup_sharedlibs=True, cleanup_failures=True, cython_only=False, fork=True, language_level=2, warning_errors=False, test_determinism=False, common_utility_dir=None, pythran_dir=None): self.test_directory = test_directory self.tags = tags self.workdir = workdir self.module = module self.language = language self.preparse = preparse self.name = module if self.preparse == "id" else "%s_%s" % (module, preparse) self.expect_errors = expect_errors self.expect_warnings = expect_warnings self.annotate = annotate self.cleanup_workdir = cleanup_workdir self.cleanup_sharedlibs = cleanup_sharedlibs self.cleanup_failures = cleanup_failures self.cython_only = cython_only self.fork = fork self.language_level = language_level self.warning_errors = warning_errors self.test_determinism = test_determinism self.common_utility_dir = common_utility_dir self.pythran_dir = pythran_dir unittest.TestCase.__init__(self) def shortDescription(self): return "compiling (%s%s) %s" % (self.language, "/pythran" if self.pythran_dir is not None else "", self.name) def setUp(self): from Cython.Compiler import Options self._saved_options = [ (name, getattr(Options, name)) for name in ('warning_errors', 'clear_to_none', 'error_on_unknown_names', 'error_on_uninitialized') ] self._saved_default_directives = list(Options.get_directive_defaults().items()) Options.warning_errors = self.warning_errors if sys.version_info >= (3, 4): Options._directive_defaults['autotestdict'] = False if not os.path.exists(self.workdir): os.makedirs(self.workdir) if self.workdir not in sys.path: sys.path.insert(0, self.workdir) def tearDown(self): from Cython.Compiler import Options for name, value in self._saved_options: setattr(Options, name, value) Options._directive_defaults = dict(self._saved_default_directives) unpatch_inspect_isfunction() try: sys.path.remove(self.workdir) except ValueError: pass try: del sys.modules[self.module] except KeyError: pass cleanup = self.cleanup_failures or self.success cleanup_c_files = WITH_CYTHON and self.cleanup_workdir and cleanup cleanup_lib_files = self.cleanup_sharedlibs and cleanup is_cygwin = sys.platform == 'cygwin' if os.path.exists(self.workdir): if cleanup_c_files and cleanup_lib_files and not is_cygwin: shutil.rmtree(self.workdir, ignore_errors=True) else: for rmfile in os.listdir(self.workdir): if not cleanup_c_files: if (rmfile[-2:] in (".c", ".h") or rmfile[-4:] == ".cpp" or rmfile.endswith(".html") and rmfile.startswith(self.module)): continue is_shared_obj = rmfile.endswith(".so") or rmfile.endswith(".dll") if not cleanup_lib_files and is_shared_obj: continue try: rmfile = os.path.join(self.workdir, rmfile) if os.path.isdir(rmfile): shutil.rmtree(rmfile, ignore_errors=True) elif is_cygwin and is_shared_obj: # Delete later _to_clean.append(rmfile) else: os.remove(rmfile) except IOError: pass if cleanup_c_files and cleanup_lib_files and is_cygwin: # Finally, remove the work dir itself _to_clean.append(self.workdir) if cleanup_c_files and os.path.exists(self.workdir + '-again'): shutil.rmtree(self.workdir + '-again', ignore_errors=True) def runTest(self): self.success = False self.runCompileTest() self.success = True def runCompileTest(self): return self.compile( self.test_directory, self.module, self.workdir, self.test_directory, self.expect_errors, self.expect_warnings, self.annotate) def find_module_source_file(self, source_file): if not os.path.exists(source_file): source_file = source_file[:-1] return source_file def build_target_filename(self, module_name): target = '%s.%s' % (module_name, self.language) return target def related_files(self, test_directory, module_name): is_related = re.compile('%s_.*[.].*' % module_name).match return [filename for filename in list_unchanging_dir(test_directory) if is_related(filename)] def copy_files(self, test_directory, target_directory, file_list): if self.preparse and self.preparse != 'id': preparse_func = globals()[self.preparse] def copy(src, dest): open(dest, 'w').write(preparse_func(open(src).read())) else: # use symlink on Unix, copy on Windows try: copy = os.symlink except AttributeError: copy = shutil.copy join = os.path.join for filename in file_list: file_path = join(test_directory, filename) if os.path.exists(file_path): copy(file_path, join(target_directory, filename)) def source_files(self, workdir, module_name, file_list): return ([self.build_target_filename(module_name)] + [filename for filename in file_list if not os.path.isfile(os.path.join(workdir, filename))]) def split_source_and_output(self, test_directory, module, workdir): source_file = self.find_module_source_file(os.path.join(test_directory, module) + '.pyx') source_and_output = io_open(source_file, 'rU', encoding='ISO-8859-1') error_writer = warnings_writer = None try: out = io_open(os.path.join(workdir, module + os.path.splitext(source_file)[1]), 'w', encoding='ISO-8859-1') for line in source_and_output: if line.startswith("_ERRORS"): out.close() out = error_writer = ErrorWriter() elif line.startswith("_WARNINGS"): out.close() out = warnings_writer = ErrorWriter() else: out.write(line) finally: source_and_output.close() return (error_writer.geterrors() if error_writer else [], warnings_writer.geterrors() if warnings_writer else []) def run_cython(self, test_directory, module, targetdir, incdir, annotate, extra_compile_options=None): include_dirs = INCLUDE_DIRS + [os.path.join(test_directory, '..', TEST_SUPPORT_DIR)] if incdir: include_dirs.append(incdir) source = self.find_module_source_file( os.path.join(test_directory, module + '.pyx')) if self.preparse == 'id': source = self.find_module_source_file( os.path.join(test_directory, module + '.pyx')) else: self.copy_files(test_directory, targetdir, [module + '.pyx']) source = os.path.join(targetdir, module + '.pyx') target = os.path.join(targetdir, self.build_target_filename(module)) if extra_compile_options is None: extra_compile_options = {} if 'allow_unknown_names' in self.tags['tag']: from Cython.Compiler import Options Options.error_on_unknown_names = False try: CompilationOptions except NameError: from Cython.Compiler.Main import CompilationOptions from Cython.Compiler.Main import compile as cython_compile from Cython.Compiler.Main import default_options common_utility_include_dir = self.common_utility_dir options = CompilationOptions( default_options, include_path = include_dirs, output_file = target, annotate = annotate, use_listing_file = False, cplus = self.language == 'cpp', np_pythran = self.pythran_dir is not None, language_level = self.language_level, generate_pxi = False, evaluate_tree_assertions = True, common_utility_include_dir = common_utility_include_dir, **extra_compile_options ) cython_compile(source, options=options, full_module_name=module) def run_distutils(self, test_directory, module, workdir, incdir, extra_extension_args=None): cwd = os.getcwd() os.chdir(workdir) try: build_extension = build_ext(get_distutils_distro()) build_extension.include_dirs = INCLUDE_DIRS[:] if incdir: build_extension.include_dirs.append(incdir) build_extension.finalize_options() if COMPILER: build_extension.compiler = COMPILER ext_compile_flags = CFLAGS[:] if build_extension.compiler == 'mingw32': ext_compile_flags.append('-Wno-format') if extra_extension_args is None: extra_extension_args = {} if self.pythran_dir is not None: ext_compile_flags.extend([ '-I', self.pythran_dir, '-DENABLE_PYTHON_MODULE', '-std=c++11', '-D__PYTHRAN__=%d' % sys.version_info.major, '-Wno-cpp', ]) related_files = self.related_files(test_directory, module) self.copy_files(test_directory, workdir, related_files) from distutils.core import Extension extension = Extension( module, sources=self.source_files(workdir, module, related_files), extra_compile_args=ext_compile_flags, **extra_extension_args ) if self.language == 'cpp': # Set the language now as the fixer might need it extension.language = 'c++' if 'distutils' in self.tags: from Cython.Build.Dependencies import DistutilsInfo from Cython.Utils import open_source_file pyx_path = os.path.join(self.test_directory, self.module + ".pyx") with open_source_file(pyx_path) as f: DistutilsInfo(f).apply(extension) for matcher, fixer in list(EXT_EXTRAS.items()): if isinstance(matcher, str): # lazy init del EXT_EXTRAS[matcher] matcher = string_selector(matcher) EXT_EXTRAS[matcher] = fixer if matcher(module, self.tags): newext = fixer(extension) if newext is EXCLUDE_EXT: return extension = newext or extension if self.language == 'cpp': extension.language = 'c++' build_extension.extensions = [extension] build_extension.build_temp = workdir build_extension.build_lib = workdir build_extension.run() finally: os.chdir(cwd) try: get_ext_fullpath = build_extension.get_ext_fullpath except AttributeError: def get_ext_fullpath(ext_name, self=build_extension): # copied from distutils.command.build_ext (missing in Py2.[45]) fullname = self.get_ext_fullname(ext_name) modpath = fullname.split('.') filename = self.get_ext_filename(modpath[-1]) if not self.inplace: filename = os.path.join(*modpath[:-1]+[filename]) return os.path.join(self.build_lib, filename) package = '.'.join(modpath[0:-1]) build_py = self.get_finalized_command('build_py') package_dir = os.path.abspath(build_py.get_package_dir(package)) return os.path.join(package_dir, filename) return get_ext_fullpath(module) def compile(self, test_directory, module, workdir, incdir, expect_errors, expect_warnings, annotate): expected_errors = expected_warnings = errors = warnings = () if expect_errors or expect_warnings: expected_errors, expected_warnings = self.split_source_and_output( test_directory, module, workdir) test_directory = workdir if WITH_CYTHON: old_stderr = sys.stderr try: sys.stderr = ErrorWriter() self.run_cython(test_directory, module, workdir, incdir, annotate) errors, warnings = sys.stderr.getall() finally: sys.stderr = old_stderr if self.test_determinism and not expect_errors: workdir2 = workdir + '-again' os.mkdir(workdir2) self.run_cython(test_directory, module, workdir2, incdir, annotate) diffs = [] for file in os.listdir(workdir2): if (open(os.path.join(workdir, file)).read() != open(os.path.join(workdir2, file)).read()): diffs.append(file) os.system('diff -u %s/%s %s/%s > %s/%s.diff' % ( workdir, file, workdir2, file, workdir2, file)) if diffs: self.fail('Nondeterministic file generation: %s' % ', '.join(diffs)) tostderr = sys.__stderr__.write if 'cerror' in self.tags['tag']: if errors: tostderr("\n=== Expected C compile error ===\n") tostderr("\n=== Got Cython errors: ===\n") tostderr('\n'.join(errors)) tostderr('\n\n') raise RuntimeError('should have generated extension code') elif errors or expected_errors: self._match_output(expected_errors, errors, tostderr) return None if expected_warnings or (expect_warnings and warnings): self._match_output(expected_warnings, warnings, tostderr) so_path = None if not self.cython_only: from Cython.Utils import captured_fd, print_bytes from distutils.errors import CompileError, LinkError show_output = True get_stderr = get_stdout = None try: with captured_fd(1) as get_stdout: with captured_fd(2) as get_stderr: so_path = self.run_distutils(test_directory, module, workdir, incdir) except Exception as exc: if ('cerror' in self.tags['tag'] and ((get_stderr and get_stderr()) or isinstance(exc, (CompileError, LinkError)))): show_output = False # expected C compiler failure else: raise else: if 'cerror' in self.tags['tag']: raise RuntimeError('should have failed C compile') finally: if show_output: stdout = get_stdout and get_stdout().strip() if stdout: tostderr("\n=== C/C++ compiler output: ===\n") print_bytes(stdout, end=None, file=sys.__stderr__) stderr = get_stderr and get_stderr().strip() if stderr: tostderr("\n=== C/C++ compiler error output: ===\n") print_bytes(stderr, end=None, file=sys.__stderr__) if stdout or stderr: tostderr("\n==============================\n") return so_path def _match_output(self, expected_output, actual_output, write): try: for expected, actual in zip(expected_output, actual_output): self.assertEquals(expected, actual) if len(actual_output) < len(expected_output): expected = expected_output[len(actual_output)] self.assertEquals(expected, None) elif len(actual_output) > len(expected_output): unexpected = actual_output[len(expected_output)] self.assertEquals(None, unexpected) except AssertionError: write("\n=== Expected: ===\n") write('\n'.join(expected_output)) write("\n\n=== Got: ===\n") write('\n'.join(actual_output)) write('\n\n') raise class CythonRunTestCase(CythonCompileTestCase): def setUp(self): CythonCompileTestCase.setUp(self) from Cython.Compiler import Options Options.clear_to_none = False def shortDescription(self): if self.cython_only: return CythonCompileTestCase.shortDescription(self) else: return "compiling (%s%s) and running %s" % (self.language, "/pythran" if self.pythran_dir is not None else "", self.name) def run(self, result=None): if result is None: result = self.defaultTestResult() result.startTest(self) try: self.setUp() try: self.success = False ext_so_path = self.runCompileTest() failures, errors = len(result.failures), len(result.errors) if not self.cython_only and ext_so_path is not None: self.run_tests(result, ext_so_path) if failures == len(result.failures) and errors == len(result.errors): # No new errors... self.success = True finally: check_thread_termination() except Exception: result.addError(self, sys.exc_info()) result.stopTest(self) try: self.tearDown() except Exception: pass def run_tests(self, result, ext_so_path): self.run_doctests(self.module, result, ext_so_path) def run_doctests(self, module_or_name, result, ext_so_path): def run_test(result): if isinstance(module_or_name, basestring): module = import_ext(module_or_name, ext_so_path) else: module = module_or_name tests = doctest.DocTestSuite(module) tests.run(result) run_forked_test(result, run_test, self.shortDescription(), self.fork) def run_forked_test(result, run_func, test_name, fork=True): if not fork or sys.version_info[0] >= 3 or not hasattr(os, 'fork'): run_func(result) sys.stdout.flush() sys.stderr.flush() gc.collect() return # fork to make sure we do not keep the tested module loaded result_handle, result_file = tempfile.mkstemp() os.close(result_handle) child_id = os.fork() if not child_id: result_code = 0 output = None try: try: tests = partial_result = None try: partial_result = PartialTestResult(result) run_func(partial_result) sys.stdout.flush() sys.stderr.flush() gc.collect() except Exception: result_code = 1 if partial_result is not None: if tests is None: # importing failed, try to fake a test class tests = _FakeClass( failureException=sys.exc_info()[1], _shortDescription=test_name, module_name=None) partial_result.addError(tests, sys.exc_info()) output = open(result_file, 'wb') pickle.dump(partial_result.data(), output) except: traceback.print_exc() finally: try: sys.stderr.flush() except: pass try: sys.stdout.flush() except: pass try: if output is not None: output.close() except: pass os._exit(result_code) try: cid, result_code = os.waitpid(child_id, 0) module_name = test_name.split()[-1] # os.waitpid returns the child's result code in the # upper byte of result_code, and the signal it was # killed by in the lower byte if result_code & 255: raise Exception("Tests in module '%s' were unexpectedly killed by signal %d"% (module_name, result_code & 255)) result_code >>= 8 if result_code in (0,1): input = open(result_file, 'rb') try: PartialTestResult.join_results(result, pickle.load(input)) finally: input.close() if result_code: raise Exception("Tests in module '%s' exited with status %d" % (module_name, result_code)) finally: try: os.unlink(result_file) except: pass class PureDoctestTestCase(unittest.TestCase): def __init__(self, module_name, module_path): self.module_name = module_name self.module_path = module_path unittest.TestCase.__init__(self, 'run') def shortDescription(self): return "running pure doctests in %s" % self.module_name def run(self, result=None): if result is None: result = self.defaultTestResult() loaded_module_name = 'pure_doctest__' + self.module_name result.startTest(self) try: self.setUp() import imp m = imp.load_source(loaded_module_name, self.module_path) try: doctest.DocTestSuite(m).run(result) finally: del m if loaded_module_name in sys.modules: del sys.modules[loaded_module_name] check_thread_termination() except Exception: result.addError(self, sys.exc_info()) result.stopTest(self) try: self.tearDown() except Exception: pass is_private_field = re.compile('^_[^_]').match class _FakeClass(object): def __init__(self, **kwargs): self._shortDescription = kwargs.get('module_name') self.__dict__.update(kwargs) def shortDescription(self): return self._shortDescription try: # Py2.7+ and Py3.2+ from unittest.runner import _TextTestResult except ImportError: from unittest import _TextTestResult class PartialTestResult(_TextTestResult): def __init__(self, base_result): _TextTestResult.__init__( self, self._StringIO(), True, base_result.dots + base_result.showAll*2) def strip_error_results(self, results): for test_case, error in results: for attr_name in filter(is_private_field, dir(test_case)): if attr_name == '_dt_test': test_case._dt_test = _FakeClass( name=test_case._dt_test.name) elif attr_name != '_shortDescription': setattr(test_case, attr_name, None) def data(self): self.strip_error_results(self.failures) self.strip_error_results(self.errors) return (self.failures, self.errors, self.testsRun, self.stream.getvalue()) def join_results(result, data): """Static method for merging the result back into the main result object. """ failures, errors, tests_run, output = data if output: result.stream.write(output) result.errors.extend(errors) result.failures.extend(failures) result.testsRun += tests_run join_results = staticmethod(join_results) class _StringIO(StringIO): def writeln(self, line): self.write("%s\n" % line) class CythonUnitTestCase(CythonRunTestCase): def shortDescription(self): return "compiling (%s) tests in %s" % (self.language, self.name) def run_tests(self, result, ext_so_path): module = import_ext(self.module, ext_so_path) unittest.defaultTestLoader.loadTestsFromModule(module).run(result) class CythonPyregrTestCase(CythonRunTestCase): def setUp(self): CythonRunTestCase.setUp(self) from Cython.Compiler import Options Options.error_on_unknown_names = False Options.error_on_uninitialized = False Options._directive_defaults.update(dict( binding=True, always_allow_keywords=True, set_initial_path="SOURCEFILE")) patch_inspect_isfunction() def related_files(self, test_directory, module_name): return _list_pyregr_data_files(test_directory) def _run_unittest(self, result, *classes): """Run tests from unittest.TestCase-derived classes.""" valid_types = (unittest.TestSuite, unittest.TestCase) suite = unittest.TestSuite() for cls in classes: if isinstance(cls, str): if cls in sys.modules: suite.addTest(unittest.findTestCases(sys.modules[cls])) else: raise ValueError("str arguments must be keys in sys.modules") elif isinstance(cls, valid_types): suite.addTest(cls) else: suite.addTest(unittest.makeSuite(cls)) suite.run(result) def _run_doctest(self, result, module): self.run_doctests(module, result, None) def run_tests(self, result, ext_so_path): try: from test import support except ImportError: # Python2.x from test import test_support as support def run_test(result): def run_unittest(*classes): return self._run_unittest(result, *classes) def run_doctest(module, verbosity=None): return self._run_doctest(result, module) backup = (support.run_unittest, support.run_doctest) support.run_unittest = run_unittest support.run_doctest = run_doctest try: try: sys.stdout.flush() # helps in case of crashes module = import_ext(self.module, ext_so_path) sys.stdout.flush() # helps in case of crashes if hasattr(module, 'test_main'): # help 'doctest.DocFileTest' find the module path through frame inspection fake_caller_module_globals = { 'module': module, '__name__': module.__name__, } call_tests = eval( 'lambda: module.test_main()', fake_caller_module_globals, fake_caller_module_globals) call_tests() sys.stdout.flush() # helps in case of crashes except (unittest.SkipTest, support.ResourceDenied): result.addSkip(self, 'ok') finally: support.run_unittest, support.run_doctest = backup run_forked_test(result, run_test, self.shortDescription(), self.fork) include_debugger = IS_CPYTHON and sys.version_info[:2] > (2, 5) def collect_unittests(path, module_prefix, suite, selectors, exclude_selectors): def file_matches(filename): return filename.startswith("Test") and filename.endswith(".py") def package_matches(dirname): return dirname == "Tests" loader = unittest.TestLoader() if include_debugger: skipped_dirs = [] else: skipped_dirs = ['Cython' + os.path.sep + 'Debugger' + os.path.sep] for dirpath, dirnames, filenames in os.walk(path): if dirpath != path and "__init__.py" not in filenames: skipped_dirs.append(dirpath + os.path.sep) continue skip = False for dir in skipped_dirs: if dirpath.startswith(dir): skip = True if skip: continue parentname = os.path.split(dirpath)[-1] if package_matches(parentname): for f in filenames: if file_matches(f): filepath = os.path.join(dirpath, f)[:-len(".py")] modulename = module_prefix + filepath[len(path)+1:].replace(os.path.sep, '.') if not any(1 for match in selectors if match(modulename)): continue if any(1 for match in exclude_selectors if match(modulename)): continue module = __import__(modulename) for x in modulename.split('.')[1:]: module = getattr(module, x) suite.addTests([loader.loadTestsFromModule(module)]) def collect_doctests(path, module_prefix, suite, selectors, exclude_selectors): def package_matches(dirname): if dirname == 'Debugger' and not include_debugger: return False return dirname not in ("Mac", "Distutils", "Plex", "Tempita") def file_matches(filename): filename, ext = os.path.splitext(filename) blacklist = ['libcython', 'libpython', 'test_libcython_in_gdb', 'TestLibCython'] return (ext == '.py' and not '~' in filename and not '#' in filename and not filename.startswith('.') and not filename in blacklist) import doctest for dirpath, dirnames, filenames in os.walk(path): for dir in list(dirnames): if not package_matches(dir): dirnames.remove(dir) for f in filenames: if file_matches(f): if not f.endswith('.py'): continue filepath = os.path.join(dirpath, f) if os.path.getsize(filepath) == 0: continue filepath = filepath[:-len(".py")] modulename = module_prefix + filepath[len(path)+1:].replace(os.path.sep, '.') if not [ 1 for match in selectors if match(modulename) ]: continue if [ 1 for match in exclude_selectors if match(modulename) ]: continue if 'in_gdb' in modulename: # These should only be imported from gdb. continue module = __import__(modulename) for x in modulename.split('.')[1:]: module = getattr(module, x) if hasattr(module, "__doc__") or hasattr(module, "__test__"): try: suite.addTest(doctest.DocTestSuite(module)) except ValueError: # no tests pass class EndToEndTest(unittest.TestCase): """ This is a test of build/*.srctree files, where srctree defines a full directory structure and its header gives a list of commands to run. """ cython_root = os.path.dirname(os.path.abspath(__file__)) def __init__(self, treefile, workdir, cleanup_workdir=True): self.name = os.path.splitext(os.path.basename(treefile))[0] self.treefile = treefile self.workdir = os.path.join(workdir, self.name) self.cleanup_workdir = cleanup_workdir cython_syspath = [self.cython_root] for path in sys.path: if path.startswith(self.cython_root) and path not in cython_syspath: # Py3 installation and refnanny build prepend their # fixed paths to sys.path => prefer that over the # generic one (cython_root itself goes last) cython_syspath.append(path) self.cython_syspath = os.pathsep.join(cython_syspath[::-1]) unittest.TestCase.__init__(self) def shortDescription(self): return "End-to-end %s" % self.name def setUp(self): from Cython.TestUtils import unpack_source_tree _, self.commands = unpack_source_tree(self.treefile, self.workdir) self.old_dir = os.getcwd() os.chdir(self.workdir) if self.workdir not in sys.path: sys.path.insert(0, self.workdir) def tearDown(self): if self.cleanup_workdir: for trial in range(5): try: shutil.rmtree(self.workdir) except OSError: time.sleep(0.1) else: break os.chdir(self.old_dir) def _try_decode(self, content): try: return content.decode() except UnicodeDecodeError: return content.decode('iso-8859-1') def runTest(self): self.success = False commands = (self.commands .replace("CYTHON", "PYTHON %s" % os.path.join(self.cython_root, 'cython.py')) .replace("PYTHON", sys.executable)) old_path = os.environ.get('PYTHONPATH') os.environ['PYTHONPATH'] = self.cython_syspath + os.pathsep + (old_path or '') try: for command in filter(None, commands.splitlines()): p = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True) out, err = p.communicate() res = p.returncode if res != 0: print(command) print(self._try_decode(out)) print(self._try_decode(err)) self.assertEqual(0, res, "non-zero exit status") finally: if old_path: os.environ['PYTHONPATH'] = old_path else: del os.environ['PYTHONPATH'] self.success = True # TODO: Support cython_freeze needed here as well. # TODO: Windows support. class EmbedTest(unittest.TestCase): working_dir = "Demos/embed" def setUp(self): self.old_dir = os.getcwd() os.chdir(self.working_dir) os.system( "make PYTHON='%s' clean > /dev/null" % sys.executable) def tearDown(self): try: os.system( "make PYTHON='%s' clean > /dev/null" % sys.executable) except: pass os.chdir(self.old_dir) def test_embed(self): libname = sysconfig.get_config_var('LIBRARY') libdir = sysconfig.get_config_var('LIBDIR') if not os.path.isdir(libdir) or libname not in os.listdir(libdir): libdir = os.path.join(os.path.dirname(sys.executable), '..', 'lib') if not os.path.isdir(libdir) or libname not in os.listdir(libdir): libdir = os.path.join(libdir, 'python%d.%d' % sys.version_info[:2], 'config') if not os.path.isdir(libdir) or libname not in os.listdir(libdir): # report the error for the original directory libdir = sysconfig.get_config_var('LIBDIR') cython = 'cython.py' if sys.version_info[0] >=3 and CY3_DIR: cython = os.path.join(CY3_DIR, cython) cython = os.path.abspath(os.path.join('..', '..', cython)) self.assert_(os.system( "make PYTHON='%s' CYTHON='%s' LIBDIR1='%s' test > make.output" % (sys.executable, cython, libdir)) == 0) try: os.remove('make.output') except OSError: pass class MissingDependencyExcluder: def __init__(self, deps): # deps: { matcher func : module name } self.exclude_matchers = [] for matcher, mod in deps.items(): try: __import__(mod) except ImportError: self.exclude_matchers.append(string_selector(matcher)) self.tests_missing_deps = [] def __call__(self, testname, tags=None): for matcher in self.exclude_matchers: if matcher(testname, tags): self.tests_missing_deps.append(testname) return True return False class VersionDependencyExcluder: def __init__(self, deps): # deps: { version : matcher func } from sys import version_info self.exclude_matchers = [] for ver, (compare, matcher) in deps.items(): if compare(version_info, ver): self.exclude_matchers.append(matcher) self.tests_missing_deps = [] def __call__(self, testname, tags=None): for matcher in self.exclude_matchers: if matcher(testname): self.tests_missing_deps.append(testname) return True return False class FileListExcluder: def __init__(self, list_file, verbose=False): self.verbose = verbose self.excludes = {} self._list_file = os.path.relpath(list_file) with open(list_file) as f: for line in f: line = line.strip() if line and line[0] != '#': self.excludes[line.split()[0]] = True def __call__(self, testname, tags=None): exclude = (testname in self.excludes or testname.split('.')[-1] in self.excludes) if exclude and self.verbose: print("Excluding %s because it's listed in %s" % (testname, self._list_file)) return exclude class TagsSelector: def __init__(self, tag, value): self.tag = tag self.value = value def __call__(self, testname, tags=None): if tags is None: return False else: return self.value in tags[self.tag] class RegExSelector: def __init__(self, pattern_string): try: self.pattern = re.compile(pattern_string, re.I|re.U) except re.error: print('Invalid pattern: %r' % pattern_string) raise def __call__(self, testname, tags=None): return self.pattern.search(testname) def string_selector(s): ix = s.find(':') if ix == -1: return RegExSelector(s) else: return TagsSelector(s[:ix], s[ix+1:]) class ShardExcludeSelector: # This is an exclude selector so it can override the (include) selectors. # It may not provide uniform distribution (in time or count), but is a # determanistic partition of the tests which is important. def __init__(self, shard_num, shard_count): self.shard_num = shard_num self.shard_count = shard_count def __call__(self, testname, tags=None): return abs(hash(testname)) % self.shard_count != self.shard_num def refactor_for_py3(distdir, cy3_dir): # need to convert Cython sources first import lib2to3.refactor from distutils.util import copydir_run_2to3 with open('2to3-fixers.txt') as f: fixers = [line.strip() for line in f if line.strip()] if not os.path.exists(cy3_dir): os.makedirs(cy3_dir) import distutils.log as dlog dlog.set_threshold(dlog.INFO) copydir_run_2to3(distdir, cy3_dir, fixer_names=fixers, template = ''' global-exclude * graft Cython recursive-exclude Cython * recursive-include Cython *.py *.pyx *.pxd recursive-include Cython/Debugger/Tests * recursive-include Cython/Utility * recursive-exclude pyximport test include Tools/*.py include pyximport/*.py include runtests.py include cython.py include cythonize.py ''') sys.path.insert(0, cy3_dir) class PendingThreadsError(RuntimeError): pass threads_seen = [] def check_thread_termination(ignore_seen=True): if threading is None: # no threading enabled in CPython return current = threading.currentThread() blocking_threads = [] for t in threading.enumerate(): if not t.isAlive() or t == current: continue t.join(timeout=2) if t.isAlive(): if not ignore_seen: blocking_threads.append(t) continue for seen in threads_seen: if t is seen: break else: threads_seen.append(t) blocking_threads.append(t) if not blocking_threads: return sys.stderr.write("warning: left-over threads found after running test:\n") for t in blocking_threads: sys.stderr.write('...%s\n' % repr(t)) raise PendingThreadsError("left-over threads found after running test") def subprocess_output(cmd): try: p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return p.communicate()[0].decode('UTF-8') except OSError: return '' def get_version(): from Cython.Compiler.Version import version as cython_version full_version = cython_version top = os.path.dirname(os.path.abspath(__file__)) if os.path.exists(os.path.join(top, '.git')): old_dir = os.getcwd() try: os.chdir(top) head_commit = subprocess_output(['git', 'rev-parse', 'HEAD']).strip() version_commit = subprocess_output(['git', 'rev-parse', cython_version]).strip() diff = subprocess_output(['git', 'diff', '--stat']).strip() if head_commit != version_commit: full_version += " " + head_commit if diff: full_version += ' + uncommitted changes' finally: os.chdir(old_dir) return full_version _orig_stdout, _orig_stderr = sys.stdout, sys.stderr def flush_and_terminate(status): try: _orig_stdout.flush() _orig_stderr.flush() finally: os._exit(status) def main(): global DISTDIR, WITH_CYTHON DISTDIR = os.path.join(os.getcwd(), os.path.dirname(sys.argv[0])) from Cython.Compiler import DebugFlags args = [] for arg in sys.argv[1:]: if arg.startswith('--debug') and arg[2:].replace('-', '_') in dir(DebugFlags): setattr(DebugFlags, arg[2:].replace('-', '_'), True) else: args.append(arg) from optparse import OptionParser parser = OptionParser() parser.add_option("--no-cleanup", dest="cleanup_workdir", action="store_false", default=True, help="do not delete the generated C files (allows passing --no-cython on next run)") parser.add_option("--no-cleanup-sharedlibs", dest="cleanup_sharedlibs", action="store_false", default=True, help="do not delete the generated shared libary files (allows manual module experimentation)") parser.add_option("--no-cleanup-failures", dest="cleanup_failures", action="store_false", default=True, help="enable --no-cleanup and --no-cleanup-sharedlibs for failed tests only") parser.add_option("--no-cython", dest="with_cython", action="store_false", default=True, help="do not run the Cython compiler, only the C compiler") parser.add_option("--compiler", dest="compiler", default=None, help="C compiler type") backend_list = ','.join(BACKENDS) parser.add_option("--backends", dest="backends", default=backend_list, help="select backends to test (default: %s)" % backend_list) parser.add_option("--no-c", dest="use_c", action="store_false", default=True, help="do not test C compilation backend") parser.add_option("--no-cpp", dest="use_cpp", action="store_false", default=True, help="do not test C++ compilation backend") parser.add_option("--no-unit", dest="unittests", action="store_false", default=True, help="do not run the unit tests") parser.add_option("--no-doctest", dest="doctests", action="store_false", default=True, help="do not run the doctests") parser.add_option("--no-file", dest="filetests", action="store_false", default=True, help="do not run the file based tests") parser.add_option("--no-pyregr", dest="pyregr", action="store_false", default=True, help="do not run the regression tests of CPython in tests/pyregr/") parser.add_option("--cython-only", dest="cython_only", action="store_true", default=False, help="only compile pyx to c, do not run C compiler or run the tests") parser.add_option("--no-refnanny", dest="with_refnanny", action="store_false", default=True, help="do not regression test reference counting") parser.add_option("--no-fork", dest="fork", action="store_false", default=True, help="do not fork to run tests") parser.add_option("--sys-pyregr", dest="system_pyregr", action="store_true", default=False, help="run the regression tests of the CPython installation") parser.add_option("-x", "--exclude", dest="exclude", action="append", metavar="PATTERN", help="exclude tests matching the PATTERN") parser.add_option("-j", "--shard_count", dest="shard_count", metavar="N", type=int, default=1, help="shard this run into several parallel runs") parser.add_option("--shard_num", dest="shard_num", metavar="K", type=int, default=-1, help="test only this single shard") parser.add_option("-C", "--coverage", dest="coverage", action="store_true", default=False, help="collect source coverage data for the Compiler") parser.add_option("--coverage-xml", dest="coverage_xml", action="store_true", default=False, help="collect source coverage data for the Compiler in XML format") parser.add_option("--coverage-html", dest="coverage_html", action="store_true", default=False, help="collect source coverage data for the Compiler in HTML format") parser.add_option("-A", "--annotate", dest="annotate_source", action="store_true", default=True, help="generate annotated HTML versions of the test source files") parser.add_option("--no-annotate", dest="annotate_source", action="store_false", help="do not generate annotated HTML versions of the test source files") parser.add_option("-v", "--verbose", dest="verbosity", action="count", default=0, help="display test progress, pass twice to print test names") parser.add_option("-T", "--ticket", dest="tickets", action="append", help="a bug ticket number to run the respective test in 'tests/*'") parser.add_option("-3", dest="language_level", action="store_const", const=3, default=2, help="set language level to Python 3 (useful for running the CPython regression tests)'") parser.add_option("--xml-output", dest="xml_output_dir", metavar="DIR", help="write test results in XML to directory DIR") parser.add_option("--exit-ok", dest="exit_ok", default=False, action="store_true", help="exit without error code even on test failures") parser.add_option("--root-dir", dest="root_dir", default=os.path.join(DISTDIR, 'tests'), help="working directory") parser.add_option("--work-dir", dest="work_dir", default=os.path.join(os.getcwd(), 'TEST_TMP'), help="working directory") parser.add_option("--cython-dir", dest="cython_dir", default=os.getcwd(), help="Cython installation directory (default: use local source version)") parser.add_option("--debug", dest="for_debugging", default=False, action="store_true", help="configure for easier use with a debugger (e.g. gdb)") parser.add_option("--pyximport-py", dest="pyximport_py", default=False, action="store_true", help="use pyximport to automatically compile imported .pyx and .py files") parser.add_option("--watermark", dest="watermark", default=None, help="deterministic generated by string") parser.add_option("--use_common_utility_dir", default=False, action="store_true") parser.add_option("--use_formal_grammar", default=False, action="store_true") parser.add_option("--test_determinism", default=False, action="store_true", help="test whether Cython's output is deterministic") parser.add_option("--pythran-dir", dest="pythran_dir", default=None, help="specify Pythran include directory. This will run the C++ tests using Pythran backend for Numpy") options, cmd_args = parser.parse_args(args) WORKDIR = os.path.abspath(options.work_dir) if options.with_cython and sys.version_info[0] >= 3: sys.path.insert(0, options.cython_dir) if sys.version_info[:2] == (3, 2): try: # try if Cython is installed in a Py3 version import Cython.Compiler.Main except Exception: # back out anything the import process loaded, then # 2to3 the Cython sources to make them re-importable cy_modules = [ name for name in sys.modules if name == 'Cython' or name.startswith('Cython.') ] for name in cy_modules: del sys.modules[name] # hasn't been refactored yet - do it now global CY3_DIR CY3_DIR = cy3_dir = os.path.join(WORKDIR, 'Cy3') refactor_for_py3(DISTDIR, cy3_dir) if options.watermark: import Cython.Compiler.Version Cython.Compiler.Version.watermark = options.watermark WITH_CYTHON = options.with_cython coverage = None if options.coverage or options.coverage_xml or options.coverage_html: if options.shard_count <= 1 and options.shard_num < 0: if not WITH_CYTHON: options.coverage = options.coverage_xml = options.coverage_html = False else: print("Enabling coverage analysis") from coverage import coverage as _coverage coverage = _coverage(branch=True, omit=['Test*']) coverage.erase() coverage.start() if options.xml_output_dir: shutil.rmtree(options.xml_output_dir, ignore_errors=True) if WITH_CYTHON: global CompilationOptions, pyrex_default_options, cython_compile from Cython.Compiler.Main import \ CompilationOptions, \ default_options as pyrex_default_options, \ compile as cython_compile from Cython.Compiler import Errors Errors.LEVEL = 0 # show all warnings from Cython.Compiler import Options Options.generate_cleanup_code = 3 # complete cleanup code from Cython.Compiler import DebugFlags DebugFlags.debug_temp_code_comments = 1 pyrex_default_options['formal_grammar'] = options.use_formal_grammar if options.shard_count > 1 and options.shard_num == -1: import multiprocessing pool = multiprocessing.Pool(options.shard_count) tasks = [(options, cmd_args, shard_num) for shard_num in range(options.shard_count)] errors = [] for shard_num, return_code in pool.imap_unordered(runtests_callback, tasks): if return_code != 0: errors.append(shard_num) print("FAILED (%s/%s)" % (shard_num, options.shard_count)) print("ALL DONE (%s/%s)" % (shard_num, options.shard_count)) pool.close() pool.join() if errors: print("Errors for shards %s" % ", ".join([str(e) for e in errors])) return_code = 1 else: return_code = 0 else: _, return_code = runtests(options, cmd_args, coverage) print("ALL DONE") try: check_thread_termination(ignore_seen=False) except PendingThreadsError: # normal program exit won't kill the threads, do it the hard way here flush_and_terminate(return_code) else: sys.exit(return_code) def runtests_callback(args): options, cmd_args, shard_num = args options.shard_num = shard_num return runtests(options, cmd_args) def runtests(options, cmd_args, coverage=None): WITH_CYTHON = options.with_cython ROOTDIR = os.path.abspath(options.root_dir) WORKDIR = os.path.abspath(options.work_dir) xml_output_dir = options.xml_output_dir if options.shard_num > -1: WORKDIR = os.path.join(WORKDIR, str(options.shard_num)) if xml_output_dir: xml_output_dir = os.path.join(xml_output_dir, 'shard-%03d' % options.shard_num) # RUN ALL TESTS! UNITTEST_MODULE = "Cython" UNITTEST_ROOT = os.path.join(os.path.dirname(__file__), UNITTEST_MODULE) if WITH_CYTHON: if os.path.exists(WORKDIR): for path in os.listdir(WORKDIR): if path in ("support", "Cy3"): continue shutil.rmtree(os.path.join(WORKDIR, path), ignore_errors=True) if not os.path.exists(WORKDIR): os.makedirs(WORKDIR) if options.shard_num <= 0: sys.stderr.write("Python %s\n" % sys.version) sys.stderr.write("\n") if WITH_CYTHON: sys.stderr.write("Running tests against Cython %s\n" % get_version()) else: sys.stderr.write("Running tests without Cython.\n") if options.for_debugging: options.cleanup_workdir = False options.cleanup_sharedlibs = False options.fork = False if WITH_CYTHON and include_debugger: from Cython.Compiler.Main import default_options as compiler_default_options compiler_default_options['gdb_debug'] = True compiler_default_options['output_dir'] = os.getcwd() if IS_PYPY: if options.with_refnanny: sys.stderr.write("Disabling refnanny in PyPy\n") options.with_refnanny = False if options.with_refnanny: from pyximport.pyxbuild import pyx_to_dll libpath = pyx_to_dll(os.path.join("Cython", "Runtime", "refnanny.pyx"), build_in_temp=True, pyxbuild_dir=os.path.join(WORKDIR, "support")) sys.path.insert(0, os.path.split(libpath)[0]) CFLAGS.append("-DCYTHON_REFNANNY=1") if xml_output_dir and options.fork: # doesn't currently work together sys.stderr.write("Disabling forked testing to support XML test output\n") options.fork = False if WITH_CYTHON and options.language_level == 3: sys.stderr.write("Using Cython language level 3.\n") test_bugs = False if options.tickets: for ticket_number in options.tickets: test_bugs = True cmd_args.append('ticket:%s' % ticket_number) if not test_bugs: for selector in cmd_args: if selector.startswith('bugs'): test_bugs = True selectors = [ string_selector(r) for r in cmd_args ] verbose_excludes = selectors or options.verbosity >= 2 if not selectors: selectors = [ lambda x, tags=None: True ] # Check which external modules are not present and exclude tests # which depends on them (by prefix) missing_dep_excluder = MissingDependencyExcluder(EXT_DEP_MODULES) version_dep_excluder = VersionDependencyExcluder(VER_DEP_MODULES) exclude_selectors = [missing_dep_excluder, version_dep_excluder] # want to print msg at exit try: import IPython.core.release if list(IPython.core.release._ver) < [1, 0, 0]: raise ImportError except (ImportError, AttributeError, TypeError): exclude_selectors.append(RegExSelector('IPython')) try: import jedi if not ([0, 9] <= list(map(int, re.findall('[0-9]+', jedi.__version__ or '0')))): raise ImportError except (ImportError, AttributeError, TypeError): exclude_selectors.append(RegExSelector('Jedi')) if options.exclude: exclude_selectors += [ string_selector(r) for r in options.exclude ] if not COMPILER_HAS_INT128 or not IS_CPYTHON: exclude_selectors += [RegExSelector('int128')] if options.shard_num > -1: exclude_selectors.append(ShardExcludeSelector(options.shard_num, options.shard_count)) if not test_bugs: bug_files = [ ('bugs.txt', True), ('pypy_bugs.txt', IS_PYPY), ('windows_bugs.txt', sys.platform == 'win32'), ('cygwin_bugs.txt', sys.platform == 'cygwin') ] exclude_selectors += [ FileListExcluder(os.path.join(ROOTDIR, bugs_file_name), verbose=verbose_excludes) for bugs_file_name, condition in bug_files if condition ] if sys.platform in ['win32', 'cygwin'] and sys.version_info < (2,6): exclude_selectors += [ lambda x: x == "run.specialfloat" ] global COMPILER if options.compiler: COMPILER = options.compiler selected_backends = [ name.strip() for name in options.backends.split(',') if name.strip() ] backends = [] for backend in selected_backends: if backend == 'c' and not options.use_c: continue elif backend == 'cpp' and not options.use_cpp: continue elif backend not in BACKENDS: sys.stderr.write("Unknown backend requested: '%s' not one of [%s]\n" % ( backend, ','.join(BACKENDS))) sys.exit(1) backends.append(backend) if options.shard_num <= 0: sys.stderr.write("Backends: %s\n" % ','.join(backends)) languages = backends if options.use_common_utility_dir: common_utility_dir = os.path.join(WORKDIR, 'utility_code') if not os.path.exists(common_utility_dir): os.makedirs(common_utility_dir) else: common_utility_dir = None sys.stderr.write("\n") test_suite = unittest.TestSuite() if options.unittests: collect_unittests(UNITTEST_ROOT, UNITTEST_MODULE + ".", test_suite, selectors, exclude_selectors) if options.doctests: collect_doctests(UNITTEST_ROOT, UNITTEST_MODULE + ".", test_suite, selectors, exclude_selectors) if options.filetests and languages: filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors, options.annotate_source, options.cleanup_workdir, options.cleanup_sharedlibs, options.cleanup_failures, options.pyregr, options.cython_only, languages, test_bugs, options.fork, options.language_level, options.test_determinism, common_utility_dir, options.pythran_dir) test_suite.addTest(filetests.build_suite()) if options.system_pyregr and languages: sys_pyregr_dir = os.path.join(sys.prefix, 'lib', 'python'+sys.version[:3], 'test') if not os.path.isdir(sys_pyregr_dir): sys_pyregr_dir = os.path.join(os.path.dirname(sys.executable), 'Lib', 'test') # source build if os.path.isdir(sys_pyregr_dir): filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors, options.annotate_source, options.cleanup_workdir, options.cleanup_sharedlibs, options.cleanup_failures, True, options.cython_only, languages, test_bugs, options.fork, sys.version_info[0], options.test_determinism, common_utility_dir, options.pythran_dir) sys.stderr.write("Including CPython regression tests in %s\n" % sys_pyregr_dir) test_suite.addTest(filetests.handle_directory(sys_pyregr_dir, 'pyregr')) if xml_output_dir: from Cython.Tests.xmlrunner import XMLTestRunner if not os.path.exists(xml_output_dir): try: os.makedirs(xml_output_dir) except OSError: pass # concurrency issue? test_runner = XMLTestRunner(output=xml_output_dir, verbose=options.verbosity > 0) else: test_runner = unittest.TextTestRunner(verbosity=options.verbosity) if options.pyximport_py: from pyximport import pyximport pyximport.install(pyimport=True, build_dir=os.path.join(WORKDIR, '_pyximport'), load_py_module_on_import_failure=True, inplace=True) result = test_runner.run(test_suite) if common_utility_dir and options.shard_num < 0 and options.cleanup_workdir: shutil.rmtree(common_utility_dir) if coverage is not None: coverage.stop() ignored_modules = set( 'Cython.Compiler.' + name for name in ('Version', 'DebugFlags', 'CmdLine')) | set( 'Cython.' + name for name in ('Debugging',)) ignored_packages = ['Cython.Runtime', 'Cython.Tempita'] modules = [ module for name, module in sys.modules.items() if module is not None and name.startswith('Cython.') and '.Tests' not in name and name not in ignored_modules and not any(name.startswith(package) for package in ignored_packages) ] if options.coverage: coverage.report(modules, show_missing=0) if options.coverage_xml: coverage.xml_report(modules, outfile="coverage-report.xml") if options.coverage_html: coverage.html_report(modules, directory="coverage-report-html") if missing_dep_excluder.tests_missing_deps: sys.stderr.write("Following tests excluded because of missing dependencies on your system:\n") for test in missing_dep_excluder.tests_missing_deps: sys.stderr.write(" %s\n" % test) if options.with_refnanny: import refnanny sys.stderr.write("\n".join([repr(x) for x in refnanny.reflog])) if options.exit_ok: return options.shard_num, 0 else: return options.shard_num, not result.wasSuccessful() if __name__ == '__main__': try: main() except Exception: traceback.print_exc() try: check_thread_termination(ignore_seen=False) except PendingThreadsError: # normal program exit won't kill the threads, do it the hard way here flush_and_terminate(1) sys.exit(1) Cython-0.26.1/pylintrc0000664000175000017500000001721513023021033015410 0ustar stefanstefan00000000000000[MASTER] # Specify a configuration file. #rcfile= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Profiled execution. profile=no # Add files or directories to the blacklist. They should be base names, not # paths. ignore=.git,.gitmarker # Pickle collected data for later comparisons. persistent=yes # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. load-plugins= [MESSAGES CONTROL] # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time. #enable= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). ## R0201: Method could be a function ## R0904: Too many public methods ## W0201: Attribute defined outside __init__() ## W0141: Used builtin function 'map' disable=E1101,C0111,R0201,R0904,W0201,W0141 [REPORTS] # Set the output format. Available formats are text, parseable, colorized, msvs # (visual studio) and html. You can also give a reporter class, eg # mypackage.mymodule.MyReporterClass. output-format=parseable # Include message's id in output include-ids=yes # Include symbolic ids of messages in output symbols=no # Put messages in a separate file for each module / package specified on the # command line instead of printing them on stdout. Reports (if any) will be # written in a file name "pylint_global.[txt|html]". files-output=no # Tells whether to display a full report or only the messages reports=no # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Add a comment according to your evaluation note. This is used by the global # evaluation report (RP0004). comment=no [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME,XXX,TODO [BASIC] # Required attributes for module, separated by a comma required-attributes= # List of builtins function names that should not be used, separated by a comma bad-functions=map,filter,apply,input # Regular expression which should only match correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Regular expression which should only match correct module level names const-rgx=(([a-zA-Z_][a-zA-Z0-9_]*)|(__.*__))$ # Regular expression which should only match correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ # Regular expression which should only match correct function names function-rgx=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match correct method names method-rgx=[a-z_][a-z0-9_]{2,30}|visit_[A-Za-z]+$ # Regular expression which should only match correct instance attribute names attr-rgx=[a-z_][a-z0-9_]{2,30}|sy$ # Regular expression which should only match correct argument names argument-rgx=[a-z_][a-z0-9_]{0,30}$ # Regular expression which should only match correct variable names variable-rgx=[a-z_][a-z0-9_]{0,30}$ # Regular expression which should only match correct list comprehension / # generator expression variable names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Good variable names which should always be accepted, separated by a comma good-names=i,j,k,ex,Run,_ # Bad variable names which should always be refused, separated by a comma bad-names=foo,bar,baz,toto,tutu,tata # Regular expression which should only match functions or classes name which do # not require a docstring no-docstring-rgx=__.*__ [FORMAT] # Maximum number of characters on a single line. max-line-length=120 # Maximum number of lines in a module max-module-lines=15000 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' [SIMILARITIES] # Minimum lines number of a similarity. min-similarity-lines=4 # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=no [TYPECHECK] # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # List of classes names for which member attributes should not be checked # (useful for classes with attributes dynamically set). ignored-classes= # When zope mode is activated, add a predefined set of Zope acquired attributes # to generated-members. zope=no # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E0201 when accessed. Python regular # expressions are accepted. generated-members=REQUEST,acl_users,aq_parent [VARIABLES] # Tells whether we should check for unused import in __init__ files. init-import=no # A regular expression matching the beginning of the name of dummy variables # (i.e. not used). dummy-variables-rgx=_|dummy # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= [IMPORTS] # Deprecated modules which should not be used, separated by a comma deprecated-modules=regsub,string,TERMIOS,Bastion,rexec # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled) import-graph= # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled) ext-import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled) int-import-graph= [DESIGN] # Maximum number of arguments for function / method max-args=12 # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.* # Maximum number of locals for function / method body max-locals=15 # Maximum number of return / yield for function / method body max-returns=6 # Maximum number of branch for function / method body max-branchs=12 # Maximum number of statements in function / method body max-statements=50 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Minimum number of public methods for a class (see R0903). min-public-methods=2 # Maximum number of public methods for a class (see R0904). max-public-methods=20 [CLASSES] # List of interface methods to ignore, separated by a comma. This is used for # instance to not check methods defines in Zope's Interface base class. ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__,__new__,setUp # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=mcs [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=Exception