pax_global_header00006660000000000000000000000064144763767410014535gustar00rootroot0000000000000052 comment=639877c67957c1b606d21e2c57b0c8d0b157993b compreffor-0.5.5/000077500000000000000000000000001447637674100137065ustar00rootroot00000000000000compreffor-0.5.5/.github/000077500000000000000000000000001447637674100152465ustar00rootroot00000000000000compreffor-0.5.5/.github/workflows/000077500000000000000000000000001447637674100173035ustar00rootroot00000000000000compreffor-0.5.5/.github/workflows/ci.yml000066400000000000000000000136401447637674100204250ustar00rootroot00000000000000name: Build + Deploy on: push: branches: [main] tags: ["*.*.*"] pull_request: branches: [main] env: CIBW_TEST_REQUIRES: "pytest -rrequirements.txt" CIBW_TEST_COMMAND: "pytest --pyargs compreffor" jobs: build_sdist: name: Build Source Distribution runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: submodules: recursive # setuptools_scm won't work with shallow clone; fetch all history fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v2 with: python-version: "3.x" - name: Build sdist run: pipx run build --sdist - name: Check metadata run: pipx run twine check dist/*.tar.gz - uses: actions/upload-artifact@v2 with: path: dist/*.tar.gz build_wheels: name: ${{ matrix.type }} ${{ matrix.arch }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} defaults: run: shell: bash strategy: fail-fast: false matrix: os: [macos-latest, windows-latest] arch: [auto64] build: ["*"] skip: ["cp36-* cp37-* pp*"] include: # the manylinux1 docker images only contain python3.8 and 3.9 - os: ubuntu-latest type: manylinux1 arch: auto64 build: "cp{38,39}-manylinux*" CIBW_MANYLINUX_X86_64_IMAGE: manylinux1 CIBW_MANYLINUX_I686_IMAGE: manylinux1 # the manylinux2010 image also contains CPython 3.10, and pypy-3.8 - os: ubuntu-latest arch: auto64 type: manylinux2010 build: "pp38-manylinux* cp310-manylinux*" CIBW_MANYLINUX_X86_64_IMAGE: manylinux2010 CIBW_MANYLINUX_I686_IMAGE: manylinux2010 # the manylinux2014 image contains pypy-3.9 and CPython 3.11 and 3.12 - os: ubuntu-latest arch: auto64 type: manylinux2014 build: "pp39-manylinux* cp311-manylinux* cp312-manylinux*" CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014 CIBW_MANYLINUX_I686_IMAGE: manylinux2014 - os: macos-latest arch: universal2 build: "*" skip: "cp36-* cp37-* pp*" - os: windows-latest arch: auto32 build: "*" skip: "cp36-* cp37-* pp*" steps: - uses: actions/checkout@v2 with: submodules: recursive fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v2 with: python-version: "3.x" - name: Install dependencies run: pip install cibuildwheel - name: Build Wheels run: python -m cibuildwheel --output-dir wheelhouse . env: CIBW_BUILD: ${{ matrix.build }} CIBW_MANYLINUX_I686_IMAGE: ${{ matrix.CIBW_MANYLINUX_I686_IMAGE }} CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.CIBW_MANYLINUX_X86_64_IMAGE }} CIBW_ARCHS: ${{ matrix.arch }} CIBW_SKIP: ${{ matrix.skip }} - uses: actions/upload-artifact@v2 with: path: wheelhouse/*.whl build_arch_wheels: name: py${{ matrix.python }} on ${{ matrix.arch }} runs-on: ubuntu-latest strategy: matrix: # aarch64 uses qemu so it's slow, build each py version in parallel jobs python: [38, 39, 310, 311, 312] arch: [aarch64] steps: - uses: actions/checkout@v2 with: submodules: recursive fetch-depth: 0 - uses: docker/setup-qemu-action@v1.2.0 with: platforms: all - name: Install dependencies run: pip install cibuildwheel - name: Build Wheels run: python -m cibuildwheel --output-dir wheelhouse . env: CIBW_BUILD: cp${{ matrix.python }}-manylinux* CIBW_ARCHS: ${{ matrix.arch }} - uses: actions/upload-artifact@v2 with: path: wheelhouse/*.whl deploy: name: Upload if tagged commit if: startsWith(github.ref, 'refs/tags/') # but only if all build jobs completed successfully needs: [build_wheels, build_arch_wheels, build_sdist] runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions/download-artifact@v2 with: name: artifact path: dist - name: Extract release notes from annotated tag message id: release_notes env: # e.g. 0.1.0a1, 1.2.0b2 or 2.3.0rc3, but not 1.0.0 PRERELEASE_TAG_PATTERN: "[[:digit:]]+\\.[[:digit:]]+\\.[[:digit:]]+([ab]|rc)[[:digit:]]+" run: | # GH checkout action doesn't preserve tag annotations, we must fetch them # https://github.com/actions/checkout/issues/290 git fetch --tags --force # strip leading 'refs/tags/' to get the tag name TAG_NAME="${GITHUB_REF##*/}" # Dump tag message to temporary .md file (excluding the PGP signature at the bottom) TAG_MESSAGE=$(git tag -l --format='%(contents)' $TAG_NAME | sed -n '/-----BEGIN PGP SIGNATURE-----/q;p') echo "$TAG_MESSAGE" > "${{ runner.temp }}/release_notes.md" # if the tag has a pre-release suffix mark the Github Release accordingly if egrep -q "$PRERELEASE_TAG_PATTERN" <<< "$TAG_NAME"; then echo "Tag contains a pre-release suffix" echo "IS_PRERELEASE=true" >> "$GITHUB_ENV" else echo "Tag does not contain pre-release suffix" echo "IS_PRERELEASE=false" >> "$GITHUB_ENV" fi - name: Create GitHub release id: create_release uses: actions/create-release@v1 env: # This token is provided by Actions, you do not need to create your own token GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: tag_name: ${{ github.ref }} release_name: ${{ github.ref }} body_path: "${{ runner.temp }}/release_notes.md" draft: false prerelease: ${{ env.IS_PRERELEASE }} - uses: pypa/gh-action-pypi-publish@v1.4.2 with: user: __token__ password: ${{ secrets.PYPI_PASSWORD }} compreffor-0.5.5/.gitignore000066400000000000000000000003671447637674100157040ustar00rootroot00000000000000__pycache__/ *.py[cod] *.DS_Store .Python *.egg-info/ .installed.cfg *.egg pip-log.txt pip-delete-this-directory.txt *.o *.so build/ dist/ .eggs/ .tox/ .cache/ # autogenerated from git tag by setuptools_scm src/python/compreffor/_version.py compreffor-0.5.5/.gitmodules000066400000000000000000000001751447637674100160660ustar00rootroot00000000000000[submodule "cxx-src/mingw-std-threads"] path = src/cxx/mingw-std-threads url = https://github.com/meganz/mingw-std-threads compreffor-0.5.5/.pyup.yml000066400000000000000000000002501447637674100155010ustar00rootroot00000000000000# controls the frequency of updates (undocumented beta feature) schedule: every week # do not pin dependencies unless they have explicit version specifiers pin: False compreffor-0.5.5/LICENSE000066400000000000000000000261361447637674100147230ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. compreffor-0.5.5/README.rst000066400000000000000000000004211447637674100153720ustar00rootroot00000000000000|CI Build Status| A CFF table subroutinizer for FontTools. .. |CI Build Status| image:: https://github.com/googlefonts/compreffor/workflows/Build%20+%20Deploy/badge.svg :target: https://github.com/googlefonts/compreffor/actions?query=workflow%3A%22Build+%2B+Deploy%22 compreffor-0.5.5/pyproject.toml000066400000000000000000000002511447637674100166200ustar00rootroot00000000000000[build-system] requires = [ "setuptools", "wheel", "setuptools_scm", "setuptools_git_ls_files", "cython", ] build-backend = "setuptools.build_meta" compreffor-0.5.5/requirements.txt000066400000000000000000000000221447637674100171640ustar00rootroot00000000000000fonttools==4.42.1 compreffor-0.5.5/setup.cfg000066400000000000000000000014321447637674100155270ustar00rootroot00000000000000[bumpversion] current_version = 0.5.2.dev0 commit = True tag = False tag_name = {new_version} parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\.(?P[a-z]+)(?P\d+))? serialize = {major}.{minor}.{patch}.{release}{dev} {major}.{minor}.{patch} [bumpversion:part:release] optional_value = final values = dev final [bumpversion:file:src/python/compreffor/__init__.py] search = __version__ = "{current_version}" replace = __version__ = "{new_version}" [bumpversion:file:setup.py] search = version="{current_version}" replace = version="{new_version}" [aliases] test = pytest [metadata] license_files = LICENSE [tool:pytest] minversion = 2.8 testpaths = src/python/compreffor python_files = *_test.py python_classes = *Test addopts = -v -r a --doctest-modules compreffor-0.5.5/setup.py000066400000000000000000000146031447637674100154240ustar00rootroot00000000000000#!/usr/bin/env python from setuptools import setup, find_packages, Extension import os from distutils.errors import DistutilsSetupError from distutils import log from distutils.dep_util import newer_group import pkg_resources import platform import sys needs_pytest = {'pytest', 'test'}.intersection(sys.argv) pytest_runner = ['pytest_runner'] if needs_pytest else [] needs_wheel = {'bdist_wheel'}.intersection(sys.argv) wheel = ['wheel'] if needs_wheel else [] # use Cython if available, else try use pre-generated .cpp sources cython_min_version = '3.0.2' try: pkg_resources.require("cython >= %s" % cython_min_version) except pkg_resources.ResolutionError: with_cython = False print('Distribution mode: Compiling from Cython-generated .cpp sources.') from setuptools.command.build_ext import build_ext else: with_cython = True print('Development mode: Compiling Cython modules from .pyx sources.') from Cython.Distutils.old_build_ext import old_build_ext as build_ext class custom_build_ext(build_ext): """ Custom 'build_ext' command which allows to pass compiler-specific 'extra_compile_args', 'define_macros' and 'undef_macros' options. """ def build_extension(self, ext): sources = ext.sources if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( "in 'ext_modules' option (extension '%s'), " "'sources' must be present and must be " "a list of source filenames" % ext.name) sources = list(sources) ext_path = self.get_ext_fullpath(ext.name) depends = sources + ext.depends if not (self.force or newer_group(depends, ext_path, 'newer')): log.debug("skipping '%s' extension (up-to-date)", ext.name) return else: log.info("building '%s' extension", ext.name) # do compiler specific customizations compiler_type = self.compiler.compiler_type if isinstance(ext.extra_compile_args, dict): extra_args_dict = ext.extra_compile_args or {} if compiler_type in extra_args_dict: extra_args = extra_args_dict[compiler_type] else: extra_args = extra_args_dict.get("default", []) else: extra_args = ext.extra_compile_args or [] if isinstance(ext.define_macros, dict): macros_dict = ext.define_macros or {} if compiler_type in macros_dict: macros = macros_dict[compiler_type] else: macros = macros_dict.get("default", []) else: macros = ext.define_macros or [] if isinstance(ext.undef_macros, dict): undef_macros_dict = ext.undef_macros for tp, undef in undef_macros_dict.items(): if tp == compiler_type: macros.append((undef,)) else: for undef in ext.undef_macros: macros.append((undef,)) # compile the source code to object files. objects = self.compiler.compile(sources, output_dir=self.build_temp, macros=macros, include_dirs=ext.include_dirs, debug=self.debug, extra_postargs=extra_args, depends=ext.depends) # Now link the object files together into a "shared object" if ext.extra_objects: objects.extend(ext.extra_objects) # TODO: do compiler-specific extra link args? extra_args = ext.extra_link_args or [] # Detect target language, if not provided language = ext.language or self.compiler.detect_language(sources) self.compiler.link_shared_object( objects, ext_path, libraries=self.get_libraries(ext), library_dirs=ext.library_dirs, runtime_library_dirs=ext.runtime_library_dirs, extra_postargs=extra_args, export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, target_lang=language) extensions = [ Extension( "compreffor._compreffor", sources=[ os.path.join('src', 'cython', ( '_compreffor' + ('.pyx' if with_cython else '.cpp'))), os.path.join('src', 'cxx', "cffCompressor.cc"), ], depends=[os.path.join('src', 'cxx', 'cffCompressor.h')], extra_compile_args={ "default": [ "-std=c++0x", "-pthread", "-Wextra", "-Wno-unused", "-Wno-unused-parameter", # pass extra compiler flags on OS X to enable support for C++11 ] + (["-stdlib=libc++", "-mmacosx-version-min=10.7"] if platform.system() == "Darwin" else []), "msvc": ["/EHsc", "/Zi"], }, language="c++", ), ] with open('README.rst', 'r') as f: long_description = f.read() setup_params = dict( name="compreffor", use_scm_version={"write_to": "src/python/compreffor/_version.py"}, description="A CFF subroutinizer for fontTools.", long_description=long_description, long_description_content_type="text/x-rst", author="Sam Fishman", license="Apache 2.0", package_dir={'': 'src/python'}, packages=find_packages('src/python'), ext_modules=extensions, cmdclass={ 'build_ext': custom_build_ext, }, setup_requires=( ["setuptools_scm", "setuptools_git_ls_files"] + pytest_runner + wheel ), tests_require=[ 'pytest>=2.8', ], install_requires=[ "fonttools>=4", ], python_requires=">=3.8", entry_points={ 'console_scripts': [ "compreffor = compreffor.__main__:main", ] }, zip_safe=False, classifiers=[ "Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Topic :: Multimedia :: Graphics", "Topic :: Multimedia :: Graphics :: Graphics Conversion", ], ) if __name__ == "__main__": setup(**setup_params) compreffor-0.5.5/src/000077500000000000000000000000001447637674100144755ustar00rootroot00000000000000compreffor-0.5.5/src/cxx/000077500000000000000000000000001447637674100152775ustar00rootroot00000000000000compreffor-0.5.5/src/cxx/Makefile000066400000000000000000000016461447637674100167460ustar00rootroot00000000000000O ?= 2 CXXFLAGS = -std=c++0x -pthread -W -Wall -Wno-unused -Wno-unused-parameter CXXFLAGS += -DDEBUG -g CXXSOFLAGS = $(CXXFLAGS) -shared CC = g++ DEPS = cffCompressor.h BIN = ../python/compreffor ifeq ($(OS),Windows_NT) uname_S := Windows else uname_S := $(shell uname -s) endif ifeq ($(uname_S), Windows) executable = cffCompressor.exe sharedlib = compreff.dll # statically link libgcc, libstdc++ and libwinpthread on mingw-w64 LDFLAGS ?= -static else executable = cffCompressor sharedlib = libcompreff.so endif all: $(executable) lib $(executable): cffCompressor.o $(CC) $(CXXFLAGS) -o $(BIN)/$@ $(LDFLAGS) $^ %.o: %.cc $(DEPS) $(CC) $(CXXFLAGS) -o $@ -c -O$(O) $< lib: $(sharedlib) $(sharedlib): cffCompressor.cc $(DEPS) $(CC) $(CXXSOFLAGS) -o $(BIN)/$@ -O$(O) -fPIC $(LDFLAGS) $< clean: rm -rf cffCompressor.o $(BIN)/$(executable) $(BIN)/$(sharedlib) $(BIN)/$(sharedlib).dSYM ALL: clean all compreffor-0.5.5/src/cxx/Makefile.vc000066400000000000000000000021311447637674100173430ustar00rootroot00000000000000CXXFLAGS = /nologo /O2 /EHsc /Zi BIN = ..\python\compreffor SRC_NAME = cffCompressor LIB_NAME = compreff TARGET_EXE = $(BIN)\$(SRC_NAME).exe TARGET_DLL = $(BIN)\$(LIB_NAME).dll all: exe dll exe: $(TARGET_EXE) $(TARGET_EXE): $(SRC_NAME).obj $(CXX) $(CXXFLAGS) /Fe$@ $(SRC_NAME).obj $(SRC_NAME).obj: $(SRC_NAME).cc $(SRC_NAME).h $(CXX) $(CXXFLAGS) /c $(SRC_NAME).cc dll: $(TARGET_DLL) $(TARGET_DLL): $(SRC_NAME).cc $(SRC_NAME).h $(CXX) $(CXXFLAGS) /Fe$@ $(SRC_NAME).cc /LD /link /DEF:$(LIB_NAME).def clean: @-if exist $(SRC_NAME).obj del /f /q $(SRC_NAME).obj @-if exist $(TARGET_EXE) del /f /q $(TARGET_EXE) @-if exist $(BIN)\$(SRC_NAME).ilk del /f /q $(BIN)\$(SRC_NAME).ilk @-if exist $(BIN)\$(SRC_NAME).pdb del /f /q $(BIN)\$(SRC_NAME).pdb @-if exist $(TARGET_DLL) del /f /q $(TARGET_DLL) @-if exist $(BIN)\$(LIB_NAME).exp del /f /q $(BIN)\$(LIB_NAME).exp @-if exist $(BIN)\$(LIB_NAME).lib del /f /q $(BIN)\$(LIB_NAME).lib @-if exist $(BIN)\$(LIB_NAME).ilk del /f /q $(BIN)\$(LIB_NAME).ilk @-if exist $(BIN)\$(LIB_NAME).pdb del /f /q $(BIN)\$(LIB_NAME).pdb compreffor-0.5.5/src/cxx/cffCompressor.cc000066400000000000000000000762531447637674100204360ustar00rootroot00000000000000/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "cffCompressor.h" // needed for Windows's "_setmode" to enable binary mode for stdin/stdout #ifdef _WIN32 #include #include #endif const unsigned int_size = sizeof(int_type); const float K = 0.1; const float ALPHA = 0.1; const unsigned hardware_threads = std::thread::hardware_concurrency(); const unsigned NUM_THREADS = hardware_threads ? hardware_threads : 1; const unsigned DEFAULT_NUM_ROUNDS = 4; // token_t ============ token_t::token_t(int_type value_) : value(value_) {} token_t::token_t(const token_t &other) : value(other.value) {} int_type token_t::getValue() const { return value; } inline unsigned token_t::size() const { return part(0); } inline unsigned token_t::part(unsigned idx) const { assert(idx < 4); char shift = (int_size - idx - 1) * 8; return (value & (0xff << shift)) >> shift; } std::string token_t::toString() const { std::ostringstream os; os << "token_t(" << part(0) << ", " << part(1) << ", " << part(2) << ", " << part(3) << ")"; return os.str(); } bool token_t::operator<(const token_t &other) const { return value < other.value; } bool token_t::operator!=(const token_t &other) const { return value != other.value; } bool token_t::operator==(const token_t &other) const { return value == other.value; } std::ostream& operator<<(std::ostream &stream, const token_t &tok) { return stream << tok.toString(); } // end token_t =============== // light_substring_t ========= bool light_substring_t::operator<(const light_substring_t &other) const { /// compares actual tokens // optimization if they are literally pointing to the same thing if (begin == other.begin && end == other.end) return false; // they are equal unsigned thisLen = end - begin; unsigned otherLen = other.end - other.begin; if (thisLen < otherLen) { auto p = std::mismatch(begin, end, other.begin); if (p.first == end) return true; else return *p.first < *p.second; } else { // thisLen >= otherLen auto p = std::mismatch(other.begin, other.end, begin); if (p.first == other.end) return false; else return *p.second < *p.first; } } light_substring_t::light_substring_t(uint32_t start, uint32_t len, charstring_pool_t* pool) { begin = pool->get(start); end = begin + len; } // end light_substring_t ===== // substring_t =============== substring_t::substring_t(unsigned _len, unsigned _start, unsigned _freq) : pos(0), flatten(true), start(_start), len(_len), freq(_freq), _cost(0) {} substring_t::substring_t(const substring_t &other) : pos(0), flatten(other.flatten), start(other.start), len(other.len), freq(other.freq), _cost(0) {} const_tokiter_t substring_t::begin(const charstring_pool_t &chPool) const { return chPool.get(start); } const_tokiter_t substring_t::end(const charstring_pool_t &chPool) const { return begin(chPool) + len; } std::string substring_t::toString(const charstring_pool_t &chPool) { std::ostringstream os; os << "["; auto it = begin(chPool); for (; it != end(chPool) - 1; ++it) { os << *it << ", "; } ++it; os << *it << "]"; return os.str(); } uint16_t substring_t::cost(const charstring_pool_t &chPool) { if (_cost != 0) { return _cost; } else { // call other cost int sum = doCost(chPool); _cost = sum; return _cost; } } uint16_t substring_t::cost(const charstring_pool_t &chPool) const { if (_cost != 0) { return _cost; } else { return doCost(chPool); } } uint16_t substring_t::doCost(const charstring_pool_t &chPool) const { int sum = 0; for (auto it = begin(chPool); it != end(chPool); ++it) { sum += it->size(); } return sum; } int substring_t::subrSaving(const charstring_pool_t &chPool) { // XXX needs use_usages and true_cost, (and call_cost and subr_overhead params) return doSubrSaving(cost(chPool)); } int substring_t::subrSaving(const charstring_pool_t &chPool) const { // XXX needs use_usages and true_cost, (and call_cost and subr_overhead params) return doSubrSaving(cost(chPool)); } int substring_t::doSubrSaving(int subCost) const { int amt = freq; int callCost = 5; int subrOverhead = 3; return subCost * amt - subCost - callCost * amt - subrOverhead; } std::vector substring_t::getTranslatedValue( const charstring_pool_t& chPool) const { std::vector ans; for (auto it = begin(chPool); it != end(chPool); ++it) { std::vector transTok = chPool.translateToken(*it); ans.insert(ans.end(), transTok.begin(), transTok.end()); } return ans; } substring_t& substring_t::operator=(const substring_t &other) { if (*this != other) { start = other.start; len = other.len; freq = other.freq; _cost = other._cost; } return *this; } bool substring_t::operator<(const substring_t &other) const { // ordering is by start pos, then len if (start == other.start) return len < other.len; else return start < other.start; } bool substring_t::operator==(const substring_t &other) const { return start == other.start && len == other.len; } bool substring_t::operator!=(const substring_t &other) const { return !(*this == other); } inline uint32_t substring_t::size() const { return len; } inline uint32_t substring_t::getStart() const { return start; } inline void substring_t::setAdjCost(float val) { assert(val > 0); adjCost = val; } inline void substring_t::syncPrice() { price = adjCost; } void substring_t::updatePrice() { float margCost = static_cast(adjCost) / (freq + K); price = margCost * ALPHA + price * (1 - ALPHA); } inline uint32_t substring_t::getFreq() const { return freq; } inline void substring_t::resetFreq() { freq = 0; } inline void substring_t::incrementFreq() { ++freq; } inline void substring_t::increaseFreq(unsigned amt) { freq += amt; } inline void substring_t::decrementFreq() { assert(freq != 0); --freq; } inline float substring_t::getPrice() const { return price; } inline void substring_t::setPrice(float newPrice) { price = newPrice; } // end substring_t ============ // charstring_pool_t ========== charstring_pool_t::charstring_pool_t(unsigned nCharstrings) : nextQuark(0), count(nCharstrings), finalized(false), numRounds(DEFAULT_NUM_ROUNDS) { pool.reserve(nCharstrings); offset.reserve(nCharstrings + 1); offset.push_back(0); } charstring_pool_t::charstring_pool_t(unsigned nCharstrings, int _nrounds) : nextQuark(0), count(nCharstrings), finalized(false), numRounds(_nrounds) { pool.reserve(nCharstrings); offset.reserve(nCharstrings + 1); offset.push_back(0); } void charstring_pool_t::writeEncoding( const encoding_list& enc, const std::map& index, std::ostream& outFile) { // write the number of subrs called assert(enc.size() < 128); outFile.put(enc.size()); // write each call for (const encoding_item& enc_item : enc) { outFile.write( reinterpret_cast(&enc_item.pos), sizeof(enc_item.pos)); // 4 bytes auto it = index.find(enc_item.substr); assert(it != index.end()); uint32_t subrIndex = it->second; outFile.write(reinterpret_cast(&subrIndex), 4); } } void charstring_pool_t::writeSubrs( std::list& subrs, std::vector& glyphEncodings, std::ostream& outFile) { /// write subrs // write number of subrs uint32_t numSubrs = (uint32_t) subrs.size(); outFile.write(reinterpret_cast(&numSubrs), 4); // number subrs std::map index; // write each subr's representative glyph and offset in that charstring uint32_t curIndex = 0; for (const substring_t& subr : subrs) { index[&subr] = curIndex++; uint32_t glyphIdx = rev[subr.getStart()]; uint32_t glyphOffset = subr.getStart() - offset[glyphIdx]; uint32_t subrLength = subr.size(); outFile.write(reinterpret_cast(&glyphIdx), 4); outFile.write(reinterpret_cast(&glyphOffset), 4); outFile.write(reinterpret_cast(&subrLength), 4); } // after producing `index`, write subr encodings for (const substring_t& subr : subrs) { writeEncoding(subr.encoding, index, outFile); } /// write glyph encoding instructions for (const encoding_list& glyphEnc : glyphEncodings) { writeEncoding(glyphEnc, index, outFile); } } unsigned charstring_pool_t::packEncoding( const encoding_list& enc, const std::map& index, uint32_t* buffer) { unsigned pos = 0; // write the number of subrs called buffer[pos++] = enc.size(); // write each call for (const encoding_item& enc_item : enc) { buffer[pos++] = enc_item.pos; auto it = index.find(enc_item.substr); assert(it != index.end()); uint32_t subrIndex = it->second; buffer[pos++] = subrIndex; } return pos; } uint32_t* charstring_pool_t::getResponse( std::list& subrs, std::vector& glyphEncodings, unsigned& outputLength) { unsigned length = 1 + subrs.size() * 3; for (const substring_t& subr : subrs) { length += 1 + subr.encoding.size() * 2; } for (const encoding_list& glyphEnc : glyphEncodings) { length += 1 + glyphEnc.size() * 2; } outputLength = length; uint32_t* buffer = new uint32_t[length]; unsigned pos = 0; /// write subrs // write number of subrs uint32_t numSubrs = (uint32_t) subrs.size(); buffer[pos++] = numSubrs; // number subrs std::map index; // write each subr's representative glyph and offset in that charstring uint32_t curIndex = 0; for (const substring_t& subr : subrs) { index[&subr] = curIndex++; uint32_t glyphIdx = rev[subr.getStart()]; uint32_t glyphOffset = subr.getStart() - offset[glyphIdx]; uint32_t subrLength = subr.size(); buffer[pos++] = glyphIdx; buffer[pos++] = glyphOffset; buffer[pos++] = subrLength; } // after producing `index`, write subr encodings for (const substring_t& subr : subrs) { pos += packEncoding(subr.encoding, index, buffer + pos); } /// write glyph encoding instructions for (const encoding_list& glyphEnc : glyphEncodings) { pos += packEncoding(glyphEnc, index, buffer + pos); } return buffer; } std::vector charstring_pool_t::formatInt(int num) { std::vector ret; if (num >= -107 && num <= 107) { ret.push_back((unsigned char) num + 139); } else if (num >= 108 && num <= 1131) { unsigned char first = (num - 108) / 256; unsigned char second = num - 108 - first * 256; assert((static_cast(first)) * 256 + static_cast(second) + 108); ret.push_back(first + 247); ret.push_back(second); } else if (num >= -1131 && num <= -108) { unsigned char first = (num + 108) / 256; unsigned char second = -num - 108 - first * 256; assert(-(static_cast(first)) * 256 - static_cast(second) - 108); ret.push_back(first + 251); ret.push_back(second); } else { assert(num >= -32768 && num <= 32767); ret.push_back((unsigned char) 28); ret.push_back((unsigned char) ((num & 0xff00) >> 8)); ret.push_back((unsigned char) (num & 0xff)); } return ret; } void charstring_pool_t::subroutinize( std::list& substrings, std::vector& glyphEncodings) { // TODO: testMode std::map substrMap; /// set up map with initial values for (substring_t &substr : substrings) { substr.setAdjCost(substr.cost(*this)); substr.syncPrice(); light_substring_t key(substr.begin(*this), substr.end(*this)); substrMap[key] = &substr; } unsigned substringChunkSize = substrings.size() / NUM_THREADS + 1; unsigned glyphChunkSize = count / NUM_THREADS + 1; std::vector threads; std::vector > results((count+glyphChunkSize-1)/glyphChunkSize); for (int runCount = 0; runCount < numRounds; ++runCount) { /// update market for (substring_t& substr : substrings) { substr.updatePrice(); } /// minimize cost of substrings // XXX consider redoing substringChunkSize threads.clear(); auto curSubstr = substrings.begin(); for (unsigned i = 0; i < NUM_THREADS; ++i) { if (i * substringChunkSize >= substrings.size()) break; unsigned step = substringChunkSize; if ((i + 1) * substringChunkSize > substrings.size()) step = substrings.size() - i * substringChunkSize; auto start = curSubstr; std::advance(curSubstr, step); threads.push_back(std::thread(optimizeSubstrings, std::ref(substrMap), std::ref(*this), start, curSubstr)); } for (auto threadIt = threads.begin(); threadIt != threads.end(); ++threadIt) { threadIt->join(); } // minimize cost of glyphstrings threads.clear(); glyphEncodings.clear(); for (unsigned i = 0; i < NUM_THREADS; ++i) { if (i * glyphChunkSize >= count) break; unsigned stop = (i + 1) * glyphChunkSize; if (stop > count) stop = count; results[i].clear(); threads.push_back(std::thread(optimizeGlyphstrings, std::ref(substrMap), std::ref(*this), i * glyphChunkSize, stop, std::ref(results[i]))); } for (auto threadIt = threads.begin(); threadIt != threads.end(); ++threadIt) { threadIt->join(); } for (std::vector &res : results) { glyphEncodings.insert(glyphEncodings.end(), res.begin(), res.end()); } // update usages for (substring_t& substr : substrings) { substr.resetFreq(); } for (substring_t& substr : substrings) { for (encoding_item& enc : substr.encoding) { enc.substr->incrementFreq(); } } for (encoding_list& encList : glyphEncodings) { for (encoding_item& enc : encList) { enc.substr->incrementFreq(); } } /// cutdown if (runCount <= numRounds - 2) { // NOTE: python checks for testMode auto substrIt = substrings.begin(); for (; substrIt != substrings.end();) { if (substrIt->subrSaving(*this) <= 0) { light_substring_t key(substrIt->begin(*this), substrIt->end(*this)); size_t response = substrMap.erase(key); // heuristic: for (encoding_list::iterator encItem = substrIt->encoding.begin(); encItem != substrIt->encoding.end(); ++encItem) { encItem->substr->increaseFreq(substrIt->getFreq() - 1); } substrIt = substrings.erase(substrIt); } else { ++substrIt; } } } } } void optimizeSubstrings(std::map &substrMap, charstring_pool_t &csPool, std::list::iterator begin, std::list::iterator end) { for (auto it = begin; it != end; ++it) { auto ans = optimizeCharstring( it->begin(csPool), it->size(), substrMap, csPool, true); it->encoding = ans.first; it->setAdjCost(ans.second); } } void optimizeGlyphstrings( std::map &substrMap, charstring_pool_t &csPool, unsigned start, unsigned stop, std::vector& result) { for (unsigned i = start; i < stop; ++i) { charstring_t cs = csPool.getCharstring(i); result.push_back(optimizeCharstring( cs.begin, cs.len, substrMap, csPool, false) .first); } } std::pair optimizeCharstring( const_tokiter_t begin, uint32_t len, std::map &substrMap, charstring_pool_t& csPool, bool isSubstring) { std::vector results(len + 1); std::vector nextEncIdx(len, -1); std::vector nextEncSubstr(len, NULL); for (int i = len - 1; i >= 0; --i) { float minOption = -1; int minEncIdx = len; substring_t* minEncSubstr = NULL; int curCost = 0; const_tokiter_t curToken = begin + i; for (unsigned j = i + 1; j <= len; ++j, ++curToken) { curCost += curToken->size(); light_substring_t key(begin + i, begin + j); auto entryIt = substrMap.find(key); substring_t* substr; float option; if (!(i == 0 && j == len) && entryIt != substrMap.end()) { // TODO: check to not subroutinize with yourself substr = entryIt->second; option = substr->getPrice() + results[j]; } else { substr = NULL; option = curCost + results[j]; } if (option < minOption || minOption == -1) { minOption = option; minEncIdx = j; minEncSubstr = substr; } } results[i] = minOption; nextEncIdx[i] = minEncIdx; nextEncSubstr[i] = minEncSubstr; } encoding_list ans; unsigned curEncIdx = 0; while (curEncIdx < len) { uint16_t lastIdx = curEncIdx; substring_t* curEncSubstr = nextEncSubstr[curEncIdx]; curEncIdx = nextEncIdx[curEncIdx]; if (curEncSubstr != NULL) { encoding_item item; item.pos = lastIdx; item.substr = curEncSubstr; ans.push_back(item); } } return std::pair(ans, results[0]); } std::list charstring_pool_t::getSubstrings() { if (!finalized) finalize(); std::vector suffixes = generateSuffixes(); std::vector lcp = generateLCP(suffixes); std::list substrings = generateSubstrings(suffixes, lcp); return substrings; } charstring_t charstring_pool_t::getCharstring(unsigned idx) { charstring_t cs; cs.begin = pool.begin() + offset[idx]; cs.len = offset[idx + 1] - offset[idx]; return cs; } void charstring_pool_t::addRawCharstring(unsigned char* data, unsigned len) { assert(!finalized); uint32_t numHints = 0; uint32_t stackSize = 0; unsigned nToks = 0; for (unsigned csPos = 0; csPos < len; ++csPos) { unsigned char first = data[csPos]; unsigned tokSize; if (first < 28 || (first >= 29 && first < 32)) { if (first < 12) { // operators 1-11 if (first == 1 || first == 3) { // hstem/vstem numHints += stackSize / 2; } tokSize = 1; } else if (first == 12) { // escape (12) + addl operator code tokSize = 2; } else if (first < 19) { // operators 13-18 if (first == 18) { // hstemhm numHints += stackSize / 2; } tokSize = 1; } else if (first < 21) { // hintmask/cntrmask (19/20) if (stackSize != 0) { // account for additonal vhints on stack (assuming legal program) numHints += stackSize / 2; } tokSize = 1 + numHints / 8 + ((numHints % 8 != 0) ? 1 : 0); } else if (first < 28) { // operators 21-27 if (first == 23) { // vstemhm numHints += stackSize / 2; } tokSize = 1; } else { // operators 29-31 tokSize = 1; } stackSize = 0; } else { stackSize += 1; if (first == 28) { // 16-bit signed tokSize = 3; } else if (first < 247) { // -107 to 107 tokSize = 1; } else if (first < 251) { // +108 to +1131 tokSize = 2; } else if (first < 255) { // -108 to -1131 tokSize = 2; } else { // 4-byte floating point tokSize = 5; } } unsigned char* rawTok = new unsigned char[tokSize]; rawTok[0] = first; memcpy(rawTok + 1, data + csPos + 1, tokSize - 1); csPos += (tokSize - 1); addRawToken(rawTok, tokSize); delete[] rawTok; ++nToks; } offset.push_back(offset.back() + nToks); } void charstring_pool_t::finalize() { rev.reserve(pool.size()); int cur = 0; for (unsigned i = 0; i < pool.size(); ++i) { if (i >= offset[cur + 1]) ++cur; rev.push_back(cur); } finalized = true; } const_tokiter_t charstring_pool_t::get(unsigned idx) const { const_tokiter_t x = pool.begin() + idx; return x; } inline uint16_t charstring_pool_t::quarkFor(unsigned char* data, unsigned len) { // TODO: verify using a string key isn't a time problem std::string key((const char*) data, (size_t) len); auto it = quarkMap.find(key); if (it == quarkMap.end()) { assert(nextQuark < 65536); assert(revQuark.size() == nextQuark); unsigned q = nextQuark++; quarkMap[key] = q; revQuark.push_back(key); return (uint16_t) q; } else { return (uint16_t) it->second; } } void charstring_pool_t::addRawToken(unsigned char* data, unsigned len) { assert(len > 0); assert(len < 256); int_type v = generateValue(data, len); pool.push_back(token_t(v)); } int_type charstring_pool_t::generateValue(unsigned char* data, unsigned len) { int_type v; if (len < int_size) { v = len; for (unsigned i = 0; i < len; ++i) { v <<= 8; v |= data[i]; } v <<= 8 * (int_size - len - 1); } else { uint16_t q = quarkFor(data, len); v = len; v <<= 8; v |= data[0]; v <<= 16; v |= q; // std::cout << "QUARK: " << q << std::endl; } return v; } struct charstring_pool_t::suffixSortFunctor { const std::vector &pool; const std::vector &offset; const std::vector &rev; suffixSortFunctor(const std::vector &_pool, const std::vector &_offset, const std::vector &_rev) : pool(_pool), offset(_offset), rev(_rev) {} bool operator()(unsigned a, unsigned b) { int aLen = offset[rev[a] + 1] - a; int bLen = offset[rev[b] + 1] - b; auto aFirst = pool.begin() + a; auto bFirst = pool.begin() + b; if (aLen < bLen) { auto aLast = pool.begin() + offset[rev[a] + 1]; auto p = std::mismatch(aFirst, aLast, bFirst); if (p.first == aLast) return true; else return *p.first < *p.second; } else { // aLen >= bLen auto bLast = pool.begin() + offset[rev[b] + 1]; auto p = std::mismatch(bFirst, bLast, aFirst); if (p.first == bLast) return false; else return *p.second < *p.first; } } }; std::vector charstring_pool_t::generateSuffixes() { assert(finalized); std::vector suffixes; suffixes.reserve(pool.size()); for (unsigned i = 0; i < pool.size(); ++i) suffixes.push_back(i); std::stable_sort( suffixes.begin(), suffixes.end(), suffixSortFunctor(pool, offset, rev)); return suffixes; } std::vector charstring_pool_t::generateLCP( const std::vector &suffixes) { assert(finalized); assert(suffixes.size() == pool.size()); std::vector lcp(pool.size(), 0); std::vector rank(pool.size(), 0); for (unsigned i = 0; i < pool.size(); ++i) { unsigned idx = suffixes[i]; rank[idx] = i; } for (std::vector::iterator ch = offset.begin(); ch != offset.end() - 1; ++ch) { unsigned start = *ch; unsigned end = *(ch + 1); unsigned curH = 0; for (unsigned tokIdx = start; tokIdx < end; ++tokIdx) { unsigned curRank = rank[tokIdx]; if (curRank > 0) { unsigned befInSuffixes = suffixes[curRank - 1]; unsigned befEnd = offset[rev[befInSuffixes] + 1]; while (befInSuffixes + curH < befEnd && tokIdx + curH < end && pool[befInSuffixes + curH] == pool[tokIdx + curH]) ++curH; lcp[curRank] = curH; if (curH > 0) --curH; } } } return lcp; } bool charstring_pool_t::verify_lcp( std::vector& lcp, std::vector& suffixes) { for (unsigned i = 1; i < pool.size(); ++i) { auto thisCur = pool.begin() + suffixes[i]; auto befCur = pool.begin() + suffixes[i - 1]; auto thisEnd = pool.begin() + offset[rev[suffixes[i]] + 1]; auto befEnd = pool.begin() + offset[rev[suffixes[i - 1]] + 1]; for (unsigned j = 0; j < lcp[i]; ++j) { assert(*thisCur == *befCur); ++thisCur; ++befCur; } assert(*thisCur != *befCur || thisCur == thisEnd || befCur == befEnd); } return true; } std::list charstring_pool_t::generateSubstrings( std::vector &suffixes, std::vector &lcp) { assert(finalized); assert(suffixes.size() == lcp.size()); assert(lcp.size() == pool.size()); std::list substrings; std::list> startIndices; for (unsigned i = 0; i < suffixes.size(); ++i) { while (!startIndices.empty() && startIndices.back().first > lcp[i]) { unsigned len = startIndices.back().first; unsigned startIdx = startIndices.back().second; startIndices.pop_back(); unsigned freq = i - startIdx; assert(freq >= 2); // NOTE: python allows different min_freq substring_t subr(len, suffixes[startIdx], freq); // NOTE: python allows turning this check off -- if (len > 1 && subr.subrSaving(*this) > 0) { substrings.push_back(subr); } } if (startIndices.empty() || lcp[i] > startIndices.back().first) { startIndices.push_back(std::make_pair(lcp[i], i - 1)); } } // NOTE: python sorts by length or saving return substrings; } std::vector charstring_pool_t::translateToken(const token_t& tok) const { size_t tokLen = tok.size(); if (tokLen < int_size) { std::vector ans; for (unsigned i = 0; i < tokLen; ++i) ans.push_back(tok.part(i + 1)); return ans; } else { uint16_t q = (tok.part(2) << 8) + tok.part(3); std::string orig = revQuark.at(q); std::vector ans(orig.begin(), orig.end()); return ans; } } // end charstring_pool_t ========= charstring_pool_t CharstringPoolFactory( std::istream &instream, int numRounds) { uint16_t count; unsigned char countBuffer[2]; instream.read(reinterpret_cast(countBuffer), 2); count = (countBuffer[0] << 8) | (countBuffer[1]); unsigned char offSize; instream.read(reinterpret_cast(&offSize), 1); uint32_t* offset = new uint32_t[count + 1]; unsigned char* offsetBuffer = new unsigned char[(count + 1) * offSize]; instream.read(reinterpret_cast(offsetBuffer), (count + 1) * offSize); for (int i = 0; i < count + 1; ++i) { offset[i] = 0; for (int j = 0; j < offSize; ++j) { offset[i] += offsetBuffer[i * offSize + j] << ((offSize - j - 1) * 8); } offset[i] -= 1; // CFF is 1-indexed(-ish) } delete[] offsetBuffer; assert(offset[0] == 0); charstring_pool_t csPool(count, numRounds); unsigned len; for (int i = 0; i < count; ++i) { unsigned len = offset[i + 1] - offset[i]; char* data = new char[len]; instream.read(data, len); csPool.addRawCharstring(reinterpret_cast(data), len); delete[] data; } delete[] offset; csPool.finalize(); return csPool; } void charstring_pool_t::printSuffix(unsigned idx, bool printVal) { std::cerr << "["; auto start = pool.begin() + idx; auto end = pool.begin() + offset[rev[idx] + 1]; for (auto it = start; it != end; ++it) { if (printVal) std::cerr << it->getValue(); else std::cerr << *it; if (it + 1 != end) std::cerr << ", "; } std::cerr << "]" << std::endl; } charstring_pool_t CharstringPoolFactoryFromString( unsigned char* buffer, int numRounds) { unsigned pos = 0; uint16_t count; count = (buffer[pos] << 8) | (buffer[pos + 1]); pos += 2; unsigned char offSize = buffer[pos++]; uint32_t* offset = new uint32_t[count + 1]; unsigned char* offsetBuffer = &buffer[pos]; pos += (count + 1) * offSize; for (int i = 0; i < count + 1; ++i) { offset[i] = 0; for (int j = 0; j < offSize; ++j) { offset[i] += offsetBuffer[i * offSize + j] << ((offSize - j - 1) * 8); } offset[i] -= 1; // CFF is 1-indexed(-ish) } assert(offset[0] == 0); charstring_pool_t csPool(count, numRounds); unsigned len; for (int i = 0; i < count; ++i) { unsigned len = offset[i + 1] - offset[i]; csPool.addRawCharstring(buffer + pos, len); pos += len; } delete[] offset; csPool.finalize(); return csPool; } extern "C" uint32_t* compreff(unsigned char* dataStream, int numRounds, unsigned& outputLength) { charstring_pool_t csPool = CharstringPoolFactoryFromString(dataStream, numRounds); std::list subrs = csPool.getSubstrings(); std::vector glyphEncodings; csPool.subroutinize(subrs, glyphEncodings); return csPool.getResponse(subrs, glyphEncodings, outputLength); } extern "C" void unload(uint32_t* response) { free(response); } int main(int argc, const char* argv[]) { int numRounds = DEFAULT_NUM_ROUNDS; unsigned argIdx = 1; while (argIdx < static_cast(argc)) { if (strcmp(argv[argIdx], "--nrounds") == 0) { numRounds = atoi(argv[argIdx + 1]); argIdx += 2; } else { std::cerr << "Unrecognized argument: " << argv[argIdx] << std::endl; return 1; } } #ifdef _WIN32 if (_setmode(_fileno(stdin), _O_BINARY) == -1) { std::cerr << "Cannot set stdin to binary mode" << std::endl; return 1; } if (_setmode(_fileno(stdout), _O_BINARY) == -1) { std::cerr << "Cannot set stdout to binary mode" << std::endl; return 1; } #endif charstring_pool_t csPool = CharstringPoolFactory( std::cin, numRounds); std::list subrs = csPool.getSubstrings(); std::vector glyphEncodings; csPool.subroutinize(subrs, glyphEncodings); csPool.writeSubrs(subrs, glyphEncodings, std::cout); return 0; } compreffor-0.5.5/src/cxx/cffCompressor.h000066400000000000000000000174671447637674100203020ustar00rootroot00000000000000/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef CFFCOMPRESSOR_H_ #define CFFCOMPRESSOR_H_ #include #include #include #include #include /* If MinGW GCC is compiled with "win32" threads instead of "posix" * it lacks the C++11 standard threading classes, so we need to include * the "mingw-std-threads" header-only library from: * * https://github.com/meganz/mingw-std-threads */ #if defined(__MINGW32__) && !defined(__WINPTHREADS_VERSION) #include "mingw-std-threads/mingw.thread.h" #endif #include #include #include #include #include #include #include #include #include #include #include class token_t; struct charstring_t; class substring_t; class charstring_pool_t; typedef uint32_t int_type; typedef std::map tokmap_t; typedef std::vector::iterator tokiter_t; typedef std::vector::const_iterator const_tokiter_t; class token_t { public: explicit token_t(int_type value_ = 0); token_t(const token_t &other); inline int_type getValue() const; inline unsigned size() const; inline unsigned part(unsigned idx) const; std::string toString() const; bool operator<(const token_t &other) const; bool operator!=(const token_t &other) const; bool operator==(const token_t &other) const; private: int_type value; }; typedef struct charstring_t { tokiter_t begin; uint32_t len; } charstring_t; class light_substring_t { public: light_substring_t(const_tokiter_t _begin, const_tokiter_t _end) : begin(_begin), end(_end) {} light_substring_t(uint32_t start, uint32_t len, charstring_pool_t* pool); light_substring_t& operator=(const light_substring_t &other) { begin = other.begin; end = other.end; return *this; }; bool operator<(const light_substring_t &other) const; const_tokiter_t begin; const_tokiter_t end; }; typedef struct encoding_item { uint32_t pos; substring_t* substr; } encoding_item; typedef std::vector encoding_list; class substring_t { public: substring_t(unsigned _len, unsigned _start, unsigned _freq); substring_t(const substring_t &other); const_tokiter_t begin(const charstring_pool_t &chPool) const; const_tokiter_t end(const charstring_pool_t &chPool) const; uint16_t cost(const charstring_pool_t &chPool); int subrSaving(const charstring_pool_t &chPool); uint16_t cost(const charstring_pool_t &chPool) const; int subrSaving(const charstring_pool_t &chPool) const; std::string toString(const charstring_pool_t &chPool); bool operator<(const substring_t &other) const; bool operator==(const substring_t &other) const; bool operator!=(const substring_t &other) const; substring_t& operator=(const substring_t &other); inline uint32_t size() const; inline uint32_t getStart() const; void updatePrice(); uint32_t getFreq() const; void resetFreq(); void incrementFreq(); void increaseFreq(unsigned amt); void decrementFreq(); float getPrice() const; void setPrice(float newPrice); void setAdjCost(float value); void syncPrice(); std::vector getTranslatedValue( const charstring_pool_t& chPool) const; uint16_t pos; bool flatten; encoding_list encoding; private: uint32_t start; uint32_t len; uint32_t freq; uint16_t _cost; float adjCost; float price; int doSubrSaving(int subCost) const; uint16_t doCost(const charstring_pool_t &chPool) const; }; typedef std::pair, std::vector > subr_pair; void optimizeSubstrings( std::map &substrMap, charstring_pool_t &csPool, std::list::iterator begin, std::list::iterator end); void optimizeGlyphstrings( std::map &substrMap, charstring_pool_t &csPool, unsigned start, unsigned stop, std::vector& result); std::pair optimizeCharstring( const_tokiter_t begin, uint32_t len, std::map &substrMap, charstring_pool_t& csPool, bool isSubstring); class charstring_pool_t { public: explicit charstring_pool_t(unsigned nCharstrings); charstring_pool_t(unsigned nCharstrings, int numRounds); void writeSubrs( std::list& substrings, std::vector& glyphEncodings, std::ostream& outFile); uint32_t* getResponse( std::list& substrings, std::vector& glyphEncodings, unsigned& outputLength); std::vector formatInt(int num); void subroutinize( std::list& substrings, std::vector& glyphEncodings); std::list getSubstrings(); charstring_t getCharstring(unsigned idx); void addRawCharstring(unsigned char* data, unsigned len); void setFDSelect(uint8_t* rawFD); void finalize(); const_tokiter_t get(unsigned idx) const; std::vector translateToken(const token_t& tok) const; void printSuffix(unsigned idx, bool printVal = false); bool verify_lcp(std::vector& lcp, std::vector& suffixes); private: tokmap_t quarkMap; unsigned nextQuark; std::vector revQuark; std::vector pool; std::vector offset; std::vector rev; unsigned count; bool finalized; int numRounds; inline uint16_t quarkFor(unsigned char* data, unsigned len); void addRawToken(unsigned char* data, unsigned len); int_type generateValue(unsigned char* data, unsigned len); std::vector generateSuffixes(); struct suffixSortFunctor; std::vector generateLCP(const std::vector& suffixes); std::list generateSubstrings( std::vector &suffixes, std::vector &lcp); encoding_list getUpdatedEncoding(substring_t* subr); void writeEncoding( const encoding_list& enc, const std::map& index, std::ostream& outFile); unsigned packEncoding( const encoding_list& enc, const std::map& index, uint32_t* buffer); }; charstring_pool_t CharstringPoolFactory( std::istream& instream, int numRounds); charstring_pool_t CharstringPoolFactoryFromString( unsigned char* buffer, int numRounds); extern "C" uint32_t* compreff(unsigned char* dataStream, int numRounds, unsigned& outputLength); extern "C" void unload(uint32_t* response); #endif compreffor-0.5.5/src/cxx/compreff.def000066400000000000000000000001011447637674100175500ustar00rootroot00000000000000LIBRARY compreff EXPORTS compreff @1 unload @2 compreffor-0.5.5/src/cxx/mingw-std-threads/000077500000000000000000000000001447637674100206405ustar00rootroot00000000000000compreffor-0.5.5/src/cython/000077500000000000000000000000001447637674100160015ustar00rootroot00000000000000compreffor-0.5.5/src/cython/_compreffor.cpp000066400000000000000000007721171447637674100210250ustar00rootroot00000000000000/* Generated by Cython 3.0.2 */ #ifndef PY_SSIZE_T_CLEAN #define PY_SSIZE_T_CLEAN #endif /* PY_SSIZE_T_CLEAN */ #if defined(CYTHON_LIMITED_API) && 0 #ifndef Py_LIMITED_API #if CYTHON_LIMITED_API+0 > 0x03030000 #define Py_LIMITED_API CYTHON_LIMITED_API #else #define Py_LIMITED_API 0x03030000 #endif #endif #endif #include "Python.h" #ifndef Py_PYTHON_H #error Python headers needed to compile C extensions, please install development version of Python. #elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.7+ or Python 3.3+. #else #if CYTHON_LIMITED_API #define __PYX_EXTRA_ABI_MODULE_NAME "limited" #else #define __PYX_EXTRA_ABI_MODULE_NAME "" #endif #define CYTHON_ABI "3_0_2" __PYX_EXTRA_ABI_MODULE_NAME #define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI #define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." #define CYTHON_HEX_VERSION 0x030002F0 #define CYTHON_FUTURE_DIVISION 1 #include #ifndef offsetof #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) #endif #if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) #ifndef __stdcall #define __stdcall #endif #ifndef __cdecl #define __cdecl #endif #ifndef __fastcall #define __fastcall #endif #endif #ifndef DL_IMPORT #define DL_IMPORT(t) t #endif #ifndef DL_EXPORT #define DL_EXPORT(t) t #endif #define __PYX_COMMA , #ifndef HAVE_LONG_LONG #define HAVE_LONG_LONG #endif #ifndef PY_LONG_LONG #define PY_LONG_LONG LONG_LONG #endif #ifndef Py_HUGE_VAL #define Py_HUGE_VAL HUGE_VAL #endif #define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX #if defined(GRAALVM_PYTHON) /* For very preliminary testing purposes. Most variables are set the same as PyPy. The existence of this section does not imply that anything works or is even tested */ #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_LIMITED_API 0 #define CYTHON_COMPILING_IN_GRAAL 1 #define CYTHON_COMPILING_IN_NOGIL 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 0 #undef CYTHON_USE_TYPE_SPECS #define CYTHON_USE_TYPE_SPECS 0 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #if PY_VERSION_HEX < 0x03050000 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #undef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #undef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 1 #undef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 0 #undef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 0 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_GIL #define CYTHON_FAST_GIL 0 #undef CYTHON_METH_FASTCALL #define CYTHON_METH_FASTCALL 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #ifndef CYTHON_PEP487_INIT_SUBCLASS #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) #endif #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #undef CYTHON_USE_MODULE_STATE #define CYTHON_USE_MODULE_STATE 0 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif #elif defined(PYPY_VERSION) #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_LIMITED_API 0 #define CYTHON_COMPILING_IN_GRAAL 0 #define CYTHON_COMPILING_IN_NOGIL 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 0 #ifndef CYTHON_USE_TYPE_SPECS #define CYTHON_USE_TYPE_SPECS 0 #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #if PY_VERSION_HEX < 0x03050000 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #undef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #undef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 1 #undef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 0 #undef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 0 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_GIL #define CYTHON_FAST_GIL 0 #undef CYTHON_METH_FASTCALL #define CYTHON_METH_FASTCALL 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #ifndef CYTHON_PEP487_INIT_SUBCLASS #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) #endif #if PY_VERSION_HEX < 0x03090000 #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #endif #undef CYTHON_USE_MODULE_STATE #define CYTHON_USE_MODULE_STATE 0 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif #elif defined(CYTHON_LIMITED_API) #ifdef Py_LIMITED_API #undef __PYX_LIMITED_VERSION_HEX #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API #endif #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_LIMITED_API 1 #define CYTHON_COMPILING_IN_GRAAL 0 #define CYTHON_COMPILING_IN_NOGIL 0 #undef CYTHON_CLINE_IN_TRACEBACK #define CYTHON_CLINE_IN_TRACEBACK 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 0 #undef CYTHON_USE_TYPE_SPECS #define CYTHON_USE_TYPE_SPECS 1 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #undef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #endif #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #undef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 0 #undef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 0 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_GIL #define CYTHON_FAST_GIL 0 #undef CYTHON_METH_FASTCALL #define CYTHON_METH_FASTCALL 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #ifndef CYTHON_PEP487_INIT_SUBCLASS #define CYTHON_PEP487_INIT_SUBCLASS 1 #endif #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #undef CYTHON_USE_MODULE_STATE #define CYTHON_USE_MODULE_STATE 1 #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #endif #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 #endif #elif defined(PY_NOGIL) #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #define CYTHON_COMPILING_IN_LIMITED_API 0 #define CYTHON_COMPILING_IN_GRAAL 0 #define CYTHON_COMPILING_IN_NOGIL 1 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #ifndef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 1 #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #endif #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 1 #endif #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_CPYTHON 1 #define CYTHON_COMPILING_IN_LIMITED_API 0 #define CYTHON_COMPILING_IN_GRAAL 0 #define CYTHON_COMPILING_IN_NOGIL 0 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #ifndef CYTHON_USE_TYPE_SPECS #define CYTHON_USE_TYPE_SPECS 0 #endif #ifndef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 1 #endif #if PY_MAJOR_VERSION < 3 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #ifndef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 1 #endif #ifndef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 1 #endif #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #elif !defined(CYTHON_USE_UNICODE_WRITER) #define CYTHON_USE_UNICODE_WRITER 1 #endif #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #ifndef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 1 #endif #ifndef CYTHON_FAST_GIL #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) #endif #ifndef CYTHON_METH_FASTCALL #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) #endif #ifndef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 1 #endif #ifndef CYTHON_PEP487_INIT_SUBCLASS #define CYTHON_PEP487_INIT_SUBCLASS 1 #endif #if PY_VERSION_HEX < 0x03050000 #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) #define CYTHON_PEP489_MULTI_PHASE_INIT 1 #endif #ifndef CYTHON_USE_MODULE_STATE #define CYTHON_USE_MODULE_STATE 0 #endif #if PY_VERSION_HEX < 0x030400a1 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #elif !defined(CYTHON_USE_TP_FINALIZE) #define CYTHON_USE_TP_FINALIZE 1 #endif #if PY_VERSION_HEX < 0x030600B1 #undef CYTHON_USE_DICT_VERSIONS #define CYTHON_USE_DICT_VERSIONS 0 #elif !defined(CYTHON_USE_DICT_VERSIONS) #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) #endif #if PY_VERSION_HEX < 0x030700A3 #undef CYTHON_USE_EXC_INFO_STACK #define CYTHON_USE_EXC_INFO_STACK 0 #elif !defined(CYTHON_USE_EXC_INFO_STACK) #define CYTHON_USE_EXC_INFO_STACK 1 #endif #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 #endif #endif #if !defined(CYTHON_FAST_PYCCALL) #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) #endif #if !defined(CYTHON_VECTORCALL) #define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) #endif #define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) #if CYTHON_USE_PYLONG_INTERNALS #if PY_MAJOR_VERSION < 3 #include "longintrepr.h" #endif #undef SHIFT #undef BASE #undef MASK #ifdef SIZEOF_VOID_P enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; #endif #endif #ifndef __has_attribute #define __has_attribute(x) 0 #endif #ifndef __has_cpp_attribute #define __has_cpp_attribute(x) 0 #endif #ifndef CYTHON_RESTRICT #if defined(__GNUC__) #define CYTHON_RESTRICT __restrict__ #elif defined(_MSC_VER) && _MSC_VER >= 1400 #define CYTHON_RESTRICT __restrict #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_RESTRICT restrict #else #define CYTHON_RESTRICT #endif #endif #ifndef CYTHON_UNUSED #if defined(__cplusplus) /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 * but leads to warnings with -pedantic, since it is a C++17 feature */ #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) #if __has_cpp_attribute(maybe_unused) #define CYTHON_UNUSED [[maybe_unused]] #endif #endif #endif #endif #ifndef CYTHON_UNUSED # if defined(__GNUC__) # if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif # elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif #endif #ifndef CYTHON_UNUSED_VAR # if defined(__cplusplus) template void CYTHON_UNUSED_VAR( const T& ) { } # else # define CYTHON_UNUSED_VAR(x) (void)(x) # endif #endif #ifndef CYTHON_MAYBE_UNUSED_VAR #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) #endif #ifndef CYTHON_NCP_UNUSED # if CYTHON_COMPILING_IN_CPYTHON # define CYTHON_NCP_UNUSED # else # define CYTHON_NCP_UNUSED CYTHON_UNUSED # endif #endif #ifndef CYTHON_USE_CPP_STD_MOVE #if defined(__cplusplus) && (\ __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) #define CYTHON_USE_CPP_STD_MOVE 1 #else #define CYTHON_USE_CPP_STD_MOVE 0 #endif #endif #define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) #ifdef _MSC_VER #ifndef _MSC_STDINT_H_ #if _MSC_VER < 1300 typedef unsigned char uint8_t; typedef unsigned short uint16_t; typedef unsigned int uint32_t; #else typedef unsigned __int8 uint8_t; typedef unsigned __int16 uint16_t; typedef unsigned __int32 uint32_t; #endif #endif #if _MSC_VER < 1300 #ifdef _WIN64 typedef unsigned long long __pyx_uintptr_t; #else typedef unsigned int __pyx_uintptr_t; #endif #else #ifdef _WIN64 typedef unsigned __int64 __pyx_uintptr_t; #else typedef unsigned __int32 __pyx_uintptr_t; #endif #endif #else #include typedef uintptr_t __pyx_uintptr_t; #endif #ifndef CYTHON_FALLTHROUGH #if defined(__cplusplus) /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 * but leads to warnings with -pedantic, since it is a C++17 feature */ #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) #if __has_cpp_attribute(fallthrough) #define CYTHON_FALLTHROUGH [[fallthrough]] #endif #endif #ifndef CYTHON_FALLTHROUGH #if __has_cpp_attribute(clang::fallthrough) #define CYTHON_FALLTHROUGH [[clang::fallthrough]] #elif __has_cpp_attribute(gnu::fallthrough) #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] #endif #endif #endif #ifndef CYTHON_FALLTHROUGH #if __has_attribute(fallthrough) #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) #else #define CYTHON_FALLTHROUGH #endif #endif #if defined(__clang__) && defined(__apple_build_version__) #if __apple_build_version__ < 7000000 #undef CYTHON_FALLTHROUGH #define CYTHON_FALLTHROUGH #endif #endif #endif #ifdef __cplusplus template struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) #else #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) #endif #if CYTHON_COMPILING_IN_PYPY == 1 #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) #else #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) #endif #define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) #ifndef __cplusplus #error "Cython files generated with the C++ option must be compiled with a C++ compiler." #endif #ifndef CYTHON_INLINE #if defined(__clang__) #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) #else #define CYTHON_INLINE inline #endif #endif template void __Pyx_call_destructor(T& x) { x.~T(); } template class __Pyx_FakeReference { public: __Pyx_FakeReference() : ptr(NULL) { } __Pyx_FakeReference(const T& ref) : ptr(const_cast(&ref)) { } T *operator->() { return ptr; } T *operator&() { return ptr; } operator T&() { return *ptr; } template bool operator ==(const U& other) const { return *ptr == other; } template bool operator !=(const U& other) const { return *ptr != other; } template bool operator==(const __Pyx_FakeReference& other) const { return *ptr == *other.ptr; } template bool operator!=(const __Pyx_FakeReference& other) const { return *ptr != *other.ptr; } private: T *ptr; }; #define __PYX_BUILD_PY_SSIZE_T "n" #define CYTHON_FORMAT_SSIZE_T "z" #if PY_MAJOR_VERSION < 3 #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" #define __Pyx_DefaultClassType PyClass_Type #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #else #define __Pyx_BUILTIN_MODULE_NAME "builtins" #define __Pyx_DefaultClassType PyType_Type #if CYTHON_COMPILING_IN_LIMITED_API static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, PyObject *code, PyObject *c, PyObject* n, PyObject *v, PyObject *fv, PyObject *cell, PyObject* fn, PyObject *name, int fline, PyObject *lnos) { PyObject *exception_table = NULL; PyObject *types_module=NULL, *code_type=NULL, *result=NULL; PyObject *version_info; // borrowed PyObject *py_minor_version = NULL; long minor_version = 0; PyObject *type, *value, *traceback; PyErr_Fetch(&type, &value, &traceback); #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 minor_version = 11; // we don't yet need to distinguish between versions > 11 #else if (!(version_info = PySys_GetObject("version_info"))) goto end; if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; minor_version = PyLong_AsLong(py_minor_version); if (minor_version == -1 && PyErr_Occurred()) goto end; #endif if (!(types_module = PyImport_ImportModule("types"))) goto end; if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; if (minor_version <= 7) { (void)p; result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, c, n, v, fn, name, fline, lnos, fv, cell); } else if (minor_version <= 10) { result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, c, n, v, fn, name, fline, lnos, fv, cell); } else { if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); } end: Py_XDECREF(code_type); Py_XDECREF(exception_table); Py_XDECREF(types_module); Py_XDECREF(py_minor_version); if (type) { PyErr_Restore(type, value, traceback); } return result; } #ifndef CO_OPTIMIZED #define CO_OPTIMIZED 0x0001 #endif #ifndef CO_NEWLOCALS #define CO_NEWLOCALS 0x0002 #endif #ifndef CO_VARARGS #define CO_VARARGS 0x0004 #endif #ifndef CO_VARKEYWORDS #define CO_VARKEYWORDS 0x0008 #endif #ifndef CO_ASYNC_GENERATOR #define CO_ASYNC_GENERATOR 0x0200 #endif #ifndef CO_GENERATOR #define CO_GENERATOR 0x0020 #endif #ifndef CO_COROUTINE #define CO_COROUTINE 0x0080 #endif #elif PY_VERSION_HEX >= 0x030B0000 static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, PyObject *code, PyObject *c, PyObject* n, PyObject *v, PyObject *fv, PyObject *cell, PyObject* fn, PyObject *name, int fline, PyObject *lnos) { PyCodeObject *result; PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); // we don't have access to __pyx_empty_bytes here if (!empty_bytes) return NULL; result = #if PY_VERSION_HEX >= 0x030C0000 PyUnstable_Code_NewWithPosOnlyArgs #else PyCode_NewWithPosOnlyArgs #endif (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); Py_DECREF(empty_bytes); return result; } #elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #else #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #endif #endif #if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) #else #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) #endif #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) #define __Pyx_Py_Is(x, y) Py_Is(x, y) #else #define __Pyx_Py_Is(x, y) ((x) == (y)) #endif #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) #else #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) #endif #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) #else #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) #endif #if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) #else #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) #endif #define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) #if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) #else #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) #endif #ifndef CO_COROUTINE #define CO_COROUTINE 0x80 #endif #ifndef CO_ASYNC_GENERATOR #define CO_ASYNC_GENERATOR 0x200 #endif #ifndef Py_TPFLAGS_CHECKTYPES #define Py_TPFLAGS_CHECKTYPES 0 #endif #ifndef Py_TPFLAGS_HAVE_INDEX #define Py_TPFLAGS_HAVE_INDEX 0 #endif #ifndef Py_TPFLAGS_HAVE_NEWBUFFER #define Py_TPFLAGS_HAVE_NEWBUFFER 0 #endif #ifndef Py_TPFLAGS_HAVE_FINALIZE #define Py_TPFLAGS_HAVE_FINALIZE 0 #endif #ifndef Py_TPFLAGS_SEQUENCE #define Py_TPFLAGS_SEQUENCE 0 #endif #ifndef Py_TPFLAGS_MAPPING #define Py_TPFLAGS_MAPPING 0 #endif #ifndef METH_STACKLESS #define METH_STACKLESS 0 #endif #if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) #ifndef METH_FASTCALL #define METH_FASTCALL 0x80 #endif typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames); #else #define __Pyx_PyCFunctionFast _PyCFunctionFast #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords #endif #if CYTHON_METH_FASTCALL #define __Pyx_METH_FASTCALL METH_FASTCALL #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords #else #define __Pyx_METH_FASTCALL METH_VARARGS #define __Pyx_PyCFunction_FastCall PyCFunction #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords #endif #if CYTHON_VECTORCALL #define __pyx_vectorcallfunc vectorcallfunc #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) #elif CYTHON_BACKPORT_VECTORCALL typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, size_t nargsf, PyObject *kwnames); #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) #else #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) #endif #if __PYX_LIMITED_VERSION_HEX < 0x030900B1 #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); #else #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) #define __Pyx_PyCMethod PyCMethod #endif #ifndef METH_METHOD #define METH_METHOD 0x200 #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) #define PyObject_Malloc(s) PyMem_Malloc(s) #define PyObject_Free(p) PyMem_Free(p) #define PyObject_Realloc(p) PyMem_Realloc(p) #endif #if CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) #else #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) #endif #if CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_PyThreadState_Current PyThreadState_Get() #elif !CYTHON_FAST_THREAD_STATE #define __Pyx_PyThreadState_Current PyThreadState_GET() #elif PY_VERSION_HEX >= 0x03060000 #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() #elif PY_VERSION_HEX >= 0x03000000 #define __Pyx_PyThreadState_Current PyThreadState_GET() #else #define __Pyx_PyThreadState_Current _PyThreadState_Current #endif #if CYTHON_COMPILING_IN_LIMITED_API static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) { void *result; result = PyModule_GetState(op); if (!result) Py_FatalError("Couldn't find the module state"); return result; } #endif #define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) #if CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) #else #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) #endif #if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) #include "pythread.h" #define Py_tss_NEEDS_INIT 0 typedef int Py_tss_t; static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { *key = PyThread_create_key(); return 0; } static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); *key = Py_tss_NEEDS_INIT; return key; } static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { PyObject_Free(key); } static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { return *key != Py_tss_NEEDS_INIT; } static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { PyThread_delete_key(*key); *key = Py_tss_NEEDS_INIT; } static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { return PyThread_set_key_value(*key, value); } static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { return PyThread_get_key_value(*key); } #endif #if PY_MAJOR_VERSION < 3 #if CYTHON_COMPILING_IN_PYPY #if PYPY_VERSION_NUM < 0x07030600 #if defined(__cplusplus) && __cplusplus >= 201402L [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] #elif defined(__GNUC__) || defined(__clang__) __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) #elif defined(_MSC_VER) __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) #endif static CYTHON_INLINE int PyGILState_Check(void) { return 0; } #else // PYPY_VERSION_NUM < 0x07030600 #endif // PYPY_VERSION_NUM < 0x07030600 #else static CYTHON_INLINE int PyGILState_Check(void) { PyThreadState * tstate = _PyThreadState_Current; return tstate && (tstate == PyGILState_GetThisThreadState()); } #endif #endif #if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) #define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) #else #define __Pyx_PyDict_NewPresized(n) PyDict_New() #endif #if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) #else #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && CYTHON_USE_UNICODE_INTERNALS #define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); if (res == NULL) PyErr_Clear(); return res; } #elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) #define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError #define __Pyx_PyDict_GetItemStr PyDict_GetItem #else static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { #if CYTHON_COMPILING_IN_PYPY return PyDict_GetItem(dict, name); #else PyDictEntry *ep; PyDictObject *mp = (PyDictObject*) dict; long hash = ((PyStringObject *) name)->ob_shash; assert(hash != -1); ep = (mp->ma_lookup)(mp, name, hash); if (ep == NULL) { return NULL; } return ep->me_value; #endif } #define __Pyx_PyDict_GetItemStr PyDict_GetItem #endif #if CYTHON_USE_TYPE_SLOTS #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) #else #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next #endif #if CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) #else #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) #endif #if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 #define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ PyTypeObject *type = Py_TYPE(obj);\ assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ PyObject_GC_Del(obj);\ Py_DECREF(type);\ } #else #define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) #endif #if CYTHON_COMPILING_IN_LIMITED_API #define CYTHON_PEP393_ENABLED 1 #define __Pyx_PyUnicode_READY(op) (0) #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) #define __Pyx_PyUnicode_DATA(u) ((void*)u) #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) #elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) #define CYTHON_PEP393_ENABLED 1 #if PY_VERSION_HEX >= 0x030C0000 #define __Pyx_PyUnicode_READY(op) (0) #else #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ 0 : _PyUnicode_Ready((PyObject *)(op))) #endif #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) #if PY_VERSION_HEX >= 0x030C0000 #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) #else #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) #else #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) #endif #endif #else #define CYTHON_PEP393_ENABLED 0 #define PyUnicode_1BYTE_KIND 1 #define PyUnicode_2BYTE_KIND 2 #define PyUnicode_4BYTE_KIND 4 #define __Pyx_PyUnicode_READY(op) (0) #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) #endif #if CYTHON_COMPILING_IN_PYPY #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) #else #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) #endif #if CYTHON_COMPILING_IN_PYPY #if !defined(PyUnicode_DecodeUnicodeEscape) #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) #endif #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) #undef PyUnicode_Contains #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) #endif #if !defined(PyByteArray_Check) #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) #endif #if !defined(PyObject_Format) #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) #endif #endif #define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) #define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) #else #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) #endif #if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) #define PyObject_ASCII(o) PyObject_Repr(o) #endif #if PY_MAJOR_VERSION >= 3 #define PyBaseString_Type PyUnicode_Type #define PyStringObject PyUnicodeObject #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact #ifndef PyObject_Unicode #define PyObject_Unicode PyObject_Str #endif #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) #else #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) #endif #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_PySequence_ListKeepNew(obj)\ (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) #else #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) #endif #ifndef PySet_CheckExact #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) #endif #if PY_VERSION_HEX >= 0x030900A4 #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) #else #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) #endif #if CYTHON_ASSUME_SAFE_MACROS #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) #else #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) #endif #if PY_MAJOR_VERSION >= 3 #define PyIntObject PyLongObject #define PyInt_Type PyLong_Type #define PyInt_Check(op) PyLong_Check(op) #define PyInt_CheckExact(op) PyLong_CheckExact(op) #define __Pyx_Py3Int_Check(op) PyLong_Check(op) #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) #define PyInt_FromString PyLong_FromString #define PyInt_FromUnicode PyLong_FromUnicode #define PyInt_FromLong PyLong_FromLong #define PyInt_FromSize_t PyLong_FromSize_t #define PyInt_FromSsize_t PyLong_FromSsize_t #define PyInt_AsLong PyLong_AsLong #define PyInt_AS_LONG PyLong_AS_LONG #define PyInt_AsSsize_t PyLong_AsSsize_t #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask #define PyNumber_Int PyNumber_Long #else #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) #endif #if PY_MAJOR_VERSION >= 3 #define PyBoolObject PyLongObject #endif #if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY #ifndef PyUnicode_InternFromString #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) #endif #endif #if PY_VERSION_HEX < 0x030200A4 typedef long Py_hash_t; #define __Pyx_PyInt_FromHash_t PyInt_FromLong #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t #else #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t #endif #if CYTHON_USE_ASYNC_SLOTS #if PY_VERSION_HEX >= 0x030500B1 #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) #else #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) #endif #else #define __Pyx_PyType_AsAsync(obj) NULL #endif #ifndef __Pyx_PyAsyncMethodsStruct typedef struct { unaryfunc am_await; unaryfunc am_aiter; unaryfunc am_anext; } __Pyx_PyAsyncMethodsStruct; #endif #if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) #if !defined(_USE_MATH_DEFINES) #define _USE_MATH_DEFINES #endif #endif #include #ifdef NAN #define __PYX_NAN() ((float) NAN) #else static CYTHON_INLINE float __PYX_NAN() { float value; memset(&value, 0xFF, sizeof(value)); return value; } #endif #if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) #define __Pyx_truncl trunc #else #define __Pyx_truncl truncl #endif #define __PYX_MARK_ERR_POS(f_index, lineno) \ { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } #define __PYX_ERR(f_index, lineno, Ln_error) \ { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } #ifdef CYTHON_EXTERN_C #undef __PYX_EXTERN_C #define __PYX_EXTERN_C CYTHON_EXTERN_C #elif defined(__PYX_EXTERN_C) #ifdef _MSC_VER #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") #else #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. #endif #else #define __PYX_EXTERN_C extern "C++" #endif #define __PYX_HAVE__compreffor___compreffor #define __PYX_HAVE_API__compreffor___compreffor /* Early includes */ #include #include "ios" #include "new" #include "stdexcept" #include "typeinfo" #include "../cxx/cffCompressor.h" #ifdef _OPENMP #include #endif /* _OPENMP */ #if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) #define CYTHON_WITHOUT_ASSERTIONS #endif typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; #define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 #define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 #define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) #define __PYX_DEFAULT_STRING_ENCODING "" #define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString #define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #define __Pyx_uchar_cast(c) ((unsigned char)c) #define __Pyx_long_cast(x) ((long)x) #define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ (sizeof(type) < sizeof(Py_ssize_t)) ||\ (sizeof(type) > sizeof(Py_ssize_t) &&\ likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX) &&\ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ v == (type)PY_SSIZE_T_MIN))) ||\ (sizeof(type) == sizeof(Py_ssize_t) &&\ (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX))) ) static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { return (size_t) i < (size_t) limit; } #if defined (__cplusplus) && __cplusplus >= 201103L #include #define __Pyx_sst_abs(value) std::abs(value) #elif SIZEOF_INT >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) abs(value) #elif SIZEOF_LONG >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) labs(value) #elif defined (_MSC_VER) #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define __Pyx_sst_abs(value) llabs(value) #elif defined (__GNUC__) #define __Pyx_sst_abs(value) __builtin_llabs(value) #else #define __Pyx_sst_abs(value) ((value<0) ? -value : value) #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); #define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) #define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) #define __Pyx_PyBytes_FromString PyBytes_FromString #define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); #if PY_MAJOR_VERSION < 3 #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #else #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize #endif #define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) #define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) #define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) #define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) #define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) #if CYTHON_COMPILING_IN_LIMITED_API static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const wchar_t *u) { const wchar_t *u_end = u; while (*u_end++) ; return (size_t)(u_end - u - 1); } #else static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { const Py_UNICODE *u_end = u; while (*u_end++) ; return (size_t)(u_end - u - 1); } #endif #define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) #define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) #define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); #if CYTHON_ASSUME_SAFE_MACROS #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) #else #define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) #endif #define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) #else #define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) #endif #if CYTHON_USE_PYLONG_INTERNALS #if PY_VERSION_HEX >= 0x030C00A7 #ifndef _PyLong_SIGN_MASK #define _PyLong_SIGN_MASK 3 #endif #ifndef _PyLong_NON_SIZE_BITS #define _PyLong_NON_SIZE_BITS 3 #endif #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) #define __Pyx_PyLong_SignedDigitCount(x)\ ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) #else #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) #endif typedef Py_ssize_t __Pyx_compact_pylong; typedef size_t __Pyx_compact_upylong; #else // Py < 3.12 #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) #define __Pyx_PyLong_CompactValue(x)\ ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) typedef sdigit __Pyx_compact_pylong; typedef digit __Pyx_compact_upylong; #endif #if PY_VERSION_HEX >= 0x030C00A5 #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) #else #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) #endif #endif #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII static int __Pyx_sys_getdefaultencoding_not_ascii; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; PyObject* ascii_chars_u = NULL; PyObject* ascii_chars_b = NULL; const char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; if (strcmp(default_encoding_c, "ascii") == 0) { __Pyx_sys_getdefaultencoding_not_ascii = 0; } else { char ascii_chars[128]; int c; for (c = 0; c < 128; c++) { ascii_chars[c] = (char) c; } __Pyx_sys_getdefaultencoding_not_ascii = 1; ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); if (!ascii_chars_u) goto bad; ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { PyErr_Format( PyExc_ValueError, "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", default_encoding_c); goto bad; } Py_DECREF(ascii_chars_u); Py_DECREF(ascii_chars_b); } Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); Py_XDECREF(ascii_chars_u); Py_XDECREF(ascii_chars_b); return -1; } #endif #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) #else #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT static char* __PYX_DEFAULT_STRING_ENCODING; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); return -1; } #endif #endif /* Test for GCC > 2.95 */ #if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) #else /* !__GNUC__ or GCC < 2.95 */ #define likely(x) (x) #define unlikely(x) (x) #endif /* __GNUC__ */ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } #if !CYTHON_USE_MODULE_STATE static PyObject *__pyx_m = NULL; #endif static int __pyx_lineno; static int __pyx_clineno = 0; static const char * __pyx_cfilenm = __FILE__; static const char *__pyx_filename; /* #### Code section: filename_table ### */ static const char *__pyx_f[] = { "src/cython/_compreffor.pyx", }; /* #### Code section: utility_code_proto_before_types ### */ /* #### Code section: numeric_typedefs ### */ /* #### Code section: complex_type_declarations ### */ /* #### Code section: type_declarations ### */ /*--- Type declarations ---*/ /* #### Code section: utility_code_proto ### */ /* --- Runtime support code (head) --- */ /* Refnanny.proto */ #ifndef CYTHON_REFNANNY #define CYTHON_REFNANNY 0 #endif #if CYTHON_REFNANNY typedef struct { void (*INCREF)(void*, PyObject*, Py_ssize_t); void (*DECREF)(void*, PyObject*, Py_ssize_t); void (*GOTREF)(void*, PyObject*, Py_ssize_t); void (*GIVEREF)(void*, PyObject*, Py_ssize_t); void* (*SetupContext)(const char*, Py_ssize_t, const char*); void (*FinishContext)(void**); } __Pyx_RefNannyAPIStruct; static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; #ifdef WITH_THREAD #define __Pyx_RefNannySetupContext(name, acquire_gil)\ if (acquire_gil) {\ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ PyGILState_Release(__pyx_gilstate_save);\ } else {\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ } #define __Pyx_RefNannyFinishContextNogil() {\ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ __Pyx_RefNannyFinishContext();\ PyGILState_Release(__pyx_gilstate_save);\ } #else #define __Pyx_RefNannySetupContext(name, acquire_gil)\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() #endif #define __Pyx_RefNannyFinishContextNogil() {\ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ __Pyx_RefNannyFinishContext();\ PyGILState_Release(__pyx_gilstate_save);\ } #define __Pyx_RefNannyFinishContext()\ __Pyx_RefNanny->FinishContext(&__pyx_refnanny) #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) #else #define __Pyx_RefNannyDeclarations #define __Pyx_RefNannySetupContext(name, acquire_gil) #define __Pyx_RefNannyFinishContextNogil() #define __Pyx_RefNannyFinishContext() #define __Pyx_INCREF(r) Py_INCREF(r) #define __Pyx_DECREF(r) Py_DECREF(r) #define __Pyx_GOTREF(r) #define __Pyx_GIVEREF(r) #define __Pyx_XINCREF(r) Py_XINCREF(r) #define __Pyx_XDECREF(r) Py_XDECREF(r) #define __Pyx_XGOTREF(r) #define __Pyx_XGIVEREF(r) #endif #define __Pyx_Py_XDECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; Py_XDECREF(tmp);\ } while (0) #define __Pyx_XDECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_XDECREF(tmp);\ } while (0) #define __Pyx_DECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_DECREF(tmp);\ } while (0) #define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) #define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) /* PyErrExceptionMatches.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); #else #define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) #endif /* PyThreadStateGet.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; #define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; #if PY_VERSION_HEX >= 0x030C00A6 #define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) #define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) #else #define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) #define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) #endif #else #define __Pyx_PyThreadState_declare #define __Pyx_PyThreadState_assign #define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) #define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() #endif /* PyErrFetchRestore.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) #define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) #define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 #define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) #else #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #endif #else #define __Pyx_PyErr_Clear() PyErr_Clear() #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) #endif /* PyObjectGetAttrStr.proto */ #if CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); #else #define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) #endif /* PyObjectGetAttrStrNoError.proto */ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); /* GetBuiltinName.proto */ static PyObject *__Pyx_GetBuiltinName(PyObject *name); /* TupleAndListFromArray.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); #endif /* IncludeStringH.proto */ #include /* BytesEquals.proto */ static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); /* UnicodeEquals.proto */ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); /* fastcall.proto */ #if CYTHON_AVOID_BORROWED_REFS #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) #elif CYTHON_ASSUME_SAFE_MACROS #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) #else #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) #endif #if CYTHON_AVOID_BORROWED_REFS #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) #else #define __Pyx_Arg_NewRef_VARARGS(arg) arg // no-op #define __Pyx_Arg_XDECREF_VARARGS(arg) // no-op - arg is borrowed #endif #define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) #define __Pyx_KwValues_VARARGS(args, nargs) NULL #define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) #define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) #if CYTHON_METH_FASTCALL #define __Pyx_Arg_FASTCALL(args, i) args[i] #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) #define __Pyx_Arg_NewRef_FASTCALL(arg) arg // no-op, __Pyx_Arg_FASTCALL is direct and this needs #define __Pyx_Arg_XDECREF_FASTCALL(arg) // no-op - arg was returned from array #else #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) #endif #if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS #define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) #define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) #else #define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) #define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) #endif /* RaiseArgTupleInvalid.proto */ static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /* RaiseDoubleKeywords.proto */ static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /* ParseKeywords.proto */ static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, PyObject **argnames[], PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, const char* function_name); /* ArgTypeTest.proto */ #define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ ((likely(__Pyx_IS_TYPE(obj, type) | (none_allowed && (obj == Py_None)))) ? 1 :\ __Pyx__ArgTypeTest(obj, type, name, exact)) static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); /* ListAppend.proto */ #if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { PyListObject* L = (PyListObject*) list; Py_ssize_t len = Py_SIZE(list); if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { Py_INCREF(x); PyList_SET_ITEM(list, len, x); __Pyx_SET_SIZE(list, len + 1); return 0; } return PyList_Append(list, x); } #else #define __Pyx_PyList_Append(L,x) PyList_Append(L,x) #endif /* IncludeStructmemberH.proto */ #include /* FixUpExtensionType.proto */ #if CYTHON_USE_TYPE_SPECS static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); #endif /* FetchSharedCythonModule.proto */ static PyObject *__Pyx_FetchSharedCythonABIModule(void); /* FetchCommonType.proto */ #if !CYTHON_USE_TYPE_SPECS static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); #else static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); #endif /* PyMethodNew.proto */ #if CYTHON_COMPILING_IN_LIMITED_API static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; CYTHON_UNUSED_VAR(typ); if (!self) return __Pyx_NewRef(func); typesModule = PyImport_ImportModule("types"); if (!typesModule) return NULL; methodType = PyObject_GetAttrString(typesModule, "MethodType"); Py_DECREF(typesModule); if (!methodType) return NULL; result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); Py_DECREF(methodType); return result; } #elif PY_MAJOR_VERSION >= 3 static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { CYTHON_UNUSED_VAR(typ); if (!self) return __Pyx_NewRef(func); return PyMethod_New(func, self); } #else #define __Pyx_PyMethod_New PyMethod_New #endif /* PyVectorcallFastCallDict.proto */ #if CYTHON_METH_FASTCALL static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); #endif /* CythonFunctionShared.proto */ #define __Pyx_CyFunction_USED #define __Pyx_CYFUNCTION_STATICMETHOD 0x01 #define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 #define __Pyx_CYFUNCTION_CCLASS 0x04 #define __Pyx_CYFUNCTION_COROUTINE 0x08 #define __Pyx_CyFunction_GetClosure(f)\ (((__pyx_CyFunctionObject *) (f))->func_closure) #if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_CyFunction_GetClassObj(f)\ (((__pyx_CyFunctionObject *) (f))->func_classobj) #else #define __Pyx_CyFunction_GetClassObj(f)\ ((PyObject*) ((PyCMethodObject *) (f))->mm_class) #endif #define __Pyx_CyFunction_SetClassObj(f, classobj)\ __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) #define __Pyx_CyFunction_Defaults(type, f)\ ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) #define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) typedef struct { #if CYTHON_COMPILING_IN_LIMITED_API PyObject_HEAD PyObject *func; #elif PY_VERSION_HEX < 0x030900B1 PyCFunctionObject func; #else PyCMethodObject func; #endif #if CYTHON_BACKPORT_VECTORCALL __pyx_vectorcallfunc func_vectorcall; #endif #if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API PyObject *func_weakreflist; #endif PyObject *func_dict; PyObject *func_name; PyObject *func_qualname; PyObject *func_doc; PyObject *func_globals; PyObject *func_code; PyObject *func_closure; #if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API PyObject *func_classobj; #endif void *defaults; int defaults_pyobjects; size_t defaults_size; // used by FusedFunction for copying defaults int flags; PyObject *defaults_tuple; PyObject *defaults_kwdict; PyObject *(*defaults_getter)(PyObject *); PyObject *func_annotations; PyObject *func_is_coroutine; } __pyx_CyFunctionObject; #define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) #define __Pyx_IsCyOrPyCFunction(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) #define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, int flags, PyObject* qualname, PyObject *closure, PyObject *module, PyObject *globals, PyObject* code); static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, size_t size, int pyobjects); static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, PyObject *tuple); static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, PyObject *dict); static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, PyObject *dict); static int __pyx_CyFunction_init(PyObject *module); #if CYTHON_METH_FASTCALL static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); #if CYTHON_BACKPORT_VECTORCALL #define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) #else #define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) #endif #endif /* CythonFunction.proto */ static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, PyObject *closure, PyObject *module, PyObject *globals, PyObject* code); /* PyDictVersioning.proto */ #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS #define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) #define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) #define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ (version_var) = __PYX_GET_DICT_VERSION(dict);\ (cache_var) = (value); #define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ static PY_UINT64_T __pyx_dict_version = 0;\ static PyObject *__pyx_dict_cached_value = NULL;\ if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ (VAR) = __pyx_dict_cached_value;\ } else {\ (VAR) = __pyx_dict_cached_value = (LOOKUP);\ __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ }\ } static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); #else #define __PYX_GET_DICT_VERSION(dict) (0) #define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) #define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); #endif /* CLineInTraceback.proto */ #ifdef CYTHON_CLINE_IN_TRACEBACK #define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) #else static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); #endif /* CodeObjectCache.proto */ #if !CYTHON_COMPILING_IN_LIMITED_API typedef struct { PyCodeObject* code_object; int code_line; } __Pyx_CodeObjectCacheEntry; struct __Pyx_CodeObjectCache { int count; int max_count; __Pyx_CodeObjectCacheEntry* entries; }; static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); static PyCodeObject *__pyx_find_code_object(int code_line); static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); #endif /* AddTraceback.proto */ static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename); /* GCCDiagnostics.proto */ #if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) #define __Pyx_HAS_GCC_DIAGNOSTIC #endif /* CppExceptionConversion.proto */ #ifndef __Pyx_CppExn2PyErr #include #include #include #include static void __Pyx_CppExn2PyErr() { try { if (PyErr_Occurred()) ; // let the latest Python exn pass through and ignore the current one else throw; } catch (const std::bad_alloc& exn) { PyErr_SetString(PyExc_MemoryError, exn.what()); } catch (const std::bad_cast& exn) { PyErr_SetString(PyExc_TypeError, exn.what()); } catch (const std::bad_typeid& exn) { PyErr_SetString(PyExc_TypeError, exn.what()); } catch (const std::domain_error& exn) { PyErr_SetString(PyExc_ValueError, exn.what()); } catch (const std::invalid_argument& exn) { PyErr_SetString(PyExc_ValueError, exn.what()); } catch (const std::ios_base::failure& exn) { PyErr_SetString(PyExc_IOError, exn.what()); } catch (const std::out_of_range& exn) { PyErr_SetString(PyExc_IndexError, exn.what()); } catch (const std::overflow_error& exn) { PyErr_SetString(PyExc_OverflowError, exn.what()); } catch (const std::range_error& exn) { PyErr_SetString(PyExc_ArithmeticError, exn.what()); } catch (const std::underflow_error& exn) { PyErr_SetString(PyExc_ArithmeticError, exn.what()); } catch (const std::exception& exn) { PyErr_SetString(PyExc_RuntimeError, exn.what()); } catch (...) { PyErr_SetString(PyExc_RuntimeError, "Unknown exception"); } } #endif /* CIntFromPy.proto */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value); /* CIntFromPy.proto */ static CYTHON_INLINE unsigned int __Pyx_PyInt_As_unsigned_int(PyObject *); /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint32_t(uint32_t value); /* FormatTypeName.proto */ #if CYTHON_COMPILING_IN_LIMITED_API typedef PyObject *__Pyx_TypeName; #define __Pyx_FMT_TYPENAME "%U" static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); #define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) #else typedef const char *__Pyx_TypeName; #define __Pyx_FMT_TYPENAME "%.200s" #define __Pyx_PyType_GetName(tp) ((tp)->tp_name) #define __Pyx_DECREF_TypeName(obj) #endif /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); /* CIntFromPy.proto */ static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); /* FastTypeChecks.proto */ #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) #define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); #else #define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) #define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) #define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) #define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) #endif #define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) #define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) /* CheckBinaryVersion.proto */ static int __Pyx_check_binary_version(void); /* InitStrings.proto */ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /* #### Code section: module_declarations ### */ /* Module declarations from "libc.stdint" */ /* Module declarations from "compreffor._compreffor" */ /* #### Code section: typeinfo ### */ /* #### Code section: before_global_var ### */ #define __Pyx_MODULE_NAME "compreffor._compreffor" extern int __pyx_module_is_main_compreffor___compreffor; int __pyx_module_is_main_compreffor___compreffor = 0; /* Implementation of "compreffor._compreffor" */ /* #### Code section: global_var ### */ static PyObject *__pyx_builtin_range; /* #### Code section: string_decls ### */ static const char __pyx_k_i[] = "i"; static const char __pyx_k__3[] = "?"; static const char __pyx_k_main[] = "__main__"; static const char __pyx_k_name[] = "__name__"; static const char __pyx_k_test[] = "__test__"; static const char __pyx_k_range[] = "range"; static const char __pyx_k_output[] = "output"; static const char __pyx_k_compreff[] = "compreff"; static const char __pyx_k_numRounds[] = "numRounds"; static const char __pyx_k_dataStream[] = "dataStream"; static const char __pyx_k_raw_output[] = "raw_output"; static const char __pyx_k_is_coroutine[] = "_is_coroutine"; static const char __pyx_k_outputLength[] = "outputLength"; static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; static const char __pyx_k_compreffor__compreffor[] = "compreffor._compreffor"; static const char __pyx_k_src_cython__compreffor_pyx[] = "src/cython/_compreffor.pyx"; /* #### Code section: decls ### */ static PyObject *__pyx_pf_10compreffor_11_compreffor_compreff(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_dataStream, int __pyx_v_numRounds); /* proto */ /* #### Code section: late_includes ### */ /* #### Code section: module_state ### */ typedef struct { PyObject *__pyx_d; PyObject *__pyx_b; PyObject *__pyx_cython_runtime; PyObject *__pyx_empty_tuple; PyObject *__pyx_empty_bytes; PyObject *__pyx_empty_unicode; #ifdef __Pyx_CyFunction_USED PyTypeObject *__pyx_CyFunctionType; #endif #ifdef __Pyx_FusedFunction_USED PyTypeObject *__pyx_FusedFunctionType; #endif #ifdef __Pyx_Generator_USED PyTypeObject *__pyx_GeneratorType; #endif #ifdef __Pyx_IterableCoroutine_USED PyTypeObject *__pyx_IterableCoroutineType; #endif #ifdef __Pyx_Coroutine_USED PyTypeObject *__pyx_CoroutineAwaitType; #endif #ifdef __Pyx_Coroutine_USED PyTypeObject *__pyx_CoroutineType; #endif #if CYTHON_USE_MODULE_STATE #endif #if CYTHON_USE_MODULE_STATE #endif PyObject *__pyx_n_s__3; PyObject *__pyx_n_s_asyncio_coroutines; PyObject *__pyx_n_s_cline_in_traceback; PyObject *__pyx_n_s_compreff; PyObject *__pyx_n_s_compreffor__compreffor; PyObject *__pyx_n_s_dataStream; PyObject *__pyx_n_s_i; PyObject *__pyx_n_s_is_coroutine; PyObject *__pyx_n_s_main; PyObject *__pyx_n_s_name; PyObject *__pyx_n_s_numRounds; PyObject *__pyx_n_s_output; PyObject *__pyx_n_s_outputLength; PyObject *__pyx_n_s_range; PyObject *__pyx_n_s_raw_output; PyObject *__pyx_kp_s_src_cython__compreffor_pyx; PyObject *__pyx_n_s_test; PyObject *__pyx_tuple_; PyObject *__pyx_codeobj__2; } __pyx_mstate; #if CYTHON_USE_MODULE_STATE #ifdef __cplusplus namespace { extern struct PyModuleDef __pyx_moduledef; } /* anonymous namespace */ #else static struct PyModuleDef __pyx_moduledef; #endif #define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) #define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) #define __pyx_m (PyState_FindModule(&__pyx_moduledef)) #else static __pyx_mstate __pyx_mstate_global_static = #ifdef __cplusplus {}; #else {0}; #endif static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; #endif /* #### Code section: module_state_clear ### */ #if CYTHON_USE_MODULE_STATE static int __pyx_m_clear(PyObject *m) { __pyx_mstate *clear_module_state = __pyx_mstate(m); if (!clear_module_state) return 0; Py_CLEAR(clear_module_state->__pyx_d); Py_CLEAR(clear_module_state->__pyx_b); Py_CLEAR(clear_module_state->__pyx_cython_runtime); Py_CLEAR(clear_module_state->__pyx_empty_tuple); Py_CLEAR(clear_module_state->__pyx_empty_bytes); Py_CLEAR(clear_module_state->__pyx_empty_unicode); #ifdef __Pyx_CyFunction_USED Py_CLEAR(clear_module_state->__pyx_CyFunctionType); #endif #ifdef __Pyx_FusedFunction_USED Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); #endif Py_CLEAR(clear_module_state->__pyx_n_s__3); Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); Py_CLEAR(clear_module_state->__pyx_n_s_compreff); Py_CLEAR(clear_module_state->__pyx_n_s_compreffor__compreffor); Py_CLEAR(clear_module_state->__pyx_n_s_dataStream); Py_CLEAR(clear_module_state->__pyx_n_s_i); Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); Py_CLEAR(clear_module_state->__pyx_n_s_main); Py_CLEAR(clear_module_state->__pyx_n_s_name); Py_CLEAR(clear_module_state->__pyx_n_s_numRounds); Py_CLEAR(clear_module_state->__pyx_n_s_output); Py_CLEAR(clear_module_state->__pyx_n_s_outputLength); Py_CLEAR(clear_module_state->__pyx_n_s_range); Py_CLEAR(clear_module_state->__pyx_n_s_raw_output); Py_CLEAR(clear_module_state->__pyx_kp_s_src_cython__compreffor_pyx); Py_CLEAR(clear_module_state->__pyx_n_s_test); Py_CLEAR(clear_module_state->__pyx_tuple_); Py_CLEAR(clear_module_state->__pyx_codeobj__2); return 0; } #endif /* #### Code section: module_state_traverse ### */ #if CYTHON_USE_MODULE_STATE static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { __pyx_mstate *traverse_module_state = __pyx_mstate(m); if (!traverse_module_state) return 0; Py_VISIT(traverse_module_state->__pyx_d); Py_VISIT(traverse_module_state->__pyx_b); Py_VISIT(traverse_module_state->__pyx_cython_runtime); Py_VISIT(traverse_module_state->__pyx_empty_tuple); Py_VISIT(traverse_module_state->__pyx_empty_bytes); Py_VISIT(traverse_module_state->__pyx_empty_unicode); #ifdef __Pyx_CyFunction_USED Py_VISIT(traverse_module_state->__pyx_CyFunctionType); #endif #ifdef __Pyx_FusedFunction_USED Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); #endif Py_VISIT(traverse_module_state->__pyx_n_s__3); Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); Py_VISIT(traverse_module_state->__pyx_n_s_compreff); Py_VISIT(traverse_module_state->__pyx_n_s_compreffor__compreffor); Py_VISIT(traverse_module_state->__pyx_n_s_dataStream); Py_VISIT(traverse_module_state->__pyx_n_s_i); Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); Py_VISIT(traverse_module_state->__pyx_n_s_main); Py_VISIT(traverse_module_state->__pyx_n_s_name); Py_VISIT(traverse_module_state->__pyx_n_s_numRounds); Py_VISIT(traverse_module_state->__pyx_n_s_output); Py_VISIT(traverse_module_state->__pyx_n_s_outputLength); Py_VISIT(traverse_module_state->__pyx_n_s_range); Py_VISIT(traverse_module_state->__pyx_n_s_raw_output); Py_VISIT(traverse_module_state->__pyx_kp_s_src_cython__compreffor_pyx); Py_VISIT(traverse_module_state->__pyx_n_s_test); Py_VISIT(traverse_module_state->__pyx_tuple_); Py_VISIT(traverse_module_state->__pyx_codeobj__2); return 0; } #endif /* #### Code section: module_state_defines ### */ #define __pyx_d __pyx_mstate_global->__pyx_d #define __pyx_b __pyx_mstate_global->__pyx_b #define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime #define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple #define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes #define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode #ifdef __Pyx_CyFunction_USED #define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType #endif #ifdef __Pyx_FusedFunction_USED #define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType #endif #ifdef __Pyx_Generator_USED #define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType #endif #ifdef __Pyx_IterableCoroutine_USED #define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType #endif #ifdef __Pyx_Coroutine_USED #define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType #endif #ifdef __Pyx_Coroutine_USED #define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType #endif #if CYTHON_USE_MODULE_STATE #endif #if CYTHON_USE_MODULE_STATE #endif #define __pyx_n_s__3 __pyx_mstate_global->__pyx_n_s__3 #define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines #define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback #define __pyx_n_s_compreff __pyx_mstate_global->__pyx_n_s_compreff #define __pyx_n_s_compreffor__compreffor __pyx_mstate_global->__pyx_n_s_compreffor__compreffor #define __pyx_n_s_dataStream __pyx_mstate_global->__pyx_n_s_dataStream #define __pyx_n_s_i __pyx_mstate_global->__pyx_n_s_i #define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine #define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main #define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name #define __pyx_n_s_numRounds __pyx_mstate_global->__pyx_n_s_numRounds #define __pyx_n_s_output __pyx_mstate_global->__pyx_n_s_output #define __pyx_n_s_outputLength __pyx_mstate_global->__pyx_n_s_outputLength #define __pyx_n_s_range __pyx_mstate_global->__pyx_n_s_range #define __pyx_n_s_raw_output __pyx_mstate_global->__pyx_n_s_raw_output #define __pyx_kp_s_src_cython__compreffor_pyx __pyx_mstate_global->__pyx_kp_s_src_cython__compreffor_pyx #define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test #define __pyx_tuple_ __pyx_mstate_global->__pyx_tuple_ #define __pyx_codeobj__2 __pyx_mstate_global->__pyx_codeobj__2 /* #### Code section: module_code ### */ /* "src/cython/_compreffor.pyx":11 * * * def compreff(bytes dataStream, int numRounds): # <<<<<<<<<<<<<< * cdef unsigned outputLength = 0 * cdef uint32_t* raw_output = _compreff(dataStream, numRounds, outputLength) */ /* Python wrapper */ static PyObject *__pyx_pw_10compreffor_11_compreffor_1compreff(PyObject *__pyx_self, #if CYTHON_METH_FASTCALL PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds #else PyObject *__pyx_args, PyObject *__pyx_kwds #endif ); /*proto*/ static PyMethodDef __pyx_mdef_10compreffor_11_compreffor_1compreff = {"compreff", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10compreffor_11_compreffor_1compreff, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; static PyObject *__pyx_pw_10compreffor_11_compreffor_1compreff(PyObject *__pyx_self, #if CYTHON_METH_FASTCALL PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds #else PyObject *__pyx_args, PyObject *__pyx_kwds #endif ) { PyObject *__pyx_v_dataStream = 0; int __pyx_v_numRounds; #if !CYTHON_METH_FASTCALL CYTHON_UNUSED Py_ssize_t __pyx_nargs; #endif CYTHON_UNUSED PyObject *const *__pyx_kwvalues; PyObject* values[2] = {0,0}; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("compreff (wrapper)", 0); #if !CYTHON_METH_FASTCALL #if CYTHON_ASSUME_SAFE_MACROS __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); #else __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely((__pyx_nargs < 0))) __PYX_ERR(0, 11, __pyx_L3_error) #endif #endif __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); { PyObject **__pyx_pyargnames[] = {&__pyx_n_s_dataStream,&__pyx_n_s_numRounds,0}; if (__pyx_kwds) { Py_ssize_t kw_args; switch (__pyx_nargs) { case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); switch (__pyx_nargs) { case 0: if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_dataStream)) != 0)) { (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); kw_args--; } else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 11, __pyx_L3_error) else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_numRounds)) != 0)) { (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); kw_args--; } else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 11, __pyx_L3_error) else { __Pyx_RaiseArgtupleInvalid("compreff", 1, 2, 2, 1); __PYX_ERR(0, 11, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { const Py_ssize_t kwd_pos_args = __pyx_nargs; if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "compreff") < 0)) __PYX_ERR(0, 11, __pyx_L3_error) } } else if (unlikely(__pyx_nargs != 2)) { goto __pyx_L5_argtuple_error; } else { values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); } __pyx_v_dataStream = ((PyObject*)values[0]); __pyx_v_numRounds = __Pyx_PyInt_As_int(values[1]); if (unlikely((__pyx_v_numRounds == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 11, __pyx_L3_error) } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("compreff", 1, 2, 2, __pyx_nargs); __PYX_ERR(0, 11, __pyx_L3_error) goto __pyx_L3_error; __pyx_L3_error:; { Py_ssize_t __pyx_temp; for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); } } __Pyx_AddTraceback("compreffor._compreffor.compreff", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_dataStream), (&PyBytes_Type), 1, "dataStream", 1))) __PYX_ERR(0, 11, __pyx_L1_error) __pyx_r = __pyx_pf_10compreffor_11_compreffor_compreff(__pyx_self, __pyx_v_dataStream, __pyx_v_numRounds); /* function exit code */ goto __pyx_L0; __pyx_L1_error:; __pyx_r = NULL; __pyx_L0:; { Py_ssize_t __pyx_temp; for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); } } __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10compreffor_11_compreffor_compreff(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_dataStream, int __pyx_v_numRounds) { unsigned int __pyx_v_outputLength; uint32_t *__pyx_v_raw_output; PyObject *__pyx_v_output = 0; unsigned int __pyx_v_i; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations unsigned char *__pyx_t_1; uint32_t *__pyx_t_2; PyObject *__pyx_t_3 = NULL; unsigned int __pyx_t_4; unsigned int __pyx_t_5; unsigned int __pyx_t_6; int __pyx_t_7; int __pyx_t_8; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannySetupContext("compreff", 0); /* "src/cython/_compreffor.pyx":12 * * def compreff(bytes dataStream, int numRounds): * cdef unsigned outputLength = 0 # <<<<<<<<<<<<<< * cdef uint32_t* raw_output = _compreff(dataStream, numRounds, outputLength) * cdef list output = [] */ __pyx_v_outputLength = 0; /* "src/cython/_compreffor.pyx":13 * def compreff(bytes dataStream, int numRounds): * cdef unsigned outputLength = 0 * cdef uint32_t* raw_output = _compreff(dataStream, numRounds, outputLength) # <<<<<<<<<<<<<< * cdef list output = [] * cdef unsigned i */ if (unlikely(__pyx_v_dataStream == Py_None)) { PyErr_SetString(PyExc_TypeError, "expected bytes, NoneType found"); __PYX_ERR(0, 13, __pyx_L1_error) } __pyx_t_1 = __Pyx_PyBytes_AsWritableUString(__pyx_v_dataStream); if (unlikely((!__pyx_t_1) && PyErr_Occurred())) __PYX_ERR(0, 13, __pyx_L1_error) try { __pyx_t_2 = compreff(__pyx_t_1, __pyx_v_numRounds, __pyx_v_outputLength); } catch(...) { __Pyx_CppExn2PyErr(); __PYX_ERR(0, 13, __pyx_L1_error) } __pyx_v_raw_output = __pyx_t_2; /* "src/cython/_compreffor.pyx":14 * cdef unsigned outputLength = 0 * cdef uint32_t* raw_output = _compreff(dataStream, numRounds, outputLength) * cdef list output = [] # <<<<<<<<<<<<<< * cdef unsigned i * for i in range(outputLength): */ __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_v_output = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; /* "src/cython/_compreffor.pyx":16 * cdef list output = [] * cdef unsigned i * for i in range(outputLength): # <<<<<<<<<<<<<< * output.append(raw_output[i]) * if raw_output != NULL: */ __pyx_t_4 = __pyx_v_outputLength; __pyx_t_5 = __pyx_t_4; for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_i = __pyx_t_6; /* "src/cython/_compreffor.pyx":17 * cdef unsigned i * for i in range(outputLength): * output.append(raw_output[i]) # <<<<<<<<<<<<<< * if raw_output != NULL: * unload(raw_output) */ __pyx_t_3 = __Pyx_PyInt_From_uint32_t((__pyx_v_raw_output[__pyx_v_i])); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 17, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_7 = __Pyx_PyList_Append(__pyx_v_output, __pyx_t_3); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 17, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } /* "src/cython/_compreffor.pyx":18 * for i in range(outputLength): * output.append(raw_output[i]) * if raw_output != NULL: # <<<<<<<<<<<<<< * unload(raw_output) * return output */ __pyx_t_8 = (__pyx_v_raw_output != NULL); if (__pyx_t_8) { /* "src/cython/_compreffor.pyx":19 * output.append(raw_output[i]) * if raw_output != NULL: * unload(raw_output) # <<<<<<<<<<<<<< * return output */ unload(__pyx_v_raw_output); /* "src/cython/_compreffor.pyx":18 * for i in range(outputLength): * output.append(raw_output[i]) * if raw_output != NULL: # <<<<<<<<<<<<<< * unload(raw_output) * return output */ } /* "src/cython/_compreffor.pyx":20 * if raw_output != NULL: * unload(raw_output) * return output # <<<<<<<<<<<<<< */ __Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_v_output); __pyx_r = __pyx_v_output; goto __pyx_L0; /* "src/cython/_compreffor.pyx":11 * * * def compreff(bytes dataStream, int numRounds): # <<<<<<<<<<<<<< * cdef unsigned outputLength = 0 * cdef uint32_t* raw_output = _compreff(dataStream, numRounds, outputLength) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("compreffor._compreffor.compreff", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_output); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyMethodDef __pyx_methods[] = { {0, 0, 0, 0} }; #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE #elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) #define CYTHON_SMALL_CODE __attribute__((cold)) #else #define CYTHON_SMALL_CODE #endif #endif /* #### Code section: pystring_table ### */ static int __Pyx_CreateStringTabAndInitStrings(void) { __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 0, 1, 1}, {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, {&__pyx_n_s_compreff, __pyx_k_compreff, sizeof(__pyx_k_compreff), 0, 0, 1, 1}, {&__pyx_n_s_compreffor__compreffor, __pyx_k_compreffor__compreffor, sizeof(__pyx_k_compreffor__compreffor), 0, 0, 1, 1}, {&__pyx_n_s_dataStream, __pyx_k_dataStream, sizeof(__pyx_k_dataStream), 0, 0, 1, 1}, {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, {&__pyx_n_s_numRounds, __pyx_k_numRounds, sizeof(__pyx_k_numRounds), 0, 0, 1, 1}, {&__pyx_n_s_output, __pyx_k_output, sizeof(__pyx_k_output), 0, 0, 1, 1}, {&__pyx_n_s_outputLength, __pyx_k_outputLength, sizeof(__pyx_k_outputLength), 0, 0, 1, 1}, {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, {&__pyx_n_s_raw_output, __pyx_k_raw_output, sizeof(__pyx_k_raw_output), 0, 0, 1, 1}, {&__pyx_kp_s_src_cython__compreffor_pyx, __pyx_k_src_cython__compreffor_pyx, sizeof(__pyx_k_src_cython__compreffor_pyx), 0, 0, 1, 0}, {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, {0, 0, 0, 0, 0, 0, 0} }; return __Pyx_InitStrings(__pyx_string_tab); } /* #### Code section: cached_builtins ### */ static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 16, __pyx_L1_error) return 0; __pyx_L1_error:; return -1; } /* #### Code section: cached_constants ### */ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); /* "src/cython/_compreffor.pyx":11 * * * def compreff(bytes dataStream, int numRounds): # <<<<<<<<<<<<<< * cdef unsigned outputLength = 0 * cdef uint32_t* raw_output = _compreff(dataStream, numRounds, outputLength) */ __pyx_tuple_ = PyTuple_Pack(6, __pyx_n_s_dataStream, __pyx_n_s_numRounds, __pyx_n_s_outputLength, __pyx_n_s_raw_output, __pyx_n_s_output, __pyx_n_s_i); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 11, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple_); __Pyx_GIVEREF(__pyx_tuple_); __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_src_cython__compreffor_pyx, __pyx_n_s_compreff, 11, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 11, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; __Pyx_RefNannyFinishContext(); return -1; } /* #### Code section: init_constants ### */ static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); return 0; __pyx_L1_error:; return -1; } /* #### Code section: init_globals ### */ static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { return 0; } /* #### Code section: init_module ### */ static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ static int __Pyx_modinit_global_init_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); /*--- Global init code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_variable_export_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); /*--- Variable export code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_function_export_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); /*--- Function export code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_type_init_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); /*--- Type init code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_type_import_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); /*--- Type import code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_variable_import_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); /*--- Variable import code ---*/ __Pyx_RefNannyFinishContext(); return 0; } static int __Pyx_modinit_function_import_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); /*--- Function import code ---*/ __Pyx_RefNannyFinishContext(); return 0; } #if PY_MAJOR_VERSION >= 3 #if CYTHON_PEP489_MULTI_PHASE_INIT static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ static int __pyx_pymod_exec__compreffor(PyObject* module); /*proto*/ static PyModuleDef_Slot __pyx_moduledef_slots[] = { {Py_mod_create, (void*)__pyx_pymod_create}, {Py_mod_exec, (void*)__pyx_pymod_exec__compreffor}, {0, NULL} }; #endif #ifdef __cplusplus namespace { struct PyModuleDef __pyx_moduledef = #else static struct PyModuleDef __pyx_moduledef = #endif { PyModuleDef_HEAD_INIT, "_compreffor", 0, /* m_doc */ #if CYTHON_PEP489_MULTI_PHASE_INIT 0, /* m_size */ #elif CYTHON_USE_MODULE_STATE sizeof(__pyx_mstate), /* m_size */ #else -1, /* m_size */ #endif __pyx_methods /* m_methods */, #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_moduledef_slots, /* m_slots */ #else NULL, /* m_reload */ #endif #if CYTHON_USE_MODULE_STATE __pyx_m_traverse, /* m_traverse */ __pyx_m_clear, /* m_clear */ NULL /* m_free */ #else NULL, /* m_traverse */ NULL, /* m_clear */ NULL /* m_free */ #endif }; #ifdef __cplusplus } /* anonymous namespace */ #endif #endif #ifndef CYTHON_NO_PYINIT_EXPORT #define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC #elif PY_MAJOR_VERSION < 3 #ifdef __cplusplus #define __Pyx_PyMODINIT_FUNC extern "C" void #else #define __Pyx_PyMODINIT_FUNC void #endif #else #ifdef __cplusplus #define __Pyx_PyMODINIT_FUNC extern "C" PyObject * #else #define __Pyx_PyMODINIT_FUNC PyObject * #endif #endif #if PY_MAJOR_VERSION < 3 __Pyx_PyMODINIT_FUNC init_compreffor(void) CYTHON_SMALL_CODE; /*proto*/ __Pyx_PyMODINIT_FUNC init_compreffor(void) #else __Pyx_PyMODINIT_FUNC PyInit__compreffor(void) CYTHON_SMALL_CODE; /*proto*/ __Pyx_PyMODINIT_FUNC PyInit__compreffor(void) #if CYTHON_PEP489_MULTI_PHASE_INIT { return PyModuleDef_Init(&__pyx_moduledef); } static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { #if PY_VERSION_HEX >= 0x030700A1 static PY_INT64_T main_interpreter_id = -1; PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); if (main_interpreter_id == -1) { main_interpreter_id = current_id; return (unlikely(current_id == -1)) ? -1 : 0; } else if (unlikely(main_interpreter_id != current_id)) #else static PyInterpreterState *main_interpreter = NULL; PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; if (!main_interpreter) { main_interpreter = current_interpreter; } else if (unlikely(main_interpreter != current_interpreter)) #endif { PyErr_SetString( PyExc_ImportError, "Interpreter change detected - this module can only be loaded into one interpreter per process."); return -1; } return 0; } #if CYTHON_COMPILING_IN_LIMITED_API static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) #else static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) #endif { PyObject *value = PyObject_GetAttrString(spec, from_name); int result = 0; if (likely(value)) { if (allow_none || value != Py_None) { #if CYTHON_COMPILING_IN_LIMITED_API result = PyModule_AddObject(module, to_name, value); #else result = PyDict_SetItemString(moddict, to_name, value); #endif } Py_DECREF(value); } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); } else { result = -1; } return result; } static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { PyObject *module = NULL, *moddict, *modname; CYTHON_UNUSED_VAR(def); if (__Pyx_check_single_interpreter()) return NULL; if (__pyx_m) return __Pyx_NewRef(__pyx_m); modname = PyObject_GetAttrString(spec, "name"); if (unlikely(!modname)) goto bad; module = PyModule_NewObject(modname); Py_DECREF(modname); if (unlikely(!module)) goto bad; #if CYTHON_COMPILING_IN_LIMITED_API moddict = module; #else moddict = PyModule_GetDict(module); if (unlikely(!moddict)) goto bad; #endif if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; return module; bad: Py_XDECREF(module); return NULL; } static CYTHON_SMALL_CODE int __pyx_pymod_exec__compreffor(PyObject *__pyx_pyinit_module) #endif #endif { int stringtab_initialized = 0; #if CYTHON_USE_MODULE_STATE int pystate_addmodule_run = 0; #endif PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; __Pyx_RefNannyDeclarations #if CYTHON_PEP489_MULTI_PHASE_INIT if (__pyx_m) { if (__pyx_m == __pyx_pyinit_module) return 0; PyErr_SetString(PyExc_RuntimeError, "Module '_compreffor' has already been imported. Re-initialisation is not supported."); return -1; } #elif PY_MAJOR_VERSION >= 3 if (__pyx_m) return __Pyx_NewRef(__pyx_m); #endif /*--- Module creation code ---*/ #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_m = __pyx_pyinit_module; Py_INCREF(__pyx_m); #else #if PY_MAJOR_VERSION < 3 __pyx_m = Py_InitModule4("_compreffor", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) #elif CYTHON_USE_MODULE_STATE __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) { int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to _compreffor pseudovariable */ if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) pystate_addmodule_run = 1; } #else __pyx_m = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) #endif #endif CYTHON_UNUSED_VAR(__pyx_t_1); __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) Py_INCREF(__pyx_d); __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) Py_INCREF(__pyx_b); __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) Py_INCREF(__pyx_cython_runtime); if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) #if CYTHON_REFNANNY __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); if (!__Pyx_RefNanny) { PyErr_Clear(); __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); if (!__Pyx_RefNanny) Py_FatalError("failed to import 'refnanny' module"); } #endif __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__compreffor(void)", 0); if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #ifdef __Pxy_PyFrame_Initialize_Offsets __Pxy_PyFrame_Initialize_Offsets(); #endif __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) #ifdef __Pyx_CyFunction_USED if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_FusedFunction_USED if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Coroutine_USED if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Generator_USED if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_AsyncGen_USED if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_StopAsyncIteration_USED if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /*--- Library function declarations ---*/ /*--- Threads initialization code ---*/ #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS PyEval_InitThreads(); #endif /*--- Initialize various global constants etc. ---*/ if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) stringtab_initialized = 1; if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif if (__pyx_module_is_main_compreffor___compreffor) { if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) } #if PY_MAJOR_VERSION >= 3 { PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) if (!PyDict_GetItemString(modules, "compreffor._compreffor")) { if (unlikely((PyDict_SetItemString(modules, "compreffor._compreffor", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) } } #endif /*--- Builtin init code ---*/ if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Constants init code ---*/ if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Global type/function init code ---*/ (void)__Pyx_modinit_global_init_code(); (void)__Pyx_modinit_variable_export_code(); (void)__Pyx_modinit_function_export_code(); (void)__Pyx_modinit_type_init_code(); (void)__Pyx_modinit_type_import_code(); (void)__Pyx_modinit_variable_import_code(); (void)__Pyx_modinit_function_import_code(); /*--- Execution code ---*/ #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /* "src/cython/_compreffor.pyx":11 * * * def compreff(bytes dataStream, int numRounds): # <<<<<<<<<<<<<< * cdef unsigned outputLength = 0 * cdef uint32_t* raw_output = _compreff(dataStream, numRounds, outputLength) */ __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10compreffor_11_compreffor_1compreff, 0, __pyx_n_s_compreff, NULL, __pyx_n_s_compreffor__compreffor, __pyx_d, ((PyObject *)__pyx_codeobj__2)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_compreff, __pyx_t_2) < 0) __PYX_ERR(0, 11, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "src/cython/_compreffor.pyx":1 * from libc.stdint cimport uint32_t # <<<<<<<<<<<<<< * * */ __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /*--- Wrapped vars code ---*/ goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); if (__pyx_m) { if (__pyx_d && stringtab_initialized) { __Pyx_AddTraceback("init compreffor._compreffor", __pyx_clineno, __pyx_lineno, __pyx_filename); } #if !CYTHON_USE_MODULE_STATE Py_CLEAR(__pyx_m); #else Py_DECREF(__pyx_m); if (pystate_addmodule_run) { PyObject *tp, *value, *tb; PyErr_Fetch(&tp, &value, &tb); PyState_RemoveModule(&__pyx_moduledef); PyErr_Restore(tp, value, tb); } #endif } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_ImportError, "init compreffor._compreffor"); } __pyx_L0:; __Pyx_RefNannyFinishContext(); #if CYTHON_PEP489_MULTI_PHASE_INIT return (__pyx_m != NULL) ? 0 : -1; #elif PY_MAJOR_VERSION >= 3 return __pyx_m; #else return; #endif } /* #### Code section: cleanup_globals ### */ /* #### Code section: cleanup_module ### */ /* #### Code section: main_method ### */ /* #### Code section: utility_code_pragmas ### */ #ifdef _MSC_VER #pragma warning( push ) /* Warning 4127: conditional expression is constant * Cython uses constant conditional expressions to allow in inline functions to be optimized at * compile-time, so this warning is not useful */ #pragma warning( disable : 4127 ) #endif /* #### Code section: utility_code_def ### */ /* --- Runtime support code --- */ /* Refnanny */ #if CYTHON_REFNANNY static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { PyObject *m = NULL, *p = NULL; void *r = NULL; m = PyImport_ImportModule(modname); if (!m) goto end; p = PyObject_GetAttrString(m, "RefNannyAPI"); if (!p) goto end; r = PyLong_AsVoidPtr(p); end: Py_XDECREF(p); Py_XDECREF(m); return (__Pyx_RefNannyAPIStruct *)r; } #endif /* PyErrExceptionMatches */ #if CYTHON_FAST_THREAD_STATE static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(tuple); #if PY_MAJOR_VERSION >= 3 for (i=0; i= 0x030C00A6 PyObject *current_exception = tstate->current_exception; if (unlikely(!current_exception)) return 0; exc_type = (PyObject*) Py_TYPE(current_exception); if (exc_type == err) return 1; #else exc_type = tstate->curexc_type; if (exc_type == err) return 1; if (unlikely(!exc_type)) return 0; #endif #if CYTHON_AVOID_BORROWED_REFS Py_INCREF(exc_type); #endif if (unlikely(PyTuple_Check(err))) { result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); } else { result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); } #if CYTHON_AVOID_BORROWED_REFS Py_DECREF(exc_type); #endif return result; } #endif /* PyErrFetchRestore */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { #if PY_VERSION_HEX >= 0x030C00A6 PyObject *tmp_value; assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); if (value) { #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) #endif PyException_SetTraceback(value, tb); } tmp_value = tstate->current_exception; tstate->current_exception = value; Py_XDECREF(tmp_value); #else PyObject *tmp_type, *tmp_value, *tmp_tb; tmp_type = tstate->curexc_type; tmp_value = tstate->curexc_value; tmp_tb = tstate->curexc_traceback; tstate->curexc_type = type; tstate->curexc_value = value; tstate->curexc_traceback = tb; Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); #endif } static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { #if PY_VERSION_HEX >= 0x030C00A6 PyObject* exc_value; exc_value = tstate->current_exception; tstate->current_exception = 0; *value = exc_value; *type = NULL; *tb = NULL; if (exc_value) { *type = (PyObject*) Py_TYPE(exc_value); Py_INCREF(*type); #if CYTHON_COMPILING_IN_CPYTHON *tb = ((PyBaseExceptionObject*) exc_value)->traceback; Py_XINCREF(*tb); #else *tb = PyException_GetTraceback(exc_value); #endif } #else *type = tstate->curexc_type; *value = tstate->curexc_value; *tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; #endif } #endif /* PyObjectGetAttrStr */ #if CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { PyTypeObject* tp = Py_TYPE(obj); if (likely(tp->tp_getattro)) return tp->tp_getattro(obj, attr_name); #if PY_MAJOR_VERSION < 3 if (likely(tp->tp_getattr)) return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); #endif return PyObject_GetAttr(obj, attr_name); } #endif /* PyObjectGetAttrStrNoError */ static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) __Pyx_PyErr_Clear(); } static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { PyObject *result; #if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 PyTypeObject* tp = Py_TYPE(obj); if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); } #endif result = __Pyx_PyObject_GetAttrStr(obj, attr_name); if (unlikely(!result)) { __Pyx_PyObject_GetAttrStr_ClearAttributeError(); } return result; } /* GetBuiltinName */ static PyObject *__Pyx_GetBuiltinName(PyObject *name) { PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); if (unlikely(!result) && !PyErr_Occurred()) { PyErr_Format(PyExc_NameError, #if PY_MAJOR_VERSION >= 3 "name '%U' is not defined", name); #else "name '%.200s' is not defined", PyString_AS_STRING(name)); #endif } return result; } /* TupleAndListFromArray */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { PyObject *v; Py_ssize_t i; for (i = 0; i < length; i++) { v = dest[i] = src[i]; Py_INCREF(v); } } static CYTHON_INLINE PyObject * __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) { PyObject *res; if (n <= 0) { Py_INCREF(__pyx_empty_tuple); return __pyx_empty_tuple; } res = PyTuple_New(n); if (unlikely(res == NULL)) return NULL; __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); return res; } static CYTHON_INLINE PyObject * __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) { PyObject *res; if (n <= 0) { return PyList_New(0); } res = PyList_New(n); if (unlikely(res == NULL)) return NULL; __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); return res; } #endif /* BytesEquals */ static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { #if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API return PyObject_RichCompareBool(s1, s2, equals); #else if (s1 == s2) { return (equals == Py_EQ); } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { const char *ps1, *ps2; Py_ssize_t length = PyBytes_GET_SIZE(s1); if (length != PyBytes_GET_SIZE(s2)) return (equals == Py_NE); ps1 = PyBytes_AS_STRING(s1); ps2 = PyBytes_AS_STRING(s2); if (ps1[0] != ps2[0]) { return (equals == Py_NE); } else if (length == 1) { return (equals == Py_EQ); } else { int result; #if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) Py_hash_t hash1, hash2; hash1 = ((PyBytesObject*)s1)->ob_shash; hash2 = ((PyBytesObject*)s2)->ob_shash; if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { return (equals == Py_NE); } #endif result = memcmp(ps1, ps2, (size_t)length); return (equals == Py_EQ) ? (result == 0) : (result != 0); } } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { return (equals == Py_NE); } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { return (equals == Py_NE); } else { int result; PyObject* py_result = PyObject_RichCompare(s1, s2, equals); if (!py_result) return -1; result = __Pyx_PyObject_IsTrue(py_result); Py_DECREF(py_result); return result; } #endif } /* UnicodeEquals */ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { #if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API return PyObject_RichCompareBool(s1, s2, equals); #else #if PY_MAJOR_VERSION < 3 PyObject* owned_ref = NULL; #endif int s1_is_unicode, s2_is_unicode; if (s1 == s2) { goto return_eq; } s1_is_unicode = PyUnicode_CheckExact(s1); s2_is_unicode = PyUnicode_CheckExact(s2); #if PY_MAJOR_VERSION < 3 if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { owned_ref = PyUnicode_FromObject(s2); if (unlikely(!owned_ref)) return -1; s2 = owned_ref; s2_is_unicode = 1; } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { owned_ref = PyUnicode_FromObject(s1); if (unlikely(!owned_ref)) return -1; s1 = owned_ref; s1_is_unicode = 1; } else if (((!s2_is_unicode) & (!s1_is_unicode))) { return __Pyx_PyBytes_Equals(s1, s2, equals); } #endif if (s1_is_unicode & s2_is_unicode) { Py_ssize_t length; int kind; void *data1, *data2; if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) return -1; length = __Pyx_PyUnicode_GET_LENGTH(s1); if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { goto return_ne; } #if CYTHON_USE_UNICODE_INTERNALS { Py_hash_t hash1, hash2; #if CYTHON_PEP393_ENABLED hash1 = ((PyASCIIObject*)s1)->hash; hash2 = ((PyASCIIObject*)s2)->hash; #else hash1 = ((PyUnicodeObject*)s1)->hash; hash2 = ((PyUnicodeObject*)s2)->hash; #endif if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { goto return_ne; } } #endif kind = __Pyx_PyUnicode_KIND(s1); if (kind != __Pyx_PyUnicode_KIND(s2)) { goto return_ne; } data1 = __Pyx_PyUnicode_DATA(s1); data2 = __Pyx_PyUnicode_DATA(s2); if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { goto return_ne; } else if (length == 1) { goto return_eq; } else { int result = memcmp(data1, data2, (size_t)(length * kind)); #if PY_MAJOR_VERSION < 3 Py_XDECREF(owned_ref); #endif return (equals == Py_EQ) ? (result == 0) : (result != 0); } } else if ((s1 == Py_None) & s2_is_unicode) { goto return_ne; } else if ((s2 == Py_None) & s1_is_unicode) { goto return_ne; } else { int result; PyObject* py_result = PyObject_RichCompare(s1, s2, equals); #if PY_MAJOR_VERSION < 3 Py_XDECREF(owned_ref); #endif if (!py_result) return -1; result = __Pyx_PyObject_IsTrue(py_result); Py_DECREF(py_result); return result; } return_eq: #if PY_MAJOR_VERSION < 3 Py_XDECREF(owned_ref); #endif return (equals == Py_EQ); return_ne: #if PY_MAJOR_VERSION < 3 Py_XDECREF(owned_ref); #endif return (equals == Py_NE); #endif } /* fastcall */ #if CYTHON_METH_FASTCALL static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) { Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); for (i = 0; i < n; i++) { if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; } for (i = 0; i < n; i++) { int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); if (unlikely(eq != 0)) { if (unlikely(eq < 0)) return NULL; // error return kwvalues[i]; } } return NULL; // not found (no exception set) } #endif /* RaiseArgTupleInvalid */ static void __Pyx_RaiseArgtupleInvalid( const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found) { Py_ssize_t num_expected; const char *more_or_less; if (num_found < num_min) { num_expected = num_min; more_or_less = "at least"; } else { num_expected = num_max; more_or_less = "at most"; } if (exact) { more_or_less = "exactly"; } PyErr_Format(PyExc_TypeError, "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", func_name, more_or_less, num_expected, (num_expected == 1) ? "" : "s", num_found); } /* RaiseDoubleKeywords */ static void __Pyx_RaiseDoubleKeywordsError( const char* func_name, PyObject* kw_name) { PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION >= 3 "%s() got multiple values for keyword argument '%U'", func_name, kw_name); #else "%s() got multiple values for keyword argument '%s'", func_name, PyString_AsString(kw_name)); #endif } /* ParseKeywords */ static int __Pyx_ParseOptionalKeywords( PyObject *kwds, PyObject *const *kwvalues, PyObject **argnames[], PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, const char* function_name) { PyObject *key = 0, *value = 0; Py_ssize_t pos = 0; PyObject*** name; PyObject*** first_kw_arg = argnames + num_pos_args; int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); while (1) { Py_XDECREF(key); key = NULL; Py_XDECREF(value); value = NULL; if (kwds_is_tuple) { Py_ssize_t size; #if CYTHON_ASSUME_SAFE_MACROS size = PyTuple_GET_SIZE(kwds); #else size = PyTuple_Size(kwds); if (size < 0) goto bad; #endif if (pos >= size) break; #if CYTHON_AVOID_BORROWED_REFS key = __Pyx_PySequence_ITEM(kwds, pos); if (!key) goto bad; #elif CYTHON_ASSUME_SAFE_MACROS key = PyTuple_GET_ITEM(kwds, pos); #else key = PyTuple_GetItem(kwds, pos); if (!key) goto bad; #endif value = kwvalues[pos]; pos++; } else { if (!PyDict_Next(kwds, &pos, &key, &value)) break; #if CYTHON_AVOID_BORROWED_REFS Py_INCREF(key); #endif } name = first_kw_arg; while (*name && (**name != key)) name++; if (*name) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS Py_INCREF(value); // transfer ownership of value to values Py_DECREF(key); #endif key = NULL; value = NULL; continue; } #if !CYTHON_AVOID_BORROWED_REFS Py_INCREF(key); #endif Py_INCREF(value); name = first_kw_arg; #if PY_MAJOR_VERSION < 3 if (likely(PyString_Check(key))) { while (*name) { if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) && _PyString_Eq(**name, key)) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS value = NULL; // ownership transferred to values #endif break; } name++; } if (*name) continue; else { PyObject*** argname = argnames; while (argname != first_kw_arg) { if ((**argname == key) || ( (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) && _PyString_Eq(**argname, key))) { goto arg_passed_twice; } argname++; } } } else #endif if (likely(PyUnicode_Check(key))) { while (*name) { int cmp = ( #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : #endif PyUnicode_Compare(**name, key) ); if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; if (cmp == 0) { values[name-argnames] = value; #if CYTHON_AVOID_BORROWED_REFS value = NULL; // ownership transferred to values #endif break; } name++; } if (*name) continue; else { PyObject*** argname = argnames; while (argname != first_kw_arg) { int cmp = (**argname == key) ? 0 : #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : #endif PyUnicode_Compare(**argname, key); if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; if (cmp == 0) goto arg_passed_twice; argname++; } } } else goto invalid_keyword_type; if (kwds2) { if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; } else { goto invalid_keyword; } } Py_XDECREF(key); Py_XDECREF(value); return 0; arg_passed_twice: __Pyx_RaiseDoubleKeywordsError(function_name, key); goto bad; invalid_keyword_type: PyErr_Format(PyExc_TypeError, "%.200s() keywords must be strings", function_name); goto bad; invalid_keyword: #if PY_MAJOR_VERSION < 3 PyErr_Format(PyExc_TypeError, "%.200s() got an unexpected keyword argument '%.200s'", function_name, PyString_AsString(key)); #else PyErr_Format(PyExc_TypeError, "%s() got an unexpected keyword argument '%U'", function_name, key); #endif bad: Py_XDECREF(key); Py_XDECREF(value); return -1; } /* ArgTypeTest */ static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) { __Pyx_TypeName type_name; __Pyx_TypeName obj_type_name; if (unlikely(!type)) { PyErr_SetString(PyExc_SystemError, "Missing type object"); return 0; } else if (exact) { #if PY_MAJOR_VERSION == 2 if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; #endif } else { if (likely(__Pyx_TypeCheck(obj, type))) return 1; } type_name = __Pyx_PyType_GetName(type); obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); PyErr_Format(PyExc_TypeError, "Argument '%.200s' has incorrect type (expected " __Pyx_FMT_TYPENAME ", got " __Pyx_FMT_TYPENAME ")", name, type_name, obj_type_name); __Pyx_DECREF_TypeName(type_name); __Pyx_DECREF_TypeName(obj_type_name); return 0; } /* FixUpExtensionType */ #if CYTHON_USE_TYPE_SPECS static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { #if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API CYTHON_UNUSED_VAR(spec); CYTHON_UNUSED_VAR(type); #else const PyType_Slot *slot = spec->slots; while (slot && slot->slot && slot->slot != Py_tp_members) slot++; if (slot && slot->slot == Py_tp_members) { int changed = 0; #if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) const #endif PyMemberDef *memb = (PyMemberDef*) slot->pfunc; while (memb && memb->name) { if (memb->name[0] == '_' && memb->name[1] == '_') { #if PY_VERSION_HEX < 0x030900b1 if (strcmp(memb->name, "__weaklistoffset__") == 0) { assert(memb->type == T_PYSSIZET); assert(memb->flags == READONLY); type->tp_weaklistoffset = memb->offset; changed = 1; } else if (strcmp(memb->name, "__dictoffset__") == 0) { assert(memb->type == T_PYSSIZET); assert(memb->flags == READONLY); type->tp_dictoffset = memb->offset; changed = 1; } #if CYTHON_METH_FASTCALL else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { assert(memb->type == T_PYSSIZET); assert(memb->flags == READONLY); #if PY_VERSION_HEX >= 0x030800b4 type->tp_vectorcall_offset = memb->offset; #else type->tp_print = (printfunc) memb->offset; #endif changed = 1; } #endif #else if ((0)); #endif #if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON else if (strcmp(memb->name, "__module__") == 0) { PyObject *descr; assert(memb->type == T_OBJECT); assert(memb->flags == 0 || memb->flags == READONLY); descr = PyDescr_NewMember(type, memb); if (unlikely(!descr)) return -1; if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { Py_DECREF(descr); return -1; } Py_DECREF(descr); changed = 1; } #endif } memb++; } if (changed) PyType_Modified(type); } #endif return 0; } #endif /* FetchSharedCythonModule */ static PyObject *__Pyx_FetchSharedCythonABIModule(void) { PyObject *abi_module = PyImport_AddModule((char*) __PYX_ABI_MODULE_NAME); if (unlikely(!abi_module)) return NULL; Py_INCREF(abi_module); return abi_module; } /* FetchCommonType */ static int __Pyx_VerifyCachedType(PyObject *cached_type, const char *name, Py_ssize_t basicsize, Py_ssize_t expected_basicsize) { if (!PyType_Check(cached_type)) { PyErr_Format(PyExc_TypeError, "Shared Cython type %.200s is not a type object", name); return -1; } if (basicsize != expected_basicsize) { PyErr_Format(PyExc_TypeError, "Shared Cython type %.200s has the wrong size, try recompiling", name); return -1; } return 0; } #if !CYTHON_USE_TYPE_SPECS static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { PyObject* abi_module; const char* object_name; PyTypeObject *cached_type = NULL; abi_module = __Pyx_FetchSharedCythonABIModule(); if (!abi_module) return NULL; object_name = strrchr(type->tp_name, '.'); object_name = object_name ? object_name+1 : type->tp_name; cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); if (cached_type) { if (__Pyx_VerifyCachedType( (PyObject *)cached_type, object_name, cached_type->tp_basicsize, type->tp_basicsize) < 0) { goto bad; } goto done; } if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; PyErr_Clear(); if (PyType_Ready(type) < 0) goto bad; if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) goto bad; Py_INCREF(type); cached_type = type; done: Py_DECREF(abi_module); return cached_type; bad: Py_XDECREF(cached_type); cached_type = NULL; goto done; } #else static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { PyObject *abi_module, *cached_type = NULL; const char* object_name = strrchr(spec->name, '.'); object_name = object_name ? object_name+1 : spec->name; abi_module = __Pyx_FetchSharedCythonABIModule(); if (!abi_module) return NULL; cached_type = PyObject_GetAttrString(abi_module, object_name); if (cached_type) { Py_ssize_t basicsize; #if CYTHON_COMPILING_IN_LIMITED_API PyObject *py_basicsize; py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); if (unlikely(!py_basicsize)) goto bad; basicsize = PyLong_AsSsize_t(py_basicsize); Py_DECREF(py_basicsize); py_basicsize = 0; if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; #else basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; #endif if (__Pyx_VerifyCachedType( cached_type, object_name, basicsize, spec->basicsize) < 0) { goto bad; } goto done; } if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; PyErr_Clear(); CYTHON_UNUSED_VAR(module); cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); if (unlikely(!cached_type)) goto bad; if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; done: Py_DECREF(abi_module); assert(cached_type == NULL || PyType_Check(cached_type)); return (PyTypeObject *) cached_type; bad: Py_XDECREF(cached_type); cached_type = NULL; goto done; } #endif /* PyVectorcallFastCallDict */ #if CYTHON_METH_FASTCALL static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) { PyObject *res = NULL; PyObject *kwnames; PyObject **newargs; PyObject **kwvalues; Py_ssize_t i, pos; size_t j; PyObject *key, *value; unsigned long keys_are_strings; Py_ssize_t nkw = PyDict_GET_SIZE(kw); newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); if (unlikely(newargs == NULL)) { PyErr_NoMemory(); return NULL; } for (j = 0; j < nargs; j++) newargs[j] = args[j]; kwnames = PyTuple_New(nkw); if (unlikely(kwnames == NULL)) { PyMem_Free(newargs); return NULL; } kwvalues = newargs + nargs; pos = i = 0; keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; while (PyDict_Next(kw, &pos, &key, &value)) { keys_are_strings &= Py_TYPE(key)->tp_flags; Py_INCREF(key); Py_INCREF(value); PyTuple_SET_ITEM(kwnames, i, key); kwvalues[i] = value; i++; } if (unlikely(!keys_are_strings)) { PyErr_SetString(PyExc_TypeError, "keywords must be strings"); goto cleanup; } res = vc(func, newargs, nargs, kwnames); cleanup: Py_DECREF(kwnames); for (i = 0; i < nkw; i++) Py_DECREF(kwvalues[i]); PyMem_Free(newargs); return res; } static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) { if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { return vc(func, args, nargs, NULL); } return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); } #endif /* CythonFunctionShared */ static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { #if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API __Pyx_Py_XDECREF_SET( __Pyx_CyFunction_GetClassObj(f), ((classobj) ? __Pyx_NewRef(classobj) : NULL)); #else __Pyx_Py_XDECREF_SET( ((PyCMethodObject *) (f))->mm_class, (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); #endif } static PyObject * __Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) { CYTHON_UNUSED_VAR(closure); if (unlikely(op->func_doc == NULL)) { #if CYTHON_COMPILING_IN_LIMITED_API op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); if (unlikely(!op->func_doc)) return NULL; #else if (((PyCFunctionObject*)op)->m_ml->ml_doc) { #if PY_MAJOR_VERSION >= 3 op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); #else op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); #endif if (unlikely(op->func_doc == NULL)) return NULL; } else { Py_INCREF(Py_None); return Py_None; } #endif } Py_INCREF(op->func_doc); return op->func_doc; } static int __Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) { CYTHON_UNUSED_VAR(context); if (value == NULL) { value = Py_None; } Py_INCREF(value); __Pyx_Py_XDECREF_SET(op->func_doc, value); return 0; } static PyObject * __Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) { CYTHON_UNUSED_VAR(context); if (unlikely(op->func_name == NULL)) { #if CYTHON_COMPILING_IN_LIMITED_API op->func_name = PyObject_GetAttrString(op->func, "__name__"); #elif PY_MAJOR_VERSION >= 3 op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); #else op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); #endif if (unlikely(op->func_name == NULL)) return NULL; } Py_INCREF(op->func_name); return op->func_name; } static int __Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) { CYTHON_UNUSED_VAR(context); #if PY_MAJOR_VERSION >= 3 if (unlikely(value == NULL || !PyUnicode_Check(value))) #else if (unlikely(value == NULL || !PyString_Check(value))) #endif { PyErr_SetString(PyExc_TypeError, "__name__ must be set to a string object"); return -1; } Py_INCREF(value); __Pyx_Py_XDECREF_SET(op->func_name, value); return 0; } static PyObject * __Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) { CYTHON_UNUSED_VAR(context); Py_INCREF(op->func_qualname); return op->func_qualname; } static int __Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) { CYTHON_UNUSED_VAR(context); #if PY_MAJOR_VERSION >= 3 if (unlikely(value == NULL || !PyUnicode_Check(value))) #else if (unlikely(value == NULL || !PyString_Check(value))) #endif { PyErr_SetString(PyExc_TypeError, "__qualname__ must be set to a string object"); return -1; } Py_INCREF(value); __Pyx_Py_XDECREF_SET(op->func_qualname, value); return 0; } static PyObject * __Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) { CYTHON_UNUSED_VAR(context); if (unlikely(op->func_dict == NULL)) { op->func_dict = PyDict_New(); if (unlikely(op->func_dict == NULL)) return NULL; } Py_INCREF(op->func_dict); return op->func_dict; } static int __Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) { CYTHON_UNUSED_VAR(context); if (unlikely(value == NULL)) { PyErr_SetString(PyExc_TypeError, "function's dictionary may not be deleted"); return -1; } if (unlikely(!PyDict_Check(value))) { PyErr_SetString(PyExc_TypeError, "setting function's dictionary to a non-dict"); return -1; } Py_INCREF(value); __Pyx_Py_XDECREF_SET(op->func_dict, value); return 0; } static PyObject * __Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) { CYTHON_UNUSED_VAR(context); Py_INCREF(op->func_globals); return op->func_globals; } static PyObject * __Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) { CYTHON_UNUSED_VAR(op); CYTHON_UNUSED_VAR(context); Py_INCREF(Py_None); return Py_None; } static PyObject * __Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) { PyObject* result = (op->func_code) ? op->func_code : Py_None; CYTHON_UNUSED_VAR(context); Py_INCREF(result); return result; } static int __Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { int result = 0; PyObject *res = op->defaults_getter((PyObject *) op); if (unlikely(!res)) return -1; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS op->defaults_tuple = PyTuple_GET_ITEM(res, 0); Py_INCREF(op->defaults_tuple); op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); Py_INCREF(op->defaults_kwdict); #else op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); if (unlikely(!op->defaults_tuple)) result = -1; else { op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); if (unlikely(!op->defaults_kwdict)) result = -1; } #endif Py_DECREF(res); return result; } static int __Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { CYTHON_UNUSED_VAR(context); if (!value) { value = Py_None; } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { PyErr_SetString(PyExc_TypeError, "__defaults__ must be set to a tuple object"); return -1; } PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " "currently affect the values used in function calls", 1); Py_INCREF(value); __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); return 0; } static PyObject * __Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { PyObject* result = op->defaults_tuple; CYTHON_UNUSED_VAR(context); if (unlikely(!result)) { if (op->defaults_getter) { if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; result = op->defaults_tuple; } else { result = Py_None; } } Py_INCREF(result); return result; } static int __Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { CYTHON_UNUSED_VAR(context); if (!value) { value = Py_None; } else if (unlikely(value != Py_None && !PyDict_Check(value))) { PyErr_SetString(PyExc_TypeError, "__kwdefaults__ must be set to a dict object"); return -1; } PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " "currently affect the values used in function calls", 1); Py_INCREF(value); __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); return 0; } static PyObject * __Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { PyObject* result = op->defaults_kwdict; CYTHON_UNUSED_VAR(context); if (unlikely(!result)) { if (op->defaults_getter) { if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; result = op->defaults_kwdict; } else { result = Py_None; } } Py_INCREF(result); return result; } static int __Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { CYTHON_UNUSED_VAR(context); if (!value || value == Py_None) { value = NULL; } else if (unlikely(!PyDict_Check(value))) { PyErr_SetString(PyExc_TypeError, "__annotations__ must be set to a dict object"); return -1; } Py_XINCREF(value); __Pyx_Py_XDECREF_SET(op->func_annotations, value); return 0; } static PyObject * __Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { PyObject* result = op->func_annotations; CYTHON_UNUSED_VAR(context); if (unlikely(!result)) { result = PyDict_New(); if (unlikely(!result)) return NULL; op->func_annotations = result; } Py_INCREF(result); return result; } static PyObject * __Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { int is_coroutine; CYTHON_UNUSED_VAR(context); if (op->func_is_coroutine) { return __Pyx_NewRef(op->func_is_coroutine); } is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; #if PY_VERSION_HEX >= 0x03050000 if (is_coroutine) { PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; fromlist = PyList_New(1); if (unlikely(!fromlist)) return NULL; Py_INCREF(marker); #if CYTHON_ASSUME_SAFE_MACROS PyList_SET_ITEM(fromlist, 0, marker); #else if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { Py_DECREF(marker); Py_DECREF(fromlist); return NULL; } #endif module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); Py_DECREF(fromlist); if (unlikely(!module)) goto ignore; op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); Py_DECREF(module); if (likely(op->func_is_coroutine)) { return __Pyx_NewRef(op->func_is_coroutine); } ignore: PyErr_Clear(); } #endif op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); return __Pyx_NewRef(op->func_is_coroutine); } #if CYTHON_COMPILING_IN_LIMITED_API static PyObject * __Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { CYTHON_UNUSED_VAR(context); return PyObject_GetAttrString(op->func, "__module__"); } static int __Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { CYTHON_UNUSED_VAR(context); return PyObject_SetAttrString(op->func, "__module__", value); } #endif static PyGetSetDef __pyx_CyFunction_getsets[] = { {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, #if CYTHON_COMPILING_IN_LIMITED_API {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, #endif {0, 0, 0, 0, 0} }; static PyMemberDef __pyx_CyFunction_members[] = { #if !CYTHON_COMPILING_IN_LIMITED_API {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, #endif #if CYTHON_USE_TYPE_SPECS {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, #if CYTHON_METH_FASTCALL #if CYTHON_BACKPORT_VECTORCALL {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, #else #if !CYTHON_COMPILING_IN_LIMITED_API {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, #endif #endif #endif #if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, #else {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, #endif #endif {0, 0, 0, 0, 0} }; static PyObject * __Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) { CYTHON_UNUSED_VAR(args); #if PY_MAJOR_VERSION >= 3 Py_INCREF(m->func_qualname); return m->func_qualname; #else return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); #endif } static PyMethodDef __pyx_CyFunction_methods[] = { {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, {0, 0, 0, 0} }; #if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API #define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) #else #define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) #endif static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { #if !CYTHON_COMPILING_IN_LIMITED_API PyCFunctionObject *cf = (PyCFunctionObject*) op; #endif if (unlikely(op == NULL)) return NULL; #if CYTHON_COMPILING_IN_LIMITED_API op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); if (unlikely(!op->func)) return NULL; #endif op->flags = flags; __Pyx_CyFunction_weakreflist(op) = NULL; #if !CYTHON_COMPILING_IN_LIMITED_API cf->m_ml = ml; cf->m_self = (PyObject *) op; #endif Py_XINCREF(closure); op->func_closure = closure; #if !CYTHON_COMPILING_IN_LIMITED_API Py_XINCREF(module); cf->m_module = module; #endif op->func_dict = NULL; op->func_name = NULL; Py_INCREF(qualname); op->func_qualname = qualname; op->func_doc = NULL; #if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API op->func_classobj = NULL; #else ((PyCMethodObject*)op)->mm_class = NULL; #endif op->func_globals = globals; Py_INCREF(op->func_globals); Py_XINCREF(code); op->func_code = code; op->defaults_pyobjects = 0; op->defaults_size = 0; op->defaults = NULL; op->defaults_tuple = NULL; op->defaults_kwdict = NULL; op->defaults_getter = NULL; op->func_annotations = NULL; op->func_is_coroutine = NULL; #if CYTHON_METH_FASTCALL switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { case METH_NOARGS: __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; break; case METH_O: __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; break; case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; break; case METH_FASTCALL | METH_KEYWORDS: __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; break; case METH_VARARGS | METH_KEYWORDS: __Pyx_CyFunction_func_vectorcall(op) = NULL; break; default: PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); Py_DECREF(op); return NULL; } #endif return (PyObject *) op; } static int __Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) { Py_CLEAR(m->func_closure); #if CYTHON_COMPILING_IN_LIMITED_API Py_CLEAR(m->func); #else Py_CLEAR(((PyCFunctionObject*)m)->m_module); #endif Py_CLEAR(m->func_dict); Py_CLEAR(m->func_name); Py_CLEAR(m->func_qualname); Py_CLEAR(m->func_doc); Py_CLEAR(m->func_globals); Py_CLEAR(m->func_code); #if !CYTHON_COMPILING_IN_LIMITED_API #if PY_VERSION_HEX < 0x030900B1 Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); #else { PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; ((PyCMethodObject *) (m))->mm_class = NULL; Py_XDECREF(cls); } #endif #endif Py_CLEAR(m->defaults_tuple); Py_CLEAR(m->defaults_kwdict); Py_CLEAR(m->func_annotations); Py_CLEAR(m->func_is_coroutine); if (m->defaults) { PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); int i; for (i = 0; i < m->defaults_pyobjects; i++) Py_XDECREF(pydefaults[i]); PyObject_Free(m->defaults); m->defaults = NULL; } return 0; } static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) { if (__Pyx_CyFunction_weakreflist(m) != NULL) PyObject_ClearWeakRefs((PyObject *) m); __Pyx_CyFunction_clear(m); __Pyx_PyHeapTypeObject_GC_Del(m); } static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) { PyObject_GC_UnTrack(m); __Pyx__CyFunction_dealloc(m); } static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) { Py_VISIT(m->func_closure); #if CYTHON_COMPILING_IN_LIMITED_API Py_VISIT(m->func); #else Py_VISIT(((PyCFunctionObject*)m)->m_module); #endif Py_VISIT(m->func_dict); Py_VISIT(m->func_name); Py_VISIT(m->func_qualname); Py_VISIT(m->func_doc); Py_VISIT(m->func_globals); Py_VISIT(m->func_code); #if !CYTHON_COMPILING_IN_LIMITED_API Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); #endif Py_VISIT(m->defaults_tuple); Py_VISIT(m->defaults_kwdict); Py_VISIT(m->func_is_coroutine); if (m->defaults) { PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); int i; for (i = 0; i < m->defaults_pyobjects; i++) Py_VISIT(pydefaults[i]); } return 0; } static PyObject* __Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) { #if PY_MAJOR_VERSION >= 3 return PyUnicode_FromFormat("", op->func_qualname, (void *)op); #else return PyString_FromFormat("", PyString_AsString(op->func_qualname), (void *)op); #endif } static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { #if CYTHON_COMPILING_IN_LIMITED_API PyObject *f = ((__pyx_CyFunctionObject*)func)->func; PyObject *py_name = NULL; PyCFunction meth; int flags; meth = PyCFunction_GetFunction(f); if (unlikely(!meth)) return NULL; flags = PyCFunction_GetFlags(f); if (unlikely(flags < 0)) return NULL; #else PyCFunctionObject* f = (PyCFunctionObject*)func; PyCFunction meth = f->m_ml->ml_meth; int flags = f->m_ml->ml_flags; #endif Py_ssize_t size; switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { case METH_VARARGS: if (likely(kw == NULL || PyDict_Size(kw) == 0)) return (*meth)(self, arg); break; case METH_VARARGS | METH_KEYWORDS: return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); case METH_NOARGS: if (likely(kw == NULL || PyDict_Size(kw) == 0)) { #if CYTHON_ASSUME_SAFE_MACROS size = PyTuple_GET_SIZE(arg); #else size = PyTuple_Size(arg); if (unlikely(size < 0)) return NULL; #endif if (likely(size == 0)) return (*meth)(self, NULL); #if CYTHON_COMPILING_IN_LIMITED_API py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); if (!py_name) return NULL; PyErr_Format(PyExc_TypeError, "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", py_name, size); Py_DECREF(py_name); #else PyErr_Format(PyExc_TypeError, "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", f->m_ml->ml_name, size); #endif return NULL; } break; case METH_O: if (likely(kw == NULL || PyDict_Size(kw) == 0)) { #if CYTHON_ASSUME_SAFE_MACROS size = PyTuple_GET_SIZE(arg); #else size = PyTuple_Size(arg); if (unlikely(size < 0)) return NULL; #endif if (likely(size == 1)) { PyObject *result, *arg0; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS arg0 = PyTuple_GET_ITEM(arg, 0); #else arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; #endif result = (*meth)(self, arg0); #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) Py_DECREF(arg0); #endif return result; } #if CYTHON_COMPILING_IN_LIMITED_API py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); if (!py_name) return NULL; PyErr_Format(PyExc_TypeError, "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", py_name, size); Py_DECREF(py_name); #else PyErr_Format(PyExc_TypeError, "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", f->m_ml->ml_name, size); #endif return NULL; } break; default: PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); return NULL; } #if CYTHON_COMPILING_IN_LIMITED_API py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); if (!py_name) return NULL; PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", py_name); Py_DECREF(py_name); #else PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", f->m_ml->ml_name); #endif return NULL; } static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { PyObject *self, *result; #if CYTHON_COMPILING_IN_LIMITED_API self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); if (unlikely(!self) && PyErr_Occurred()) return NULL; #else self = ((PyCFunctionObject*)func)->m_self; #endif result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); return result; } static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { PyObject *result; __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; #if CYTHON_METH_FASTCALL __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); if (vc) { #if CYTHON_ASSUME_SAFE_MACROS return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); #else (void) &__Pyx_PyVectorcall_FastCallDict; return PyVectorcall_Call(func, args, kw); #endif } #endif if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { Py_ssize_t argc; PyObject *new_args; PyObject *self; #if CYTHON_ASSUME_SAFE_MACROS argc = PyTuple_GET_SIZE(args); #else argc = PyTuple_Size(args); if (unlikely(!argc) < 0) return NULL; #endif new_args = PyTuple_GetSlice(args, 1, argc); if (unlikely(!new_args)) return NULL; self = PyTuple_GetItem(args, 0); if (unlikely(!self)) { Py_DECREF(new_args); #if PY_MAJOR_VERSION > 2 PyErr_Format(PyExc_TypeError, "unbound method %.200S() needs an argument", cyfunc->func_qualname); #else PyErr_SetString(PyExc_TypeError, "unbound method needs an argument"); #endif return NULL; } result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); Py_DECREF(new_args); } else { result = __Pyx_CyFunction_Call(func, args, kw); } return result; } #if CYTHON_METH_FASTCALL static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) { int ret = 0; if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { if (unlikely(nargs < 1)) { PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); return -1; } ret = 1; } if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); return -1; } return ret; } static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) { __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; #if CYTHON_BACKPORT_VECTORCALL Py_ssize_t nargs = (Py_ssize_t)nargsf; #else Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); #endif PyObject *self; switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { case 1: self = args[0]; args += 1; nargs -= 1; break; case 0: self = ((PyCFunctionObject*)cyfunc)->m_self; break; default: return NULL; } if (unlikely(nargs != 0)) { PyErr_Format(PyExc_TypeError, "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", def->ml_name, nargs); return NULL; } return def->ml_meth(self, NULL); } static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) { __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; #if CYTHON_BACKPORT_VECTORCALL Py_ssize_t nargs = (Py_ssize_t)nargsf; #else Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); #endif PyObject *self; switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { case 1: self = args[0]; args += 1; nargs -= 1; break; case 0: self = ((PyCFunctionObject*)cyfunc)->m_self; break; default: return NULL; } if (unlikely(nargs != 1)) { PyErr_Format(PyExc_TypeError, "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", def->ml_name, nargs); return NULL; } return def->ml_meth(self, args[0]); } static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) { __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; #if CYTHON_BACKPORT_VECTORCALL Py_ssize_t nargs = (Py_ssize_t)nargsf; #else Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); #endif PyObject *self; switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { case 1: self = args[0]; args += 1; nargs -= 1; break; case 0: self = ((PyCFunctionObject*)cyfunc)->m_self; break; default: return NULL; } return ((_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); } static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) { __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); #if CYTHON_BACKPORT_VECTORCALL Py_ssize_t nargs = (Py_ssize_t)nargsf; #else Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); #endif PyObject *self; switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { case 1: self = args[0]; args += 1; nargs -= 1; break; case 0: self = ((PyCFunctionObject*)cyfunc)->m_self; break; default: return NULL; } return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); } #endif #if CYTHON_USE_TYPE_SPECS static PyType_Slot __pyx_CyFunctionType_slots[] = { {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, {Py_tp_methods, (void *)__pyx_CyFunction_methods}, {Py_tp_members, (void *)__pyx_CyFunction_members}, {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, {0, 0}, }; static PyType_Spec __pyx_CyFunctionType_spec = { __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", sizeof(__pyx_CyFunctionObject), 0, #ifdef Py_TPFLAGS_METHOD_DESCRIPTOR Py_TPFLAGS_METHOD_DESCRIPTOR | #endif #if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) _Py_TPFLAGS_HAVE_VECTORCALL | #endif Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, __pyx_CyFunctionType_slots }; #else static PyTypeObject __pyx_CyFunctionType_type = { PyVarObject_HEAD_INIT(0, 0) __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", sizeof(__pyx_CyFunctionObject), 0, (destructor) __Pyx_CyFunction_dealloc, #if !CYTHON_METH_FASTCALL 0, #elif CYTHON_BACKPORT_VECTORCALL (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), #else offsetof(PyCFunctionObject, vectorcall), #endif 0, 0, #if PY_MAJOR_VERSION < 3 0, #else 0, #endif (reprfunc) __Pyx_CyFunction_repr, 0, 0, 0, 0, __Pyx_CyFunction_CallAsMethod, 0, 0, 0, 0, #ifdef Py_TPFLAGS_METHOD_DESCRIPTOR Py_TPFLAGS_METHOD_DESCRIPTOR | #endif #if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL _Py_TPFLAGS_HAVE_VECTORCALL | #endif Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, 0, (traverseproc) __Pyx_CyFunction_traverse, (inquiry) __Pyx_CyFunction_clear, 0, #if PY_VERSION_HEX < 0x030500A0 offsetof(__pyx_CyFunctionObject, func_weakreflist), #else offsetof(PyCFunctionObject, m_weakreflist), #endif 0, 0, __pyx_CyFunction_methods, __pyx_CyFunction_members, __pyx_CyFunction_getsets, 0, 0, __Pyx_PyMethod_New, 0, offsetof(__pyx_CyFunctionObject, func_dict), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, #if PY_VERSION_HEX >= 0x030400a1 0, #endif #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) 0, #endif #if __PYX_NEED_TP_PRINT_SLOT 0, #endif #if PY_VERSION_HEX >= 0x030C0000 0, #endif #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 0, #endif }; #endif static int __pyx_CyFunction_init(PyObject *module) { #if CYTHON_USE_TYPE_SPECS __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); #else CYTHON_UNUSED_VAR(module); __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); #endif if (unlikely(__pyx_CyFunctionType == NULL)) { return -1; } return 0; } static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; m->defaults = PyObject_Malloc(size); if (unlikely(!m->defaults)) return PyErr_NoMemory(); memset(m->defaults, 0, size); m->defaults_pyobjects = pyobjects; m->defaults_size = size; return m->defaults; } static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; m->defaults_tuple = tuple; Py_INCREF(tuple); } static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; m->defaults_kwdict = dict; Py_INCREF(dict); } static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; m->func_annotations = dict; Py_INCREF(dict); } /* CythonFunction */ static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { PyObject *op = __Pyx_CyFunction_Init( PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), ml, flags, qualname, closure, module, globals, code ); if (likely(op)) { PyObject_GC_Track(op); } return op; } /* PyDictVersioning */ #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { PyObject *dict = Py_TYPE(obj)->tp_dict; return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; } static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { PyObject **dictptr = NULL; Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; if (offset) { #if CYTHON_COMPILING_IN_CPYTHON dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); #else dictptr = _PyObject_GetDictPtr(obj); #endif } return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; } static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { PyObject *dict = Py_TYPE(obj)->tp_dict; if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) return 0; return obj_dict_version == __Pyx_get_object_dict_version(obj); } #endif /* CLineInTraceback */ #ifndef CYTHON_CLINE_IN_TRACEBACK static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { PyObject *use_cline; PyObject *ptype, *pvalue, *ptraceback; #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif CYTHON_MAYBE_UNUSED_VAR(tstate); if (unlikely(!__pyx_cython_runtime)) { return c_line; } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); if (likely(cython_runtime_dict)) { __PYX_PY_DICT_LOOKUP_IF_MODIFIED( use_cline, *cython_runtime_dict, __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) } else #endif { PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); if (use_cline_obj) { use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; Py_DECREF(use_cline_obj); } else { PyErr_Clear(); use_cline = NULL; } } if (!use_cline) { c_line = 0; (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); } else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { c_line = 0; } __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); return c_line; } #endif /* CodeObjectCache */ #if !CYTHON_COMPILING_IN_LIMITED_API static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { int start = 0, mid = 0, end = count - 1; if (end >= 0 && code_line > entries[end].code_line) { return count; } while (start < end) { mid = start + (end - start) / 2; if (code_line < entries[mid].code_line) { end = mid; } else if (code_line > entries[mid].code_line) { start = mid + 1; } else { return mid; } } if (code_line <= entries[mid].code_line) { return mid; } else { return mid + 1; } } static PyCodeObject *__pyx_find_code_object(int code_line) { PyCodeObject* code_object; int pos; if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { return NULL; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { return NULL; } code_object = __pyx_code_cache.entries[pos].code_object; Py_INCREF(code_object); return code_object; } static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { int pos, i; __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; if (unlikely(!code_line)) { return; } if (unlikely(!entries)) { entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); if (likely(entries)) { __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = 64; __pyx_code_cache.count = 1; entries[0].code_line = code_line; entries[0].code_object = code_object; Py_INCREF(code_object); } return; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { PyCodeObject* tmp = entries[pos].code_object; entries[pos].code_object = code_object; Py_DECREF(tmp); return; } if (__pyx_code_cache.count == __pyx_code_cache.max_count) { int new_max = __pyx_code_cache.max_count + 64; entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); if (unlikely(!entries)) { return; } __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = new_max; } for (i=__pyx_code_cache.count; i>pos; i--) { entries[i] = entries[i-1]; } entries[pos].code_line = code_line; entries[pos].code_object = code_object; __pyx_code_cache.count++; Py_INCREF(code_object); } #endif /* AddTraceback */ #include "compile.h" #include "frameobject.h" #include "traceback.h" #if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API #ifndef Py_BUILD_CORE #define Py_BUILD_CORE 1 #endif #include "internal/pycore_frame.h" #endif #if CYTHON_COMPILING_IN_LIMITED_API static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, PyObject *firstlineno, PyObject *name) { PyObject *replace = NULL; if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; replace = PyObject_GetAttrString(code, "replace"); if (likely(replace)) { PyObject *result; result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); Py_DECREF(replace); return result; } #if __PYX_LIMITED_VERSION_HEX < 0x030780000 PyErr_Clear(); { PyObject *compiled = NULL, *result = NULL; if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; compiled = Py_CompileString( "out = type(code)(\n" " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" " code.co_lnotab)\n", "", Py_file_input); if (!compiled) return NULL; result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); Py_DECREF(compiled); if (!result) PyErr_Print(); Py_DECREF(result); result = PyDict_GetItemString(scratch_dict, "out"); if (result) Py_INCREF(result); return result; } #endif } static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename) { PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; PyObject *replace = NULL, *getframe = NULL, *frame = NULL; PyObject *exc_type, *exc_value, *exc_traceback; int success = 0; if (c_line) { (void) __pyx_cfilenm; (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); } PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); code_object = Py_CompileString("_getframe()", filename, Py_eval_input); if (unlikely(!code_object)) goto bad; py_py_line = PyLong_FromLong(py_line); if (unlikely(!py_py_line)) goto bad; py_funcname = PyUnicode_FromString(funcname); if (unlikely(!py_funcname)) goto bad; dict = PyDict_New(); if (unlikely(!dict)) goto bad; { PyObject *old_code_object = code_object; code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); Py_DECREF(old_code_object); } if (unlikely(!code_object)) goto bad; getframe = PySys_GetObject("_getframe"); if (unlikely(!getframe)) goto bad; if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; frame = PyEval_EvalCode(code_object, dict, dict); if (unlikely(!frame) || frame == Py_None) goto bad; success = 1; bad: PyErr_Restore(exc_type, exc_value, exc_traceback); Py_XDECREF(code_object); Py_XDECREF(py_py_line); Py_XDECREF(py_funcname); Py_XDECREF(dict); Py_XDECREF(replace); if (success) { PyTraceBack_Here( (struct _frame*)frame); } Py_XDECREF(frame); } #else static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = NULL; PyObject *py_funcname = NULL; #if PY_MAJOR_VERSION < 3 PyObject *py_srcfile = NULL; py_srcfile = PyString_FromString(filename); if (!py_srcfile) goto bad; #endif if (c_line) { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); if (!py_funcname) goto bad; #else py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); if (!py_funcname) goto bad; funcname = PyUnicode_AsUTF8(py_funcname); if (!funcname) goto bad; #endif } else { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromString(funcname); if (!py_funcname) goto bad; #endif } #if PY_MAJOR_VERSION < 3 py_code = __Pyx_PyCode_New( 0, 0, 0, 0, 0, 0, __pyx_empty_bytes, /*PyObject *code,*/ __pyx_empty_tuple, /*PyObject *consts,*/ __pyx_empty_tuple, /*PyObject *names,*/ __pyx_empty_tuple, /*PyObject *varnames,*/ __pyx_empty_tuple, /*PyObject *freevars,*/ __pyx_empty_tuple, /*PyObject *cellvars,*/ py_srcfile, /*PyObject *filename,*/ py_funcname, /*PyObject *name,*/ py_line, __pyx_empty_bytes /*PyObject *lnotab*/ ); Py_DECREF(py_srcfile); #else py_code = PyCode_NewEmpty(filename, funcname, py_line); #endif Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline return py_code; bad: Py_XDECREF(py_funcname); #if PY_MAJOR_VERSION < 3 Py_XDECREF(py_srcfile); #endif return NULL; } static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = 0; PyFrameObject *py_frame = 0; PyThreadState *tstate = __Pyx_PyThreadState_Current; PyObject *ptype, *pvalue, *ptraceback; if (c_line) { c_line = __Pyx_CLineForTraceback(tstate, c_line); } py_code = __pyx_find_code_object(c_line ? -c_line : py_line); if (!py_code) { __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); py_code = __Pyx_CreateCodeObjectForTraceback( funcname, c_line, py_line, filename); if (!py_code) { /* If the code object creation fails, then we should clear the fetched exception references and propagate the new exception */ Py_XDECREF(ptype); Py_XDECREF(pvalue); Py_XDECREF(ptraceback); goto bad; } __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); } py_frame = PyFrame_New( tstate, /*PyThreadState *tstate,*/ py_code, /*PyCodeObject *code,*/ __pyx_d, /*PyObject *globals,*/ 0 /*PyObject *locals*/ ); if (!py_frame) goto bad; __Pyx_PyFrame_SetLineNumber(py_frame, py_line); PyTraceBack_Here(py_frame); bad: Py_XDECREF(py_code); Py_XDECREF(py_frame); } #endif /* CIntFromPyVerify */ #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) #define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) #define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ {\ func_type value = func_value;\ if (sizeof(target_type) < sizeof(func_type)) {\ if (unlikely(value != (func_type) (target_type) value)) {\ func_type zero = 0;\ if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ return (target_type) -1;\ if (is_unsigned && unlikely(value < zero))\ goto raise_neg_overflow;\ else\ goto raise_overflow;\ }\ }\ return (target_type) value;\ } /* CIntFromPy */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const int neg_one = (int) -1, const_zero = (int) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if ((sizeof(int) < sizeof(long))) { __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (int) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS if (unlikely(__Pyx_PyLong_IsNeg(x))) { goto raise_neg_overflow; } else if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_DigitCount(x)) { case 2: if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 3: if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 4: if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; } } #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (int) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if ((sizeof(int) <= sizeof(unsigned long))) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_SignedDigitCount(x)) { case -2: if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 2: if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -3: if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 3: if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -4: if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 4: if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; } } #endif if ((sizeof(int) <= sizeof(long))) { __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { int val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int ret = -1; #if !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); #else PyObject *stepval = NULL, *mask = NULL, *shift = NULL; int bits, remaining_bits, is_negative = 0; long idigit; int chunk_size = (sizeof(long) < 8) ? 30 : 62; if (unlikely(!PyLong_CheckExact(v))) { PyObject *tmp = v; v = PyNumber_Long(v); assert(PyLong_CheckExact(v)); Py_DECREF(tmp); if (unlikely(!v)) return (int) -1; } #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000 if (Py_SIZE(x) == 0) return (int) 0; is_negative = Py_SIZE(x) < 0; #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (int) -1; is_negative = result == 1; } #endif if (is_unsigned && unlikely(is_negative)) { goto raise_neg_overflow; } else if (is_negative) { stepval = PyNumber_Invert(v); if (unlikely(!stepval)) return (int) -1; } else { stepval = __Pyx_NewRef(v); } val = (int) 0; mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { PyObject *tmp, *digit; digit = PyNumber_And(stepval, mask); if (unlikely(!digit)) goto done; idigit = PyLong_AsLong(digit); Py_DECREF(digit); if (unlikely(idigit < 0)) goto done; tmp = PyNumber_Rshift(stepval, shift); if (unlikely(!tmp)) goto done; Py_DECREF(stepval); stepval = tmp; val |= ((int) idigit) << bits; #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000 if (Py_SIZE(stepval) == 0) goto unpacking_done; #endif } idigit = PyLong_AsLong(stepval); if (unlikely(idigit < 0)) goto done; remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); if (unlikely(idigit >= (1L << remaining_bits))) goto raise_overflow; val |= ((int) idigit) << bits; #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000 unpacking_done: #endif if (!is_unsigned) { if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) goto raise_overflow; if (is_negative) val = ~val; } ret = 0; done: Py_XDECREF(shift); Py_XDECREF(mask); Py_XDECREF(stepval); #endif Py_DECREF(v); if (likely(!ret)) return val; } return (int) -1; } } else { int val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (int) -1; val = __Pyx_PyInt_As_int(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to int"); return (int) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to int"); return (int) -1; } /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const unsigned int neg_one = (unsigned int) -1, const_zero = (unsigned int) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(unsigned int) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(unsigned int) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(unsigned int) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(unsigned int) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(unsigned int) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; #if !CYTHON_COMPILING_IN_LIMITED_API return _PyLong_FromByteArray(bytes, sizeof(unsigned int), little, !is_unsigned); #else PyObject *from_bytes, *result = NULL; PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; from_bytes = PyObject_GetAttrString((PyObject*)&PyInt_Type, "from_bytes"); if (!from_bytes) return NULL; py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(unsigned int)); if (!py_bytes) goto limited_bad; order_str = PyUnicode_FromString(little ? "little" : "big"); if (!order_str) goto limited_bad; arg_tuple = PyTuple_Pack(2, py_bytes, order_str); if (!arg_tuple) goto limited_bad; kwds = PyDict_New(); if (!kwds) goto limited_bad; if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(!is_unsigned ? Py_True : Py_False))) goto limited_bad; result = PyObject_Call(from_bytes, arg_tuple, kwds); limited_bad: Py_XDECREF(from_bytes); Py_XDECREF(py_bytes); Py_XDECREF(order_str); Py_XDECREF(arg_tuple); Py_XDECREF(kwds); return result; #endif } } /* CIntFromPy */ static CYTHON_INLINE unsigned int __Pyx_PyInt_As_unsigned_int(PyObject *x) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const unsigned int neg_one = (unsigned int) -1, const_zero = (unsigned int) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if ((sizeof(unsigned int) < sizeof(long))) { __PYX_VERIFY_RETURN_INT(unsigned int, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (unsigned int) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS if (unlikely(__Pyx_PyLong_IsNeg(x))) { goto raise_neg_overflow; } else if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(unsigned int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_DigitCount(x)) { case 2: if ((8 * sizeof(unsigned int) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(unsigned int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(unsigned int) >= 2 * PyLong_SHIFT)) { return (unsigned int) (((((unsigned int)digits[1]) << PyLong_SHIFT) | (unsigned int)digits[0])); } } break; case 3: if ((8 * sizeof(unsigned int) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(unsigned int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(unsigned int) >= 3 * PyLong_SHIFT)) { return (unsigned int) (((((((unsigned int)digits[2]) << PyLong_SHIFT) | (unsigned int)digits[1]) << PyLong_SHIFT) | (unsigned int)digits[0])); } } break; case 4: if ((8 * sizeof(unsigned int) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(unsigned int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(unsigned int) >= 4 * PyLong_SHIFT)) { return (unsigned int) (((((((((unsigned int)digits[3]) << PyLong_SHIFT) | (unsigned int)digits[2]) << PyLong_SHIFT) | (unsigned int)digits[1]) << PyLong_SHIFT) | (unsigned int)digits[0])); } } break; } } #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (unsigned int) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if ((sizeof(unsigned int) <= sizeof(unsigned long))) { __PYX_VERIFY_RETURN_INT_EXC(unsigned int, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(unsigned int) <= sizeof(unsigned PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(unsigned int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(unsigned int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_SignedDigitCount(x)) { case -2: if ((8 * sizeof(unsigned int) - 1 > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(unsigned int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(unsigned int) - 1 > 2 * PyLong_SHIFT)) { return (unsigned int) (((unsigned int)-1)*(((((unsigned int)digits[1]) << PyLong_SHIFT) | (unsigned int)digits[0]))); } } break; case 2: if ((8 * sizeof(unsigned int) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(unsigned int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(unsigned int) - 1 > 2 * PyLong_SHIFT)) { return (unsigned int) ((((((unsigned int)digits[1]) << PyLong_SHIFT) | (unsigned int)digits[0]))); } } break; case -3: if ((8 * sizeof(unsigned int) - 1 > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(unsigned int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(unsigned int) - 1 > 3 * PyLong_SHIFT)) { return (unsigned int) (((unsigned int)-1)*(((((((unsigned int)digits[2]) << PyLong_SHIFT) | (unsigned int)digits[1]) << PyLong_SHIFT) | (unsigned int)digits[0]))); } } break; case 3: if ((8 * sizeof(unsigned int) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(unsigned int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(unsigned int) - 1 > 3 * PyLong_SHIFT)) { return (unsigned int) ((((((((unsigned int)digits[2]) << PyLong_SHIFT) | (unsigned int)digits[1]) << PyLong_SHIFT) | (unsigned int)digits[0]))); } } break; case -4: if ((8 * sizeof(unsigned int) - 1 > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(unsigned int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(unsigned int) - 1 > 4 * PyLong_SHIFT)) { return (unsigned int) (((unsigned int)-1)*(((((((((unsigned int)digits[3]) << PyLong_SHIFT) | (unsigned int)digits[2]) << PyLong_SHIFT) | (unsigned int)digits[1]) << PyLong_SHIFT) | (unsigned int)digits[0]))); } } break; case 4: if ((8 * sizeof(unsigned int) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(unsigned int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(unsigned int) - 1 > 4 * PyLong_SHIFT)) { return (unsigned int) ((((((((((unsigned int)digits[3]) << PyLong_SHIFT) | (unsigned int)digits[2]) << PyLong_SHIFT) | (unsigned int)digits[1]) << PyLong_SHIFT) | (unsigned int)digits[0]))); } } break; } } #endif if ((sizeof(unsigned int) <= sizeof(long))) { __PYX_VERIFY_RETURN_INT_EXC(unsigned int, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(unsigned int) <= sizeof(PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(unsigned int, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { unsigned int val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int ret = -1; #if !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); #else PyObject *stepval = NULL, *mask = NULL, *shift = NULL; int bits, remaining_bits, is_negative = 0; long idigit; int chunk_size = (sizeof(long) < 8) ? 30 : 62; if (unlikely(!PyLong_CheckExact(v))) { PyObject *tmp = v; v = PyNumber_Long(v); assert(PyLong_CheckExact(v)); Py_DECREF(tmp); if (unlikely(!v)) return (unsigned int) -1; } #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000 if (Py_SIZE(x) == 0) return (unsigned int) 0; is_negative = Py_SIZE(x) < 0; #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (unsigned int) -1; is_negative = result == 1; } #endif if (is_unsigned && unlikely(is_negative)) { goto raise_neg_overflow; } else if (is_negative) { stepval = PyNumber_Invert(v); if (unlikely(!stepval)) return (unsigned int) -1; } else { stepval = __Pyx_NewRef(v); } val = (unsigned int) 0; mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; for (bits = 0; bits < (int) sizeof(unsigned int) * 8 - chunk_size; bits += chunk_size) { PyObject *tmp, *digit; digit = PyNumber_And(stepval, mask); if (unlikely(!digit)) goto done; idigit = PyLong_AsLong(digit); Py_DECREF(digit); if (unlikely(idigit < 0)) goto done; tmp = PyNumber_Rshift(stepval, shift); if (unlikely(!tmp)) goto done; Py_DECREF(stepval); stepval = tmp; val |= ((unsigned int) idigit) << bits; #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000 if (Py_SIZE(stepval) == 0) goto unpacking_done; #endif } idigit = PyLong_AsLong(stepval); if (unlikely(idigit < 0)) goto done; remaining_bits = ((int) sizeof(unsigned int) * 8) - bits - (is_unsigned ? 0 : 1); if (unlikely(idigit >= (1L << remaining_bits))) goto raise_overflow; val |= ((unsigned int) idigit) << bits; #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000 unpacking_done: #endif if (!is_unsigned) { if (unlikely(val & (((unsigned int) 1) << (sizeof(unsigned int) * 8 - 1)))) goto raise_overflow; if (is_negative) val = ~val; } ret = 0; done: Py_XDECREF(shift); Py_XDECREF(mask); Py_XDECREF(stepval); #endif Py_DECREF(v); if (likely(!ret)) return val; } return (unsigned int) -1; } } else { unsigned int val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (unsigned int) -1; val = __Pyx_PyInt_As_unsigned_int(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to unsigned int"); return (unsigned int) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to unsigned int"); return (unsigned int) -1; } /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint32_t(uint32_t value) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const uint32_t neg_one = (uint32_t) -1, const_zero = (uint32_t) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(uint32_t) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(uint32_t) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(uint32_t) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(uint32_t) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(uint32_t) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; #if !CYTHON_COMPILING_IN_LIMITED_API return _PyLong_FromByteArray(bytes, sizeof(uint32_t), little, !is_unsigned); #else PyObject *from_bytes, *result = NULL; PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; from_bytes = PyObject_GetAttrString((PyObject*)&PyInt_Type, "from_bytes"); if (!from_bytes) return NULL; py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(uint32_t)); if (!py_bytes) goto limited_bad; order_str = PyUnicode_FromString(little ? "little" : "big"); if (!order_str) goto limited_bad; arg_tuple = PyTuple_Pack(2, py_bytes, order_str); if (!arg_tuple) goto limited_bad; kwds = PyDict_New(); if (!kwds) goto limited_bad; if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(!is_unsigned ? Py_True : Py_False))) goto limited_bad; result = PyObject_Call(from_bytes, arg_tuple, kwds); limited_bad: Py_XDECREF(from_bytes); Py_XDECREF(py_bytes); Py_XDECREF(order_str); Py_XDECREF(arg_tuple); Py_XDECREF(kwds); return result; #endif } } /* FormatTypeName */ #if CYTHON_COMPILING_IN_LIMITED_API static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp) { PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, __pyx_n_s_name); if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { PyErr_Clear(); Py_XDECREF(name); name = __Pyx_NewRef(__pyx_n_s__3); } return name; } #endif /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const long neg_one = (long) -1, const_zero = (long) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(long) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(long) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(long) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; #if !CYTHON_COMPILING_IN_LIMITED_API return _PyLong_FromByteArray(bytes, sizeof(long), little, !is_unsigned); #else PyObject *from_bytes, *result = NULL; PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; from_bytes = PyObject_GetAttrString((PyObject*)&PyInt_Type, "from_bytes"); if (!from_bytes) return NULL; py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); if (!py_bytes) goto limited_bad; order_str = PyUnicode_FromString(little ? "little" : "big"); if (!order_str) goto limited_bad; arg_tuple = PyTuple_Pack(2, py_bytes, order_str); if (!arg_tuple) goto limited_bad; kwds = PyDict_New(); if (!kwds) goto limited_bad; if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(!is_unsigned ? Py_True : Py_False))) goto limited_bad; result = PyObject_Call(from_bytes, arg_tuple, kwds); limited_bad: Py_XDECREF(from_bytes); Py_XDECREF(py_bytes); Py_XDECREF(order_str); Py_XDECREF(arg_tuple); Py_XDECREF(kwds); return result; #endif } } /* CIntFromPy */ static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const long neg_one = (long) -1, const_zero = (long) 0; #ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if ((sizeof(long) < sizeof(long))) { __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (long) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS if (unlikely(__Pyx_PyLong_IsNeg(x))) { goto raise_neg_overflow; } else if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_DigitCount(x)) { case 2: if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 3: if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 4: if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; } } #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (long) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if ((sizeof(long) <= sizeof(unsigned long))) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS if (__Pyx_PyLong_IsCompact(x)) { __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) } else { const digit* digits = __Pyx_PyLong_Digits(x); assert(__Pyx_PyLong_DigitCount(x) > 1); switch (__Pyx_PyLong_SignedDigitCount(x)) { case -2: if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 2: if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -3: if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 3: if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -4: if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 4: if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; } } #endif if ((sizeof(long) <= sizeof(long))) { __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { long val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int ret = -1; #if !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); #else PyObject *stepval = NULL, *mask = NULL, *shift = NULL; int bits, remaining_bits, is_negative = 0; long idigit; int chunk_size = (sizeof(long) < 8) ? 30 : 62; if (unlikely(!PyLong_CheckExact(v))) { PyObject *tmp = v; v = PyNumber_Long(v); assert(PyLong_CheckExact(v)); Py_DECREF(tmp); if (unlikely(!v)) return (long) -1; } #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000 if (Py_SIZE(x) == 0) return (long) 0; is_negative = Py_SIZE(x) < 0; #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (long) -1; is_negative = result == 1; } #endif if (is_unsigned && unlikely(is_negative)) { goto raise_neg_overflow; } else if (is_negative) { stepval = PyNumber_Invert(v); if (unlikely(!stepval)) return (long) -1; } else { stepval = __Pyx_NewRef(v); } val = (long) 0; mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { PyObject *tmp, *digit; digit = PyNumber_And(stepval, mask); if (unlikely(!digit)) goto done; idigit = PyLong_AsLong(digit); Py_DECREF(digit); if (unlikely(idigit < 0)) goto done; tmp = PyNumber_Rshift(stepval, shift); if (unlikely(!tmp)) goto done; Py_DECREF(stepval); stepval = tmp; val |= ((long) idigit) << bits; #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000 if (Py_SIZE(stepval) == 0) goto unpacking_done; #endif } idigit = PyLong_AsLong(stepval); if (unlikely(idigit < 0)) goto done; remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); if (unlikely(idigit >= (1L << remaining_bits))) goto raise_overflow; val |= ((long) idigit) << bits; #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000 unpacking_done: #endif if (!is_unsigned) { if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) goto raise_overflow; if (is_negative) val = ~val; } ret = 0; done: Py_XDECREF(shift); Py_XDECREF(mask); Py_XDECREF(stepval); #endif Py_DECREF(v); if (likely(!ret)) return val; } return (long) -1; } } else { long val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (long) -1; val = __Pyx_PyInt_As_long(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to long"); return (long) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to long"); return (long) -1; } /* FastTypeChecks */ #if CYTHON_COMPILING_IN_CPYTHON static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { while (a) { a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); if (a == b) return 1; } return b == &PyBaseObject_Type; } static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { PyObject *mro; if (a == b) return 1; mro = a->tp_mro; if (likely(mro)) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(mro); for (i = 0; i < n; i++) { if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) return 1; } return 0; } return __Pyx_InBases(a, b); } static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { PyObject *mro; if (cls == a || cls == b) return 1; mro = cls->tp_mro; if (likely(mro)) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(mro); for (i = 0; i < n; i++) { PyObject *base = PyTuple_GET_ITEM(mro, i); if (base == (PyObject *)a || base == (PyObject *)b) return 1; } return 0; } return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); } #if PY_MAJOR_VERSION == 2 static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { PyObject *exception, *value, *tb; int res; __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ErrFetch(&exception, &value, &tb); res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; if (unlikely(res == -1)) { PyErr_WriteUnraisable(err); res = 0; } if (!res) { res = PyObject_IsSubclass(err, exc_type2); if (unlikely(res == -1)) { PyErr_WriteUnraisable(err); res = 0; } } __Pyx_ErrRestore(exception, value, tb); return res; } #else static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { if (exc_type1) { return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); } else { return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); } } #endif static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { Py_ssize_t i, n; assert(PyExceptionClass_Check(exc_type)); n = PyTuple_GET_SIZE(tuple); #if PY_MAJOR_VERSION >= 3 for (i=0; i '9'); break; } if (rt_from_call[i] != ctversion[i]) { same = 0; break; } } if (!same) { char rtversion[5] = {'\0'}; char message[200]; for (i=0; i<4; ++i) { if (rt_from_call[i] == '.') { if (found_dot) break; found_dot = 1; } else if (rt_from_call[i] < '0' || rt_from_call[i] > '9') { break; } rtversion[i] = rt_from_call[i]; } PyOS_snprintf(message, sizeof(message), "compile time version %s of module '%.100s' " "does not match runtime version %s", ctversion, __Pyx_MODULE_NAME, rtversion); return PyErr_WarnEx(NULL, message, 1); } return 0; } /* InitStrings */ #if PY_MAJOR_VERSION >= 3 static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { if (t.is_unicode | t.is_str) { if (t.intern) { *str = PyUnicode_InternFromString(t.s); } else if (t.encoding) { *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); } else { *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); } } else { *str = PyBytes_FromStringAndSize(t.s, t.n - 1); } if (!*str) return -1; if (PyObject_Hash(*str) == -1) return -1; return 0; } #endif static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { while (t->p) { #if PY_MAJOR_VERSION >= 3 __Pyx_InitString(*t, t->p); #else if (t->is_unicode) { *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); } else if (t->intern) { *t->p = PyString_InternFromString(t->s); } else { *t->p = PyString_FromStringAndSize(t->s, t->n - 1); } if (!*t->p) return -1; if (PyObject_Hash(*t->p) == -1) return -1; #endif ++t; } return 0; } static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); } static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { Py_ssize_t ignore; return __Pyx_PyObject_AsStringAndSize(o, &ignore); } #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT #if !CYTHON_PEP393_ENABLED static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { char* defenc_c; PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); if (!defenc) return NULL; defenc_c = PyBytes_AS_STRING(defenc); #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII { char* end = defenc_c + PyBytes_GET_SIZE(defenc); char* c; for (c = defenc_c; c < end; c++) { if ((unsigned char) (*c) >= 128) { PyUnicode_AsASCIIString(o); return NULL; } } } #endif *length = PyBytes_GET_SIZE(defenc); return defenc_c; } #else static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII if (likely(PyUnicode_IS_ASCII(o))) { *length = PyUnicode_GET_LENGTH(o); return PyUnicode_AsUTF8(o); } else { PyUnicode_AsASCIIString(o); return NULL; } #else return PyUnicode_AsUTF8AndSize(o, length); #endif } #endif #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT if ( #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII __Pyx_sys_getdefaultencoding_not_ascii && #endif PyUnicode_Check(o)) { return __Pyx_PyUnicode_AsStringAndSize(o, length); } else #endif #if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) if (PyByteArray_Check(o)) { *length = PyByteArray_GET_SIZE(o); return PyByteArray_AS_STRING(o); } else #endif { char* result; int r = PyBytes_AsStringAndSize(o, &result, length); if (unlikely(r < 0)) { return NULL; } else { return result; } } } static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { int is_true = x == Py_True; if (is_true | (x == Py_False) | (x == Py_None)) return is_true; else return PyObject_IsTrue(x); } static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { int retval; if (unlikely(!x)) return -1; retval = __Pyx_PyObject_IsTrue(x); Py_DECREF(x); return retval; } static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); #if PY_MAJOR_VERSION >= 3 if (PyLong_Check(result)) { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " "The ability to return an instance of a strict subclass of int is deprecated, " "and may be removed in a future version of Python.", result_type_name)) { __Pyx_DECREF_TypeName(result_type_name); Py_DECREF(result); return NULL; } __Pyx_DECREF_TypeName(result_type_name); return result; } #endif PyErr_Format(PyExc_TypeError, "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", type_name, type_name, result_type_name); __Pyx_DECREF_TypeName(result_type_name); Py_DECREF(result); return NULL; } static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { #if CYTHON_USE_TYPE_SLOTS PyNumberMethods *m; #endif const char *name = NULL; PyObject *res = NULL; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x) || PyLong_Check(x))) #else if (likely(PyLong_Check(x))) #endif return __Pyx_NewRef(x); #if CYTHON_USE_TYPE_SLOTS m = Py_TYPE(x)->tp_as_number; #if PY_MAJOR_VERSION < 3 if (m && m->nb_int) { name = "int"; res = m->nb_int(x); } else if (m && m->nb_long) { name = "long"; res = m->nb_long(x); } #else if (likely(m && m->nb_int)) { name = "int"; res = m->nb_int(x); } #endif #else if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { res = PyNumber_Int(x); } #endif if (likely(res)) { #if PY_MAJOR_VERSION < 3 if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { #else if (unlikely(!PyLong_CheckExact(res))) { #endif return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); } } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_TypeError, "an integer is required"); } return res; } static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_ssize_t ival; PyObject *x; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_CheckExact(b))) { if (sizeof(Py_ssize_t) >= sizeof(long)) return PyInt_AS_LONG(b); else return PyInt_AsSsize_t(b); } #endif if (likely(PyLong_CheckExact(b))) { #if CYTHON_USE_PYLONG_INTERNALS if (likely(__Pyx_PyLong_IsCompact(b))) { return __Pyx_PyLong_CompactValue(b); } else { const digit* digits = __Pyx_PyLong_Digits(b); const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); switch (size) { case 2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; } } #endif return PyLong_AsSsize_t(b); } x = PyNumber_Index(b); if (!x) return -1; ival = PyInt_AsSsize_t(x); Py_DECREF(x); return ival; } static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); #if PY_MAJOR_VERSION < 3 } else if (likely(PyInt_CheckExact(o))) { return PyInt_AS_LONG(o); #endif } else { Py_ssize_t ival; PyObject *x; x = PyNumber_Index(o); if (!x) return -1; ival = PyInt_AsLong(x); Py_DECREF(x); return ival; } } static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); } static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { return PyInt_FromSize_t(ival); } /* #### Code section: utility_code_pragmas_end ### */ #ifdef _MSC_VER #pragma warning( pop ) #endif /* #### Code section: end ### */ #endif /* Py_PYTHON_H */ compreffor-0.5.5/src/cython/_compreffor.pyx000066400000000000000000000011231447637674100210410ustar00rootroot00000000000000from libc.stdint cimport uint32_t cdef extern from "../cxx/cffCompressor.h": uint32_t* _compreff "compreff" ( unsigned char* dataStream, int numRounds, unsigned& outputLength) except + void unload(uint32_t* response) def compreff(bytes dataStream, int numRounds): cdef unsigned outputLength = 0 cdef uint32_t* raw_output = _compreff(dataStream, numRounds, outputLength) cdef list output = [] cdef unsigned i for i in range(outputLength): output.append(raw_output[i]) if raw_output != NULL: unload(raw_output) return output compreffor-0.5.5/src/python/000077500000000000000000000000001447637674100160165ustar00rootroot00000000000000compreffor-0.5.5/src/python/compreffor/000077500000000000000000000000001447637674100201605ustar00rootroot00000000000000compreffor-0.5.5/src/python/compreffor/__init__.py000066400000000000000000000150651447637674100223000ustar00rootroot00000000000000# # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ ==== TTX/FontTools Compreffor ==== This module automatically subroutines the CFF table in a TTFont object, for the purposes of compressing the outputted font file. In addition to providing a Python interface, this tool can be used on the command line. Usage (python): >> from fontTools.ttLib import TTFont >> import compreffor >> font = TTFont(filename) >> options = { ... } >> compreffor.compress(font, **options) >> font.save(filename) Compression Backends: There are 2 different ways the compreffor can be run. - The default method is backed by a C++ extension module. The logic is in cxxCompressor.py, cffCompressor.h, cffCompressor.cc and _compreffor.pyx. - The second is a pure Python approach, and can be selected from `compress` by passing `method_python=True`. This is significantly slower than the the other backend (~10-20x). The logic can be found in pyCompressor.py. Options: When running `compreffor.compress`, options can be set using keyword arguments: - nrounds (integer) -- the number of market iterations to run (default: 4) - max_subrs (integer) -- limit to number of subrs per INDEX (default: 65533) With `method_python=True`, the following additional options are available: - chunk_ratio (float) -- set the percentage of charstrings to be run by each process. The value must be a float between 0 < n <= 1 (default: 0.1) - processes (integer) -- the number of simultaneous processes to run. Use value 1 to perform operation serially. Usage (command line): From the command line, you can either run the package as a module, $ python -m compreffor --help Or call the `compreffor` console script installed with the package. Use -h/--help to list all the available options. """ import logging from fontTools.misc.loggingTools import Timer log = logging.getLogger(__name__) timer = Timer(logger=logging.getLogger(log.name + ".timer")) from compreffor import cxxCompressor, pyCompressor try: from ._version import version as __version__ # type: ignore except ImportError: __version__ = "0.0.0+unknown" def compress(ttFont, method_python=False, **options): """ Subroutinize TTFont instance in-place using the C++ Compreffor. If 'method_python' is True, use the slower, pure-Python Compreffor. If the font already contains subroutines, it is first decompressed. """ if has_subrs(ttFont): log.warning( "There are subroutines in font; must decompress it first") decompress(ttFont) if method_python: pyCompressor.compreff(ttFont, **options) else: cxxCompressor.compreff(ttFont, **options) def decompress(ttFont, **kwargs): """ Use the FontTools Subsetter to desubroutinize the font's CFF table. Any keyword arguments are passed on as options to the Subsetter. Skip if the font contains no subroutines. """ if not has_subrs(ttFont): log.debug('No subroutines found; skip decompress') return from fontTools import subset # The FontTools subsetter modifies many tables by default; here # we only want to desubroutinize, so we run the subsetter on a # temporary copy and extract the resulting CFF table from it make_temp = kwargs.pop('make_temp', True) if make_temp: from io import BytesIO from fontTools.ttLib import TTFont, newTable stream = BytesIO() ttFont.save(stream, reorderTables=None) stream.flush() stream.seek(0) tmpfont = TTFont(stream) else: tmpfont = ttFont # run subsetter on the original font options = subset.Options(**kwargs) options.desubroutinize = True options.notdef_outline = True subsetter = subset.Subsetter(options=options) subsetter.populate(glyphs=tmpfont.getGlyphOrder()) subsetter.subset(tmpfont) if make_temp: # copy modified CFF table to original font data = tmpfont['CFF '].compile(tmpfont) table = newTable('CFF ') table.decompile(data, ttFont) ttFont['CFF '] = table tmpfont.close() def has_subrs(ttFont): """ Return True if the font's CFF table contains any subroutines. """ if 'CFF ' not in ttFont: raise ValueError("Invalid font: no 'CFF ' table found") td = ttFont['CFF '].cff.topDictIndex[0] all_subrs = [td.GlobalSubrs] if hasattr(td, 'FDArray'): all_subrs.extend(fd.Private.Subrs for fd in td.FDArray if hasattr(fd.Private, 'Subrs')) elif hasattr(td.Private, 'Subrs'): all_subrs.append(td.Private.Subrs) return any(all_subrs) def check(original_file, compressed_file): """ Compare the original and compressed font files to confirm they are functionally equivalent. Also check that the Charstrings in the compressed font's CFFFontSet don't exceed the maximum subroutine nesting level. Return True if all checks pass, else return False. """ from compreffor.test.util import check_compression_integrity from compreffor.test.util import check_call_depth rv = check_compression_integrity(original_file, compressed_file) rv &= check_call_depth(compressed_file) return rv # The `Methods` and `Compreffor` classes are now deprecated, but we keep # them here for backward compatibility class Methods: Py, Cxx = range(2) class Compreffor(object): def __init__(self, font, method=Methods.Cxx, **options): import warnings warnings.warn("'Compreffor' class is deprecated; use 'compress' function " "instead", UserWarning) self.font = font self.method = method self.options = options def compress(self): if self.method == Methods.Py: compress(self.font, method_python=True, **self.options) elif self.method == Methods.Cxx: compress(self.font, method_python=False, **self.options) else: raise ValueError("Invalid method: %r" % self.method) compreffor-0.5.5/src/python/compreffor/__main__.py000066400000000000000000000125001447637674100222500ustar00rootroot00000000000000import os import argparse import logging import compreffor from compreffor.pyCompressor import human_size from fontTools.ttLib import TTFont from fontTools.misc.loggingTools import configLogger def parse_arguments(args=None): parser = argparse.ArgumentParser( prog='compreffor', description="FontTools Compreffor will take a CFF-flavored OpenType font " "and automatically detect repeated routines and generate " "subroutines to minimize the disk space needed to represent " "a font.") parser.add_argument("infile", metavar="INPUT", help="path to the input font file") parser.add_argument("outfile", nargs="?", metavar="OUTPUT", help="path to the compressed file (default: " "*.compressed.otf)") parser.add_argument('-v', '--verbose', action='count', default=0, help="print more messages to stdout; use it multiple " "times to increase the level of verbosity") parser.add_argument("-c", "--check", action='store_true', help="verify that the outputted font is valid and " "functionally equivalent to the input") parser.add_argument("-d", "--decompress", action="store_true", help="decompress source before compressing (necessary if " "there are subroutines in the source)") parser.add_argument("-n", "--nrounds", type=int, help="the number of iterations to run the algorithm" " (default: 4)") parser.add_argument("-m", "--max-subrs", type=int, help="limit to the number of subroutines per INDEX " " (default: 65533)") parser.add_argument('--generate-cff', action='store_true', help="Also save binary CFF table data as {INPUT}.cff") parser.add_argument('--py', dest="method_python", action='store_true', help="Use pure Python method, instead of C++ extension") py_meth_group = parser.add_argument_group("options for pure Python method") py_meth_group.add_argument("--chunk-ratio", type=float, help="specify the percentage size of the " "job chunks used for parallel processing " "(0 < float <= 1; default: 0.1)") py_meth_group.add_argument("-p", "--processes", type=int, help="specify number of concurrent processes to " "run. Use value 1 to perform operation serially " "(default: 12)") options = parser.parse_args(args) kwargs = vars(options) if options.method_python: if options.processes is not None and options.processes < 1: parser.error('argument --processes expects positive integer > 0') if (options.chunk_ratio is not None and not (0 < options.chunk_ratio <= 1)): parser.error('argument --chunk-ratio expects float number 0 < n <= 1') else: for attr in ('chunk_ratio', 'processes'): if getattr(options, attr): opt = attr.replace('_', '-') parser.error('argument --%s can only be used with --py (pure ' 'Python) method' % opt) else: del kwargs[attr] if options.outfile is None: options.outfile = "%s.compressed%s" % os.path.splitext(options.infile) return kwargs def main(args=None): log = compreffor.log timer = compreffor.timer timer.reset() options = parse_arguments(args) # consume kwargs that are not passed on to 'compress' function infile = options.pop('infile') outfile = options.pop('outfile') decompress = options.pop('decompress') generate_cff = options.pop('generate_cff') check = options.pop('check') verbose = options.pop('verbose') if verbose == 1: level = logging.INFO elif verbose > 1: level = logging.DEBUG else: level = logging.WARNING configLogger(logger=log, level=level) orig_size = os.path.getsize(infile) font = TTFont(infile) if decompress: log.info("Decompressing font with FontTools Subsetter") with timer("decompress the font"): compreffor.decompress(font) log.info("Compressing font through %s Compreffor", "pure-Python" if options['method_python'] else "C++") compreffor.compress(font, **options) with timer("compile and save compressed font"): font.save(outfile) if generate_cff: cff_file = os.path.splitext(outfile)[0] + ".cff" with open(cff_file, 'wb') as f: font['CFF '].cff.compile(f, None) log.info("Saved CFF data to '%s'" % os.path.basename(cff_file)) if check: log.info("Checking compression integrity and call depth") passed = compreffor.check(infile, outfile) if not passed: return 1 comp_size = os.path.getsize(outfile) log.info("Compressed to '%s' -- saved %s" % (os.path.basename(outfile), human_size(orig_size - comp_size))) log.debug("Total time: %gs", timer.time()) if __name__ == "__main__": main() compreffor-0.5.5/src/python/compreffor/cxxCompressor.py000077500000000000000000000153441447637674100234230ustar00rootroot00000000000000#!/usr/bin/env python # # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Tool to subroutinize a CFF OpenType font. Backed by a C++ binary. This file is a bootstrap for the C++ edition of the FontTools compreffor. It prepares the input data for the extension and reads back in the results, applying them to the input font. Usage (command line): >> ./cxxCompressor.py /path/to/font.otf # font written to /path/to/font.compressed.otf Usage (python): >> font = TTFont("/path/to/font.otf") >> cxxCompressor.compreff(font) >> font.save("/path/to/output.otf") """ import array from io import BytesIO import struct import logging from compreffor.pyCompressor import ( Compreffor, CandidateSubr, tokenCost) from compreffor import _compreffor as lib, timer log = logging.getLogger(__name__) __all__ = ["compreff"] class IdKeyMap(object): """A map that where every key's value is itself. Used as a map from simplified key space to actual key space in pyCompressor""" def __getitem__(self, tok): return tok class SimpleCandidateSubr(CandidateSubr): """A reimplimentation of CandidateSubr to be more compatible with results from C++""" def __init__(self, length, ref_loc): self.length = length self.location = ref_loc self.freq = 0 self._flatten = False self._global = False def usages(self): return self.freq frequency = usages def cost(self): try: return self.__cost except AttributeError: self.__cost = sum(map(tokenCost, self.value())) return self.__cost def encoding(self): return self._encoding @timer("produce data for C++ library") def write_data(td): """Writes CharStrings from the TopDict td into a string that is easily readable.""" out = BytesIO() td.CharStrings.charStringsIndex.getCompiler(td.strings, None).toFile(out) return out.getvalue() def get_encoding(data_buffer, subrs): """Read a charstring's encoding stream out of a string buffer response from cffCompressor.cc""" pos = 0 num_calls = data_buffer[pos] pos += 1 enc = [] for j in range(num_calls): insertion_pos = struct.unpack_from('> ./pyCompressor.py /path/to/font.otf # font written to /path/to/font.compressed.otf Usage (in Python): >> font = TTFont(path_to_font) >> compreffor = Compreffor(font) >> compreffor.compress() >> font.save(path_to_output) """ import itertools import functools import sys import multiprocessing import math from collections import deque import logging from fontTools import cffLib from fontTools.ttLib import TTFont from fontTools.misc import psCharStrings from compreffor import timer log = logging.getLogger(__name__) SINGLE_BYTE_OPS = set(['hstem', 'vstem', 'vmoveto', 'rlineto', 'hlineto', 'vlineto', 'rrcurveto', 'callsubr', 'return', 'endchar', 'blend', 'hstemhm', 'hintmask', 'cntrmask', 'rmoveto', 'hmoveto', 'vstemhm', 'rcurveline', 'rlinecurve', 'vvcurveto', 'hhcurveto', # 'shortint', # not really an operator 'callgsubr', 'vhcurveto', 'hvcurveto']) __all__ = ["CandidateSubr", "SubstringFinder", "Compreffor", "compreff"] def tokenCost(token): """Calculate the bytecode size of a T2 Charstring token""" tp = type(token) if issubclass(tp, str): if token[:8] in ("hintmask", "cntrmask"): return 1 + len(token[9:]) elif token in SINGLE_BYTE_OPS: return 1 else: return 2 elif tp == tuple: assert token[0] in ("hintmask", "cntrmask") return 1 + len(token[1]) elif tp == int: if -107 <= token <= 107: return 1 elif 108 <= token <= 1131 or -1131 <= token <= -108: return 2 else: return 3 elif tp == float: return 5 assert 0 class CandidateSubr(object): """ Records a substring of a charstring that is generally repeated throughout many glyphs. Instance variables: length -- length of substring location -- tuple of form (glyph_idx, start_pos) where a ref string starts freq -- number of times it appears chstrings -- chstrings from whence this substring came cost_map -- array from simple alphabet -> actual token """ __slots__ = ["length", "location", "freq", "chstrings", "cost_map", "_CandidateSubr__cost", "_adjusted_cost", "_price", "_usages", "_list_idx", "_position", "_encoding", "_program", "_flatten", "_max_call_depth", "_fdidx", "_global"] def __init__(self, length, ref_loc, freq=0, chstrings=None, cost_map=None): self.length = length self.location = ref_loc self.freq = freq self.chstrings = chstrings self.cost_map = cost_map self._global = False self._flatten = False self._fdidx = [] # indicates unreached subr def __len__(self): """Return the number of tokens in this substring""" return self.length def value(self): """Returns the actual substring value""" assert self.chstrings is not None return self.chstrings[self.location[0]][self.location[1]:(self.location[1] + self.length)] def subr_saving(self, use_usages=False, true_cost=False, call_cost=5, subr_overhead=3): """ Return the savings that will be realized by subroutinizing this substring. Arguments: use_usages -- indicate to use the value in `_usages` rather than `freq` true_cost -- take account of subroutine calls call_cost -- the cost to call a subroutine subr_overhead -- the cost to define a subroutine """ # NOTE: call_cost=5 gives better results for some reason # but that is really not correct if use_usages: amt = self.usages() else: amt = self.frequency() if not true_cost: cost = self.cost() else: cost = self.real_cost(call_cost=call_cost) # TODO: # - If substring ends in "endchar", we need no "return" # added and as such subr_overhead will be one byte # smaller. # - The call_cost should be 3 or 4 if the position of the subr # is greater return ( cost * amt # avoided copies - cost # cost of subroutine body - call_cost * amt # cost of calling - subr_overhead) # cost of subr definition def real_cost(self, call_cost=5): """Account for subroutine calls in cost computation. Not cached because the subroutines used will change over time.""" cost = self.cost() cost += sum(-it[1].cost() + call_cost if not it[1]._flatten else it[1].real_cost(call_cost=call_cost) for it in self.encoding()) return cost def cost(self): """Return the size (in bytes) that the bytecode for this takes up""" assert self.cost_map is not None try: try: return self.__cost except AttributeError: self.__cost = sum([self.cost_map[t] for t in self.value()]) return self.__cost except: raise Exception('Translated token not recognized') def encoding(self): return self._encoding def usages(self): return self._usages def frequency(self): return self.freq def __eq__(self, other): if not isinstance(other, CandidateSubr): return NotImplemented return self.length == other.length and self.location == other.location def __ne__(self, other): if not isinstance(other, CandidateSubr): return NotImplemented return not(self == other) def __hash__(self): return hash((self.length, self.location)) def __repr__(self): return "" % (self.length, self.freq) class SubstringFinder(object): """ This class facilitates the finding of repeated substrings within a glyph_set. Typical usage involves creation of an instance and then calling `get_substrings`, which returns a sorted list of `CandidateSubr`s. Instance variables: suffixes -- sorted array of suffixes data -- A 2-level array of charstrings: - The first level separates by glyph - The second level separates by token in a glyph's charstring alphabet_size -- size of alphabet length -- sum of the lengths of the individual glyphstrings rev_keymap -- map from simple alphabet -> original tokens cost_map -- map from simple alphabet -> bytecost of token glyph_set_keys -- glyph_set_keys[i] gives the glyph id for data[i] _completed_suffixes -- boolean whether the suffix array is ready and sorted """ __slots__ = ["suffixes", "data", "alphabet_size", "length", "substrings", "rev_keymap", "glyph_set_keys", "_completed_suffixes", "cost_map"] def __init__(self, glyph_set): self.rev_keymap = [] self.cost_map = [] self.data = [] self.suffixes = [] self.length = 0 self.process_chstrings(glyph_set) self._completed_suffixes = False def process_chstrings(self, glyph_set): """Remap the charstring alphabet and put into self.data""" self.glyph_set_keys = sorted(glyph_set.keys()) keymap = {} # maps charstring tokens -> simple integer alphabet next_key = 0 for k in self.glyph_set_keys: char_string = glyph_set[k]._glyph char_string.decompile() program = [] piter = iter(enumerate(char_string.program)) for i, tok in piter: assert tok not in ("callsubr", "callgsubr", "return") assert tok != "endchar" or i == len(char_string.program) - 1 if tok in ("hintmask", "cntrmask"): # Attach next token to this, as a subroutine # call cannot be placed between this token and # the following. _, tokennext = next(piter) tok = (tok, tokennext) if not tok in keymap: keymap[tok] = next_key self.rev_keymap.append(tok) self.cost_map.append(tokenCost(tok)) next_key += 1 program.append(keymap[tok]) program = tuple(program) chstr_len = len(program) self.length += chstr_len glyph_idx = len(self.data) self.suffixes.extend( map(lambda x: (glyph_idx, x), range(chstr_len)) ) self.data.append(tuple(program)) self.alphabet_size = next_key def get_suffixes(self): """Return the sorted suffix array""" if self._completed_suffixes: return self.suffixes with timer("get suffixes via Python sort"): self.suffixes.sort(key=lambda idx: self.data[idx[0]][idx[1]:]) self._completed_suffixes = True return self.suffixes @timer("get LCP array") def get_lcp(self): """Returns the LCP array""" if not self._completed_suffixes: self.get_suffixes() assert self._completed_suffixes rank = [[0 for _ in range(len(d_list))] for d_list in self.data] lcp = [0 for _ in range(self.length)] # compute rank array for i in range(self.length): glyph_idx, tok_idx = self.suffixes[i] rank[glyph_idx][tok_idx] = i for glyph_idx in range(len(self.data)): cur_h = 0 chstring = self.data[glyph_idx] for tok_idx in range(len(chstring)): cur_rank = rank[glyph_idx][tok_idx] if cur_rank > 0: last_glidx, last_tidx = self.suffixes[cur_rank - 1] last_chstring = self.data[last_glidx] while last_tidx + cur_h < len(last_chstring) and \ tok_idx + cur_h < len(chstring) and \ last_chstring[last_tidx + cur_h] == self.data[glyph_idx][tok_idx + cur_h]: cur_h += 1 lcp[cur_rank] = cur_h if cur_h > 0: cur_h -= 1 return lcp def get_substrings(self, min_freq=2, check_positive=True, sort_by_length=False): """ Return repeated substrings (type CandidateSubr) from the charstrings sorted by subroutine savings with freq >= min_freq using the LCP array. Arguments: min_freq -- the minimum frequency required to include a substring check_positive -- if True, only allow substrings with positive subr_saving sort_by_length -- if True, return substrings sorted by length, else by saving """ self.get_suffixes() lcp = self.get_lcp() with timer("extract substrings"): start_indices = deque() self.substrings = [] for i, min_l in enumerate(lcp): # First min_l items are still the same. # Pop the rest from previous and account for. # Note: non-branching substrings aren't included # TODO: don't allow overlapping substrings into the same set while start_indices and start_indices[-1][0] > min_l: l, start_idx = start_indices.pop() freq = i - start_idx if freq < min_freq: continue substr = CandidateSubr( l, self.suffixes[start_idx], freq, self.data, self.cost_map) if substr.subr_saving() > 0 or not check_positive: self.substrings.append(substr) if not start_indices or min_l > start_indices[-1][0]: start_indices.append((min_l, i - 1)) log.debug("%d substrings found", len(self.substrings)) with timer("sort substrings"): if sort_by_length: self.substrings.sort(key=lambda s: len(s)) else: self.substrings.sort(key=lambda s: s.subr_saving(), reverse=True) return self.substrings class Compreffor(object): """ Manager class for the compreffor. Usage: >> font = TTFont(path_to_font) >> compreffor = Compreffor(font) >> compreffor.compress() >> font.save("/path/to/output.otf") """ SINGLE_PROCESS = False ALPHA = 0.1 K = 0.1 PROCESSES = 12 NROUNDS = 4 LATIN_POOL_CHUNKRATIO = 0.05 POOL_CHUNKRATIO = 0.1 CHUNK_CHARSET_CUTOFF = 1500 NSUBRS_LIMIT = 65533 # 64K - 3 SUBR_NEST_LIMIT = 10 def __init__(self, font, nrounds=None, max_subrs=None, chunk_ratio=None, processes=None, test_mode=False): """ Initialize the compressor. Arguments: font -- the TTFont to compress, must be a CFF font nrounds -- specifies the number of rounds to run max_subrs -- specify the limit on the number of subrs in an INDEX chunk_ratio -- sets the POOL_CHUNKRATIO parameter processes -- specify the number of parallel processes (1 to not parallelize) test_mode -- disables some checks (such as positive subr_saving) """ if isinstance(font, TTFont): assert "CFF " in font assert len(font["CFF "].cff.topDictIndex) == 1 self.font = font else: log.warning("non-TTFont given to Compreffor") self.test_mode = test_mode if chunk_ratio is not None: self.POOL_CHUNKRATIO = chunk_ratio elif font and (len(font["CFF "].cff.topDictIndex[0].charset) < self.CHUNK_CHARSET_CUTOFF): self.POOL_CHUNKRATIO = self.LATIN_POOL_CHUNKRATIO if nrounds is not None: self.NROUNDS = nrounds if processes is not None: if processes < 1: raise ValueError('processes value must be > 0') elif processes == 1: self.SINGLE_PROCESS = True else: self.PROCESSES = processes if max_subrs is not None: self.NSUBRS_LIMIT = max_subrs # only print the progress in `iterative_encode` if the logger is # enabled for DEBUG, and if it outputs to the console's stderr self._progress = (not log.disabled and log.isEnabledFor(logging.DEBUG) and _has_stderr_handler(log)) def compress(self): """Compress the provided font using the iterative method""" top_dict = self.font["CFF "].cff.topDictIndex[0] multi_font = hasattr(top_dict, "FDArray") if not multi_font: n_locals = 1 fdsel = None else: n_locals = len(top_dict.FDArray) fdsel = lambda g: top_dict.CharStrings.getItemAndSelector(g)[1] ans = self.iterative_encode(self.font.getGlyphSet(), fdsel, n_locals) encoding = ans["glyph_encodings"] gsubrs = ans["gsubrs"] lsubrs = ans["lsubrs"] Compreffor.apply_subrs(top_dict, encoding, gsubrs, lsubrs) @staticmethod @timer("apply subroutines") def apply_subrs(top_dict, encoding, gsubrs, lsubrs): multi_font = hasattr(top_dict, "FDArray") gbias = psCharStrings.calcSubrBias(gsubrs) lbias = [psCharStrings.calcSubrBias(subrs) for subrs in lsubrs] if multi_font: for g in top_dict.charset: charstring, sel = top_dict.CharStrings.getItemAndSelector(g) enc = encoding[g] Compreffor.collapse_hintmask(charstring.program) Compreffor.update_program(charstring.program, enc, gbias, lbias, sel) Compreffor.expand_hintmask(charstring.program) for fd in top_dict.FDArray: if not hasattr(fd.Private, "Subrs"): fd.Private.Subrs = cffLib.SubrsIndex() for subrs, subrs_index in zip(itertools.chain([gsubrs], lsubrs), itertools.chain([top_dict.GlobalSubrs], [fd.Private.Subrs for fd in top_dict.FDArray])): for subr in subrs: item = psCharStrings.T2CharString(program=subr._program) subrs_index.append(item) for fd in top_dict.FDArray: if not fd.Private.Subrs: del fd.Private.Subrs else: for glyph, enc in encoding.items(): charstring = top_dict.CharStrings[glyph] Compreffor.collapse_hintmask(charstring.program) Compreffor.update_program(charstring.program, enc, gbias, lbias, 0) Compreffor.expand_hintmask(charstring.program) assert len(lsubrs) == 1 if not hasattr(top_dict.Private, "Subrs"): top_dict.Private.Subrs = cffLib.SubrsIndex() for subr in lsubrs[0]: item = psCharStrings.T2CharString(program=subr._program) top_dict.Private.Subrs.append(item) if not top_dict.Private.Subrs: del top_dict.Private.Subrs for subr in gsubrs: item = psCharStrings.T2CharString(program=subr._program) top_dict.GlobalSubrs.append(item) @staticmethod def test_call_cost(subr, subrs): """See how much it would cost to call subr if it were inserted into subrs""" if len(subrs) >= 2263: if subrs[2262].usages() >= subr.usages(): return 3 if len(subrs) >= 215: if subrs[214].usages() >= subr.usages(): return 2 return 1 @staticmethod def insert_by_usage(subr, subrs): """Insert subr into subrs mainting a sort by usage""" subrs.append(subr) subrs.sort(key=lambda s: s.usages(), reverse=True) def iterative_encode(self, glyph_set, fdselect=None, fdlen=1): """ Choose a subroutinization encoding for all charstrings in `glyph_set` using an iterative Dynamic Programming algorithm. Initially uses the results from SubstringFinder and then iteratively optimizes. Arguments: glyph_set -- the set of charstrings to encode (required) fdselect -- the FDSelect array of the source font, or None fdlen -- the number of FD's in the source font, or 1 if there are none Returns: A three-part dictionary with keys 'gsubrs', 'lsubrs', and 'glyph_encodings'. The 'glyph_encodings' encoding dictionary specifies how to break up each charstring. Encoding[i] describes how to encode glyph i. Each entry is something like [(x_1, c_1), (x_2, c_2), ..., (x_k, c_k)], where x_* is an index into the charstring that indicates where a subr starts and c_* is a CandidateSubr. The 'gsubrs' entry contains an array of global subroutines (CandidateSubr objects) and 'lsubrs' is an array indexed by FDidx, where each entry is a list of local subroutines. """ # generate substrings for marketplace sf = SubstringFinder(glyph_set) if self.test_mode: substrings = sf.get_substrings(min_freq=0, check_positive=False, sort_by_length=False) else: substrings = sf.get_substrings(min_freq=2, check_positive=True, sort_by_length=False) # TODO remove unnecessary substrings? data = sf.data rev_keymap = sf.rev_keymap cost_map = sf.cost_map glyph_set_keys = sf.glyph_set_keys del sf if not self.SINGLE_PROCESS: pool = multiprocessing.Pool(processes=self.PROCESSES) else: class DummyPool: pass pool = DummyPool() pool.map = lambda f, *l, **kwargs: map(f, *l) substr_dict = {} timer.split() log.debug("glyphstrings+substrings=%d", len(data) + len(substrings)) # set up dictionary with initial values for idx, substr in enumerate(substrings): substr._adjusted_cost = substr.cost() substr._price = substr._adjusted_cost substr._usages = substr.freq # this is the frequency that the substring appears, # not necessarily used substr._list_idx = idx substr_dict[substr.value()] = (idx, substr._price) # NOTE: avoid excess data copying on fork # probably can just pass substr # if threading instead for run_count in range(self.NROUNDS): # calibrate prices for idx, substr in enumerate(substrings): marg_cost = float(substr._adjusted_cost) / (substr._usages + self.K) substr._price = marg_cost * self.ALPHA + substr._price * (1 - self.ALPHA) substr_dict[substr.value()] = (idx, substr._price) # minimize substring costs csize = int(math.ceil(self.POOL_CHUNKRATIO*len(substrings))) substr_encodings = pool.map(functools.partial(optimize_charstring, cost_map=cost_map, substr_dict=substr_dict, progress=self._progress), enumerate([s.value() for s in substrings]), chunksize=csize) for substr, result in zip(substrings, substr_encodings): substr._encoding = [(enc_item[0], substrings[enc_item[1]]) for enc_item in result["encoding"]] substr._adjusted_cost = result["market_cost"] del substr_encodings # minimize charstring costs in current market through DP csize = int(math.ceil(self.POOL_CHUNKRATIO*len(data))) encodings = pool.map(functools.partial(optimize_charstring, cost_map=cost_map, substr_dict=substr_dict, progress=self._progress), data, chunksize=csize) encodings = [[(enc_item[0], substrings[enc_item[1]]) for enc_item in i["encoding"]] for i in encodings] # update substring frequencies based on cost minimization for substr in substrings: substr._usages = 0 for calling_substr in substrings: for start, substr in calling_substr._encoding: if substr: substr._usages += 1 for glyph_idx, enc in enumerate(encodings): for start, substr in enc: if substr: substr._usages += 1 if log.isEnabledFor(logging.INFO): log.info("Round %d Done!", (run_count + 1)) log.info("avg: %f", (float(sum(substr._usages for substr in substrings)) / len(substrings))) log.info("max: %d", max(substr._usages for substr in substrings)) log.info("used: %d", sum(substr._usages > 0 for substr in substrings)) if run_count <= self.NROUNDS - 2 and not self.test_mode: with timer("cutdown"): if run_count < self.NROUNDS - 2: bad_substrings = [s for s in substrings if s.subr_saving(use_usages=True) <= 0] substrings = [s for s in substrings if s.subr_saving(use_usages=True) > 0] else: bad_substrings = [s for s in substrings if s.subr_saving(use_usages=True, true_cost=False) <= 0] substrings = [s for s in substrings if s.subr_saving(use_usages=True, true_cost=False) > 0] for substr in bad_substrings: # heuristic to encourage use of called substrings: for idx, called_substr in substr._encoding: called_substr._usages += substr._usages - 1 del substr_dict[substr.value()] for idx, s in enumerate(substrings): s._list_idx = idx if log.isEnabledFor(logging.DEBUG): log.debug("%d substrings with non-positive savings removed", len(bad_substrings)) log.debug("(%d had positive usage)", len([s for s in bad_substrings if s._usages > 0])) log.info("Finished iterative market (%gs)", timer.split()) log.info("%d candidate subrs found", len(substrings)) gsubrs, lsubrs = Compreffor.process_subrs( glyph_set_keys, encodings, fdlen, fdselect, substrings, rev_keymap, self.NSUBRS_LIMIT, self.SUBR_NEST_LIMIT) return {"glyph_encodings": dict(zip(glyph_set_keys, encodings)), "lsubrs": lsubrs, "gsubrs": gsubrs} @staticmethod @timer("post-process subroutines") def process_subrs(glyph_set_keys, encodings, fdlen, fdselect, substrings, rev_keymap, subr_limit, nest_limit): def mark_reachable(cand_subr, fdidx): try: if fdidx not in cand_subr._fdidx: cand_subr._fdidx.append(fdidx) except AttributeError: cand_subr._fdidx = [fdidx] for it in cand_subr._encoding: mark_reachable(it[1], fdidx) if fdselect is not None: for g, enc in zip(glyph_set_keys, encodings): sel = fdselect(g) for it in enc: mark_reachable(it[1], sel) else: for encoding in encodings: for it in encoding: mark_reachable(it[1], 0) subrs = [s for s in substrings if s.usages() > 0 and hasattr(s, '_fdidx') and bool(s._fdidx) and s.subr_saving(use_usages=True, true_cost=True) > 0] bad_substrings = [s for s in substrings if s.usages() == 0 or not hasattr(s, '_fdidx') or not bool(s._fdidx) or s.subr_saving(use_usages=True, true_cost=True) <= 0] log.debug("%d substrings unused or negative saving subrs", len(bad_substrings)) for s in bad_substrings: s._flatten = True gsubrs = [] lsubrs = [[] for _ in range(fdlen)] subrs.sort(key=lambda s: s.subr_saving(use_usages=True, true_cost=True)) while subrs and (any(len(s) < subr_limit for s in lsubrs) or len(gsubrs) < subr_limit): subr = subrs[-1] del subrs[-1] if len(subr._fdidx) == 1: lsub_index = lsubrs[subr._fdidx[0]] if len(gsubrs) < subr_limit: if len(lsub_index) < subr_limit: # both have space gcost = Compreffor.test_call_cost(subr, gsubrs) lcost = Compreffor.test_call_cost(subr, lsub_index) if gcost < lcost: Compreffor.insert_by_usage(subr, gsubrs) subr._global = True else: Compreffor.insert_by_usage(subr, lsub_index) else: # just gsubrs has space Compreffor.insert_by_usage(subr, gsubrs) subr._global = True elif len(lsub_index) < subr_limit: # just lsubrs has space Compreffor.insert_by_usage(subr, lsub_index) else: # we must skip :( bad_substrings.append(subr) else: if len(gsubrs) < subr_limit: # we can put it in globals Compreffor.insert_by_usage(subr, gsubrs) subr._global = True else: # no room for this one bad_substrings.append(subr) bad_substrings.extend([s[1] for s in subrs]) # add any leftover subrs to bad_substrings if fdselect is not None: # CID-keyed: Avoid `callsubr` usage in global subroutines bad_lsubrs = Compreffor.collect_lsubrs_called_from(gsubrs) bad_substrings.extend(bad_lsubrs) lsubrs = [[s for s in lsubrarr if s not in bad_lsubrs] for lsubrarr in lsubrs] for s in bad_substrings: s._flatten = True # fix any nesting issues Compreffor.calc_nesting(gsubrs) for subrs in lsubrs: Compreffor.calc_nesting(subrs) too_nested = [s for s in itertools.chain(*lsubrs) if s._max_call_depth > nest_limit] too_nested.extend([s for s in gsubrs if s._max_call_depth > nest_limit]) for s in too_nested: s._flatten = True bad_substrings.extend(too_nested) lsubrs = [[s for s in lsubrarr if s._max_call_depth <= nest_limit] for lsubrarr in lsubrs] gsubrs = [s for s in gsubrs if s._max_call_depth <= nest_limit] too_nested = len(too_nested) log.debug("%d substrings nested too deep", too_nested) log.debug("%d substrings being flattened", len(bad_substrings)) # reorganize to minimize call cost of most frequent subrs gbias = psCharStrings.calcSubrBias(gsubrs) lbias = [psCharStrings.calcSubrBias(s) for s in lsubrs] for subr_arr, bias in zip(itertools.chain([gsubrs], lsubrs), itertools.chain([gbias], lbias)): subr_arr.sort(key=lambda s: s.usages(), reverse=True) if bias == 1131: subr_arr[:] = subr_arr[216:1240] + subr_arr[0:216] + subr_arr[1240:] elif bias == 32768: subr_arr[:] = (subr_arr[2264:33901] + subr_arr[216:1240] + subr_arr[0:216] + subr_arr[1240:2264] + subr_arr[33901:]) for idx, subr in enumerate(subr_arr): subr._position = idx for subr in sorted(bad_substrings, key=lambda s: len(s)): # NOTE: it is important this is run in order so shorter # substrings are run before longer ones if hasattr(subr, '_fdidx') and len(subr._fdidx) > 0: program = [rev_keymap[tok] for tok in subr.value()] Compreffor.update_program(program, subr.encoding(), gbias, lbias, None) Compreffor.expand_hintmask(program) subr._program = program for subr_arr, sel in zip(itertools.chain([gsubrs], lsubrs), itertools.chain([None], range(fdlen))): for subr in subr_arr: program = [rev_keymap[tok] for tok in subr.value()] if program[-1] not in ("endchar", "return"): program.append("return") Compreffor.update_program(program, subr.encoding(), gbias, lbias, sel) Compreffor.expand_hintmask(program) subr._program = program return (gsubrs, lsubrs) @staticmethod def collect_lsubrs_called_from(gsubrs): """ Collect local subroutines called from any entries in `gsubrs`. This method returns them as a set for after flattening in order to avoid `callsubr` usage in global subroutines. """ lsubrs = set() def collect(subr): for _, s in subr._encoding: if not s._global: lsubrs.add(s) collect(s) for subr in gsubrs: collect(subr) return lsubrs @staticmethod def calc_nesting(subrs): """Update each entry of subrs with their call depth. This is stored in the '_max_call_depth' attribute of the subr""" def increment_subr_depth(subr, depth): if not hasattr(subr, "_max_call_depth") or subr._max_call_depth < depth: subr._max_call_depth = depth callees = deque([it[1] for it in subr._encoding]) while len(callees): next_subr = callees.pop() if next_subr._flatten: callees.extend([it[1] for it in next_subr._encoding]) elif (not hasattr(next_subr, "_max_call_depth") or next_subr._max_call_depth < depth + 1): increment_subr_depth(next_subr, depth + 1) for subr in subrs: if not hasattr(subr, "_max_call_depth"): increment_subr_depth(subr, 1) @staticmethod def update_program(program, encoding, gbias, lbias_arr, fdidx): """ Applies the provided `encoding` to the provided `program`. I.e., all specified subroutines are actually called in the program. This mutates the input program and also returns it. Arguments: program -- the program to update encoding -- the encoding to use. a list of (idx, cand_subr) tuples gbias -- bias into the global subrs INDEX lbias_arr -- bias into each of the lsubrs INDEXes fdidx -- the FD that this `program` belongs to, or None if global """ offset = 0 for item in encoding: subr = item[1] s = slice(item[0] - offset, item[0] + subr.length - offset) if subr._flatten: program[s] = subr._program offset += subr.length - len(subr._program) else: assert hasattr(subr, "_position"), \ "CandidateSubr without position in Subrs encountered" if subr._global: operator = "callgsubr" bias = gbias else: # assert this is a local or global only used by one FD assert len(subr._fdidx) == 1 assert fdidx == None or subr._fdidx[0] == fdidx operator = "callsubr" bias = lbias_arr[subr._fdidx[0]] program[s] = [subr._position - bias, operator] offset += subr.length - 2 return program @staticmethod def collapse_hintmask(program): """Takes in a charstring and returns the same charstring with hintmasks combined into a single element""" piter = iter(enumerate(program)) for i, tok in piter: if tok in ("hintmask", "cntrmask"): program[i:i+2] = [(program[i], program[i+1])] @staticmethod def expand_hintmask(program): """Expands collapsed hintmask tokens into two tokens""" piter = iter(enumerate(program)) for i, tok in piter: if isinstance(tok, tuple): assert tok[0] in ("hintmask", "cntrmask") program[i:i+1] = tok def _has_stderr_handler(logger): """ Return True if any of the logger's handlers outputs to sys.stderr. """ c = logger while c: if c.handlers: for h in c.handlers: if hasattr(h, 'stream') and h.stream is sys.stderr: return True if not c.propagate: break else: c = c.parent return False def optimize_charstring(charstring, cost_map, substr_dict, progress=False): """Optimize a charstring (encoded using keymap) using the substrings in substr_dict. This is the Dynamic Programming portion of `iterative_encode`.""" if len(charstring) > 1 and type(charstring[1]) == tuple: if type(charstring[0]) == int: skip_idx = charstring[0] charstring = charstring[1] else: skip_idx = None results = [0 for _ in range(len(charstring) + 1)] next_enc_idx = [None for _ in range(len(charstring))] next_enc_substr = [None for _ in range(len(charstring))] for i in reversed(range(len(charstring))): min_option = float("inf") min_enc_idx = len(charstring) min_enc_substr = None cur_cost = 0 for j in range(i + 1, len(charstring) + 1): cur_cost += cost_map[charstring[j - 1]] if charstring[i:j] in substr_dict: substr = substr_dict[charstring[i:j]] if substr[0] != skip_idx: option = substr[1] + results[j] substr = substr[0] else: assert i == 0 and j == len(charstring) substr = None option = cur_cost + results[j] else: # note: must not be branching, so just make _price actual cost substr = None option = cur_cost + results[j] if option < min_option: min_option = option min_enc_idx = j min_enc_substr = substr results[i] = min_option next_enc_idx[i] = min_enc_idx next_enc_substr[i] = min_enc_substr market_cost = results[0] encoding = [] cur_enc_idx = 0 last = len(next_enc_idx) while cur_enc_idx < last: last_idx = cur_enc_idx cur_enc_substr = next_enc_substr[cur_enc_idx] cur_enc_idx = next_enc_idx[cur_enc_idx] if cur_enc_substr is not None: encoding.append((last_idx, cur_enc_substr)) if progress: sys.stderr.write(".") sys.stderr.flush() return {"encoding": encoding, "market_cost": market_cost} # this is here for symmetry with cxxCompressor.compreff def compreff(font, **options): """ Main function that compresses `font`, a TTFont object, in place. """ Compreffor(font, **options).compress() def human_size(num): """Return a number of bytes in human-readable units""" num = float(num) for s in ['bytes', 'KB', 'MB']: if num < 1024.0: return '%3.1f %s' % (num, s) else: num /= 1024.0 return '%3.1f %s' % (num, 'GB') compreffor-0.5.5/src/python/compreffor/test/000077500000000000000000000000001447637674100211375ustar00rootroot00000000000000compreffor-0.5.5/src/python/compreffor/test/__init__.py000066400000000000000000000011261447637674100232500ustar00rootroot00000000000000# # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. compreffor-0.5.5/src/python/compreffor/test/dummy.py000066400000000000000000000032151447637674100226450ustar00rootroot00000000000000# # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import collections.abc class DummyGlyphSet(collections.abc.MutableMapping): """Behaves like a glyphset for testing purposes""" def __init__(self, *args, **kwargs): self.storage = {} self.update(dict(*args, **kwargs)) # interpret initial args def __getitem__(self, key): return self.storage[key] def __setitem__(self, key, value): self.storage[key] = self.DummyCharString(value) def __delitem__(self, key): del self.storage[key] def __iter__(self): return iter(self.storage) def __len__(self): return len(self.storage) class DummyCharString(object): program = None def __init__(self, data): self.program = data self._glyph = self def decompile(self): pass def __iter__(self): return iter(self.program) def __repr__(self): return repr(self.program) def __str__(self): return str(self.program) def __len__(self): return len(self.program) compreffor-0.5.5/src/python/compreffor/test/pyCompressor_test.py000066400000000000000000000242031447637674100252560ustar00rootroot00000000000000# # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import random from compreffor import pyCompressor from compreffor.test.dummy import DummyGlyphSet class TestCffCompressor(unittest.TestCase): def setUp(self): self.glyph_set = DummyGlyphSet({'a': (0, 1, 20, 21, 22, 2), 'b': (7, 0, 1, 20, 21, 22, 2), 'c': (0, 1, 20, 21, 22, 9, 3, 17)}) self.sf = pyCompressor.SubstringFinder(self.glyph_set) self.short_sf = pyCompressor.SubstringFinder(DummyGlyphSet({'a': (1, 2, 3), 'b': (8, 1, 4)})) self.rand_gs = DummyGlyphSet() num_glyphs = random.randint(5, 20) for i in range(num_glyphs): length = random.randint(2, 30) self.rand_gs[i] = tuple(random.randint(0, 100) for _ in range(length)) self.random_sf = pyCompressor.SubstringFinder(DummyGlyphSet(self.rand_gs)) length = 3 locations = [(0, 0), (1, 4)] charstrings = [(348, 374, 'rmoveto', 'endchar'), (123, -206, -140, 'hlineto', 348, 374, 'rmoveto', 'endchar')] self.cand_subr = pyCompressor.CandidateSubr(length, locations[0], 2, charstrings) self.empty_compreffor = pyCompressor.Compreffor(None, test_mode=True) def test_iterative_encode(self): """Test iterative_encode function""" ans = self.empty_compreffor.iterative_encode(self.glyph_set) self.assertIsInstance(ans, dict) encs = ans["glyph_encodings"] expected_subr_length = 5 # subr is (0, 1, 20, 21, 22) for glyph_enc in encs.values(): self.assertTrue(any(cs[1].length == expected_subr_length for cs in glyph_enc)) def test_get_substrings_all(self): """Test get_substrings without restrictions""" ans = [s.value() for s in self.sf.get_substrings(0, False)] expected_values = [(0, 1, 2, 3, 4, 5), (0, 1, 2, 3, 4), (1, 2, 3, 4, 5), (1, 2, 3, 4), \ (2, 3, 4, 5), (2, 3, 4), (3, 4, 5), (3, 4), (4, 5), (4,), (5,)] self.assertEqual(ans, expected_values) def test_get_substrings_standard(self): """Check to make sure all substrings have freq >= 2 and positive savings""" ans = self.sf.get_substrings() for substr in ans: self.assertTrue(substr.freq >= 2) self.assertTrue(substr.subr_saving() > 0) def test_get_suffixes(self): """Test the results of suffix array construction.""" ans = self.short_sf.get_suffixes() self.assertEqual(ans, [(0, 0), (1, 1), (0, 1), (0, 2), (1, 0), (1, 2)]) def test_get_suffixes_random(self): """Check suffix array invariants on random input""" ans = self.random_sf.get_suffixes() # check there are the right number of suffixes expected_num = sum([len(chstring) for chstring in self.rand_gs.values()]) actual_num = len(ans) self.assertEqual(actual_num, expected_num) # check that the order is correct last_glidx, last_tidx = ans[0] last = self.random_sf.data[last_glidx][last_tidx:] for glyph_idx, tok_idx in ans[1:]: current = self.random_sf.data[glyph_idx][tok_idx:] self.assertTrue(last <= current) def test_get_lcp(self): """Test the lcp array generation""" expected = [0, 6, 5, 0, 5, 4, 0, 4, 3, 0, 3, 2, 0, 2, 1, 0, 1, 0, 0, 0, 0] self.assertEqual(self.sf.get_lcp(), expected) def test_human_size(self): """Test the human_size function for various numbers of bytes""" human_size = pyCompressor.human_size self.assertEqual(human_size(2), "2.0 bytes") self.assertEqual(human_size(2050), "2.0 KB") self.assertEqual(human_size(3565158), "3.4 MB") self.assertEqual(human_size(6120328397), "5.7 GB") def test_collect_lsubrs_called_from(self): """Test collecting local subrs called from any global subrs""" g1 = pyCompressor.CandidateSubr(3, (0, 10)) g1._global = True g2 = pyCompressor.CandidateSubr(3, (0, 20)) g2._global = True g3 = pyCompressor.CandidateSubr(3, (0, 30)) g3._global = True l1 = pyCompressor.CandidateSubr(3, (0, 40)) l1._global = False l2 = pyCompressor.CandidateSubr(3, (0, 50)) l2._global = False l3 = pyCompressor.CandidateSubr(3, (0, 60)) l3._global = False g1._encoding = [(3, l1)] g2._encoding = [(3, l2), (6, g3)] g3._encoding = [] l1._encoding = [] l2._encoding = [(3, l3)] l3._encoding = [] lsubrs = self.empty_compreffor.collect_lsubrs_called_from([g1, g2, g3]) self.assertSetEqual(lsubrs, {l1, l2, l3}) def test_update_program_local(self): """Test update_program with only one replacement""" program = [7, 2, 10, 4, 8, 7, 0] substr = pyCompressor.CandidateSubr(3, (0, 1)) substr._position = 5 substr._fdidx = [0] substr._global = False encoding = [(1, substr)] bias = 0 self.empty_compreffor.update_program(program, encoding, bias, [bias], 0) self.assertEqual(program, [7, 5, "callsubr", 8, 7, 0]) def test_update_program_global(self): """Test update_program with only one replacement""" program = [7, 2, 10, 4, 8, 7, 0] substr = pyCompressor.CandidateSubr(3, (0, 1)) substr._position = 5 substr._fdidx = [0] substr._global = True encoding = [(1, substr)] bias = 0 self.empty_compreffor.update_program(program, encoding, bias, [bias], 0) self.assertEqual(program, [7, 5, "callgsubr", 8, 7, 0]) def test_update_program_multiple(self): """Test update_program with two replacements""" program = [7, 2, 10, 4, 8, 7, 0] substr = pyCompressor.CandidateSubr(3, (0, 1)) substr._position = 5 substr._global = True substr2 = pyCompressor.CandidateSubr(2, (0, 5)) substr2._position = 21 substr2._global = True encoding = [(1, substr), (5, substr2)] bias = 0 self.empty_compreffor.update_program(program, encoding, bias, [bias], 0) self.assertEqual(program, [7, 5, "callgsubr", 8, 21, "callgsubr"]) # TODO: make this test actually work def test_multiple_nested_subr_calls(self): """Test to make sure we can handle nested subrs. This is really just a case to make check we're encoding optimally.""" glyph_set = {'a': (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 20), 'b': (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 21), 'c': (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 22), 'd': (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 23), 'e': (0, 1, 2, 3, 4, 5, 6, 7, 14, 15, 16, 17, 18, 19, 24), 'f': (0, 1, 2, 3, 4, 5, 6, 7, 14, 15, 16, 17, 18, 19, 25), 'g': (0, 1, 2, 3, 4, 5, 6, 7, 14, 15, 16, 17, 18, 19, 26),} glyph_set = DummyGlyphSet(glyph_set) ans = self.empty_compreffor.iterative_encode(glyph_set) print(ans["glyph_encodings"]) print(ans["lsubrs"]) print([s._encoding for s in ans["lsubrs"][0]]) def test_expand_hintmask_single_middle(self): """Non-edge usage of expand_hintmask""" data = [1, 2, 3, 4, 5, ('hintmask', 7), 8, 9, 10] self.empty_compreffor.expand_hintmask(data) self.assertEqual(data, [1, 2, 3, 4, 5, 'hintmask', 7, 8, 9, 10]) def test_expand_hintmask_multi_middle(self): """Non-edge usage of expand_hintmask with two items""" data = [1, ('hintmask', 3), 4, 5, ('hintmask', 7), 8, 9, 10] self.empty_compreffor.expand_hintmask(data) self.assertEqual(data, [1, 'hintmask', 3, 4, 5, 'hintmask', 7, 8, 9, 10]) def test_expand_hintmask_multi_end(self): """Non-edge usage of expand_hintmask with two items, one at end""" data = [1, 2, 3, 4, 5, ('hintmask', 7), 8, ('hintmask', 10)] self.empty_compreffor.expand_hintmask(data) self.assertEqual(data, [1, 2, 3, 4, 5, 'hintmask', 7, 8, 'hintmask', 10]) def test_collapse_hintmask_single_middle(self): """Non-edge usage of collapse_hintmask""" data = [1, 2, 3, 4, 5, 'hintmask', 7, 8, 9, 10] self.empty_compreffor.collapse_hintmask(data) self.assertEqual(data, [1, 2, 3, 4, 5, ('hintmask', 7), 8, 9, 10]) def test_collapse_hintmask_multi_middle(self): """Non-edge usage of collapse_hintmask with two items""" data = [1, 'hintmask', 3, 4, 5, 'hintmask', 7, 8, 9, 10] self.empty_compreffor.collapse_hintmask(data) self.assertEqual(data, [1, ('hintmask', 3), 4, 5, ('hintmask', 7), 8, 9, 10]) def test_collapse_hintmask_multi_end(self): """Non-edge usage of collapse_hintmask with two items, one at end""" data = [1, 2, 3, 4, 5, 'hintmask', 7, 8, 'hintmask', 10] self.empty_compreffor.collapse_hintmask(data) self.assertEqual(data, [1, 2, 3, 4, 5, ('hintmask', 7), 8, ('hintmask', 10)]) def test_tokenCost(self): """Make sure single tokens can have their cost calculated""" tokenCost = pyCompressor.tokenCost self.assertEqual(tokenCost('hlineto'), 1) self.assertEqual(tokenCost('flex'), 2) self.assertEqual(tokenCost(107), 1) self.assertEqual(tokenCost(108), 2) def test_candidatesubr_len(self): """Make sure len returns the correct length""" self.assertEqual(len(self.cand_subr), 3) def test_candidatesubr_value(self): """Make sure the value is correct""" expected_value = (348, 374, 'rmoveto') self.assertEqual(self.cand_subr.value(), expected_value) compreffor-0.5.5/src/python/compreffor/test/util.py000066400000000000000000000102441447637674100224670ustar00rootroot00000000000000# # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from fontTools import ttLib from fontTools.misc import psCharStrings from compreffor import decompress, timer log = logging.getLogger(__name__) @timer("check compression integrity") def check_compression_integrity(orignal_file, compressed_file): """Compares two fonts to confirm they are functionally equivalent""" orig_font = ttLib.TTFont(orignal_file) orig_gset = orig_font.getGlyphSet() comp_font = ttLib.TTFont(compressed_file) comp_gset = comp_font.getGlyphSet() assert orig_gset.keys() == comp_gset.keys() decompress(orig_font, make_temp=False) decompress(comp_font, make_temp=False) passed = True for g in orig_gset.keys(): orig_glyph = orig_gset[g]._glyph comp_glyph = comp_gset[g]._glyph orig_glyph.decompile() if not (orig_glyph.program == comp_glyph.program): log.warning("Difference found in glyph '%s'" % (g,)) passed = False if passed: log.info("Fonts match!") return True else: log.warning("Fonts have differences :(") return False @timer("check subroutine nesting depth") def check_call_depth(compressed_file): """Runs `check_cff_call_depth` on a file""" f = ttLib.TTFont(compressed_file) return check_cff_call_depth(f["CFF "].cff) def check_cff_call_depth(cff): """Checks that the Charstrings in the provided CFFFontSet obey the rules for subroutine nesting. Return True if the subroutine nesting level does not exceed the maximum limit (10), else return False. """ SUBR_NESTING_LIMIT = 10 assert len(cff.topDictIndex) == 1 td = cff.topDictIndex[0] class track_info: pass track_info.max_for_all = 0 gsubrs = cff.GlobalSubrs gbias = psCharStrings.calcSubrBias(gsubrs) def follow_program(program, depth, subrs): bias = psCharStrings.calcSubrBias(subrs) if len(program) > 0: last = program[0] for tok in program[1:]: if tok == "callsubr": assert type(last) == int next_subr = subrs[last + bias] if (not hasattr(next_subr, "_max_call_depth") or next_subr._max_call_depth < depth + 1): increment_subr_depth(next_subr, depth + 1, subrs) elif tok == "callgsubr": assert type(last) == int next_subr = gsubrs[last + gbias] if (not hasattr(next_subr, "_max_call_depth") or next_subr._max_call_depth < depth + 1): increment_subr_depth(next_subr, depth + 1, subrs) last = tok else: log.warning("Compiled subr encountered") def increment_subr_depth(subr, depth, subrs=None): if not hasattr(subr, "_max_call_depth") or subr._max_call_depth < depth: subr._max_call_depth = depth if subr._max_call_depth > track_info.max_for_all: track_info.max_for_all = subr._max_call_depth program = subr.program follow_program(program, depth, subrs) for cs in td.CharStrings.values(): cs.decompile() follow_program(cs.program, 0, getattr(cs.private, "Subrs", [])) if track_info.max_for_all <= SUBR_NESTING_LIMIT: log.info("Subroutine nesting depth ok! [max nesting depth of %d]", track_info.max_for_all) return True else: log.warning("Subroutine nesting depth too deep :( [max nesting depth " "of %d]", track_info.max_for_all) return False compreffor-0.5.5/tools/000077500000000000000000000000001447637674100150465ustar00rootroot00000000000000compreffor-0.5.5/tools/analyzer.py000077500000000000000000000065411447637674100172560ustar00rootroot00000000000000#!/usr/bin/env python # # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Quick analysis tool for the compreffor. Compresses a directory of fonts and writes a CSV containing various stats about the compression. Usage (command line): >>> ./analyzer.py /path/to/font/dir ... A CSV will be written to the current working directory with the name data.csv. """ import csv import os import time from compreffor import cxxCompressor from fontTools import subset from fontTools.ttLib import TTFont def sum_subrs(font): td = font['CFF '].cff.topDictIndex[0] ans = len(td.GlobalSubrs) try: ans += sum(len(fd.Private.Subrs) if hasattr(fd.Private, 'Subrs') else 0 for fd in td.FDArray) except AttributeError: pass return ans if __name__ == '__main__': names = [] times = [] orig_sizes = [] full_sizes = [] compressed_sizes = [] nsubrs = [] nsubrs_orig = [] for root, dirs, files in os.walk(os.argv[1]): for filename in files: if os.path.splitext(filename)[1] == '.otf': fname = os.path.join(root, filename) print "Handling %s" % filename # decompress print("\tDecompressing...") font = TTFont(fname) orig_subrs = sum_subrs(font) orig_size = os.path.getsize(fname) options = subset.Options() options.decompress = True subsetter = subset.Subsetter(options=options) subsetter.populate(glyphs=font.getGlyphOrder()) subsetter.subset(font) name_parts = os.path.splitext(fname) new_fname = name_parts[0] + '-decomp' + name_parts[1] font.save(new_fname) full_size = os.path.getsize(new_fname) print("\tSubroutinizing...") print("----") start_time = time.time() cxxCompressor.main(filename=new_fname, verbose=True) times.append(time.time() - start_time) print("----") print("\tTabulating results...") comp_fname = name_parts[0] + '-decomp.compressed' + name_parts[1] comp_subrs = sum_subrs(TTFont(comp_fname)) comp_size = os.path.getsize(comp_fname) orig_sizes.append(orig_size) full_sizes.append(full_size) compressed_sizes.append(comp_size) names.append(filename) nsubrs_orig.append(orig_subrs) nsubrs.append(comp_subrs) with open('data.csv', 'w') as csvf: nwriter = csv.writer(csvf) nwriter.writerow(['Name', 'Time to compress', 'Original Size', 'Expanded Size', 'Compressed Size', 'Original # of Subrs', 'Compressed # of Subrs']) nwriter.writerows(zip(names, times, orig_sizes, full_sizes, compressed_sizes, nsubrs_orig, nsubrs)) compreffor-0.5.5/tools/gen_data.py000077500000000000000000000022141447637674100171640ustar00rootroot00000000000000#!/usr/bin/env python # # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ This script generates a data source file for the C++ compressor. Usage: >>> ./gen_data.py /path/to/font.otf /path/to/output """ import array import subprocess import sys from fontTools.ttLib import TTFont from compreffor.cxxCompressor import write_data if __name__ == '__main__': if len(sys.argv) < 3: print "missing arguments" else: f = TTFont(sys.argv[1]) td = f['CFF '].cff.topDictIndex[0] with open(sys.argv[2], 'w') as out_f: data = write_data(td) out_f.write(data) compreffor-0.5.5/tools/optimizeParameter.py000077500000000000000000000057701447637674100211350ustar00rootroot00000000000000#!/usr/bin/env python # # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import os import sys import time import numpy as np from subprocess import call COMMAND_TEMPLATE = "python ../compreffor/pyCompressor.py /path/to/font.otf --chunkratio %f" def time_run(value): command = COMMAND_TEMPLATE % value null = open(os.devnull, "wb") start_time = time.time() call(command.split(), stdout=null) run_time = time.time() - start_time return run_time def minimize_runtime(start_val, stop_val, samples, passes): left = start_val right = stop_val step = float(right - left) / samples for i in range(passes): print "Testing range (%f, %f) with %f steps" % (left, right, step) values = np.arange(left, right + step, step) times = map(time_run, values) lowest = min(enumerate(times), key=lambda x: x[1]) low_val, low_time = values[lowest[0]], lowest[1] print "Current lowest: %f with %gs" % (low_val, low_time) left = low_val - 2 * step while left <= 0: left += step right = low_val + 2 * step step = float(right - left) / samples return low_val def plot_values(start_val, stop_val, step): from matplotlib import pyplot values = np.arange(start_val, stop_val + step, step) times = map(time_run, values) pyplot.plot(values, times) pyplot.title("Time to run vs. Changing parameter") pyplot.xlabel("Parameter Value") pyplot.ylabel("Time to run (seconds)") pyplot.show() if __name__ == "__main__": parser = argparse.ArgumentParser(description='Minimize runtime.') parser.add_argument('-p', help='Plot values', action='store_true', default=False, required=False, dest="plot") parser.add_argument('start_val', help='What value to start at', type=float) parser.add_argument('stop_val', help='What value to stop at', type=float) parser.add_argument('step', help='What value to step by (for plotter) or number of samples (for minimizer)', type=float) parser.add_argument('passes', help='How many passes to run (for minimizer)', type=int, nargs="?") args = parser.parse_args() if not args.plot: assert args.passes != None, "passes argument required" min_val = minimize_runtime(args.start_val, args.stop_val, args.step, args.passes) print "Minimized time value: %f" % min_val else: plot_values(args.start_val, args.stop_val, args.step) compreffor-0.5.5/tools/simpleCffCompressor.py000077500000000000000000000063571447637674100214230ustar00rootroot00000000000000#!/usr/bin/env python # # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse from compreffor import pyCompressor class SimpleSubstringFinder(pyCompressor.SubstringFinder): def get_substrings(self, min_freq=2, check_positive=True, sort_by_length=False): movetos = set() for idx, tok in enumerate(self.rev_keymap): if isinstance(tok, basestring) and tok[-6:] == "moveto": movetos.add(idx) try: hmask = self.rev_keymap.index("hintmask") except ValueError: hmask = None matches = {} for glyph_idx, program in enumerate(self.data): cur_start = 0 last_op = -1 for pos, tok in enumerate(program): if tok in movetos: stop = last_op + 1 if stop - cur_start > 0: if program[cur_start:stop] in matches: matches[program[cur_start:stop]].freq += 1 else: span = pyCompressor.CandidateSubr(stop - cur_start, (glyph_idx, cur_start), 1, self.data, self.cost_map) matches[program[cur_start:stop]] = span cur_start = pos + 1 elif tok == hmask: last_op = pos + 1 elif type(self.rev_keymap[tok]) == str: last_op = pos constraints = lambda s: (s.freq >= min_freq and (s.subr_saving() > 0 or not check_positive)) self.substrings = filter(constraints, matches.values()) if sort_by_length: self.substrings.sort(key=lambda s: len(s)) else: self.substrings.sort(key=lambda s: s.subr_saving(), reverse=True) return self.substrings if __name__ == '__main__': parser = argparse.ArgumentParser(description='Subroutinize a font.') parser.add_argument('filename', help='Where to find the font', nargs='*') parser.add_argument('-t', required=False, action='store_true', dest='test', default=False) parser.add_argument('-v', required=False, action='store_true', dest='verbose_test', default=False) parser.add_argument('-c', required=False, action='store_true', dest='check', default=False) kwargs = vars(parser.parse_args()) pyCompressor.SubstringFinder = SimpleSubstringFinder pyCompressor.main(**kwargs) compreffor-0.5.5/tools/subr_grapher.py000077500000000000000000000255211447637674100201130ustar00rootroot00000000000000#!/usr/bin/env python # # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from fontTools.ttLib import TTFont from fontTools import cffLib from fontTools.misc import psCharStrings from fontTools.pens import basePen import matplotlib.pyplot as plt import functools import itertools import os import argparse """ Prints out some stats about a set of fonts, mostly related to subroutines. Dependencies: - matplotlib - fontTools Usage: >>> ./subr_grapher.py font1.otf font2.otf font3.otf cff_table.cff NOTE: if the file extension is `cff`, it will be interpreted as a raw CFF table. """ SINGLE_BYTE_OPS = set(['hstem', 'vstem', 'vmoveto', 'rlineto', 'hlineto', 'vlineto', 'rrcurveto', 'callsubr', 'return', 'endchar', 'blend', 'hstemhm', 'hintmask', 'cntrmask', 'rmoveto', 'hmoveto', 'vstemhm', 'rcurveline', 'rlinecurve', 'vvcurveto', 'hhcurveto', # 'shortint', # not really an operatr 'callgsubr', 'vhcurveto', 'hvcurveto']) def tokenCost(token): """Calculate the bytecode size of a T2 Charstring token""" tp = type(token) if issubclass(tp, basestring): if token[:8] in ("hintmask", "cntrmask"): return 1 + len(token[9:]) elif token in SINGLE_BYTE_OPS: return 1 else: return 2 elif tp == tuple: assert token[0] in ("hintmask", "cntrmask") return 1 + len(token[1]) elif tp == int: if -107 <= token <= 107: return 1 elif 108 <= token <= 1131 or -1131 <= token <= -108: return 2 else: return 3 elif tp == float: return 5 assert 0 def get_cff(filename): if os.path.splitext(filename)[1] == '.cff': res = cffLib.CFFFontSet() res.decompile(open(filename), None) return res else: return TTFont(filename)['CFF '].cff def get_cs_bytes(td, fds): count = 0 for cs in td.GlobalSubrs: count += len(cs.bytecode) for fd in fds: try: for cs in fd.Private.Subrs: count += len(cs.bytecode) except AttributeError: pass for cs in td.CharStrings.values(): count += len(cs.bytecode) return count def print_n_subroutines(name, td, fds): print("%s:\n\tGlobal Subrs: %d" % (name, len(td.GlobalSubrs))) for i, fd in enumerate(fds): try: x = len(fd.Private.Subrs) except AttributeError: x = 0 print("\tFD %d Subrs: %d" % (i, x)) def get_savings(td, fds): gsavings = [-(s.subr_cost + 2) if s.program else 0 for s in td.GlobalSubrs] lsavings = [[-(s.subr_cost + 2) if s.program else 0 for s in fd.Private.Subrs] for fd in fds] gusages = [0 for _ in td.GlobalSubrs] lusages = [[0 for _ in fd.Private.Subrs] for fd in fds] gbias = psCharStrings.calcSubrBias(td.GlobalSubrs) lbias = map(lambda fd: psCharStrings.calcSubrBias(fd.Private.Subrs) if hasattr(fd.Private, 'Subrs') else 0, fds) def count_subr(idx, is_global, fdidx=-1): if is_global: gsavings[idx + gbias] += (td.GlobalSubrs[idx + gbias].subr_saving - tokenCost(idx) - 1) gusages[idx + gbias] += 1 subr = td.GlobalSubrs[idx + gbias] else: assert fdidx >= 0 lsavings[fdidx][idx + lbias[fdidx]] += (fds[fdidx].Private.Subrs[idx + lbias[fdidx]].subr_saving - tokenCost(idx) - 1) lusages[fdidx][idx + lbias[fdidx]] += 1 subr = fds[fdidx].Private.Subrs[idx + lbias[fdidx]] # follow called subrs: for before, tok in zip(subr.program, subr.program[1:]): if tok == 'callgsubr': count_subr(before, True, fdidx) elif tok == 'callsubr': count_subr(before, False, fdidx) for g in td.charset: cs, sel = td.CharStrings.getItemAndSelector(g) for before, tok in zip(cs.program, cs.program[1:]): if tok == 'callgsubr': count_subr(before, True, sel) elif tok == 'callsubr': count_subr(before, False, sel) return ((gsavings, lsavings), (gusages, lusages)) def decompile_charstrings(td, fds): for cs in td.GlobalSubrs: cs.subr_cost = cs.subr_saving = len(cs.bytecode) for fd in fds: try: for cs in fd.Private.Subrs: cs.subr_cost = cs.subr_saving = len(cs.bytecode) except AttributeError: pass for g in td.charset: cs, sel = td.CharStrings.getItemAndSelector(g) cs.decompile() for cs in td.GlobalSubrs: if cs.program and cs.program[-1] == 'return': cs.subr_saving -= 1 for fd in fds: try: for cs in fd.Private.Subrs: if cs.program and cs.program[-1] == 'return': cs.subr_saving -= 1 except AttributeError: pass def get_raw_usages(td, fds): gusages = [0 for _ in td.GlobalSubrs] lusages = [[0 for _ in fd.Private.Subrs] for fd in fds] gbias = psCharStrings.calcSubrBias(td.GlobalSubrs) lbias = map(lambda fd: psCharStrings.calcSubrBias(fd.Private.Subrs) if hasattr(fd.Private, 'Subrs') else 0, fds) gsels = [None for _ in td.GlobalSubrs] for g in td.charset: cs, sel = td.CharStrings.getItemAndSelector(g) for before, tok in zip(cs.program, cs.program[1:]): if tok == 'callgsubr': gusages[before + gbias] += 1 gsels[before + gbias] = sel elif tok == 'callsubr': lusages[sel][before + lbias[sel]] += 1 for cs, sel in zip(td.GlobalSubrs, gsels): for before, tok in zip(cs.program, cs.program[1:]): if tok == 'callgsubr': gusages[before + gbias] += 1 elif tok == 'callsubr': lusages[sel][before + lbias[sel]] += 1 for sel, fd in enumerate(fds): if hasattr(fd.Private, 'Subrs'): for cs in fd.Private.Subrs: for before, tok in zip(cs.program, cs.program[1:]): if tok == 'callgsubr': gusages[before + gbias] += 1 elif tok == 'callsubr': lusages[sel][before + lbias[sel]] += 1 return (gusages, lusages) def main(filenames, show_graphs): names = map(os.path.basename, filenames) cffs = map(get_cff, filenames) tds = map(lambda f: f.topDictIndex[0], cffs) fds = map(lambda td: td.FDArray if hasattr(td, 'FDArray') else [], tds) n_bytes = map(get_cs_bytes, tds, fds) for name, b in zip(names, n_bytes): print("%s:\n\t%d bytes" % (name, b)) map(decompile_charstrings, tds, fds) map(print_n_subroutines, names, tds, fds) sav_usag = map(get_savings, tds, fds) for name, (savings, usages) in zip(names, sav_usag): tot_savings = savings[0] + list(itertools.chain.from_iterable(savings[1])) tot_usages = usages[0] + list(itertools.chain.from_iterable(usages[1])) avg = float(sum(tot_savings)) / len(tot_savings) print("%s:\n\tAverage savings per subr: %f\n\tMax saving subr: %d\n\tMax usage subr: %d" % (name, avg, max(tot_savings), max(tot_usages))) if show_graphs: # plot subrs SHOW_START = 0 SHOW_LEN = 200 mins = [] maxes = [] plt.figure(0) for savings, usages in sav_usag: tot_savings = savings[0] + list(itertools.chain.from_iterable(savings[1])) plot_savings = sorted(tot_savings, reverse=True)[SHOW_START:SHOW_START+SHOW_LEN] plt.plot(range(len(plot_savings)), plot_savings) mins.append(min(plot_savings)) maxes.append(max(plot_savings)) plt.ylim([min(mins) - 1, max(maxes) + 1]) plt.title("Subroutine Savings") plt.xlabel("Subroutine") plt.ylabel("Savings (bytes)") raw_usages = map(get_raw_usages, tds, fds) fig = 1 for gusages, lusages in raw_usages: for idx, usages in zip(['Global'] + range(len(lusages)), [gusages] + lusages): if usages: bias = psCharStrings.calcSubrBias(usages) if bias == 1131: orig_order_usages = usages[1024:1240] + usages[0:1024] + usages[1240:] elif bias == 32768: orig_order_usages = (usages[32661:32877] + usages[31637:32661] + usages[32877:33901] + usages[0:31637] + usages[33901:]) else: orig_order_usages = usages plt.figure(fig) plt.plot(range(len(orig_order_usages)), orig_order_usages, color='b') plt.title("Subroutine usages for FD %s" % idx) plt.axvline(215, 0, max(orig_order_usages), color='r') plt.axvline(2263, 0, max(orig_order_usages), color='r') plt.ylim([0, max(orig_order_usages)]) plt.xlim([0, len(orig_order_usages)]) fig += 1 plt.show() if __name__ == '__main__': parser = argparse.ArgumentParser( description="""FontTools Compreffor will take a CFF-flavored OpenType font and automatically detect repeated routines and generate subroutines to minimize the disk space needed to represent a font.""") parser.add_argument('filenames', help="the path to font files", nargs='+') parser.add_argument('-g', '--show-graphs', help="show graphs", action='store_true', default=False) kwargs = vars(parser.parse_args()) main(**kwargs) compreffor-0.5.5/tox.ini000066400000000000000000000003301447637674100152150ustar00rootroot00000000000000[tox] envlist = py3{8,9,10,11,12} [testenv] deps = pytest -rrequirements.txt commands = # run the test suite against installed package; pass extra args to pytest pytest --pyargs compreffor {posargs}