pax_global_header00006660000000000000000000000064143401233430014507gustar00rootroot0000000000000052 comment=1c33aace8f1732449600907086e41c6a34fdc51d ufo2ft-2.30.0/000077500000000000000000000000001434012334300127765ustar00rootroot00000000000000ufo2ft-2.30.0/.codecov.yml000066400000000000000000000001211434012334300152130ustar00rootroot00000000000000comment: false coverage: status: project: false patch: false ufo2ft-2.30.0/.coveragerc000066400000000000000000000016011434012334300151150ustar00rootroot00000000000000[run] # measure 'branch' coverage in addition to 'statement' coverage # See: http://coverage.readthedocs.org/en/coverage-4.0.3/branch.html#branch branch = True # list of directories or packages to measure source = ufo2ft # these are treated as equivalent when combining data [paths] source = Lib/ufo2ft .tox/*/lib/python*/site-packages/ufo2ft .tox/pypy*/site-packages/ufo2ft [report] # Regexes for lines to exclude from consideration exclude_lines = # keywords to use in inline comments to skip coverage pragma: no cover # don't complain if tests don't hit defensive assertion code raise AssertionError raise NotImplementedError # don't complain if non-runnable code isn't run if 0: if __name__ == .__main__.: # ignore source code that can’t be found ignore_errors = True # when running a summary report, show missing lines show_missing = True ufo2ft-2.30.0/.gitattributes000066400000000000000000000004411434012334300156700ustar00rootroot00000000000000# Set the default behavior, in case people don't have core.autocrlf set. * text=lf # Explicitly declare text files you want to always be normalized and converted # to native line endings on checkout. *.cfg text *.ini text *.md text *.py text *.toml text *.txt text *.yaml text *.yml text ufo2ft-2.30.0/.github/000077500000000000000000000000001434012334300143365ustar00rootroot00000000000000ufo2ft-2.30.0/.github/workflows/000077500000000000000000000000001434012334300163735ustar00rootroot00000000000000ufo2ft-2.30.0/.github/workflows/ci.yml000066400000000000000000000077171434012334300175250ustar00rootroot00000000000000name: Test + Deploy on: push: branches: [main] tags: ["v*.*.*"] pull_request: branches: [main] jobs: lint: runs-on: ubuntu-latest # https://github.community/t/github-actions-does-not-respect-skip-ci/17325/8 if: "! contains(toJSON(github.event.commits.*.message), '[skip ci]')" steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: "3.x" - name: Install dependencies run: pip install tox - name: Run style and static checks run: tox -e lint test: runs-on: ${{ matrix.platform }} if: "! contains(toJSON(github.event.commits.*.message), '[skip ci]')" strategy: matrix: python-version: [3.7, 3.9] platform: [ubuntu-latest, windows-latest] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: pip install tox - name: Test with tox run: tox -e py-cov - name: Produce coverage files run: tox -e htmlcov - name: Upload coverage to Codecov uses: codecov/codecov-action@v1 with: file: coverage.xml flags: unittests name: codecov-umbrella fail_ci_if_error: true # see https://github.com/codecov/codecov-action/issues/557 token: ${{ secrets.CODECOV_TOKEN }} deploy: # only run if the commit is tagged... if: startsWith(github.ref, 'refs/tags/v') # ... and both the lint and test jobs completed successfully needs: - lint - test runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: # setuptools_scm requires the git clone to not be 'shallow' fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v2 with: python-version: "3.x" - name: Extract release notes from annotated tag message id: release_notes env: # e.g. v0.1.0a1, v1.2.0b2 or v2.3.0rc3, but not v1.0.0 PRERELEASE_TAG_PATTERN: "v[[:digit:]]+\\.[[:digit:]]+\\.[[:digit:]]+([ab]|rc)[[:digit:]]+" run: | # GH checkout action doesn't preserve tag annotations, we must fetch them # https://github.com/actions/checkout/issues/290 git fetch --tags --force # strip leading 'refs/tags/' to get the tag name TAG_NAME="${GITHUB_REF##*/}" # Dump tag message to temporary .md file (excluding the PGP signature at the bottom) TAG_MESSAGE=$(git tag -l --format='%(contents)' $TAG_NAME | sed -n '/-----BEGIN PGP SIGNATURE-----/q;p') echo "$TAG_MESSAGE" > "${{ runner.temp }}/release_notes.md" # if the tag has a pre-release suffix mark the Github Release accordingly if egrep -q "$PRERELEASE_TAG_PATTERN" <<< "$TAG_NAME"; then echo "Tag contains a pre-release suffix" echo "IS_PRERELEASE=true" >> "$GITHUB_ENV" else echo "Tag does not contain pre-release suffix" echo "IS_PRERELEASE=false" >> "$GITHUB_ENV" fi - name: Create GitHub release id: create_release uses: actions/create-release@v1 env: # This token is provided by Actions, you do not need to create your own token GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: tag_name: ${{ github.ref }} release_name: ${{ github.ref }} body_path: "${{ runner.temp }}/release_notes.md" draft: false prerelease: ${{ env.IS_PRERELEASE }} - name: Build and publish env: TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | if [ "$IS_PRERELEASE" == true ]; then echo "DEBUG: This is a pre-release" else echo "DEBUG: This is a final release" fi pipx run build pipx run twine check dist/* pipx run twine upload dist/* ufo2ft-2.30.0/.gitignore000066400000000000000000000006351434012334300147720ustar00rootroot00000000000000# Byte-compiled / optimized files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / Packaging *.egg *.egg-info *.eggs MANIFEST build dist # Unit test / coverage files .tox/* .cache/ .coverage .coverage.* htmlcov/ .pytest_cache/ # OSX Finder .DS_Store # pyenv python configuration file .python-version # autosaved emacs files *~ # autogenerated by setuptools-scm Lib/ufo2ft/_version.py ufo2ft-2.30.0/.pyup.yml000066400000000000000000000002501434012334300145710ustar00rootroot00000000000000# controls the frequency of updates (undocumented beta feature) schedule: every week # do not pin dependencies unless they have explicit version specifiers pin: False ufo2ft-2.30.0/LICENSE000066400000000000000000000020631434012334300140040ustar00rootroot00000000000000The MIT License Copyright (c) 2009 Type Supply LLC Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.ufo2ft-2.30.0/Lib/000077500000000000000000000000001434012334300135045ustar00rootroot00000000000000ufo2ft-2.30.0/Lib/ufo2ft/000077500000000000000000000000001434012334300147115ustar00rootroot00000000000000ufo2ft-2.30.0/Lib/ufo2ft/__init__.py000066400000000000000000000747311434012334300170360ustar00rootroot00000000000000import logging import os from enum import IntEnum from fontTools import varLib from fontTools.designspaceLib import DesignSpaceDocument from fontTools.designspaceLib.split import splitInterpolable, splitVariableFonts from fontTools.misc.loggingTools import Timer from fontTools.otlLib.optimize.gpos import GPOS_COMPACT_MODE_ENV_KEY from ufo2ft.constants import SPARSE_OTF_MASTER_TABLES, SPARSE_TTF_MASTER_TABLES from ufo2ft.errors import InvalidDesignSpaceData from ufo2ft.featureCompiler import ( MTI_FEATURES_PREFIX, FeatureCompiler, MtiFeatureCompiler, ) from ufo2ft.outlineCompiler import OutlineOTFCompiler, OutlineTTFCompiler from ufo2ft.postProcessor import PostProcessor from ufo2ft.preProcessor import ( OTFPreProcessor, TTFInterpolatablePreProcessor, TTFPreProcessor, ) from ufo2ft.util import ( _getDefaultNotdefGlyph, ensure_all_sources_have_names, init_kwargs, prune_unknown_kwargs, ) try: from ._version import version as __version__ except ImportError: __version__ = "0.0.0+unknown" logger = logging.getLogger(__name__) timer = Timer(logging.getLogger("ufo2ft.timer"), level=logging.DEBUG) class CFFOptimization(IntEnum): NONE = 0 SPECIALIZE = 1 SUBROUTINIZE = 2 @timer("preprocess UFO") def call_preprocessor(ufo_or_ufos, *, preProcessorClass, **kwargs): logger.info("Pre-processing glyphs") if kwargs["skipExportGlyphs"] is None: if isinstance(ufo_or_ufos, (list, tuple)): kwargs["skipExportGlyphs"] = set() for ufo in ufo_or_ufos: kwargs["skipExportGlyphs"].update( ufo.lib.get("public.skipExportGlyphs", []) ) else: kwargs["skipExportGlyphs"] = ufo_or_ufos.lib.get( "public.skipExportGlyphs", [] ) # Preprocessors expect this parameter under a different name. if "cubicConversionError" in kwargs: kwargs["conversionError"] = kwargs.pop("cubicConversionError") callables = [preProcessorClass] if hasattr(preProcessorClass, "initDefaultFilters"): callables.append(preProcessorClass.initDefaultFilters) preProcessor = preProcessorClass( ufo_or_ufos, **prune_unknown_kwargs(kwargs, *callables) ) return preProcessor.process() @timer("compile a basic TTF") def call_outline_compiler(ufo, glyphSet, *, outlineCompilerClass, **kwargs): kwargs = prune_unknown_kwargs(kwargs, outlineCompilerClass) outlineCompiler = outlineCompilerClass(ufo, glyphSet=glyphSet, **kwargs) return outlineCompiler.compile() def call_postprocessor(otf, ufo, glyphSet, *, postProcessorClass, **kwargs): with timer("postprocess TTF"): if postProcessorClass is not None: postProcessor = postProcessorClass(otf, ufo, glyphSet=glyphSet) kwargs = prune_unknown_kwargs(kwargs, postProcessor.process) otf = postProcessor.process(**kwargs) return otf base_args = dict( postProcessorClass=PostProcessor, featureCompilerClass=None, featureWriters=None, filters=None, glyphOrder=None, useProductionNames=None, removeOverlaps=False, overlapsBackend=None, inplace=False, layerName=None, skipExportGlyphs=None, debugFeatureFile=None, notdefGlyph=None, colrLayerReuse=True, feaIncludeDir=None, ) compileOTF_args = { **base_args, **dict( preProcessorClass=OTFPreProcessor, outlineCompilerClass=OutlineOTFCompiler, optimizeCFF=CFFOptimization.SUBROUTINIZE, roundTolerance=None, cffVersion=1, subroutinizer=None, _tables=None, ), } def compileOTF(ufo, **kwargs): """Create FontTools CFF font from a UFO. *removeOverlaps* performs a union operation on all the glyphs' contours. *optimizeCFF* (int) defines whether the CFF charstrings should be specialized and subroutinized. By default both optimization are enabled. A value of 0 disables both; 1 only enables the specialization; 2 (default) does both specialization and subroutinization. *roundTolerance* (float) controls the rounding of point coordinates. It is defined as the maximum absolute difference between the original float and the rounded integer value. By default, all floats are rounded to integer (tolerance 0.5); a value of 0 completely disables rounding; values in between only round floats which are close to their integral part within the tolerated range. *featureWriters* argument is a list of BaseFeatureWriter subclasses or pre-initialized instances. Features will be written by each feature writer in the given order. If featureWriters is None, the default feature writers [KernFeatureWriter, MarkFeatureWriter] are used. *filters* argument is a list of BaseFilters subclasses or pre-initialized instances. Filters with 'pre' attribute set to True will be pre-filters called before the default filters, otherwise they will be post-filters, called after the default filters. Filters will modify glyphs or the glyph set. The default filters cannot be disabled. *useProductionNames* renames glyphs in TrueType 'post' or OpenType 'CFF ' tables based on the 'public.postscriptNames' mapping in the UFO lib, if present. Otherwise, uniXXXX names are generated from the glyphs' unicode values. The default value (None) will first check if the UFO lib has the 'com.github.googlei18n.ufo2ft.useProductionNames' key. If this is missing or True (default), the glyphs are renamed. Set to False to keep the original names. **inplace** (bool) specifies whether the filters should modify the input UFO's glyphs, a copy should be made first. *layerName* specifies which layer should be compiled. When compiling something other than the default layer, feature compilation is skipped. *skipExportGlyphs* is a list or set of glyph names to not be exported to the final font. If these glyphs are used as components in any other glyph, those components get decomposed. If the parameter is not passed in, the UFO's "public.skipExportGlyphs" lib key will be consulted. If it doesn't exist, all glyphs are exported. UFO groups and kerning will be pruned of skipped glyphs. *cffVersion* (int) is the CFF format, choose between 1 (default) and 2. *subroutinizer* (Optional[str]) is the name of the library to use for compressing CFF charstrings, if subroutinization is enabled by optimizeCFF parameter. Choose between "cffsubr" or "compreffor". By default "cffsubr" is used for both CFF 1 and CFF 2. NOTE: cffsubr is required for subroutinizing CFF2 tables, as compreffor currently doesn't support it. """ kwargs = init_kwargs(kwargs, compileOTF_args) glyphSet = call_preprocessor(ufo, **kwargs) logger.info("Building OpenType tables") optimizeCFF = CFFOptimization(kwargs.pop("optimizeCFF")) tables = kwargs.pop("_tables") otf = call_outline_compiler( ufo, glyphSet, **kwargs, optimizeCFF=optimizeCFF >= CFFOptimization.SPECIALIZE, tables=tables, ) # Only the default layer is likely to have all glyphs used in feature code. if kwargs["layerName"] is None: compileFeatures(ufo, otf, glyphSet=glyphSet, **kwargs) return call_postprocessor( otf, ufo, glyphSet, **kwargs, optimizeCFF=optimizeCFF >= CFFOptimization.SUBROUTINIZE, ) compileTTF_args = { **base_args, **dict( preProcessorClass=TTFPreProcessor, outlineCompilerClass=OutlineTTFCompiler, convertCubics=True, cubicConversionError=None, reverseDirection=True, rememberCurveType=True, flattenComponents=False, ), } def compileTTF(ufo, **kwargs): """Create FontTools TrueType font from a UFO. *removeOverlaps* performs a union operation on all the glyphs' contours. *flattenComponents* un-nests glyphs so that they have at most one level of components. *convertCubics* and *cubicConversionError* specify how the conversion from cubic to quadratic curves should be handled. *layerName* specifies which layer should be compiled. When compiling something other than the default layer, feature compilation is skipped. *skipExportGlyphs* is a list or set of glyph names to not be exported to the final font. If these glyphs are used as components in any other glyph, those components get decomposed. If the parameter is not passed in, the UFO's "public.skipExportGlyphs" lib key will be consulted. If it doesn't exist, all glyphs are exported. UFO groups and kerning will be pruned of skipped glyphs. """ kwargs = init_kwargs(kwargs, compileTTF_args) glyphSet = call_preprocessor(ufo, **kwargs) logger.info("Building OpenType tables") otf = call_outline_compiler(ufo, glyphSet, **kwargs) # Only the default layer is likely to have all glyphs used in feature code. if kwargs["layerName"] is None: compileFeatures(ufo, otf, glyphSet=glyphSet, **kwargs) return call_postprocessor(otf, ufo, glyphSet, **kwargs) compileInterpolatableTTFs_args = { **base_args, **dict( preProcessorClass=TTFInterpolatablePreProcessor, outlineCompilerClass=OutlineTTFCompiler, cubicConversionError=None, reverseDirection=True, flattenComponents=False, layerNames=None, colrLayerReuse=False, ), } def compileInterpolatableTTFs(ufos, **kwargs): """Create FontTools TrueType fonts from a list of UFOs with interpolatable outlines. Cubic curves are converted compatibly to quadratic curves using the Cu2Qu conversion algorithm. Return an iterator object that yields a TTFont instance for each UFO. *layerNames* refers to the layer names to use glyphs from in the order of the UFOs in *ufos*. By default, this is a list of `[None]` times the number of UFOs, i.e. using the default layer from all the UFOs. When the layerName is not None for a given UFO, the corresponding TTFont object will contain only a minimum set of tables ("head", "hmtx", "glyf", "loca", "maxp", "post" and "vmtx"), and no OpenType layout tables. *skipExportGlyphs* is a list or set of glyph names to not be exported to the final font. If these glyphs are used as components in any other glyph, those components get decomposed. If the parameter is not passed in, the union of all UFO's "public.skipExportGlyphs" lib keys will be used. If they don't exist, all glyphs are exported. UFO groups and kerning will be pruned of skipped glyphs. """ from ufo2ft.util import _LazyFontName kwargs = init_kwargs(kwargs, compileInterpolatableTTFs_args) if kwargs["layerNames"] is None: kwargs["layerNames"] = [None] * len(ufos) assert len(ufos) == len(kwargs["layerNames"]) glyphSets = call_preprocessor(ufos, **kwargs) for ufo, glyphSet, layerName in zip(ufos, glyphSets, kwargs["layerNames"]): fontName = _LazyFontName(ufo) if layerName is not None: logger.info("Building OpenType tables for %s-%s", fontName, layerName) else: logger.info("Building OpenType tables for %s", fontName) ttf = call_outline_compiler( ufo, glyphSet, **kwargs, tables=SPARSE_TTF_MASTER_TABLES if layerName else None, ) # Only the default layer is likely to have all glyphs used in feature # code. if layerName is None: if kwargs["debugFeatureFile"]: kwargs["debugFeatureFile"].write("\n### %s ###\n" % fontName) compileFeatures(ufo, ttf, glyphSet=glyphSet, **kwargs) ttf = call_postprocessor(ttf, ufo, glyphSet, **kwargs) if layerName is not None: # for sparse masters (i.e. containing only a subset of the glyphs), we # need to include the post table in order to store glyph names, so that # fontTools.varLib can interpolate glyphs with same name across masters. # However we want to prevent the underlinePosition/underlineThickness # fields in such sparse masters to be included when computing the deltas # for the MVAR table. Thus, we set them to this unlikely, limit value # (-36768) which is a signal varLib should ignore them when building MVAR. ttf["post"].underlinePosition = -0x8000 ttf["post"].underlineThickness = -0x8000 yield ttf def compileInterpolatableTTFsFromDS(designSpaceDoc, **kwargs): """Create FontTools TrueType fonts from the DesignSpaceDocument UFO sources with interpolatable outlines. Cubic curves are converted compatibly to quadratic curves using the Cu2Qu conversion algorithm. If the Designspace contains a "public.skipExportGlyphs" lib key, these glyphs will not be exported to the final font. If these glyphs are used as components in any other glyph, those components get decomposed. If the lib key doesn't exist in the Designspace, all glyphs are exported (keys in individual UFOs are ignored). UFO groups and kerning will be pruned of skipped glyphs. The DesignSpaceDocument should contain SourceDescriptor objects with 'font' attribute set to an already loaded defcon.Font object (or compatible UFO Font class). If 'font' attribute is unset or None, an AttributeError exception is thrown. Return a copy of the DesignSpaceDocument object (or the same one if inplace=True) with the source's 'font' attribute set to the corresponding TTFont instance. For sources that have the 'layerName' attribute defined, the corresponding TTFont object will contain only a minimum set of tables ("head", "hmtx", "glyf", "loca", "maxp", "post" and "vmtx"), and no OpenType layout tables. """ kwargs = init_kwargs(kwargs, compileInterpolatableTTFs_args) ufos, kwargs["layerNames"] = [], [] for source in designSpaceDoc.sources: if source.font is None: raise AttributeError( "designspace source '%s' is missing required 'font' attribute" % getattr(source, "name", "") ) ufos.append(source.font) # 'layerName' is None for the default layer kwargs["layerNames"].append(source.layerName) kwargs["skipExportGlyphs"] = designSpaceDoc.lib.get("public.skipExportGlyphs", []) if kwargs["notdefGlyph"] is None: kwargs["notdefGlyph"] = _getDefaultNotdefGlyph(designSpaceDoc) ttfs = compileInterpolatableTTFs(ufos, **kwargs) if kwargs["inplace"]: result = designSpaceDoc else: result = designSpaceDoc.deepcopyExceptFonts() for source, ttf in zip(result.sources, ttfs): source.font = ttf return result compileInterpolatableOTFs_args = { **base_args, **dict( preProcessorClass=OTFPreProcessor, outlineCompilerClass=OutlineOTFCompiler, featureCompilerClass=None, roundTolerance=None, optimizeCFF=CFFOptimization.NONE, colrLayerReuse=False, ), } def compileInterpolatableOTFsFromDS(designSpaceDoc, **kwargs): """Create FontTools CFF fonts from the DesignSpaceDocument UFO sources with interpolatable outlines. Interpolatable means without subroutinization and specializer optimizations and no removal of overlaps. If the Designspace contains a "public.skipExportGlyphs" lib key, these glyphs will not be exported to the final font. If these glyphs are used as components in any other glyph, those components get decomposed. If the lib key doesn't exist in the Designspace, all glyphs are exported (keys in individual UFOs are ignored). UFO groups and kerning will be pruned of skipped glyphs. The DesignSpaceDocument should contain SourceDescriptor objects with 'font' attribute set to an already loaded defcon.Font object (or compatible UFO Font class). If 'font' attribute is unset or None, an AttributeError exception is thrown. Return a copy of the DesignSpaceDocument object (or the same one if inplace=True) with the source's 'font' attribute set to the corresponding TTFont instance. For sources that have the 'layerName' attribute defined, the corresponding TTFont object will contain only a minimum set of tables ("head", "hmtx", "CFF ", "maxp", "vmtx" and "VORG"), and no OpenType layout tables. """ kwargs = init_kwargs(kwargs, compileInterpolatableOTFs_args) for source in designSpaceDoc.sources: if source.font is None: raise AttributeError( "designspace source '%s' is missing required 'font' attribute" % getattr(source, "name", "") ) kwargs["skipExportGlyphs"] = designSpaceDoc.lib.get("public.skipExportGlyphs", []) if kwargs["notdefGlyph"] is None: kwargs["notdefGlyph"] = _getDefaultNotdefGlyph(designSpaceDoc) otfs = [] for source in designSpaceDoc.sources: otfs.append( compileOTF( ufo=source.font, **{ **kwargs, **dict( layerName=source.layerName, removeOverlaps=False, overlapsBackend=None, optimizeCFF=CFFOptimization.NONE, _tables=SPARSE_OTF_MASTER_TABLES if source.layerName else None, ), }, ) ) if kwargs["inplace"]: result = designSpaceDoc else: result = designSpaceDoc.deepcopyExceptFonts() for source, otf in zip(result.sources, otfs): source.font = otf return result def compileFeatures( ufo, ttFont=None, glyphSet=None, featureCompilerClass=None, debugFeatureFile=None, **kwargs, ): """Compile OpenType Layout features from `ufo` into FontTools OTL tables. If `ttFont` is None, a new TTFont object is created containing the new tables, else the provided `ttFont` is updated with the new tables. If no explicit `featureCompilerClass` is provided, the one used will depend on whether the ufo contains any MTI feature files in its 'data' directory (thus the `MTIFeatureCompiler` is used) or not (then the default FeatureCompiler for Adobe FDK features is used). If skipExportGlyphs is provided (see description in the ``compile*`` functions), the feature compiler will prune groups (removing them if empty) and kerning of the UFO of these glyphs. The feature file is left untouched. `debugFeatureFile` can be a file or file-like object opened in text mode, in which to dump the text content of the feature file, useful for debugging auto-generated OpenType features like kern, mark, mkmk etc. """ if featureCompilerClass is None: if any( fn.startswith(MTI_FEATURES_PREFIX) and fn.endswith(".mti") for fn in ufo.data.fileNames ): featureCompilerClass = MtiFeatureCompiler else: featureCompilerClass = FeatureCompiler kwargs = prune_unknown_kwargs(kwargs, featureCompilerClass) featureCompiler = featureCompilerClass(ufo, ttFont, glyphSet=glyphSet, **kwargs) otFont = featureCompiler.compile() if debugFeatureFile: if hasattr(featureCompiler, "writeFeatures"): featureCompiler.writeFeatures(debugFeatureFile) return otFont compileVariableTTF_args = { **base_args, **dict( preProcessorClass=TTFInterpolatablePreProcessor, outlineCompilerClass=OutlineTTFCompiler, cubicConversionError=None, reverseDirection=True, flattenComponents=False, excludeVariationTables=(), optimizeGvar=True, ), } def compileVariableTTF(designSpaceDoc, **kwargs): """Create FontTools TrueType variable font from the DesignSpaceDocument UFO sources with interpolatable outlines, using fontTools.varLib.build. *optimizeGvar*, if set to False, will not perform IUP optimization on the generated 'gvar' table. *excludeVariationTables* is a list of sfnt table tags (str) that is passed on to fontTools.varLib.build, to skip building some variation tables. The rest of the arguments works the same as in the other compile functions. Returns a new variable TTFont object. """ kwargs = init_kwargs(kwargs, compileVariableTTF_args) fonts = compileVariableTTFs(designSpaceDoc, **kwargs) if len(fonts) != 1: raise ValueError( "Tried to build a DesignSpace version 5 with multiple variable " "fonts using the old ufo2ft API `compileVariableTTF`. " "Use the new API instead `compileVariableTTFs`" ) return next(iter(fonts.values())) compileVariableTTFs_args = { **compileVariableTTF_args, **dict(variableFontNames=None), } def compileVariableTTFs(designSpaceDoc: DesignSpaceDocument, **kwargs): """Create FontTools TrueType variable fonts for each variable font defined in the given DesignSpaceDocument, using their UFO sources with interpolatable outlines, using fontTools.varLib.build. *optimizeGvar*, if set to False, will not perform IUP optimization on the generated 'gvar' table. *excludeVariationTables* is a list of sfnt table tags (str) that is passed on to fontTools.varLib.build, to skip building some variation tables. *variableFontNames* is an optional list of names of variable fonts to build. If not provided, all variable fonts listed in the given designspace will by built. The rest of the arguments works the same as in the other compile functions. Returns a dictionary that maps each variable font filename to a new variable TTFont object. If no variable fonts are defined in the Designspace, returns an empty dictionary. .. versionadded:: 2.28.0 """ kwargs = init_kwargs(kwargs, compileVariableTTFs_args) optimizeGvar = kwargs.pop("optimizeGvar") excludeVariationTables = kwargs.pop("excludeVariationTables") variableFontNames = kwargs.pop("variableFontNames") colrLayerReuse = kwargs.pop("colrLayerReuse") # Pop inplace because we'll make a copy at this level so deeper functions # don't need to worry inplace = kwargs.pop("inplace") if not inplace: designSpaceDoc = designSpaceDoc.deepcopyExceptFonts() vfNameToBaseUfo = _compileNeededSources( kwargs, designSpaceDoc, variableFontNames, compileInterpolatableTTFsFromDS ) if not vfNameToBaseUfo: return {} logger.info("Building variable TTF fonts: %s", ", ".join(vfNameToBaseUfo)) with timer("merge fonts to variable"): vfNameToTTFont = varLib.build_many( designSpaceDoc, exclude=excludeVariationTables, optimize=optimizeGvar, skip_vf=lambda vf_name: variableFontNames and vf_name not in variableFontNames, colr_layer_reuse=colrLayerReuse, ) for vfName, varfont in list(vfNameToTTFont.items()): vfNameToTTFont[vfName] = call_postprocessor( varfont, vfNameToBaseUfo[vfName], glyphSet=None, **kwargs ) return vfNameToTTFont compileVariableCFF2_args = { **base_args, **dict( preProcessorClass=OTFPreProcessor, outlineCompilerClass=OutlineOTFCompiler, roundTolerance=None, excludeVariationTables=(), optimizeCFF=CFFOptimization.SPECIALIZE, ), } def compileVariableCFF2(designSpaceDoc, **kwargs): """Create FontTools CFF2 variable font from the DesignSpaceDocument UFO sources with interpolatable outlines, using fontTools.varLib.build. *excludeVariationTables* is a list of sfnt table tags (str) that is passed on to fontTools.varLib.build, to skip building some variation tables. *optimizeCFF* (int) defines whether the CFF charstrings should be specialized and subroutinized. 1 (default) only enables the specialization; 2 (default) does both specialization and subroutinization. The value 0 is supposed to disable both optimizations, however it's currently unused, because fontTools has some issues generating a VF with non-specialized CFF2 charstrings: fonttools/fonttools#1979. NOTE: Subroutinization of variable CFF2 requires the "cffsubr" extra requirement. The rest of the arguments works the same as in the other compile functions. Returns a new variable TTFont object. """ kwargs = init_kwargs(kwargs, compileVariableCFF2_args) fonts = compileVariableCFF2s(designSpaceDoc, **kwargs) if len(fonts) != 1: raise ValueError( "Tried to build a DesignSpace version 5 with multiple variable " "fonts using the old ufo2ft API `compileVariableCFF2`. " "Use the new API instead `compileVariableCFF2s`" ) return next(iter(fonts.values())) compileVariableCFF2s_args = { **compileVariableCFF2_args, **dict(variableFontNames=None), } def compileVariableCFF2s(designSpaceDoc, **kwargs): """Create FontTools CFF2 variable fonts for each variable font defined in the given DesignSpaceDocument, using their UFO sources with interpolatable outlines, using fontTools.varLib.build. *excludeVariationTables* is a list of sfnt table tags (str) that is passed on to fontTools.varLib.build, to skip building some variation tables. *optimizeCFF* (int) defines whether the CFF charstrings should be specialized and subroutinized. 1 (default) only enables the specialization; 2 (default) does both specialization and subroutinization. The value 0 is supposed to disable both optimizations, however it's currently unused, because fontTools has some issues generating a VF with non-specialized CFF2 charstrings: fonttools/fonttools#1979. NOTE: Subroutinization of variable CFF2 requires the "cffsubr" extra requirement. *variableFontNames* is an optional list of filenames of variable fonts to build. If not provided, all variable fonts listed in the given designspace will by built. The rest of the arguments works the same as in the other compile functions. Returns a dictionary that maps each variable font filename to a new variable TTFont object. .. versionadded:: 2.28.0 """ kwargs = init_kwargs(kwargs, compileVariableCFF2s_args) excludeVariationTables = kwargs.pop("excludeVariationTables") optimizeCFF = CFFOptimization(kwargs.pop("optimizeCFF")) variableFontNames = kwargs.pop("variableFontNames") colrLayerReuse = kwargs.pop("colrLayerReuse") # Pop inplace because we'll make a copy at this level so deeper functions # don't need to worry inplace = kwargs.pop("inplace") if not inplace: designSpaceDoc = designSpaceDoc.deepcopyExceptFonts() vfNameToBaseUfo = _compileNeededSources( kwargs, designSpaceDoc, variableFontNames, compileInterpolatableOTFsFromDS ) if not vfNameToBaseUfo: logger.warning("No variable fonts to build") return {} logger.info(f"Building variable CFF2 fonts: {', '.join(vfNameToBaseUfo)}") vfNameToTTFont = varLib.build_many( designSpaceDoc, exclude=excludeVariationTables, # NOTE optimize=False won't change anything until this PR is merged # https://github.com/fonttools/fonttools/pull/1979 optimize=optimizeCFF >= CFFOptimization.SPECIALIZE, skip_vf=lambda vf_name: variableFontNames and vf_name not in variableFontNames, colr_layer_reuse=colrLayerReuse, ) for vfName, varfont in list(vfNameToTTFont.items()): vfNameToTTFont[vfName] = call_postprocessor( varfont, vfNameToBaseUfo[vfName], glyphSet=None, **kwargs, optimizeCFF=optimizeCFF >= CFFOptimization.SUBROUTINIZE, ) return vfNameToTTFont def _compileNeededSources( kwargs, designSpaceDoc, variableFontNames, compileInterpolatableFunc ): # We'll need to map elements to TTFonts, to do so make sure that # each has a name. ensure_all_sources_have_names(designSpaceDoc) # Go through VFs to build and gather list of needed sources to compile interpolableSubDocs = [ subDoc for _location, subDoc in splitInterpolable(designSpaceDoc) ] vfNameToBaseUfo = {} sourcesToCompile = set() for subDoc in interpolableSubDocs: for vfName, vfDoc in splitVariableFonts(subDoc): if variableFontNames is not None and vfName not in variableFontNames: # This VF is not needed so we don't need to compile its sources continue default_source = vfDoc.findDefault() if default_source is None: raise InvalidDesignSpaceData("No default source.") vfNameToBaseUfo[vfName] = default_source.font for source in vfDoc.sources: sourcesToCompile.add(source.name) # Match sources to compile to their Descriptor in the original designspace sourcesByName = {} for source in designSpaceDoc.sources: if source.name in sourcesToCompile: sourcesByName[source.name] = source # Compile all needed sources in each interpolable subspace to make sure # they're all compatible; that also ensures that sub-vfs within the same # interpolable sub-space are compatible too. for subDoc in interpolableSubDocs: # Only keep the sources that we've identified earlier as need-to-compile subDoc.sources = [s for s in subDoc.sources if s.name in sourcesToCompile] if not subDoc.sources: continue # FIXME: Hack until we get a fontTools config module. Disable GPOS # compaction while building masters because the compaction will be undone # anyway by varLib merge and then done again on the VF gpos_compact_value = os.environ.pop(GPOS_COMPACT_MODE_ENV_KEY, None) try: ttfDesignSpace = compileInterpolatableFunc( subDoc, **{ **kwargs, **dict( useProductionNames=False, # will rename glyphs after varfont is built # No need to post-process intermediate fonts. postProcessorClass=None, ), }, ) finally: if gpos_compact_value is not None: os.environ[GPOS_COMPACT_MODE_ENV_KEY] = gpos_compact_value # Stick TTFs back into original big DS for ttfSource in ttfDesignSpace.sources: sourcesByName[ttfSource.name].font = ttfSource.font return vfNameToBaseUfo ufo2ft-2.30.0/Lib/ufo2ft/constants.py000066400000000000000000000067471434012334300173150ustar00rootroot00000000000000SPARSE_TTF_MASTER_TABLES = frozenset( ["glyf", "head", "hmtx", "loca", "maxp", "post", "vmtx"] ) SPARSE_OTF_MASTER_TABLES = frozenset(["CFF ", "VORG", "head", "hmtx", "maxp", "vmtx"]) UFO2FT_PREFIX = "com.github.googlei18n.ufo2ft." GLYPHS_PREFIX = "com.schriftgestaltung." FILTERS_KEY = UFO2FT_PREFIX + "filters" MTI_FEATURES_PREFIX = UFO2FT_PREFIX + "mtiFeatures" FEATURE_WRITERS_KEY = UFO2FT_PREFIX + "featureWriters" USE_PRODUCTION_NAMES = UFO2FT_PREFIX + "useProductionNames" GLYPHS_DONT_USE_PRODUCTION_NAMES = GLYPHS_PREFIX + "Don't use Production Names" KEEP_GLYPH_NAMES = UFO2FT_PREFIX + "keepGlyphNames" COLOR_LAYERS_KEY = UFO2FT_PREFIX + "colorLayers" COLOR_PALETTES_KEY = UFO2FT_PREFIX + "colorPalettes" COLOR_LAYER_MAPPING_KEY = UFO2FT_PREFIX + "colorLayerMapping" # sequence of [glyphs, clipBox], where 'glyphs' is in turn a sequence of # glyph names, and 'clipBox' a 5- or 4-item sequence of numbers: # Sequence[ # Sequence[ # Sequence[str, ...], # glyph names # Union[ # Sequence[float, float, float, float, float], # variable box # Sequence[float, float, float, float], # non-variable box # ] # ], # ... # ] COLR_CLIP_BOXES_KEY = UFO2FT_PREFIX + "colrClipBoxes" OPENTYPE_CATEGORIES_KEY = "public.openTypeCategories" OPENTYPE_META_KEY = "public.openTypeMeta" UNICODE_VARIATION_SEQUENCES_KEY = "public.unicodeVariationSequences" INDIC_SCRIPTS = [ "Beng", # Bengali "Cham", # Cham "Deva", # Devanagari "Gujr", # Gujarati "Guru", # Gurmukhi "Knda", # Kannada "Mlym", # Malayalam "Orya", # Oriya "Sinh", # Sinhala "Taml", # Tamil "Telu", # Telugu ] USE_SCRIPTS = [ # Correct as at Unicode 14.0 "Tibt", # Tibetan "Mong", # Mongolian # HB has Sinhala commented out here?! "Buhd", # Buhid "Hano", # Hanunoo "Tglg", # Tagalog "Tagb", # Tagbanwa "Limb", # Limbu "Tale", # Tai Le "Bugi", # Buginese "Khar", # Kharosthi "Sylo", # Syloti Nagri "tfng", # Tifinagh "Bali", # Balinese "Nko ", # Nko "Phag", # Phags Pa "Cham", # Cham "Kali", # Kayah Li "Lepc", # Lepcha "Rjng", # Rejang "Saur", # Saurashtra "Sund", # Sundanese "Egyp", # Egyptian Hieroglyphs "Java", # Javanese "Kthi", # Kaithi "Mtei", # Meetei Mayek "Lana", # Tai Tham "Tavt", # Tai Viet "Batk", # Batak "Brah", # Brahmi "Mand", # Mandaic "Cakm", # Chakma "Plrd", # Miao "Shrd", # Sharada "Takr", # Takri "Dupl", # Duployan "Gran", # Grantha "Khoj", # Khojki "Sind", # Khudawadi "Mahj", # Mahajani "Mani", # Manichaean "Modi", # Modi "Hmng", # Pahawh Hmong "Phlp", # Psalter Pahlavi "Sidd", # Siddham "Tirh", # Tirhuta "Ahom", # Ahom "Mult", # Multani "Adlm", # Adlam "Nhks", # Bhaiksuki "Marc", # Marchen "Newa", # Newa "Gonm", # Masaram Gondi "Soyo", # Soyombo "Zanb", # Zanabazar Square "Dogr", # Dogra "Gong", # Gunjala Gondi "Rohg", # Hanifi Rohingya "Maka", # Makasar "Medf", # Medefaidrin "Sogo", # Old Sogdian "Sogd", # Sogdian "Elym", # Elymaic "Nand", # Nandinagari "Hmnp", # Nyiakeng Puachue Hmong "Wcho", # Wancho "Chrs", # Chorasmian "Diak", # Dives Akuru "Kits", # Khitan Small Script "Yezi", # Yezidi "Cpmn", # Cypro Minoan "Ougr", # Old Uyghur "Tnsa", # Tangsa "Toto", # Toto "Vith", # Vithkuqi ] ufo2ft-2.30.0/Lib/ufo2ft/errors.py000066400000000000000000000006421434012334300166010ustar00rootroot00000000000000class Error(Exception): """Base exception class for all ufo2ft errors.""" pass class InvalidFontData(Error): """Raised when input font contains invalid data.""" pass class InvalidFeaturesData(Error): """Raised when input font contains invalid features data.""" pass class InvalidDesignSpaceData(Error): """Raised when input DesignSpace document contains invalid data.""" pass ufo2ft-2.30.0/Lib/ufo2ft/featureCompiler.py000066400000000000000000000315131434012334300204140ustar00rootroot00000000000000import logging import os from collections import OrderedDict from inspect import isclass from io import StringIO from tempfile import NamedTemporaryFile from fontTools import mtiLib from fontTools.feaLib.builder import addOpenTypeFeaturesFromString from fontTools.feaLib.error import FeatureLibError, IncludedFeaNotFound from fontTools.feaLib.parser import Parser from fontTools.misc.loggingTools import Timer from ufo2ft.constants import MTI_FEATURES_PREFIX from ufo2ft.featureWriters import ( CursFeatureWriter, GdefFeatureWriter, KernFeatureWriter, MarkFeatureWriter, ast, isValidFeatureWriter, loadFeatureWriters, ) logger = logging.getLogger(__name__) timer = Timer(logging.getLogger("ufo2ft.timer"), level=logging.DEBUG) def parseLayoutFeatures(font, includeDir=None): """Parse OpenType layout features in the UFO and return a feaLib.ast.FeatureFile instance. includeDir is an optional directory path to search for included feature files, if omitted the font.path is used. If the latter is also not set, the feaLib Lexer uses the current working directory. """ featxt = font.features.text or "" if not featxt: return ast.FeatureFile() buf = StringIO(featxt) ufoPath = font.path if includeDir is None and ufoPath is not None: # The UFO v3 specification says "Any include() statements must be relative to # the UFO path, not to the features.fea file itself". We set the `name` # attribute on the buffer to the actual feature file path, which feaLib will # pick up and use to attribute errors to the correct file, and explicitly set # the include directory to the parent of the UFO. ufoPath = os.path.normpath(ufoPath) buf.name = os.path.join(ufoPath, "features.fea") includeDir = os.path.dirname(ufoPath) glyphNames = set(font.keys()) includeDir = os.path.normpath(includeDir) if includeDir else None try: parser = Parser(buf, glyphNames, includeDir=includeDir) doc = parser.parse() except IncludedFeaNotFound as e: if ufoPath and os.path.exists(os.path.join(ufoPath, e.args[0])): logger.warning( "Please change the file name in the include(...); " "statement to be relative to the UFO itself, " "instead of relative to the 'features.fea' file " "contained in it." ) raise return doc class BaseFeatureCompiler: """Base class for generating OpenType features and compiling OpenType layout tables from these. """ def __init__(self, ufo, ttFont=None, glyphSet=None, **kwargs): """ Args: ufo: an object representing a UFO (defcon.Font or equivalent) containing the features source data. ttFont: a fontTools TTFont object where the generated OpenType tables are added. If None, an empty TTFont is used, with the same glyph order as the ufo object. glyphSet: a (optional) dict containing pre-processed copies of the UFO glyphs. """ self.ufo = ufo if ttFont is None: from fontTools.ttLib import TTFont from ufo2ft.util import makeOfficialGlyphOrder ttFont = TTFont() ttFont.setGlyphOrder(makeOfficialGlyphOrder(ufo)) self.ttFont = ttFont glyphOrder = ttFont.getGlyphOrder() if glyphSet is not None: assert set(glyphOrder) == set(glyphSet.keys()) else: glyphSet = ufo self.glyphSet = OrderedDict((gn, glyphSet[gn]) for gn in glyphOrder) def setupFeatures(self): """Make the features source. **This should not be called externally.** Subclasses must override this method. """ raise NotImplementedError def buildTables(self): """Compile OpenType feature tables from the source. **This should not be called externally.** Subclasses must override this method. """ raise NotImplementedError def setupFile_features(self): """DEPRECATED. Use 'setupFeatures' instead.""" _deprecateMethod("setupFile_features", "setupFeatures") self.setupFeatures() def setupFile_featureTables(self): """DEPRECATED. Use 'setupFeatures' instead.""" _deprecateMethod("setupFile_featureTables", "buildTables") self.buildTables() def compile(self): if "setupFile_features" in self.__class__.__dict__: _deprecateMethod("setupFile_features", "setupFeatures") self.setupFile_features() else: self.setupFeatures() if "setupFile_featureTables" in self.__class__.__dict__: _deprecateMethod("setupFile_featureTables", "buildTables") self.setupFile_featureTables() else: self.buildTables() return self.ttFont def _deprecateMethod(arg, repl): import warnings warnings.warn( f"{arg!r} method is deprecated; use {repl!r} instead", category=UserWarning, stacklevel=3, ) class FeatureCompiler(BaseFeatureCompiler): """Generate automatic features and compile OpenType tables from Adobe Feature File stored in the UFO, using fontTools.feaLib as compiler. """ defaultFeatureWriters = [ KernFeatureWriter, MarkFeatureWriter, GdefFeatureWriter, CursFeatureWriter, ] def __init__( self, ufo, ttFont=None, glyphSet=None, featureWriters=None, feaIncludeDir=None, **kwargs, ): """ Args: featureWriters: a list of BaseFeatureWriter subclasses or pre-initialized instances. The default value (None) means that: - first, the UFO lib will be searched for a list of featureWriters under the key "com.github.googlei18n.ufo2ft.featureWriters" (see loadFeatureWriters). - if that is not found, the default list of writers will be used: (see FeatureCompiler.defaultFeatureWriters, and the individual feature writer classes for the list of features generated). If the featureWriters list is empty, no automatic feature is generated and only pre-existing features are compiled. The ``featureWriters`` parameter overrides both the writers from the UFO lib and the default writers list. To extend instead of replace the latter, the list can contain a special value ``...`` (i.e. the ``ellipsis`` singleton, not the str literal '...') which gets replaced by either the UFO.lib writers or the default ones; thus one can insert additional writers either before or after these. feaIncludeDir: a directory to be used as the include directory for the feature file. If None, the include directory is set to the parent directory of the UFO, provided the UFO has a path. """ BaseFeatureCompiler.__init__(self, ufo, ttFont, glyphSet) self.feaIncludeDir = feaIncludeDir self.initFeatureWriters(featureWriters) if kwargs.get("mtiFeatures") is not None: import warnings warnings.warn( "mtiFeatures argument is ignored; " "you should use MtiLibFeatureCompiler", category=UserWarning, stacklevel=2, ) def _load_custom_feature_writers(self, featureWriters=None): # Args: # ufo: Font # featureWriters: Optional[List[Union[FeatureWriter, EllipsisType]]]) # Returns: List[FeatureWriter] # by default, load the feature writers from the lib or the default ones; # ellipsis is used as a placeholder so one can optionally insert additional # featureWriters=[w1, ..., w2] either before or after these, or override # them by omitting the ellipsis. if featureWriters is None: featureWriters = [...] result = [] seen_ellipsis = False for writer in featureWriters: if writer is ...: if seen_ellipsis: raise ValueError("ellipsis not allowed more than once") writers = loadFeatureWriters(self.ufo) if writers is not None: result.extend(writers) else: result.extend(self.defaultFeatureWriters) seen_ellipsis = True else: klass = writer if isclass(writer) else type(writer) if not isValidFeatureWriter(klass): raise TypeError(f"Invalid feature writer: {writer!r}") result.append(writer) return result def initFeatureWriters(self, featureWriters=None): """Initialize feature writer classes as specified in the UFO lib. If none are defined in the UFO, the default feature writers are used (see FeatureCompiler.defaultFeatureWriters). The 'featureWriters' argument can be used to override these. The method sets the `self.featureWriters` attribute with the list of writers. Note that the writers that generate GSUB features are placed first in this list, before all others. This is because the GSUB table may be used in the subsequent feature writers to resolve substitutions from glyphs with unicodes to their alternates. """ featureWriters = self._load_custom_feature_writers(featureWriters) gsubWriters = [] others = [] for writer in featureWriters: if isclass(writer): writer = writer() if writer.tableTag == "GSUB": gsubWriters.append(writer) else: others.append(writer) self.featureWriters = gsubWriters + others def setupFeatures(self): """ Make the features source. **This should not be called externally.** Subclasses may override this method to handle the file creation in a different way if desired. """ with timer("run feature writers"): if self.featureWriters: featureFile = parseLayoutFeatures(self.ufo, self.feaIncludeDir) for writer in self.featureWriters: writer.write(self.ufo, featureFile, compiler=self) # stringify AST to get correct line numbers in error messages self.features = featureFile.asFea() else: # no featureWriters, simply read existing features' text self.features = self.ufo.features.text or "" def writeFeatures(self, outfile): if hasattr(self, "features"): outfile.write(self.features) def buildTables(self): """ Compile OpenType feature tables from the source. Raises a FeaLibError if the feature compilation was unsuccessful. **This should not be called externally.** Subclasses may override this method to handle the table compilation in a different way if desired. """ if not self.features: return # the path is used by the lexer to follow 'include' statements; # if we generated some automatic features, includes have already been # resolved, and we work from a string which does't exist on disk path = self.ufo.path if not self.featureWriters else None with timer("build OpenType features"): try: addOpenTypeFeaturesFromString(self.ttFont, self.features, filename=path) except FeatureLibError: if path is None: # if compilation fails, create temporary file for inspection data = self.features.encode("utf-8") with NamedTemporaryFile(delete=False) as tmp: tmp.write(data) logger.error( "Compilation failed! Inspect temporary file: %r", tmp.name ) raise class MtiFeatureCompiler(BaseFeatureCompiler): """Compile OpenType layout tables from MTI feature files using fontTools.mtiLib. """ def setupFeatures(self): ufo = self.ufo features = {} # includes the length of the "/" separator prefixLength = len(MTI_FEATURES_PREFIX) + 1 for fn in ufo.data.fileNames: if fn.startswith(MTI_FEATURES_PREFIX) and fn.endswith(".mti"): content = ufo.data[fn].decode("utf-8") features[fn[prefixLength:-4]] = content self.mtiFeatures = features def buildTables(self): for tag, features in self.mtiFeatures.items(): table = mtiLib.build(features.splitlines(), self.ttFont) assert table.tableTag == tag self.ttFont[tag] = table ufo2ft-2.30.0/Lib/ufo2ft/featureWriters/000077500000000000000000000000001434012334300177245ustar00rootroot00000000000000ufo2ft-2.30.0/Lib/ufo2ft/featureWriters/__init__.py000066400000000000000000000112361434012334300220400ustar00rootroot00000000000000import importlib import logging from inspect import getfullargspec, isclass from ufo2ft.constants import FEATURE_WRITERS_KEY from ufo2ft.util import _loadPluginFromString from .baseFeatureWriter import BaseFeatureWriter from .cursFeatureWriter import CursFeatureWriter from .gdefFeatureWriter import GdefFeatureWriter from .kernFeatureWriter import KernFeatureWriter from .markFeatureWriter import MarkFeatureWriter __all__ = [ "BaseFeatureWriter", "CursFeatureWriter", "GdefFeatureWriter", "KernFeatureWriter", "MarkFeatureWriter", "loadFeatureWriters", ] logger = logging.getLogger(__name__) def isValidFeatureWriter(klass): """Return True if 'klass' is a valid feature writer class. A valid feature writer class is a class (of type 'type'), that has two required attributes: 1) 'tableTag' (str), which can be "GSUB", "GPOS", or other similar tags. 2) 'write' (bound method), with the signature matching the same method from the BaseFeatureWriter class: def write(self, font, feaFile, compiler=None) """ if not isclass(klass): logger.error("%r is not a class", klass) return False if not hasattr(klass, "tableTag"): logger.error("%r does not have required 'tableTag' attribute", klass) return False if not hasattr(klass, "write"): logger.error("%r does not have a required 'write' method", klass) return False if getfullargspec(klass.write).args != getfullargspec(BaseFeatureWriter.write).args: logger.error("%r 'write' method has incorrect signature", klass) return False return True def loadFeatureWriters(ufo, ignoreErrors=True): """Check UFO lib for key "com.github.googlei18n.ufo2ft.featureWriters", containing a list of dicts, each having the following key/value pairs: For example: { "module": "myTools.featureWriters", # default: ufo2ft.featureWriters "class": "MyKernFeatureWriter", # required "options": {"doThis": False, "doThat": True}, } Import each feature writer class from the specified module (default is the built-in ufo2ft.featureWriters), and instantiate it with the given 'options' dict. Return the list of feature writer objects. If the 'featureWriters' key is missing from the UFO lib, return None. If an exception occurs and 'ignoreErrors' is True, the exception message is logged and the invalid writer is skipped, otrherwise it's propagated. """ if FEATURE_WRITERS_KEY not in ufo.lib: return None writers = [] for wdict in ufo.lib[FEATURE_WRITERS_KEY]: try: moduleName = wdict.get("module", __name__) className = wdict["class"] options = wdict.get("options", {}) if not isinstance(options, dict): raise TypeError(type(options)) module = importlib.import_module(moduleName) klass = getattr(module, className) if not isValidFeatureWriter(klass): raise TypeError(klass) writer = klass(**options) except Exception: if ignoreErrors: logger.exception("failed to load feature writer: %r", wdict) continue raise writers.append(writer) return writers def loadFeatureWriterFromString(spec): """Take a string specifying a feature writer class to load (either a built-in writer or one defined in an external, user-defined module), initialize it with given options and return the writer object. The string must conform to the following notation: - an optional python module, followed by '::' - a required class name; the class must have a method call 'write' with the same signature as the BaseFeatureWriter. - an optional list of keyword-only arguments enclosed by parentheses Raises ValueError if the string doesn't conform to this specification; TypeError if imported name is not a feature writer class; and ImportError if the user-defined module cannot be imported. Examples: >>> loadFeatureWriterFromString("KernFeatureWriter") >>> w = loadFeatureWriterFromString("KernFeatureWriter(ignoreMarks=False)") >>> w.options.ignoreMarks False >>> w = loadFeatureWriterFromString("MarkFeatureWriter(features=['mkmk'])") >>> w.features == frozenset(['mkmk']) True >>> loadFeatureWriterFromString("ufo2ft.featureWriters::KernFeatureWriter") """ return _loadPluginFromString(spec, "ufo2ft.featureWriters", isValidFeatureWriter) ufo2ft-2.30.0/Lib/ufo2ft/featureWriters/__main__.py000066400000000000000000000022101434012334300220110ustar00rootroot00000000000000import argparse import logging from io import StringIO from fontTools.misc.cliTools import makeOutputFileName from ufo2ft.featureCompiler import FeatureCompiler from ufo2ft.featureWriters import loadFeatureWriterFromString, logger try: import ufoLib2 loader = ufoLib2.Font.open except ImportError: import defcon loader = defcon.Font logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description="Apply feature writers to a UFO file") parser.add_argument("--output", "-o", metavar="OUTPUT", help="output file name") parser.add_argument("ufo", metavar="UFO", help="UFO file") parser.add_argument( "writers", metavar="WRITER", nargs="*", help="list of feature writers to enable", ) args = parser.parse_args() if not args.output: args.output = makeOutputFileName(args.ufo) ufo = loader(args.ufo) writers = [loadFeatureWriterFromString(w) for w in args.writers] compiler = FeatureCompiler(ufo, featureWriters=writers or None) compiler.setupFeatures() buf = StringIO() compiler.writeFeatures(buf) ufo.features.text = buf.getvalue() logger.info("Written on %s" % args.output) ufo.save(args.output) ufo2ft-2.30.0/Lib/ufo2ft/featureWriters/ast.py000066400000000000000000000177071434012334300211010ustar00rootroot00000000000000"""Helpers to build or extract data from feaLib AST objects.""" import collections import functools import operator import re # we re-export here all the feaLib AST classes so they can be used from # writer modules with a single `from ufo2ft.featureWriters import ast` import sys from fontTools import unicodedata from fontTools.feaLib import ast self = sys.modules[__name__] for name in getattr(ast, "__all__", dir(ast)): if isinstance(getattr(ast, name), type): setattr(self, name, getattr(ast, name)) del sys, self, name def getScriptLanguageSystems(feaFile): """Return dictionary keyed by Unicode script code containing lists of (OT_SCRIPT_TAG, [OT_LANGUAGE_TAG, ...]) tuples (excluding "DFLT"). """ languagesByScript = collections.OrderedDict() for ls in [ st for st in feaFile.statements if isinstance(st, ast.LanguageSystemStatement) ]: if ls.script == "DFLT": continue languagesByScript.setdefault(ls.script, []).append(ls.language) langSysMap = collections.OrderedDict() for script, languages in languagesByScript.items(): sc = unicodedata.ot_tag_to_script(script) langSysMap.setdefault(sc, []).append((script, languages)) return langSysMap def iterFeatureBlocks(feaFile, tag=None): for statement in feaFile.statements: if isinstance(statement, ast.FeatureBlock): if tag is not None and statement.name != tag: continue yield statement def findFeatureTags(feaFile): return {f.name for f in iterFeatureBlocks(feaFile)} def findCommentPattern(feaFile, pattern): """ Yield a tuple of statements, starting with the parent block, followed by nested blocks if present, ending with the comment matching a given pattern. There is not parent block if the matched comment is a the root level. """ for statement in feaFile.statements: if hasattr(statement, "statements"): for res in findCommentPattern(statement, pattern): yield (statement, *res) elif isinstance(statement, ast.Comment): if re.match(pattern, str(statement)): yield (statement,) def findTable(feaLib, tag): for statement in feaLib.statements: if isinstance(statement, ast.TableBlock) and statement.name == tag: return statement def iterClassDefinitions(feaFile, featureTag=None): if featureTag is None: # start from top-level class definitions for s in feaFile.statements: if isinstance(s, ast.GlyphClassDefinition): yield s # then iterate over per-feature class definitions for fea in iterFeatureBlocks(feaFile, tag=featureTag): for s in fea.statements: if isinstance(s, ast.GlyphClassDefinition): yield s LOOKUP_FLAGS = { "RightToLeft": 1, "IgnoreBaseGlyphs": 2, "IgnoreLigatures": 4, "IgnoreMarks": 8, } def makeLookupFlag(flags=None, markAttachment=None, markFilteringSet=None): if isinstance(flags, str): value = LOOKUP_FLAGS[flags] elif flags is not None: value = functools.reduce(operator.or_, [LOOKUP_FLAGS[n] for n in flags], 0) else: value = 0 if markAttachment is not None: assert isinstance(markAttachment, ast.GlyphClassDefinition) markAttachment = ast.GlyphClassName(markAttachment) if markFilteringSet is not None: assert isinstance(markFilteringSet, ast.GlyphClassDefinition) markFilteringSet = ast.GlyphClassName(markFilteringSet) return ast.LookupFlagStatement( value, markAttachment=markAttachment, markFilteringSet=markFilteringSet ) def makeGlyphClassDefinitions(groups, feaFile=None, stripPrefix=""): """Given a groups dictionary ({str: list[str]}), create feaLib GlyphClassDefinition objects for each group. Return a dict keyed by the original group name. If `stripPrefix` (str) is provided and a group name starts with it, the string will be stripped from the beginning of the class name. """ classDefs = {} if feaFile is not None: classNames = {cdef.name for cdef in iterClassDefinitions(feaFile)} else: classNames = set() lengthPrefix = len(stripPrefix) for groupName, members in sorted(groups.items()): originalGroupName = groupName if stripPrefix and groupName.startswith(stripPrefix): groupName = groupName[lengthPrefix:] className = makeFeaClassName(groupName, classNames) classNames.add(className) classDef = makeGlyphClassDefinition(className, members) classDefs[originalGroupName] = classDef return classDefs def makeGlyphClassDefinition(className, members): glyphNames = [ast.GlyphName(g) for g in members] glyphClass = ast.GlyphClass(glyphNames) classDef = ast.GlyphClassDefinition(className, glyphClass) return classDef def makeFeaClassName(name, existingClassNames=None): """Make a glyph class name which is legal to use in feature text. Ensures the name only includes characters in "A-Za-z0-9._", and isn't already defined. """ name = re.sub(r"[^A-Za-z0-9._]", r"", name) if existingClassNames is None: return name i = 1 origName = name while name in existingClassNames: name = "%s_%d" % (origName, i) i += 1 return name def addLookupReferences( feature, lookups, script=None, languages=None, exclude_dflt=False ): """Add references to named lookups to the feature's statements. If `script` (str) and `languages` (sequence of str) are provided, only register the lookup for the given script and languages, optionally with `exclude_dflt` directive. Otherwise add a global reference which will be registered for all the scripts and languages in the feature file's `languagesystems` statements. """ assert lookups if not script: for lookup in lookups: feature.statements.append(ast.LookupReferenceStatement(lookup)) return feature.statements.append(ast.ScriptStatement(script)) if exclude_dflt: for language in languages or ("dflt",): feature.statements.append( ast.LanguageStatement(language, include_default=False) ) for lookup in lookups: feature.statements.append(ast.LookupReferenceStatement(lookup)) else: feature.statements.append(ast.LanguageStatement("dflt", include_default=True)) for lookup in lookups: feature.statements.append(ast.LookupReferenceStatement(lookup)) for language in languages or (): if language == "dflt": continue feature.statements.append( ast.LanguageStatement(language, include_default=True) ) _GDEFGlyphClasses = collections.namedtuple( "_GDEFGlyphClasses", "base ligature mark component" ) def getGDEFGlyphClasses(feaLib): """Return GDEF GlyphClassDef base/mark/ligature/component glyphs, or None if no GDEF table is defined in the feature file. """ for s in feaLib.statements: if isinstance(s, ast.TableBlock) and s.name == "GDEF": for st in s.statements: if isinstance(st, ast.GlyphClassDefStatement): return _GDEFGlyphClasses( frozenset(st.baseGlyphs.glyphSet()) if st.baseGlyphs is not None else frozenset(), frozenset(st.ligatureGlyphs.glyphSet()) if st.ligatureGlyphs is not None else frozenset(), frozenset(st.markGlyphs.glyphSet()) if st.markGlyphs is not None else frozenset(), frozenset(st.componentGlyphs.glyphSet()) if st.componentGlyphs is not None else frozenset(), ) return _GDEFGlyphClasses(None, None, None, None) ufo2ft-2.30.0/Lib/ufo2ft/featureWriters/baseFeatureWriter.py000066400000000000000000000354161434012334300237320ustar00rootroot00000000000000import logging from collections import OrderedDict, namedtuple from types import SimpleNamespace from ufo2ft.constants import OPENTYPE_CATEGORIES_KEY from ufo2ft.errors import InvalidFeaturesData from ufo2ft.featureWriters import ast INSERT_FEATURE_MARKER = r"\s*# Automatic Code.*" class BaseFeatureWriter: """Abstract features writer. The `tableTag` class attribute (str) states the tag of the OpenType Layout table which the generated features are intended for. For example: "GPOS", "GSUB", "BASE", etc. The `features` class attribute defines the set of all the features that this writer supports. If you want to only write some of the available features you can provide a smaller sequence to 'features' constructor argument. By the default all the features supported by this writer will be outputted. Two writing modes are defined here: 1) "skip" (default) will not write features if already present; 2) "append" will add additional lookups to an existing feature, if present, or it will add a new one at the end of all features. Subclasses can set a different default mode or define a different set of `_SUPPORTED_MODES`. The `options` class attribute contains a mapping of option names with their default values. These can be overridden on an instance by passing keyword arguments to the constructor. """ tableTag = None features = frozenset() mode = "skip" insertFeatureMarker = INSERT_FEATURE_MARKER options = {} _SUPPORTED_MODES = frozenset(["skip", "append"]) def __init__(self, features=None, mode=None, **kwargs): if features is not None: features = frozenset(features) assert features, "features cannot be empty" unsupported = features.difference(self.__class__.features) if unsupported: raise ValueError("unsupported: %s" % ", ".join(unsupported)) self.features = features if mode is not None: self.mode = mode if self.mode not in self._SUPPORTED_MODES: raise ValueError(self.mode) options = dict(self.__class__.options) for k in kwargs: if k not in options: raise TypeError("unsupported keyword argument: %r" % k) options[k] = kwargs[k] self.options = SimpleNamespace(**options) logger = ".".join([self.__class__.__module__, self.__class__.__name__]) self.log = logging.getLogger(logger) def setContext(self, font, feaFile, compiler=None): """Populate a temporary `self.context` namespace, which is reset after each new call to `_write` method. Subclasses can override this to provide contextual information which depends on other data, or set any temporary attributes. The default implementation sets: - the current font; - the current FeatureFile object; - the current compiler instance (only present when this writer was instantiated from a FeatureCompiler); - a set of features (tags) to be generated. If self.mode is "skip", these are all the features which are _not_ already present. Returns the context namespace instance. """ todo = set(self.features) insertComments = None if self.mode == "skip": if self.insertFeatureMarker is not None: insertComments = self.collectInsertMarkers( feaFile, self.insertFeatureMarker, todo ) # find existing feature blocks existing = ast.findFeatureTags(feaFile) # ignore features with insert marker if insertComments: existing.difference_update(insertComments.keys()) # remove existing feature without insert marker from todo list todo.difference_update(existing) self.context = SimpleNamespace( font=font, feaFile=feaFile, compiler=compiler, todo=todo, insertComments=insertComments, ) return self.context def shouldContinue(self): """Decide whether to start generating features or return early. Returns a boolean: True to proceed, False to skip. Sublcasses may override this to skip generation based on the presence or lack of other required pieces of font data. """ if not self.context.todo: self.log.debug("No features to be generated; skipped") return False return True def write(self, font, feaFile, compiler=None): """Write features and class definitions for this font to a feaLib FeatureFile object. Returns True if feature file was modified, False if no new features were generated. """ self.setContext(font, feaFile, compiler=compiler) try: if self.shouldContinue(): return self._write() else: return False finally: del self.context def _write(self): """Subclasses must override this.""" raise NotImplementedError def _insert( self, feaFile, classDefs=None, anchorDefs=None, markClassDefs=None, lookups=None, features=None, ): """ Insert feature, its classDefs or markClassDefs and lookups at insert marker comment. If the insert marker is at the top of a feature block, the feature is inserted before that block, and after if the insert marker is at the bottom. """ statements = feaFile.statements inserted = {} # First handle those with a known location, i.e. insert markers insertComments = self.context.insertComments indices = [] for ix, feature in enumerate(features): if insertComments and feature.name in insertComments: block, comment = insertComments[feature.name] markerIndex = block.statements.index(comment) onlyCommentsBefore = all( isinstance(s, ast.Comment) for s in block.statements[:markerIndex] ) onlyCommentsAfter = all( isinstance(s, ast.Comment) for s in block.statements[markerIndex:] ) # Remove insert marker(s) from feature block. del block.statements[markerIndex] # insertFeatureMarker is in a block with only comments. # Replace that block with new feature block. if onlyCommentsBefore and onlyCommentsAfter: index = statements.index(block) statements.remove(block) # insertFeatureMarker is at the top of a feature block # or only preceded by other comments. elif onlyCommentsBefore: index = statements.index(block) # insertFeatureMarker is at the bottom of a feature block # or only followed by other comments elif onlyCommentsAfter: index = statements.index(block) + 1 # insertFeatureMarker is in the middle of a feature block # preceded and followed by statements that are not comments # # Glyphs3 can insert a feature block when rules are before # and after the insert marker. # See # https://github.com/googlefonts/ufo2ft/issues/351#issuecomment-765294436 # This is currently not supported. else: raise InvalidFeaturesData( "Insert marker has rules before and after, feature " f"{block.name} cannot be inserted. This is not supported." ) statements.insert(index, feature) indices.append(index) inserted[id(feature)] = True # Now walk feature list backwards and insert any dependent features for i in range(ix - 1, -1, -1): if id(features[i]) in inserted: break # Insert this before the current one i.e. at same array index statements.insert(index, features[i]) # All the indices recorded previously have now shifted up by one indices = [index] + [j + 1 for j in indices] inserted[id(features[i])] = True # Finally, deal with any remaining features for feature in features: if id(feature) in inserted: continue index = len(statements) statements.insert(index, feature) indices.append(index) # Write classDefs, anchorsDefs, markClassDefs, lookups at earliest # opportunity. others = [] minindex = min(indices) for defs in [classDefs, anchorDefs, markClassDefs]: if defs: others.extend(defs) others.append(ast.Comment("")) # Insert lookups if lookups: if minindex > 0 and not others: others.append(ast.Comment("")) others.extend(lookups) if others: feaFile.statements = statements = ( statements[:minindex] + others + statements[minindex:] ) @staticmethod def collectInsertMarkers(feaFile, insertFeatureMarker, featureTags): """ Returns a dictionary of tuples (block, comment) keyed by feature tag with the block that contains the comment matching the insert feature marker, for given feature tags. """ insertComments = dict() for match in ast.findCommentPattern(feaFile, insertFeatureMarker): blocks, comment = match[:-1], match[-1] if len(blocks) == 1 and isinstance(blocks[0], ast.FeatureBlock): block = blocks[0] if block.name in featureTags and block.name not in insertComments: insertComments[block.name] = (block, comment) return insertComments def makeUnicodeToGlyphNameMapping(self): """Return the Unicode to glyph name mapping for the current font.""" # Try to get the "best" Unicode cmap subtable if this writer is running # in the context of a FeatureCompiler, else create a new mapping from # the UFO glyphs compiler = self.context.compiler cmap = None if compiler is not None: table = compiler.ttFont.get("cmap") if table is not None: cmap = table.getBestCmap() if cmap is None: from ufo2ft.util import makeUnicodeToGlyphNameMapping if compiler is not None: glyphSet = compiler.glyphSet else: glyphSet = self.context.font cmap = makeUnicodeToGlyphNameMapping(glyphSet) return cmap def getOrderedGlyphSet(self): """Return OrderedDict[glyphName, glyph] sorted by glyphOrder.""" compiler = self.context.compiler if compiler is not None: return compiler.glyphSet from ufo2ft.util import _GlyphSet, makeOfficialGlyphOrder font = self.context.font # subset glyphSet by skipExportGlyphs if any glyphSet = _GlyphSet.from_layer( font, skipExportGlyphs=set(font.lib.get("public.skipExportGlyphs", [])), ) glyphOrder = makeOfficialGlyphOrder(glyphSet, font.glyphOrder) return OrderedDict((gn, glyphSet[gn]) for gn in glyphOrder) def compileGSUB(self): """Compile a temporary GSUB table from the current feature file.""" from ufo2ft.util import compileGSUB compiler = self.context.compiler if compiler is not None: # The result is cached in the compiler instance, so if another # writer requests one it is not compiled again. if hasattr(compiler, "_gsub"): return compiler._gsub glyphOrder = compiler.ttFont.getGlyphOrder() else: # the 'real' glyph order doesn't matter because the table is not # compiled to binary, only the glyph names are used glyphOrder = sorted(self.context.font.keys()) gsub = compileGSUB(self.context.feaFile, glyphOrder) if compiler and not hasattr(compiler, "_gsub"): compiler._gsub = gsub return gsub def getOpenTypeCategories(self): """Return 'public.openTypeCategories' values as a tuple of sets of unassigned, bases, ligatures, marks, components.""" font = self.context.font unassigned, bases, ligatures, marks, components = ( set(), set(), set(), set(), set(), ) openTypeCategories = font.lib.get(OPENTYPE_CATEGORIES_KEY, {}) for glyphName, category in openTypeCategories.items(): if category == "unassigned": unassigned.add(glyphName) elif category == "base": bases.add(glyphName) elif category == "ligature": ligatures.add(glyphName) elif category == "mark": marks.add(glyphName) elif category == "component": components.add(glyphName) else: self.log.warning( f"The '{OPENTYPE_CATEGORIES_KEY}' value of {glyphName} in " f"{font.info.familyName} {font.info.styleName} is '{category}' " "when it should be 'unassigned', 'base', 'ligature', 'mark' " "or 'component'." ) return namedtuple( "OpenTypeCategories", "unassigned base ligature mark component" )( frozenset(unassigned), frozenset(bases), frozenset(ligatures), frozenset(marks), frozenset(components), ) def getGDEFGlyphClasses(self): """Return a tuple of GDEF GlyphClassDef base, ligature, mark, component glyph names. Sets are `None` if no 'public.openTypeCategories' values are defined or if no GDEF table is defined in the feature file. """ feaFile = self.context.feaFile if ast.findTable(feaFile, "GDEF") is not None: return ast.getGDEFGlyphClasses(feaFile) unassigned, bases, ligatures, marks, components = self.getOpenTypeCategories() if not any((unassigned, bases, ligatures, marks, components)): return ast._GDEFGlyphClasses(None, None, None, None) return ast._GDEFGlyphClasses( frozenset(bases), frozenset(ligatures), frozenset(marks), frozenset(components), ) ufo2ft-2.30.0/Lib/ufo2ft/featureWriters/cursFeatureWriter.py000066400000000000000000000101551434012334300237650ustar00rootroot00000000000000from fontTools.misc.fixedTools import otRound from ufo2ft.featureWriters import BaseFeatureWriter, ast from ufo2ft.util import classifyGlyphs, unicodeScriptDirection class CursFeatureWriter(BaseFeatureWriter): """Generate a curs feature base on glyph anchors. The default mode is 'skip': i.e. if the 'curs' feature is already present in the feature file, it is not generated again. The optional 'append' mode will add extra lookups to an already existing features, if any. By default, anchors names 'entry' and 'exit' will be used to connect the 'entry' anchor of a glyph with the 'exit' anchor of the preceding glyph. """ tableTag = "GPOS" features = frozenset(["curs"]) def _makeCursiveFeature(self): cmap = self.makeUnicodeToGlyphNameMapping() if any(unicodeScriptDirection(uv) == "LTR" for uv in cmap): gsub = self.compileGSUB() dirGlyphs = classifyGlyphs(unicodeScriptDirection, cmap, gsub) shouldSplit = "LTR" in dirGlyphs else: shouldSplit = False lookups = [] ordereredGlyphSet = self.getOrderedGlyphSet().items() if shouldSplit: # Make LTR lookup LTRlookup = self._makeCursiveLookup( ( glyph for (glyphName, glyph) in ordereredGlyphSet if glyphName in dirGlyphs["LTR"] ), direction="LTR", ) if LTRlookup: lookups.append(LTRlookup) # Make RTL lookup with other glyphs RTLlookup = self._makeCursiveLookup( ( glyph for (glyphName, glyph) in ordereredGlyphSet if glyphName not in dirGlyphs["LTR"] ), direction="RTL", ) if RTLlookup: lookups.append(RTLlookup) else: lookup = self._makeCursiveLookup( (glyph for (glyphName, glyph) in ordereredGlyphSet) ) if lookup: lookups.append(lookup) if lookups: feature = ast.FeatureBlock("curs") feature.statements.extend(lookups) return feature def _makeCursiveLookup(self, glyphs, direction=None): statements = self._makeCursiveStatements(glyphs) if not statements: return suffix = "" if direction == "LTR": suffix = "_ltr" elif direction == "RTL": suffix = "_rtl" lookup = ast.LookupBlock(name=f"curs{suffix}") if direction != "LTR": lookup.statements.append(ast.makeLookupFlag(("IgnoreMarks", "RightToLeft"))) else: lookup.statements.append(ast.makeLookupFlag("IgnoreMarks")) lookup.statements.extend(statements) return lookup def _makeCursiveStatements(self, glyphs): cursiveAnchors = dict() statements = [] for glyph in glyphs: entryAnchor = exitAnchor = None for anchor in glyph.anchors: if entryAnchor and exitAnchor: break if anchor.name == "entry": entryAnchor = ast.Anchor(x=otRound(anchor.x), y=otRound(anchor.y)) elif anchor.name == "exit": exitAnchor = ast.Anchor(x=otRound(anchor.x), y=otRound(anchor.y)) # A glyph can have only one of the cursive anchors (e.g. if it # attaches on one side only) if entryAnchor or exitAnchor: cursiveAnchors[ast.GlyphName(glyph.name)] = (entryAnchor, exitAnchor) if cursiveAnchors: for glyphName, anchors in cursiveAnchors.items(): statement = ast.CursivePosStatement(glyphName, *anchors) statements.append(statement) return statements def _write(self): feaFile = self.context.feaFile feature = self._makeCursiveFeature() if not feature: return False self._insert(feaFile=feaFile, features=[feature]) return True ufo2ft-2.30.0/Lib/ufo2ft/featureWriters/gdefFeatureWriter.py000066400000000000000000000071731434012334300237240ustar00rootroot00000000000000from ufo2ft.featureWriters import BaseFeatureWriter, ast class GdefFeatureWriter(BaseFeatureWriter): """Generates a GDEF table based on OpenType Category and glyph anchors. It skips generating the GDEF if a GDEF is defined in the features. It uses the 'public.openTypeCategories' values to create the GDEF ClassDefs and the ligature caret anchors to create the GDEF ligature carets. """ tableTag = "GDEF" features = frozenset(["GlyphClassDefs", "LigatureCarets"]) insertFeatureMarker = None def setContext(self, font, feaFile, compiler=None): ctx = super().setContext(font, feaFile, compiler=compiler) ctx.gdefTableBlock = ast.findTable(self.context.feaFile, "GDEF") if ctx.gdefTableBlock: for fea in ctx.gdefTableBlock.statements: if isinstance(fea, ast.GlyphClassDefStatement): ctx.todo.discard("GlyphClassDefs") elif isinstance(fea, ast.LigatureCaretByIndexStatement) or isinstance( fea, ast.LigatureCaretByPosStatement ): ctx.todo.discard("LigatureCarets") if not ctx.todo: break ctx.orderedGlyphSet = self.getOrderedGlyphSet() if "GlyphClassDefs" in ctx.todo: ctx.openTypeCategories = self.getOpenTypeCategories() if not any(ctx.openTypeCategories): ctx.todo.remove("GlyphClassDefs") if "LigatureCarets" in ctx.todo: ctx.ligatureCarets = self._getLigatureCarets() if not ctx.ligatureCarets: ctx.todo.remove("LigatureCarets") return ctx def _getLigatureCarets(self): carets = dict() for glyphName, glyph in self.context.orderedGlyphSet.items(): glyphCarets = set() for anchor in glyph.anchors: if ( anchor.name and anchor.name.startswith("caret_") and anchor.x is not None ): glyphCarets.add(round(anchor.x)) elif ( anchor.name and anchor.name.startswith("vcaret_") and anchor.y is not None ): glyphCarets.add(round(anchor.y)) if glyphCarets: carets[glyphName] = sorted(glyphCarets) return carets def _sortedGlyphClass(self, glyphNames): return sorted(n for n in self.context.orderedGlyphSet if n in glyphNames) def _write(self): feaFile = self.context.feaFile gdefTableBlock = self.context.gdefTableBlock if not gdefTableBlock: gdefTableBlock = ast.TableBlock("GDEF") feaFile.statements.append(gdefTableBlock) if "GlyphClassDefs" in self.context.todo: categories = self.context.openTypeCategories glyphClassDefs = ast.GlyphClassDefStatement( ast.GlyphClass(self._sortedGlyphClass(categories.base)), ast.GlyphClass(self._sortedGlyphClass(categories.mark)), ast.GlyphClass(self._sortedGlyphClass(categories.ligature)), ast.GlyphClass(self._sortedGlyphClass(categories.component)), ) gdefTableBlock.statements.append(glyphClassDefs) if "LigatureCarets" in self.context.todo: ligatureCarets = [ ast.LigatureCaretByPosStatement(ast.GlyphName(glyphName), carets) for glyphName, carets in self.context.ligatureCarets.items() ] gdefTableBlock.statements.extend(ligatureCarets) return True ufo2ft-2.30.0/Lib/ufo2ft/featureWriters/kernFeatureWriter.py000066400000000000000000000465711434012334300237630ustar00rootroot00000000000000from types import SimpleNamespace from fontTools import unicodedata from ufo2ft.constants import INDIC_SCRIPTS, USE_SCRIPTS from ufo2ft.featureWriters import BaseFeatureWriter, ast from ufo2ft.util import classifyGlyphs, quantize, unicodeScriptDirection SIDE1_PREFIX = "public.kern1." SIDE2_PREFIX = "public.kern2." # In HarfBuzz the 'dist' feature is automatically enabled for these shapers: # src/hb-ot-shape-complex-myanmar.cc # src/hb-ot-shape-complex-use.cc # src/hb-ot-shape-complex-indic.cc # src/hb-ot-shape-complex-khmer.cc # We derived the list of scripts associated to each dist-enabled shaper from # `hb_ot_shape_complex_categorize` in src/hb-ot-shape-complex-private.hh DIST_ENABLED_SCRIPTS = set(INDIC_SCRIPTS) | set(["Khmr", "Mymr"]) | set(USE_SCRIPTS) RTL_BIDI_TYPES = {"R", "AL"} LTR_BIDI_TYPES = {"L", "AN", "EN"} def unicodeBidiType(uv): """Return "R" for characters with RTL direction, or "L" for LTR (whether 'strong' or 'weak'), or None for neutral direction. """ char = chr(uv) bidiType = unicodedata.bidirectional(char) if bidiType in RTL_BIDI_TYPES: return "R" elif bidiType in LTR_BIDI_TYPES: return "L" else: return None class KerningPair: __slots__ = ("side1", "side2", "value", "directions", "bidiTypes") def __init__(self, side1, side2, value, directions=None, bidiTypes=None): if isinstance(side1, str): self.side1 = ast.GlyphName(side1) elif isinstance(side1, ast.GlyphClassDefinition): self.side1 = ast.GlyphClassName(side1) else: raise AssertionError(side1) if isinstance(side2, str): self.side2 = ast.GlyphName(side2) elif isinstance(side2, ast.GlyphClassDefinition): self.side2 = ast.GlyphClassName(side2) else: raise AssertionError(side2) self.value = value self.directions = directions or set() self.bidiTypes = bidiTypes or set() @property def firstIsClass(self): return isinstance(self.side1, ast.GlyphClassName) @property def secondIsClass(self): return isinstance(self.side2, ast.GlyphClassName) @property def glyphs(self): if self.firstIsClass: classDef1 = self.side1.glyphclass glyphs1 = {g.asFea() for g in classDef1.glyphSet()} else: glyphs1 = {self.side1.asFea()} if self.secondIsClass: classDef2 = self.side2.glyphclass glyphs2 = {g.asFea() for g in classDef2.glyphSet()} else: glyphs2 = {self.side2.asFea()} return glyphs1 | glyphs2 def __repr__(self): return "<{} {} {} {}{}{}>".format( self.__class__.__name__, self.side1, self.side2, self.value, " %r" % self.directions if self.directions else "", " %r" % self.bidiTypes if self.bidiTypes else "", ) class KernFeatureWriter(BaseFeatureWriter): """Generates a kerning feature based on groups and rules contained in an UFO's kerning data. There are currently two possible writing modes: 2) "skip" (default) will not write anything if the features are already present; 1) "append" will add additional lookups to an existing feature, if present, or it will add a new one at the end of all features. If the `quantization` argument is given in the filter options, the resulting anchors are rounded to the nearest multiple of the quantization value. """ tableTag = "GPOS" features = frozenset(["kern", "dist"]) options = dict(ignoreMarks=True, quantization=1) def setContext(self, font, feaFile, compiler=None): ctx = super().setContext(font, feaFile, compiler=compiler) ctx.gdefClasses = self.getGDEFGlyphClasses() ctx.kerning = self.getKerningData(font, feaFile, self.getOrderedGlyphSet()) feaScripts = ast.getScriptLanguageSystems(feaFile) ctx.scriptGroups = self._groupScriptsByTagAndDirection(feaScripts) return ctx def shouldContinue(self): if not self.context.kerning.pairs: self.log.debug("No kerning data; skipped") return False if "dist" in self.context.todo and "dist" not in self.context.scriptGroups: self.log.debug( "No dist-enabled scripts defined in languagesystem " "statements; dist feature will not be generated" ) self.context.todo.remove("dist") return super().shouldContinue() def _write(self): lookups = self._makeKerningLookups() if not lookups: self.log.debug("kerning lookups empty; skipped") return False features = self._makeFeatureBlocks(lookups) if not features: self.log.debug("kerning features empty; skipped") return False # extend feature file with the new generated statements feaFile = self.context.feaFile # first add the glyph class definitions side1Classes = self.context.kerning.side1Classes side2Classes = self.context.kerning.side2Classes newClassDefs = [] for classes in (side1Classes, side2Classes): newClassDefs.extend([c for _, c in sorted(classes.items())]) lookupGroups = [] for _, lookupGroup in sorted(lookups.items()): lookupGroups.extend(lookupGroup) self._insert( feaFile=feaFile, classDefs=newClassDefs, lookups=lookupGroups, features=[features[tag] for tag in ["kern", "dist"] if tag in features], ) return True @classmethod def getKerningData(cls, font, feaFile=None, glyphSet=None): side1Classes, side2Classes = cls.getKerningClasses(font, feaFile, glyphSet) pairs = cls.getKerningPairs(font, side1Classes, side2Classes, glyphSet) return SimpleNamespace( side1Classes=side1Classes, side2Classes=side2Classes, pairs=pairs ) @staticmethod def getKerningGroups(font, glyphSet=None): if glyphSet: allGlyphs = set(glyphSet.keys()) else: allGlyphs = set(font.keys()) side1Groups = {} side2Groups = {} for name, members in font.groups.items(): # prune non-existent or skipped glyphs members = [g for g in members if g in allGlyphs] if not members: # skip empty groups continue # skip groups without UFO3 public.kern{1,2} prefix if name.startswith(SIDE1_PREFIX): side1Groups[name] = members elif name.startswith(SIDE2_PREFIX): side2Groups[name] = members return side1Groups, side2Groups @classmethod def getKerningClasses(cls, font, feaFile=None, glyphSet=None): side1Groups, side2Groups = cls.getKerningGroups(font, glyphSet) side1Classes = ast.makeGlyphClassDefinitions( side1Groups, feaFile, stripPrefix="public." ) side2Classes = ast.makeGlyphClassDefinitions( side2Groups, feaFile, stripPrefix="public." ) return side1Classes, side2Classes @staticmethod def getKerningPairs(font, side1Classes, side2Classes, glyphSet=None): if glyphSet: allGlyphs = set(glyphSet.keys()) else: allGlyphs = set(font.keys()) kerning = font.kerning pairsByFlags = {} for (side1, side2) in kerning: # filter out pairs that reference missing groups or glyphs if side1 not in side1Classes and side1 not in allGlyphs: continue if side2 not in side2Classes and side2 not in allGlyphs: continue flags = (side1 in side1Classes, side2 in side2Classes) pairsByFlags.setdefault(flags, set()).add((side1, side2)) result = [] for flags, pairs in sorted(pairsByFlags.items()): for side1, side2 in sorted(pairs): value = kerning[side1, side2] if all(flags) and value == 0: # ignore zero-valued class kern pairs continue firstIsClass, secondIsClass = flags if firstIsClass: side1 = side1Classes[side1] if secondIsClass: side2 = side2Classes[side2] result.append(KerningPair(side1, side2, value)) return result def _intersectPairs(self, attribute, glyphSets): allKeys = set() for pair in self.context.kerning.pairs: for key, glyphs in glyphSets.items(): if not pair.glyphs.isdisjoint(glyphs): getattr(pair, attribute).add(key) allKeys.add(key) return allKeys @staticmethod def _groupScriptsByTagAndDirection(feaScripts): # Read scripts/languages defined in feaFile's 'languagesystem' # statements and group them by the feature tag (kern or dist) # they are associated with, and the global script's horizontal # direction (DFLT is excluded) scriptGroups = {} for scriptCode, scriptLangSys in feaScripts.items(): if scriptCode: direction = unicodedata.script_horizontal_direction(scriptCode) else: direction = "LTR" if scriptCode in DIST_ENABLED_SCRIPTS: tag = "dist" else: tag = "kern" scriptGroups.setdefault(tag, {}).setdefault(direction, []).extend( scriptLangSys ) return scriptGroups @staticmethod def _makePairPosRule(pair, rtl=False, quantization=1): enumerated = pair.firstIsClass ^ pair.secondIsClass value = quantize(pair.value, quantization) if rtl and "L" in pair.bidiTypes: # numbers are always shaped LTR even in RTL scripts rtl = False valuerecord = ast.ValueRecord( xPlacement=value if rtl else None, yPlacement=0 if rtl else None, xAdvance=value, yAdvance=0 if rtl else None, ) return ast.PairPosStatement( glyphs1=pair.side1, valuerecord1=valuerecord, glyphs2=pair.side2, valuerecord2=None, enumerated=enumerated, ) def _makeKerningLookup( self, name, pairs, exclude=None, rtl=False, ignoreMarks=True ): assert pairs rules = [] for pair in pairs: if exclude is not None and exclude(pair): self.log.debug("pair excluded from '%s' lookup: %r", name, pair) continue rules.append( self._makePairPosRule( pair, rtl=rtl, quantization=self.options.quantization ) ) if rules: lookup = ast.LookupBlock(name) if ignoreMarks and self.options.ignoreMarks: lookup.statements.append(ast.makeLookupFlag("IgnoreMarks")) lookup.statements.extend(rules) return lookup def _makeKerningLookups(self): cmap = self.makeUnicodeToGlyphNameMapping() if any(unicodeScriptDirection(uv) == "RTL" for uv in cmap): # If there are any characters from globally RTL scripts in the # cmap, we compile a temporary GSUB table to resolve substitutions # and group glyphs by script horizontal direction and bidirectional # type. We then mark each kerning pair with these properties when # any of the glyphs involved in a pair intersects these groups. gsub = self.compileGSUB() dirGlyphs = classifyGlyphs(unicodeScriptDirection, cmap, gsub) directions = self._intersectPairs("directions", dirGlyphs) shouldSplit = "RTL" in directions if shouldSplit: bidiGlyphs = classifyGlyphs(unicodeBidiType, cmap, gsub) self._intersectPairs("bidiTypes", bidiGlyphs) else: shouldSplit = False marks = self.context.gdefClasses.mark lookups = {} if shouldSplit: # make one DFLT lookup with script-agnostic characters, and two # LTR/RTL lookups excluding pairs from the opposite group. # We drop kerning pairs with ambiguous direction: i.e. those containing # glyphs from scripts with different overall horizontal direction, or # glyphs with incompatible bidirectional type (e.g. arabic letters vs # arabic numerals). pairs = [] for pair in self.context.kerning.pairs: if ("RTL" in pair.directions and "LTR" in pair.directions) or ( "R" in pair.bidiTypes and "L" in pair.bidiTypes ): self.log.warning( "skipped kern pair with ambiguous direction: %r", pair ) continue pairs.append(pair) if not pairs: return lookups if self.options.ignoreMarks: # If there are pairs with a mix of mark/base then the IgnoreMarks # flag is unnecessary and should not be set basePairs, markPairs = self._splitBaseAndMarkPairs(pairs, marks) if basePairs: self._makeSplitDirectionKernLookups(lookups, basePairs) if markPairs: self._makeSplitDirectionKernLookups( lookups, markPairs, ignoreMarks=False, suffix="_marks" ) else: self._makeSplitDirectionKernLookups(lookups, pairs) else: # only make a single (implicitly LTR) lookup including all base/base pairs # and a single lookup including all base/mark pairs (if any) pairs = self.context.kerning.pairs if self.options.ignoreMarks: basePairs, markPairs = self._splitBaseAndMarkPairs(pairs, marks) lookups["LTR"] = [] if basePairs: lookups["LTR"].append( self._makeKerningLookup("kern_ltr", basePairs) ) if markPairs: lookups["LTR"].append( self._makeKerningLookup( "kern_ltr_marks", markPairs, ignoreMarks=False ) ) else: lookups["LTR"] = [self._makeKerningLookup("kern_ltr", pairs)] return lookups def _splitBaseAndMarkPairs(self, pairs, marks): basePairs, markPairs = [], [] if marks: for pair in pairs: if any(glyph in marks for glyph in pair.glyphs): markPairs.append(pair) else: basePairs.append(pair) else: basePairs[:] = pairs return basePairs, markPairs def _makeSplitDirectionKernLookups( self, lookups, pairs, ignoreMarks=True, suffix="" ): dfltKern = self._makeKerningLookup( "kern_dflt" + suffix, pairs, exclude=(lambda pair: {"LTR", "RTL"}.intersection(pair.directions)), rtl=False, ignoreMarks=ignoreMarks, ) if dfltKern: lookups.setdefault("DFLT", []).append(dfltKern) ltrKern = self._makeKerningLookup( "kern_ltr" + suffix, pairs, exclude=(lambda pair: not pair.directions or "RTL" in pair.directions), rtl=False, ignoreMarks=ignoreMarks, ) if ltrKern: lookups.setdefault("LTR", []).append(ltrKern) rtlKern = self._makeKerningLookup( "kern_rtl" + suffix, pairs, exclude=(lambda pair: not pair.directions or "LTR" in pair.directions), rtl=True, ignoreMarks=ignoreMarks, ) if rtlKern: lookups.setdefault("RTL", []).append(rtlKern) def _makeFeatureBlocks(self, lookups): features = {} if "kern" in self.context.todo: kern = ast.FeatureBlock("kern") self._registerKernLookups(kern, lookups) if kern.statements: features["kern"] = kern if "dist" in self.context.todo: dist = ast.FeatureBlock("dist") self._registerDistLookups(dist, lookups) if dist.statements: features["dist"] = dist return features def _registerKernLookups(self, feature, lookups): if "DFLT" in lookups: ast.addLookupReferences(feature, lookups["DFLT"]) scriptGroups = self.context.scriptGroups if "dist" in self.context.todo: distScripts = scriptGroups["dist"] else: distScripts = {} kernScripts = scriptGroups.get("kern", {}) ltrScripts = kernScripts.get("LTR", []) rtlScripts = kernScripts.get("RTL", []) ltrLookups = lookups.get("LTR") rtlLookups = lookups.get("RTL") if ltrLookups and rtlLookups: if ltrScripts and rtlScripts: for script, langs in ltrScripts: ast.addLookupReferences(feature, ltrLookups, script, langs) for script, langs in rtlScripts: ast.addLookupReferences(feature, rtlLookups, script, langs) elif ltrScripts: ast.addLookupReferences(feature, rtlLookups, script="DFLT") for script, langs in ltrScripts: ast.addLookupReferences(feature, ltrLookups, script, langs) elif rtlScripts: ast.addLookupReferences(feature, ltrLookups, script="DFLT") for script, langs in rtlScripts: ast.addLookupReferences(feature, rtlLookups, script, langs) else: if not (distScripts.get("LTR") and distScripts.get("RTL")): raise ValueError( "cannot use DFLT script for both LTR and RTL kern " "lookups; add 'languagesystems' to features for at " "least one LTR or RTL script using the kern feature" ) elif ltrLookups: if not (rtlScripts or distScripts): ast.addLookupReferences(feature, ltrLookups) else: ast.addLookupReferences(feature, ltrLookups, script="DFLT") for script, langs in ltrScripts: ast.addLookupReferences(feature, ltrLookups, script, langs) elif rtlLookups: if not (ltrScripts or distScripts): ast.addLookupReferences(feature, rtlLookups) else: ast.addLookupReferences(feature, rtlLookups, script="DFLT") for script, langs in rtlScripts: ast.addLookupReferences(feature, rtlLookups, script, langs) def _registerDistLookups(self, feature, lookups): scripts = self.context.scriptGroups["dist"] ltrLookups = lookups.get("LTR") if ltrLookups: for script, langs in scripts.get("LTR", []): ast.addLookupReferences(feature, ltrLookups, script, langs) rtlLookups = lookups.get("RTL") if rtlLookups: for script, langs in scripts.get("RTL", []): ast.addLookupReferences(feature, rtlLookups, script, langs) ufo2ft-2.30.0/Lib/ufo2ft/featureWriters/markFeatureWriter.py000066400000000000000000001114751434012334300237520ustar00rootroot00000000000000import itertools import re from collections import OrderedDict, defaultdict from functools import partial from fontTools.misc.fixedTools import otRound from fontTools.unicodedata import script_extension from ufo2ft.constants import INDIC_SCRIPTS, USE_SCRIPTS from ufo2ft.featureWriters import BaseFeatureWriter, ast from ufo2ft.util import classifyGlyphs, quantize, unicodeInScripts class AbstractMarkPos: """Object containing all the mark attachments for glyph 'name'. The 'marks' is a list of NamedAnchor objects. Provides methods to filter marks given some callable, and convert itself to feaLib AST 'pos' statements for mark2base, mark2liga and mark2mark lookups. """ Statement = None def __init__(self, name, marks): self.name = name self.marks = marks def _filterMarks(self, include): return [anchor for anchor in self.marks if include(anchor)] def _marksAsAST(self): return [ (ast.Anchor(x=otRound(anchor.x), y=otRound(anchor.y)), anchor.markClass) for anchor in sorted(self.marks, key=lambda a: a.name) ] def asAST(self): marks = self._marksAsAST() return self.Statement(ast.GlyphName(self.name), marks) def __str__(self): return self.asAST().asFea() # pragma: no cover def filter(self, include): marks = self._filterMarks(include) return self.__class__(self.name, marks) if any(marks) else None def getMarkGlyphToMarkClasses(self): """Return a list of pairs (markGlyph, markClasses).""" markGlyphToMarkClasses = defaultdict(set) for namedAnchor in self.marks: for markGlyph in namedAnchor.markClass.glyphs: markGlyphToMarkClasses[markGlyph].add(namedAnchor.markClass.name) return markGlyphToMarkClasses.items() class MarkToBasePos(AbstractMarkPos): Statement = ast.MarkBasePosStatement class MarkToMarkPos(AbstractMarkPos): Statement = ast.MarkMarkPosStatement class MarkToLigaPos(AbstractMarkPos): Statement = ast.MarkLigPosStatement def _filterMarks(self, include): return [ [anchor for anchor in component if include(anchor)] for component in self.marks ] def _marksAsAST(self): return [ [ (ast.Anchor(x=otRound(anchor.x), y=otRound(anchor.y)), anchor.markClass) for anchor in sorted(component, key=lambda a: a.name) ] for component in self.marks ] def getMarkGlyphToMarkClasses(self): """Return a list of pairs (markGlyph, markClasses).""" markGlyphToMarkClasses = defaultdict(set) for component in self.marks: for namedAnchor in component: for markGlyph in namedAnchor.markClass.glyphs: markGlyphToMarkClasses[markGlyph].add(namedAnchor.markClass.name) return markGlyphToMarkClasses.items() MARK_PREFIX = LIGA_SEPARATOR = "_" LIGA_NUM_RE = re.compile(r".*?(\d+)$") def parseAnchorName( anchorName, markPrefix=MARK_PREFIX, ligaSeparator=LIGA_SEPARATOR, ligaNumRE=LIGA_NUM_RE, ignoreRE=None, ): """Parse anchor name and return a tuple that specifies: 1) whether the anchor is a "mark" anchor (bool); 2) the "key" name of the anchor, i.e. the name after stripping all the prefixes and suffixes, which identifies the class it belongs to (str); 3) An optional number (int), starting from 1, which identifies that index of the ligature component the anchor refers to. The 'ignoreRE' argument is an optional regex pattern (str) identifying sub-strings in the anchor name that should be ignored when parsing the three elements above. """ number = None if ignoreRE is not None: anchorName = re.sub(ignoreRE, "", anchorName) m = ligaNumRE.match(anchorName) if not m: key = anchorName else: number = m.group(1) key = anchorName.rstrip(number) separator = ligaSeparator if key.endswith(separator): assert separator key = key[: -len(separator)] number = int(number) else: # not a valid ligature anchor name key = anchorName number = None if anchorName.startswith(markPrefix) and key: if number is not None: raise ValueError("mark anchor cannot be numbered: %r" % anchorName) isMark = True key = key[len(markPrefix) :] if not key: raise ValueError("mark anchor key is nil: %r" % anchorName) else: isMark = False return isMark, key, number class NamedAnchor: """A position with a name, and an associated markClass.""" __slots__ = ("name", "x", "y", "isMark", "key", "number", "markClass") # subclasses can customize these to use different anchor naming schemes markPrefix = MARK_PREFIX ignoreRE = None ligaSeparator = LIGA_SEPARATOR ligaNumRE = LIGA_NUM_RE def __init__(self, name, x, y, markClass=None): self.name = name self.x = x self.y = y isMark, key, number = parseAnchorName( name, markPrefix=self.markPrefix, ligaSeparator=self.ligaSeparator, ligaNumRE=self.ligaNumRE, ignoreRE=self.ignoreRE, ) if number is not None: if number < 1: raise ValueError("ligature component indexes must start from 1") else: assert key, name self.isMark = isMark self.key = key self.number = number self.markClass = markClass @property def markAnchorName(self): return self.markPrefix + self.key def __repr__(self): items = ("{}={!r}".format(k, getattr(self, k)) for k in ("name", "x", "y")) return "%s(%s)" % (type(self).__name__, ", ".join(items)) def colorGraph(adjacency): """Color the graph defined by the provided adjacency lists. The input is a dict of iterables. Each entry of the dict is one vertex, and the value is a list of neighbours of that vertex. The input graph is expected to be undirected and the input should reflect that (have symmetric adjacency for A -> B and B -> A). Vertices that don't have neighbours should still be present in the input. The output is a list of lists, each list being one color assignment, and its members being vertices. """ # Basic implementation # https://en.wikipedia.org/wiki/Greedy_coloring colors = dict() # Sorted for reproducibility, probably not the optimal vertex order for node in sorted(adjacency): usedNeighbourColors = { colors[neighbour] for neighbour in adjacency[node] if neighbour in colors } colors[node] = firstAvailable(usedNeighbourColors) groups = defaultdict(list) for node, color in colors.items(): groups[color].append(node) return list(groups.values()) def firstAvailable(colorSet): """Return smallest non-negative integer not in the given set of colors.""" count = 0 while True: if count not in colorSet: return count count += 1 class MarkFeatureWriter(BaseFeatureWriter): """Generates a mark, mkmk, abvm and blwm features based on glyph anchors. The default mode is 'skip': i.e. if any of the supported features is already present in the feature file, it is not generated again. The optional 'append' mode will add extra lookups to already existing features, if any. New markClass definitions with unique names are generated when the mark anchors from UFO glyphs are different from those already defined in the feature file, otherwise the existing markClass definitions are reused in the newly appended lookups. Anchors prefixed with "_" are considered mark anchors; any glyph containing those is as such considered a mark glyph, thus added to markClass definitions, and in mark-to-mark lookups (if the glyph also contains other non-underscore-prefixed anchors). Anchors suffixed with a number, e.g. "top_1", "bottom_2", etc., are used for ligature glyphs. The number refers to the index (counting from 1) of the ligature component where the mark is meant to be attached. It is possible that a ligature component has no marks defined, in which case one can have an anchor with an empty name and only the number (e.g. '_3'), which is encoded as '' in the generated 'pos ligature' statement. If the glyph set contains glyphs whose unicode codepoint's script extension property intersects with one of the scripts which are processed by the Indic, USE, or Khmer complex shapers, then the "abvm" and "blwm" features are also generated for those glyphs, as well as for alternate glyphs only accessible via GSUB substitutions. The "abvm" (above-base marks) and "blwm" (below-base marks) features include all mark2base, mark2liga and mark2mark attachments for Indic/USE/Khmer glyphs containing anchors from predefined lists of "above" and "below" anchor names (see below). If these glyphs contain anchors with names not in those lists, the anchors' vertical position relative to the half of the UPEM square is used to decide whether they are considered above or below. If the `quantization` argument is given in the filter options, the resulting anchors are rounded to the nearest multiple of the quantization value. """ options = dict(quantization=1) tableTag = "GPOS" features = frozenset(["mark", "mkmk", "abvm", "blwm"]) # subclasses may override this to use different anchor naming schemes NamedAnchor = NamedAnchor # @MC_top, @MC_bottom, etc. markClassPrefix = "MC" abvmAnchorNames = { "top", "topleft", "topright", "candra", "bindu", "candrabindu", "imatra", } blwmAnchorNames = {"bottom", "bottomleft", "bottomright", "nukta"} scriptsUsingAbvm = set(INDIC_SCRIPTS + USE_SCRIPTS + ["Khmr"]) # Glyphs moves "_bottom" and "_top" (if present) to the top of # the list and then picks the first to use in the mark feature. # https://github.com/googlei18n/noto-source/issues/122#issuecomment-403952188 anchorSortKey = {"_bottom": -2, "_top": -1} def setContext(self, font, feaFile, compiler=None): ctx = super().setContext(font, feaFile, compiler=compiler) ctx.gdefClasses = self.getGDEFGlyphClasses() ctx.anchorLists = self._getAnchorLists() ctx.anchorPairs = self._getAnchorPairs() ctx.feaScripts = set(ast.getScriptLanguageSystems(feaFile).keys()) def shouldContinue(self): if not self.context.anchorPairs: self.log.debug("No mark-attaching anchors found; skipped") return False return super().shouldContinue() def _getAnchorLists(self): gdefClasses = self.context.gdefClasses if gdefClasses.base is not None: # only include the glyphs listed in the GDEF.GlyphClassDef groups include = gdefClasses.base | gdefClasses.ligature | gdefClasses.mark else: # no GDEF table defined in feature file, include all glyphs include = None result = OrderedDict() for glyphName, glyph in self.getOrderedGlyphSet().items(): if include is not None and glyphName not in include: continue anchorDict = OrderedDict() for anchor in glyph.anchors: anchorName = anchor.name if not anchorName: self.log.warning( "unnamed anchor discarded in glyph '%s'", glyphName ) continue if anchorName in anchorDict: self.log.warning( "duplicate anchor '%s' in glyph '%s'", anchorName, glyphName ) x = quantize(anchor.x, self.options.quantization) y = quantize(anchor.y, self.options.quantization) a = self.NamedAnchor(name=anchorName, x=x, y=y) anchorDict[anchorName] = a if anchorDict: result[glyphName] = list(anchorDict.values()) return result def _getAnchorPairs(self): markAnchorNames = set() for anchors in self.context.anchorLists.values(): markAnchorNames.update(a.name for a in anchors if a.isMark) anchorPairs = {} for anchors in self.context.anchorLists.values(): for anchor in anchors: if anchor.isMark: continue markAnchorName = anchor.markAnchorName if markAnchorName in markAnchorNames: anchorPairs[anchor.name] = markAnchorName return anchorPairs def _pruneUnusedAnchors(self): baseAnchorNames = set(self.context.anchorPairs.keys()) markAnchorNames = set(self.context.anchorPairs.values()) attachingAnchorNames = baseAnchorNames | markAnchorNames for glyphName, anchors in list(self.context.anchorLists.items()): for anchor in list(anchors): if anchor.name not in attachingAnchorNames and anchor.key: anchors.remove(anchor) if not anchors: del self.context.anchorLists[glyphName] def _groupMarkGlyphsByAnchor(self): gdefMarks = self.context.gdefClasses.mark markAnchorNames = set(self.context.anchorPairs.values()) markGlyphNames = set() groups = {} for glyphName, anchors in self.context.anchorLists.items(): # if the feature file has a GDEF table with GlyphClassDef defined, # only include mark glyphs that are referenced in there, otherwise # include any glyphs that contain an "_" prefixed anchor. if gdefMarks is not None and glyphName not in gdefMarks: continue markAnchors = [a for a in anchors if a.name in markAnchorNames] if not markAnchors: continue # Use all mark anchors. The rest of the algorithm will make sure # that the generated lookups will not have overlapping mark classes. for anchor in markAnchors: group = groups.setdefault(anchor.name, OrderedDict()) assert glyphName not in group group[glyphName] = anchor markGlyphNames.add(glyphName) self.context.markGlyphNames = markGlyphNames return groups def _makeMarkClassDefinitions(self): markGlyphSets = self._groupMarkGlyphsByAnchor() currentClasses = self.context.feaFile.markClasses allMarkClasses = self.context.markClasses = {} classPrefix = self.markClassPrefix newDefs = [] for markAnchorName, glyphAnchorPairs in sorted(markGlyphSets.items()): className = ast.makeFeaClassName(classPrefix + markAnchorName) for glyphName, anchor in glyphAnchorPairs.items(): mcd = self._defineMarkClass( glyphName, anchor.x, anchor.y, className, currentClasses ) if mcd is not None: newDefs.append(mcd) # this may be different because of name clashes className = mcd.markClass.name allMarkClasses[anchor.key] = currentClasses[className] return newDefs def _defineMarkClass(self, glyphName, x, y, className, markClasses): anchor = ast.Anchor(x=otRound(x), y=otRound(y)) markClass = markClasses.get(className) if markClass is None: markClass = ast.MarkClass(className) markClasses[className] = markClass else: if glyphName in markClass.glyphs: mcdef = markClass.glyphs[glyphName] if self._anchorsAreEqual(anchor, mcdef.anchor): self.log.debug( "Glyph %s already defined in markClass @%s", glyphName, className, ) return None else: # same mark glyph defined with different anchors for the # same markClass; make a new unique markClass definition newClassName = ast.makeFeaClassName(className, markClasses) markClass = ast.MarkClass(newClassName) markClasses[newClassName] = markClass glyphName = ast.GlyphName(glyphName) mcdef = ast.MarkClassDefinition(markClass, anchor, glyphName) markClass.addDefinition(mcdef) return mcdef @staticmethod def _anchorsAreEqual(a1, a2): # TODO add __eq__ to feaLib AST objects? return all( getattr(a1, attr) == getattr(a2, attr) for attr in ("x", "y", "contourpoint", "xDeviceTable", "yDeviceTable") ) def _setBaseAnchorMarkClasses(self): markClasses = self.context.markClasses for anchors in self.context.anchorLists.values(): for anchor in anchors: if anchor.isMark or not anchor.key or anchor.key not in markClasses: continue anchor.markClass = markClasses[anchor.key] def _groupMarkClasses(self, markGlyphToMarkClasses): # To compute the number of lookups that we need to build, we want # the minimum number of lookups such that, whenever a mark glyph # belongs to several mark classes, these classes are not in the same # lookup. A trivial solution is to make 1 lookup per mark class # but that's a bit wasteful, we might be able to do better by grouping # mark classes that do not conflict. # This is a graph coloring problem: the graph nodes are mark classes, # edges are between classes that would conflict and the colors are # the lookups in which they can go. adjacency = { # We'll get the same markClass several times in the dict # comprehension below but it's ok, only one will be kept. markClass: set() for markClasses in markGlyphToMarkClasses.values() for markClass in markClasses } for _markGlyph, markClasses in markGlyphToMarkClasses.items(): for markClass, other in itertools.combinations(markClasses, 2): adjacency[markClass].add(other) adjacency[other].add(markClass) colorGroups = colorGraph(adjacency) # Sort the groups, because the group that contains MC_top or MC_bottom # needs to go to the end (as specified in self.anchorSortKey) so that # they are applied last and "win" in case of conflict. # We also sort alphabetically for reproducibility, both within each # group and between groups. return sorted( [sorted(group) for group in colorGroups], key=lambda group: ( # The first part sorts _top and _bottom at the end. # There's a minus sign in front of the min because the original # self.anchorSortKey was designed to put the _top and _bottom # at the start (and now we want them at the end). -min( # Remove the MC prefix because that's how the mark classes # are looking at this stage (the original # self.anchorSortKey was applied at a different stage of # the algorithm, on anchors instead of mark classes) self.anchorSortKey.get(self._removeClassPrefix(markClass), 0) for markClass in group ), # Second part of the tuple sorts the groups lexicographically group, ), ) def _logIfAmbiguous(self, attachments, groupedMarkClasses): """Warn about ambiguous situations and log the current resolution. An anchor attachment is ambiguous if for the same mark glyph, more than one mark class can be used to attach it to the base. """ for attachment in attachments: for markGlyph, markClasses in attachment.getMarkGlyphToMarkClasses(): if len(markClasses) > 1: self.log.info( "The base glyph %s and mark glyph %s are ambiguously " "connected by several anchor classes: %s. " "The last one will prevail.", attachment.name, markGlyph, ", ".join( markClass for group in groupedMarkClasses for markClass in group if markClass in markClasses ), ) def _removeClassPrefix(self, markClass): assert markClass.startswith(self.markClassPrefix) return markClass[len(self.markClassPrefix) :] def _groupAttachments(self, attachments): """Group the given attachments so that no group contains conflicting anchor classes for the same glyph. """ # Idea for mark2base: # attachments is a list of mark to base pairs, linked together through # an anchor name We have to put them into one or more lookups with the # constraint that the same mark glyph cannot appear twice in the same # lookup while using different anchor names. # Idea for mark2liga: # attachments is a list of mark to liga positioning. Each links # together a base ligature with several marks, through numbered anchor # names. # We have to put them into one or more lookups with the constraint that # the same mark glyph cannot appear twice in the same lookup while # using different anchor names. # To do so, if a single attachment refers to to the same mark twice # through different anchor names, we may have to split the attachment # into two attachments, using null anchors instead of one or the other # mark class in each split attachment. markGlyphToMarkClasses = defaultdict(set) for attachment in attachments: for markGlyph, markClasses in attachment.getMarkGlyphToMarkClasses(): markGlyphToMarkClasses[markGlyph].update(markClasses) groupedMarkClasses = self._groupMarkClasses(markGlyphToMarkClasses) self._logIfAmbiguous(attachments, groupedMarkClasses) lookups = [] for markClasses in groupedMarkClasses: lookup = [] # Filter existing attachments for attachment in attachments: # One attachment has one base glyph and many marks, each of # the class NamedAnchor. Each NamedAnchor has one markClass. # We keep the NamedAnchor if the markClass is allowed in the # current lookup. def include(anchor): return anchor.markClass.name in markClasses # noqa: B023 filteredAttachment = attachment.filter(include) if filteredAttachment: lookup.append(filteredAttachment) lookups.append(lookup) return lookups def _makeMarkToBaseAttachments(self): markGlyphNames = self.context.markGlyphNames baseClass = self.context.gdefClasses.base result = [] for glyphName, anchors in self.context.anchorLists.items(): # exclude mark glyphs, or glyphs not listed in GDEF Base if glyphName in markGlyphNames or ( baseClass is not None and glyphName not in baseClass ): continue baseMarks = [] for anchor in anchors: if anchor.markClass is None or anchor.number is not None: # skip anchors for which no mark class is defined; also # skip '_1', '_2', etc. suffixed anchors for this lookup # type; these will be are added in the mark2liga lookup continue assert not anchor.isMark baseMarks.append(anchor) if not baseMarks: continue result.append(MarkToBasePos(glyphName, baseMarks)) return result def _makeMarkToMarkAttachments(self): markGlyphNames = self.context.markGlyphNames # we make a dict of lists containing mkmk pos rules keyed by # anchor name, so we can create one mkmk lookup per markClass # each with different mark filtering sets. results = {} for glyphName, anchors in self.context.anchorLists.items(): if glyphName not in markGlyphNames: continue for anchor in anchors: # skip anchors for which no mark class is defined if anchor.markClass is None or anchor.isMark: continue if anchor.number is not None: self.log.warning( "invalid ligature anchor '%s' in mark glyph '%s'; " "skipped", anchor.name, glyphName, ) continue pos = MarkToMarkPos(glyphName, [anchor]) results.setdefault(anchor.key, []).append(pos) return results def _makeMarkToLigaAttachments(self): markGlyphNames = self.context.markGlyphNames ligatureClass = self.context.gdefClasses.ligature result = [] for glyphName, anchors in self.context.anchorLists.items(): # exclude mark glyphs, or glyphs not listed in GDEF Ligature if glyphName in markGlyphNames or ( ligatureClass is not None and glyphName not in ligatureClass ): continue componentAnchors = {} for anchor in anchors: if anchor.markClass is None and anchor.key: # skip anchors for which no mark class is defined continue assert not anchor.isMark number = anchor.number if number is None: # we handled these in the mark2base lookup continue # unnamed anchors with only a number suffix "_1", "_2", etc. # are understood as the ligature component having if not anchor.key: componentAnchors[number] = [] else: componentAnchors.setdefault(number, []).append(anchor) if not componentAnchors: continue ligatureMarks = [] # ligature components are indexed from 1; any missing intermediate # anchor number means the component has for number in range(1, max(componentAnchors.keys()) + 1): ligatureMarks.append(componentAnchors.get(number, [])) result.append(MarkToLigaPos(glyphName, ligatureMarks)) return result @staticmethod def _iterAttachments(attachments, include=None, marksFilter=None): for pos in attachments: if include is not None and not include(pos.name): continue if marksFilter is not None: pos = pos.filter(marksFilter) if pos is None: continue yield pos def _makeMarkLookup(self, lookupName, attachments, include, marksFilter=None): statements = [ pos.asAST() for pos in self._iterAttachments(attachments, include, marksFilter) ] if statements: lkp = ast.LookupBlock(lookupName) lkp.statements.extend(statements) return lkp def _makeMarkFilteringSetClass(self, lookupName, attachments, markClass, include): markGlyphs = (glyphName for glyphName in markClass.glyphs if include(glyphName)) baseGlyphs = ( pos.name for pos in attachments if pos.name not in markClass.glyphs ) members = itertools.chain(markGlyphs, baseGlyphs) className = "MFS_%s" % lookupName return ast.makeGlyphClassDefinitions( {className: members}, feaFile=self.context.feaFile )[className] def _makeMarkToMarkLookup( self, anchorName, attachments, include, marksFilter=None, featureTag=None ): attachments = list(self._iterAttachments(attachments, include, marksFilter)) if not attachments: return prefix = (featureTag + "_") if featureTag is not None else "" lookupName = f"{prefix}mark2mark_{anchorName}" filteringClass = self._makeMarkFilteringSetClass( lookupName, attachments, markClass=self.context.markClasses[anchorName], include=include, ) lkp = ast.LookupBlock(lookupName) lkp.statements.append(filteringClass) lkp.statements.append(ast.makeLookupFlag(markFilteringSet=filteringClass)) lkp.statements.extend(pos.asAST() for pos in attachments) return lkp def _makeMarkFeature(self, include): baseLkps = [] for i, attachments in enumerate(self.context.groupedMarkToBaseAttachments): lookup = self._makeMarkLookup( f"mark2base{'_' + str(i) if i > 0 else ''}", attachments, include ) if lookup: baseLkps.append(lookup) ligaLkps = [] for i, attachments in enumerate(self.context.groupedMarkToLigaAttachments): lookup = self._makeMarkLookup( f"mark2liga{'_' + str(i) if i > 0 else ''}", attachments, include ) if lookup: ligaLkps.append(lookup) if not baseLkps and not ligaLkps: return feature = ast.FeatureBlock("mark") for baseLkp in baseLkps: feature.statements.append(baseLkp) for ligaLkp in ligaLkps: feature.statements.append(ligaLkp) return feature def _makeMkmkFeature(self, include): feature = ast.FeatureBlock("mkmk") for anchorName, attachments in sorted( self.context.markToMarkAttachments.items() ): lkp = self._makeMarkToMarkLookup(anchorName, attachments, include) if lkp is not None: feature.statements.append(lkp) return feature if feature.statements else None def _isAboveMark(self, anchor): if anchor.name in self.abvmAnchorNames: return True if anchor.name in self.blwmAnchorNames or anchor.name.startswith("bottom"): return False # Glyphs uses (used to use?) a heuristic to guess whether an anchor # should go into abvm or blwm. (See # https://github.com/googlefonts/ufo2ft/issues/179#issuecomment-390391382) # However, this causes issues in variable fonts where an anchor in one # master is assigned to a different feature from the same anchor in # another master if the Y-coordinates happen to straddle the threshold # coordinate. For simplicity, we just place all unknown anchors into # the abvm feature. return True def _isBelowMark(self, anchor): return not self._isAboveMark(anchor) def _makeAbvmOrBlwmFeature(self, tag, include): if tag == "abvm": marksFilter = self._isAboveMark elif tag == "blwm": marksFilter = self._isBelowMark else: raise AssertionError(tag) baseLkps = [] for i, attachments in enumerate(self.context.groupedMarkToBaseAttachments): lookup = self._makeMarkLookup( f"{tag}_mark2base{'_' + str(i) if i > 0 else ''}", attachments, include=include, marksFilter=marksFilter, ) if lookup: baseLkps.append(lookup) ligaLkps = [] for i, attachments in enumerate(self.context.groupedMarkToLigaAttachments): lookup = self._makeMarkLookup( f"{tag}_mark2liga{'_' + str(i) if i > 0 else ''}", attachments, include=include, marksFilter=marksFilter, ) if lookup: ligaLkps.append(lookup) mkmkLookups = [] for anchorName, attachments in sorted( self.context.markToMarkAttachments.items() ): lkp = self._makeMarkToMarkLookup( anchorName, attachments, include=include, marksFilter=marksFilter, featureTag=tag, ) if lkp is not None: mkmkLookups.append(lkp) if not any([baseLkps, ligaLkps, mkmkLookups]): return feature = ast.FeatureBlock(tag) for baseLkp in baseLkps: feature.statements.append(baseLkp) for ligaLkp in ligaLkps: feature.statements.append(ligaLkp) feature.statements.extend(mkmkLookups) return feature def _makeFeatures(self): ctx = self.context ctx.groupedMarkToBaseAttachments = self._groupAttachments( self._makeMarkToBaseAttachments() ) ctx.groupedMarkToLigaAttachments = self._groupAttachments( self._makeMarkToLigaAttachments() ) ctx.markToMarkAttachments = self._makeMarkToMarkAttachments() abvmGlyphs, notAbvmGlyphs = self._getAbvmGlyphs() def isAbvm(glyphName): return glyphName in abvmGlyphs def isNotAbvm(glyphName): return glyphName in notAbvmGlyphs features = {} todo = ctx.todo if "mark" in todo: mark = self._makeMarkFeature(include=isNotAbvm) if mark is not None: features["mark"] = mark if "mkmk" in todo: mkmk = self._makeMkmkFeature(include=isNotAbvm) if mkmk is not None: features["mkmk"] = mkmk if "abvm" in todo or "blwm" in todo: if abvmGlyphs: for tag in ("abvm", "blwm"): if tag not in todo: continue feature = self._makeAbvmOrBlwmFeature(tag, include=isAbvm) if feature is not None: features[tag] = feature return features def _getAbvmGlyphs(self): glyphSet = set(self.getOrderedGlyphSet().keys()) scriptsUsingAbvm = self.scriptsUsingAbvm if self.context.feaScripts: # https://github.com/googlefonts/ufo2ft/issues/579 Some characters # can be used in multiple scripts and some of these scripts might # need an abvm feature and some might not, so we filter-out the # abvm scripts that the font does not intend to support. scriptsUsingAbvm = scriptsUsingAbvm & self.context.feaScripts if scriptsUsingAbvm: cmap = self.makeUnicodeToGlyphNameMapping() unicodeIsAbvm = partial(unicodeInScripts, scripts=scriptsUsingAbvm) def unicodeIsNotAbvm(uv): return bool(script_extension(chr(uv)) - self.scriptsUsingAbvm) if any(unicodeIsAbvm(uv) for uv in cmap): # If there are any characters from Indic/USE/Khmer scripts in # the cmap, we compile a temporary GSUB table to resolve # substitutions and get the set of all the relevant glyphs, # including alternate glyphs. gsub = self.compileGSUB() glyphGroups = classifyGlyphs(unicodeIsAbvm, cmap, gsub) # the 'glyphGroups' dict is keyed by the return value of the # classifying include, so here 'True' means all the # Indic/USE/Khmer glyphs abvmGlyphs = glyphGroups.get(True, set()) # If a character can be used in Indic/USE/Khmer scripts as well # as other scripts, we want to return it in both 'abvmGlyphs' # (done above) and 'notAbvmGlyphs' (done below) sets. glyphGroups = classifyGlyphs(unicodeIsNotAbvm, cmap, gsub) notAbvmGlyphs = glyphGroups.get(True, set()) # Since cmap might not cover all glyphs, we union with the # glyph set. notAbvmGlyphs |= glyphSet - abvmGlyphs return abvmGlyphs, notAbvmGlyphs return set(), glyphSet def _write(self): self._pruneUnusedAnchors() newClassDefs = self._makeMarkClassDefinitions() self._setBaseAnchorMarkClasses() features = self._makeFeatures() if not features: return False feaFile = self.context.feaFile self._insert( feaFile=feaFile, markClassDefs=newClassDefs, features=[features[tag] for tag in sorted(features.keys())], ) return True ufo2ft-2.30.0/Lib/ufo2ft/filters/000077500000000000000000000000001434012334300163615ustar00rootroot00000000000000ufo2ft-2.30.0/Lib/ufo2ft/filters/__init__.py000066400000000000000000000111231434012334300204700ustar00rootroot00000000000000import importlib import logging from inspect import getfullargspec, isclass from ufo2ft.constants import FILTERS_KEY from ufo2ft.util import _loadPluginFromString from .base import BaseFilter from .cubicToQuadratic import CubicToQuadraticFilter from .decomposeComponents import DecomposeComponentsFilter from .decomposeTransformedComponents import DecomposeTransformedComponentsFilter from .dottedCircleFilter import DottedCircleFilter from .explodeColorLayerGlyphs import ExplodeColorLayerGlyphsFilter from .flattenComponents import FlattenComponentsFilter from .propagateAnchors import PropagateAnchorsFilter from .removeOverlaps import RemoveOverlapsFilter from .sortContours import SortContoursFilter from .transformations import TransformationsFilter __all__ = [ "BaseFilter", "CubicToQuadraticFilter", "DecomposeComponentsFilter", "DecomposeTransformedComponentsFilter", "DottedCircleFilter", "ExplodeColorLayerGlyphsFilter", "FlattenComponentsFilter", "PropagateAnchorsFilter", "RemoveOverlapsFilter", "SortContoursFilter", "TransformationsFilter", "loadFilters", "loadFilterFromString", ] logger = logging.getLogger(__name__) def getFilterClass(filterName, pkg="ufo2ft.filters"): """Given a filter name, import and return the filter class. By default, filter modules are searched within the ``ufo2ft.filters`` package. """ # TODO add support for third-party plugin discovery? # if filter name is 'Foo Bar', the module should be called 'fooBar' filterName = filterName.replace(" ", "") moduleName = filterName[0].lower() + filterName[1:] module = importlib.import_module(".".join([pkg, moduleName])) # if filter name is 'Foo Bar', the class should be called 'FooBarFilter' className = filterName[0].upper() + filterName[1:] + "Filter" return getattr(module, className) def loadFilters(ufo): """Parse custom filters from the ufo's lib.plist. Return two lists, one for the filters that are applied before decomposition of composite glyphs, another for the filters that are applied after decomposition. """ preFilters, postFilters = [], [] for filterDict in ufo.lib.get(FILTERS_KEY, []): namespace = filterDict.get("namespace", "ufo2ft.filters") try: filterClass = getFilterClass(filterDict["name"], namespace) except (ImportError, AttributeError): from pprint import pformat logger.exception("Failed to load filter: %s", pformat(filterDict)) continue filterObj = filterClass( *filterDict.get("args", []), include=filterDict.get("include"), exclude=filterDict.get("exclude"), pre=filterDict.get("pre", False), **filterDict.get("kwargs", {}), ) if filterObj.pre: preFilters.append(filterObj) else: postFilters.append(filterObj) return preFilters, postFilters def isValidFilter(klass): """Return True if 'klass' is a valid filter class. A valid filter class is a class (of type 'type'), that has a '__call__' (bound method), with the signature matching the same method from the BaseFilter class: def __call__(self, font, glyphSet=None) """ if not isclass(klass): logger.error(f"{klass!r} is not a class") return False if not callable(klass): logger.error(f"{klass!r} is not callable") return False if getfullargspec(klass.__call__).args != getfullargspec(BaseFilter.__call__).args: logger.error(f"{klass!r} '__call__' method has incorrect signature") return False return True def loadFilterFromString(spec): """Take a string specifying a filter class to load (either a built-in filter or one defined in an external, user-defined module), initialize it with given options and return the filter object. The string must conform to the following notation: - an optional python module, followed by '::' - a required class name; the class must have a method called 'filter' with the same signature as the BaseFilter. - an optional list of keyword-only arguments enclosed by parentheses Raises ValueError if the string doesn't conform to this specification; TypeError if imported name is not a filter class; and ImportError if the user-defined module cannot be imported. Examples: >>> loadFilterFromString("ufo2ft.filters.removeOverlaps::RemoveOverlapsFilter") """ return _loadPluginFromString(spec, "ufo2ft.filters", isValidFilter) ufo2ft-2.30.0/Lib/ufo2ft/filters/__main__.py000066400000000000000000000027161434012334300204610ustar00rootroot00000000000000import argparse import logging from fontTools.misc.cliTools import makeOutputFileName from ufo2ft.filters import loadFilterFromString, logger try: import ufoLib2 loader = ufoLib2.Font.open except ImportError: import defcon loader = defcon.Font logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description="Filter a UFO file") parser.add_argument("--output", "-o", metavar="OUTPUT", help="output file name") include_group = parser.add_mutually_exclusive_group(required=False) include_group.add_argument( "--include", metavar="GLYPHS", help="comma-separated list of glyphs to filter" ) include_group.add_argument( "--exclude", metavar="GLYPHS", help="comma-separated list of glyphs to not filter" ) parser.add_argument("ufo", metavar="UFO", help="UFO file") parser.add_argument("filters", metavar="FILTER", nargs="+", help="filter name") args = parser.parse_args() if not args.output: args.output = makeOutputFileName(args.ufo) ufo = loader(args.ufo) include = None if args.include: include_set = set(args.include.split(",")) def include(g): return g.name in include_set elif args.exclude: exclude_set = set(args.exclude.split(",")) def include(g): return g.name not in exclude_set for filtername in args.filters: f = loadFilterFromString(filtername) if include is not None: f.include = include f(ufo) logger.info("Written on %s" % args.output) ufo.save(args.output) ufo2ft-2.30.0/Lib/ufo2ft/filters/base.py000066400000000000000000000200401434012334300176410ustar00rootroot00000000000000import logging from types import SimpleNamespace from fontTools.misc.loggingTools import Timer from ufo2ft.util import _GlyphSet, _LazyFontName, getMaxComponentDepth # reuse the "ufo2ft.filters" logger logger = logging.getLogger("ufo2ft.filters") # library-level logger specialized for timing info which apps like fontmake # can selectively configure timing_logger = logging.getLogger("ufo2ft.timer") class BaseFilter: # tuple of strings listing the names of required positional arguments # which will be set as attributes of the filter instance _args = () # dictionary containing the names of optional keyword arguments and # their default values, which will be set as instance attributes _kwargs = {} # pre-filter when True, post-filter when False, meaning before or after default # filters _pre = False def __init__(self, *args, **kwargs): self.options = options = SimpleNamespace() num_required = len(self._args) num_args = len(args) # process positional arguments as keyword arguments if num_args < num_required: args = ( *args, *(kwargs.pop(a) for a in self._args[num_args:] if a in kwargs), ) num_args = len(args) duplicated_args = [k for k in self._args if k in kwargs] if duplicated_args: num_duplicated = len(duplicated_args) raise TypeError( "got {} duplicated positional argument{}: {}".format( num_duplicated, "s" if num_duplicated > 1 else "", ", ".join(duplicated_args), ) ) # process positional arguments if num_args < num_required: missing = [repr(a) for a in self._args[num_args:]] num_missing = len(missing) raise TypeError( "missing {} required positional argument{}: {}".format( num_missing, "s" if num_missing > 1 else "", ", ".join(missing) ) ) elif num_args > num_required: extra = [repr(a) for a in args[num_required:]] num_extra = len(extra) raise TypeError( "got {} unsupported positional argument{}: {}".format( num_extra, "s" if num_extra > 1 else "", ", ".join(extra) ) ) for key, value in zip(self._args, args): setattr(options, key, value) # process optional keyword arguments for key, default in self._kwargs.items(): setattr(options, key, kwargs.pop(key, default)) # process special pre argument self.pre = kwargs.pop("pre", self._pre) # process special include/exclude arguments include = kwargs.pop("include", None) exclude = kwargs.pop("exclude", None) if include is not None and exclude is not None: raise ValueError("'include' and 'exclude' arguments are mutually exclusive") if callable(include): # 'include' can be a function (e.g. lambda) that takes a # glyph object and returns True/False based on some test self.include = include self._include_repr = lambda: repr(include) elif include is not None: # or it can be a list of glyph names to be included included = set(include) self.include = lambda g: g.name in included self._include_repr = lambda: repr(include) elif exclude is not None: # alternatively one can provide a list of names to not include excluded = set(exclude) self.include = lambda g: g.name not in excluded self._exclude_repr = lambda: repr(exclude) else: # by default, all glyphs are included self.include = lambda g: True # raise if any unsupported keyword arguments if kwargs: num_left = len(kwargs) raise TypeError( "got {}unsupported keyword argument{}: {}".format( "an " if num_left == 1 else "", "s" if len(kwargs) > 1 else "", ", ".join(f"'{k}'" for k in kwargs), ) ) # run the filter's custom initialization code self.start() def __repr__(self): items = [] if self._args: items.append( ", ".join(repr(getattr(self.options, arg)) for arg in self._args) ) if self._kwargs: items.append( ", ".join( "{}={!r}".format(k, getattr(self.options, k)) for k in sorted(self._kwargs) ) ) if hasattr(self, "_include_repr"): items.append(f"include={self._include_repr()}") elif hasattr(self, "_exclude_repr"): items.append(f"exclude={self._exclude_repr()}") return "{}({})".format(type(self).__name__, ", ".join(items)) def start(self): """Subclasses can perform here custom initialization code.""" pass def set_context(self, font, glyphSet): """Populate a `self.context` namespace, which is reset before each new filter call. Subclasses can override this to provide contextual information which depends on other data in the font that is not available in the glyphs objects currently being filtered, or set any other temporary attributes. The default implementation simply sets the current font and glyphSet, and initializes an empty set that keeps track of the names of the glyphs that were modified. Returns the namespace instance. """ self.context = SimpleNamespace(font=font, glyphSet=glyphSet) self.context.modified = set() return self.context def filter(self, glyph): """This is where the filter is applied to a single glyph. Subclasses must override this method, and return True when the glyph was modified. """ raise NotImplementedError @property def name(self): return self.__class__.__name__ def __call__(self, font, glyphSet=None): """Run this filter on all the included glyphs. Return the set of glyph names that were modified, if any. If `glyphSet` (dict) argument is provided, run the filter on the glyphs contained therein (which may be copies). Otherwise, run the filter in-place on the font's default glyph set. """ fontName = _LazyFontName(font) if glyphSet is not None and getattr(glyphSet, "name", None): logger.info("Running %s on %s-%s", self.name, fontName, glyphSet.name) else: logger.info("Running %s on %s", self.name, fontName) if glyphSet is None: glyphSet = _GlyphSet.from_layer(font) context = self.set_context(font, glyphSet) filter_ = self.filter include = self.include modified = context.modified # process composite glyphs in decreasing component depth order (i.e. composites # with more deeply nested components before shallower ones) to avoid # order-dependent interferences while filtering glyphs with nested components # https://github.com/googlefonts/ufo2ft/issues/621 orderedGlyphs = sorted( glyphSet.keys(), key=lambda g: -getMaxComponentDepth(glyphSet[g], glyphSet) ) with Timer() as t: for glyphName in orderedGlyphs: if glyphName in modified: continue glyph = glyphSet[glyphName] if include(glyph) and filter_(glyph): modified.add(glyphName) num = len(modified) if num > 0: timing_logger.debug( "Took %.3fs to run %s on %d glyph%s", t, self.name, len(modified), "" if num == 1 else "s", ) return modified ufo2ft-2.30.0/Lib/ufo2ft/filters/cubicToQuadratic.py000066400000000000000000000044541434012334300221700ustar00rootroot00000000000000import logging from cu2qu.pens import Cu2QuPointPen from cu2qu.ufo import CURVE_TYPE_LIB_KEY, DEFAULT_MAX_ERR from ufo2ft.filters import BaseFilter from ufo2ft.fontInfoData import getAttrWithFallback logger = logging.getLogger(__name__) class CubicToQuadraticFilter(BaseFilter): _kwargs = { "conversionError": None, "reverseDirection": True, "rememberCurveType": False, } def set_context(self, font, glyphSet): ctx = super().set_context(font, glyphSet) relativeError = self.options.conversionError or DEFAULT_MAX_ERR ctx.absoluteError = relativeError * getAttrWithFallback(font.info, "unitsPerEm") ctx.stats = {} return ctx def __call__(self, font, glyphSet=None): if self.options.rememberCurveType: # check first in the global font lib, then in layer lib for lib in (font.lib, getattr(glyphSet, "lib", {})): curve_type = lib.get(CURVE_TYPE_LIB_KEY, "cubic") if curve_type == "quadratic": logger.info("Curves already converted to quadratic") return set() elif curve_type == "cubic": pass # keep converting else: raise NotImplementedError(curve_type) modified = super().__call__(font, glyphSet) if modified: stats = self.context.stats logger.info( "New spline lengths: %s" % (", ".join("%s: %d" % (ln, stats[ln]) for ln in sorted(stats.keys()))) ) if self.options.rememberCurveType: # 'lib' here is the layer's lib, as defined in for loop variable curve_type = lib.get(CURVE_TYPE_LIB_KEY, "cubic") if curve_type != "quadratic": lib[CURVE_TYPE_LIB_KEY] = "quadratic" return modified def filter(self, glyph): if not len(glyph): return False pen = Cu2QuPointPen( glyph.getPointPen(), self.context.absoluteError, reverse_direction=self.options.reverseDirection, stats=self.context.stats, ) contours = list(glyph) glyph.clearContours() for contour in contours: contour.drawPoints(pen) return True ufo2ft-2.30.0/Lib/ufo2ft/filters/decomposeComponents.py000066400000000000000000000005721434012334300227630ustar00rootroot00000000000000from fontTools.misc.transform import Transform import ufo2ft.util from ufo2ft.filters import BaseFilter class DecomposeComponentsFilter(BaseFilter): def filter(self, glyph): if not glyph.components: return False ufo2ft.util.deepCopyContours(self.context.glyphSet, glyph, glyph, Transform()) glyph.clearComponents() return True ufo2ft-2.30.0/Lib/ufo2ft/filters/decomposeTransformedComponents.py000066400000000000000000000012311434012334300251610ustar00rootroot00000000000000from fontTools.misc.transform import Identity, Transform import ufo2ft.util from ufo2ft.filters import BaseFilter class DecomposeTransformedComponentsFilter(BaseFilter): def filter(self, glyph): if not glyph.components: return False needs_decomposition = False for component in glyph.components: if component.transformation[:4] != Identity[:4]: needs_decomposition = True break if not needs_decomposition: return False ufo2ft.util.deepCopyContours(self.context.glyphSet, glyph, glyph, Transform()) glyph.clearComponents() return True ufo2ft-2.30.0/Lib/ufo2ft/filters/dottedCircleFilter.py000066400000000000000000000234671434012334300225220ustar00rootroot00000000000000""" Dotted Circle Filter This filter checks whether a font contains a glyph for U+25CC (DOTTED CIRCLE), which is inserted by complex shapers to display mark glyphs which have no associated base glyph, usually as a result of broken clusters but also for pedagogical reasons. (For example, to display the marks in a table of glyphs.) If no dotted circle glyph is present in the font, then one is drawn and added. Next, the filter creates any additional anchors for the dotted circle glyph to ensure that all marks can be attached to it. It does this by gathering a list of anchors, finding the set of base glyphs for each anchor, computing the average position of the anchor on the base glyph (relative to the glyph's width), and then creating an anchor at that average position on the dotted circle glyph. The filter must be run as a "pre" filter. This can be done from the command line like so:: fontmake -o ttf -g MyFont.glyphs --filter "DottedCircleFilter(pre=True)" or in the ``lib.plist`` file of a UFO:: com.github.googlei18n.ufo2ft.filters name DottedCircleFilter pre The filter supports the following options: margin When drawing a dotted circle, the vertical space in units around the dotted circle. sidebearings When drawing a dotted circle, additional horizontal space in units around the dotted circle. dots Number of dots in the circle. """ import logging import math from statistics import mean from fontTools.misc.fixedTools import otRound from ufo2ft.constants import OPENTYPE_CATEGORIES_KEY from ufo2ft.featureCompiler import parseLayoutFeatures from ufo2ft.featureWriters import ast from ufo2ft.filters import BaseFilter from ufo2ft.util import _getNewGlyphFactory, _GlyphSet, _LazyFontName, _setGlyphMargin logger = logging.getLogger(__name__) DO_NOTHING = -1 # Sentinel value (not a valid glyph name) # Length of cubic Bezier handle used when drawing quarter circles. # See https://pomax.github.io/bezierinfo/#circles_cubic CIRCULAR_SUPERNESS = 0.551784777779014 def circle(pen, origin, radius): w = (origin[0] - radius, origin[1]) n = (origin[0], origin[1] + radius) e = (origin[0] + radius, origin[1]) s = (origin[0], origin[1] - radius) pen.moveTo(w) pen.curveTo( (w[0], w[1] + radius * CIRCULAR_SUPERNESS), (n[0] - radius * CIRCULAR_SUPERNESS, n[1]), n, ) pen.curveTo( (n[0] + radius * CIRCULAR_SUPERNESS, n[1]), (e[0], e[1] + radius * CIRCULAR_SUPERNESS), e, ) pen.curveTo( (e[0], e[1] - radius * CIRCULAR_SUPERNESS), (s[0] + radius * CIRCULAR_SUPERNESS, s[1]), s, ) pen.curveTo( (s[0] - radius * CIRCULAR_SUPERNESS, s[1]), (w[0], w[1] - radius * CIRCULAR_SUPERNESS), w, ) pen.closePath() class DottedCircleFilter(BaseFilter): _kwargs = {"margin": 80, "sidebearing": 160, "dots": 12} def __call__(self, font, glyphSet=None): fontName = _LazyFontName(font) if glyphSet is not None and getattr(glyphSet, "name", None): logger.info("Running %s on %s-%s", self.name, fontName, glyphSet.name) else: logger.info("Running %s on %s", self.name, fontName) if glyphSet is None: glyphSet = _GlyphSet.from_layer(font) self.set_context(font, glyphSet) added_glyph = False dotted_circle_glyph = self.check_dotted_circle() if dotted_circle_glyph == DO_NOTHING: return [] if not dotted_circle_glyph: dotted_circle_glyph = self.draw_dotted_circle(glyphSet) added_glyph = True added_anchors = self.check_and_add_anchors(dotted_circle_glyph) if added_anchors: self.ensure_base(dotted_circle_glyph) if added_glyph or added_anchors: return [dotted_circle_glyph.name] else: return [] def check_dotted_circle(self): """Check for the presence of a dotted circle glyph and return it""" font = self.context.font glyphset = self.context.glyphSet dotted_circle = next((g.name for g in font if 0x25CC in g.unicodes), None) if dotted_circle: if dotted_circle not in glyphset: logger.debug( "Found dotted circle glyph %s in font but not in glyphset", dotted_circle, ) return DO_NOTHING logger.debug("Found dotted circle glyph %s", dotted_circle) return glyphset[dotted_circle] def draw_dotted_circle(self, glyphSet): """Add a new dotted circle glyph, drawing its outlines""" font = self.context.font logger.debug("Adding dotted circle glyph") proto = font.layers.defaultLayer.instantiateGlyphObject() glyph = _getNewGlyphFactory(proto)(name="uni25CC", unicodes=[0x25CC]) pen = glyph.getPen() bigradius = (font.info.xHeight - 2 * self.options.margin) / 2 littleradius = bigradius / 6 left = self.options.sidebearing + littleradius right = self.options.sidebearing + bigradius * 2 - littleradius middleY = font.info.xHeight / 2 middleX = (left + right) / 2 subangle = 2 * math.pi / self.options.dots for t in range(self.options.dots): angle = t * subangle cx = middleX + bigradius * math.cos(angle) cy = middleY + bigradius * math.sin(angle) circle(pen, (cx, cy), littleradius) _setGlyphMargin(glyph, "right", self.options.sidebearing) glyphSet["uni25CC"] = glyph return glyph def check_and_add_anchors(self, dotted_circle_glyph): """Check that all mark-attached anchors are present on the dotted circle glyph, synthesizing a position for any missing anchors.""" font = self.context.font # First we will gather information about all the anchors in the # font at present; for the anchors on marks (starting with "_") # we just want to know their names, so we can match them with # bases later. For the anchors on bases, we also want to store # the position of the anchor so we can average them. all_anchors = {} any_added = False for glyph in font: width = None try: bounds = glyph.getBounds(font) if bounds: width = bounds.xMax - bounds.xMin except AttributeError: bounds = glyph.bounds if bounds: width = bounds[2] - bounds[0] if width is None: width = glyph.width for anchor in glyph.anchors: if anchor.name.startswith("_"): all_anchors[anchor.name] = [] continue if not width: continue x_percentage = anchor.x / width all_anchors.setdefault(anchor.name, []).append((x_percentage, anchor.y)) # Now we move to the dotted circle. What anchors do we have already? dsanchors = set([a.name for a in dotted_circle_glyph.anchors]) for anchor, positions in all_anchors.items(): # Skip existing anchors on the dotted-circle, and any anchors # which don't have a matching mark glyph (mark-to-lig etc.). if anchor in dsanchors or f"_{anchor}" not in all_anchors: continue # And now we're creating a new one anchor_x = dotted_circle_glyph.width * mean([v[0] for v in positions]) anchor_y = mean([v[1] for v in positions]) logger.debug( "Adding anchor %s to dotted circle glyph at %i,%i", anchor, anchor_x, anchor_y, ) dotted_circle_glyph.appendAnchor( {"x": otRound(anchor_x), "y": otRound(anchor_y), "name": anchor} ) any_added = True return any_added # We have added some anchors to the dotted circle glyph. Now we need to # ensure the glyph is a base (and specifically a base glyph, not just # unclassified), or else it won't be in the list of base glyphs when # we come to the mark features writer, and all our work will be for nothing. # Also note that if we had a dotted circle glyph in the font already and # we have come from Glyphs, glyphsLib would only consider the glyph to # be a base if it has anchors, and it might not have had any when glyphsLib # wrote the GDEF table. # So we have to go digging around for a GDEF table and modify it. def ensure_base(self, dotted_circle_glyph): dotted_circle = dotted_circle_glyph.name font = self.context.font feaFile = parseLayoutFeatures(font) if ast.findTable(feaFile, "GDEF") is None: # We have no GDEF table. GDEFFeatureWriter will create one # using the font's lib. if OPENTYPE_CATEGORIES_KEY in font.lib: font.lib[OPENTYPE_CATEGORIES_KEY][dotted_circle] = "base" return # We have GDEF table, so we need to find the GlyphClassDef, and add # ourselves to the baseGlyphs set. for st in feaFile.statements: if isinstance(st, ast.TableBlock) and st.name == "GDEF": for st2 in st.statements: if isinstance(st2, ast.GlyphClassDefStatement): if ( st2.baseGlyphs and dotted_circle not in st2.baseGlyphs.glyphSet() ): st2.baseGlyphs.glyphs.append(dotted_circle) # And then put the modified feature file back into the font font.features.text = feaFile.asFea() ufo2ft-2.30.0/Lib/ufo2ft/filters/explodeColorLayerGlyphs.py000066400000000000000000000064721434012334300235670ustar00rootroot00000000000000from ufo2ft.constants import COLOR_LAYER_MAPPING_KEY, COLOR_LAYERS_KEY from ufo2ft.filters import BaseFilter from ufo2ft.util import _GlyphSet class ExplodeColorLayerGlyphsFilter(BaseFilter): """This filter doesn't really filter glyphs, but copies glyphs from UFO layers to alternate glyphs in the default layer, for use in the COLR table. """ def set_context(self, font, glyphSet): context = super().set_context(font, glyphSet) context.globalColorLayerMapping = font.lib.get(COLOR_LAYER_MAPPING_KEY) context.layerGlyphSets = {} context.colorLayerGlyphNames = set() # glyph names that we added if COLOR_LAYERS_KEY not in font.lib: font.lib[COLOR_LAYERS_KEY] = {} else: # if the font already contains an explicit COLOR_LAYERS_KEY, we # assume the color layers have already been 'exploded' once. context.skipCurrentFont = True return context def _getLayer(self, font, layerName): layer = self.context.layerGlyphSets.get(layerName) if layer is None: layer = _GlyphSet.from_layer(font, layerName) self.context.layerGlyphSets[layerName] = layer return layer def _copyGlyph(self, layerGlyphSet, glyphSet, glyphName, layerName): layerGlyph = layerGlyphSet[glyphName] layerGlyphName = f"{glyphName}.{layerName}" if layerGlyphName in glyphSet: if layerGlyphName in self.context.colorLayerGlyphNames: # We've added this glyph already, so we're done return layerGlyphName from ufo2ft.errors import InvalidFontData raise InvalidFontData( f"a glyph named {layerGlyphName} already exists, " "conflicting with a requested color layer glyph." ) for component in layerGlyph.components: baseLayerGlyphName = self._copyGlyph( layerGlyphSet, glyphSet, component.baseGlyph, layerName ) component.baseGlyph = baseLayerGlyphName glyphSet[layerGlyphName] = layerGlyph self.context.colorLayerGlyphNames.add(layerGlyphName) return layerGlyphName def filter(self, glyph): if getattr(self.context, "skipCurrentFont", False): return False font = self.context.font glyphSet = self.context.glyphSet colorLayers = font.lib[COLOR_LAYERS_KEY] colorLayerMapping = glyph.lib.get(COLOR_LAYER_MAPPING_KEY) if colorLayerMapping is None: colorLayerMapping = self.context.globalColorLayerMapping if colorLayerMapping is None: # No color layer info for this glyph return False layers = [] for layerName, colorID in colorLayerMapping: layerGlyphSet = self._getLayer(font, layerName) if glyph.name in layerGlyphSet: if glyph == layerGlyphSet[glyph.name]: layerGlyphName = glyph.name else: layerGlyphName = self._copyGlyph( layerGlyphSet, glyphSet, glyph.name, layerName ) layers.append((layerGlyphName, colorID)) if layers: colorLayers[glyph.name] = layers return True else: return False ufo2ft-2.30.0/Lib/ufo2ft/filters/flattenComponents.py000066400000000000000000000043031434012334300224360ustar00rootroot00000000000000import logging from fontTools.misc.transform import Transform from ufo2ft.filters import BaseFilter logger = logging.getLogger(__name__) class FlattenComponentsFilter(BaseFilter): def __call__(self, font, glyphSet=None): if super().__call__(font, glyphSet): modified = self.context.modified if modified: logger.info("Flattened composite glyphs: %i" % len(modified)) return modified def filter(self, glyph): flattened = False if not glyph.components: return flattened pen = glyph.getPen() for comp in list(glyph.components): flattened_tuples = _flattenComponent( self.context.glyphSet, comp, found_in=glyph ) if flattened_tuples[0] != (comp.baseGlyph, comp.transformation): flattened = True glyph.removeComponent(comp) for flattened_tuple in flattened_tuples: pen.addComponent(*flattened_tuple) if flattened: self.context.modified.add(glyph.name) return flattened def _flattenComponent(glyphSet, component, found_in): """Returns a list of tuples (baseGlyph, transform) of nested component.""" if component.baseGlyph not in glyphSet: raise ValueError( f"Could not find component '{component.baseGlyph}' used in '{found_in.name}'" ) glyph = glyphSet[component.baseGlyph] # Any contour will cause components to be decomposed if not glyph.components or len(glyph) > 0: transformation = Transform(*component.transformation) return [(component.baseGlyph, transformation)] all_flattened_components = [] for nested in glyph.components: flattened_components = _flattenComponent(glyphSet, nested, found_in=glyph) for i, (name, tr) in enumerate(flattened_components): flat_tr = Transform(*component.transformation) flat_tr = flat_tr.translate(tr.dx, tr.dy) flat_tr = flat_tr.transform((tr.xx, tr.xy, tr.yx, tr.yy, 0, 0)) flattened_components[i] = (name, flat_tr) all_flattened_components.extend(flattened_components) return all_flattened_components ufo2ft-2.30.0/Lib/ufo2ft/filters/propagateAnchors.py000066400000000000000000000153341434012334300222410ustar00rootroot00000000000000# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import fontTools.pens.boundsPen from fontTools.misc.transform import Transform from ufo2ft.filters import BaseFilter logger = logging.getLogger(__name__) class PropagateAnchorsFilter(BaseFilter): def set_context(self, font, glyphSet): ctx = super().set_context(font, glyphSet) ctx.processed = set() return ctx def __call__(self, font, glyphSet=None): if super().__call__(font, glyphSet): modified = self.context.modified if modified: logger.info("Glyphs with propagated anchors: %i" % len(modified)) return modified def filter(self, glyph): if not glyph.components: return False before = len(glyph.anchors) _propagate_glyph_anchors( self.context.glyphSet, glyph, self.context.processed, self.context.modified, ) return len(glyph.anchors) > before def _propagate_glyph_anchors(glyphSet, composite, processed, modified): """ Propagate anchors from base glyphs to a given composite glyph, and to all composite glyphs used in between. """ if composite.name in processed: return processed.add(composite.name) if not composite.components: return base_components = [] mark_components = [] anchor_names = set() to_add = {} for component in composite.components: try: glyph = glyphSet[component.baseGlyph] except KeyError: logger.warning( "Anchors not propagated for inexistent component {} " "in glyph {}".format(component.baseGlyph, composite.name) ) else: _propagate_glyph_anchors(glyphSet, glyph, processed, modified) if any(a.name.startswith("_") for a in glyph.anchors): mark_components.append(component) else: base_components.append(component) anchor_names |= {a.name for a in glyph.anchors} if mark_components and not base_components and _is_ligature_mark(composite): # The composite is a mark that is composed of other marks (E.g. # "circumflexcomb_tildecomb"). Promote the mark that is positioned closest # to the origin to a base. try: component = _component_closest_to_origin(mark_components, glyphSet) except Exception as e: raise Exception( "Error while determining which component of composite " "'{}' is the lowest: {}".format(composite.name, str(e)) ) from e mark_components.remove(component) base_components.append(component) glyph = glyphSet[component.baseGlyph] anchor_names |= {a.name for a in glyph.anchors} for anchor_name in anchor_names: # don't add if composite glyph already contains this anchor OR any # associated ligature anchors (e.g. "top_1, top_2" for "top") if not any(a.name.startswith(anchor_name) for a in composite.anchors): _get_anchor_data(to_add, glyphSet, base_components, anchor_name) for component in mark_components: _adjust_anchors(to_add, glyphSet, component) # we sort propagated anchors to append in a deterministic order for name, (x, y) in sorted(to_add.items()): anchor_dict = {"name": name, "x": x, "y": y} try: composite.appendAnchor(anchor_dict) except TypeError: # pragma: no cover # fontParts API composite.appendAnchor(name, (x, y)) if to_add: modified.add(composite.name) def _get_anchor_data(anchor_data, glyphSet, components, anchor_name): """Get data for an anchor from a list of components.""" anchors = [] for component in components: for anchor in glyphSet[component.baseGlyph].anchors: if anchor.name == anchor_name: anchors.append((anchor, component)) break if len(anchors) > 1: for i, (anchor, component) in enumerate(anchors): t = Transform(*component.transformation) name = "%s_%d" % (anchor.name, i + 1) anchor_data[name] = t.transformPoint((anchor.x, anchor.y)) elif anchors: anchor, component = anchors[0] t = Transform(*component.transformation) anchor_data[anchor.name] = t.transformPoint((anchor.x, anchor.y)) def _adjust_anchors(anchor_data, glyphSet, component): """ Adjust base anchors to which a mark component may have been attached, by moving the base anchor attached to a mark anchor to the position of the mark component's base anchor. """ glyph = glyphSet[component.baseGlyph] t = Transform(*component.transformation) for anchor in glyph.anchors: # only adjust if this anchor has data and the component also contains # the associated mark anchor (e.g. "_top" for "top") if anchor.name in anchor_data and any( a.name == "_" + anchor.name for a in glyph.anchors ): anchor_data[anchor.name] = t.transformPoint((anchor.x, anchor.y)) def _component_closest_to_origin(components, glyph_set): """Return the component whose (xmin, ymin) bounds are closest to origin. This ensures that a component that is moved below another is actually recognized as such. Looking only at the transformation offset can be misleading. """ return min(components, key=lambda comp: _distance((0, 0), _bounds(comp, glyph_set))) def _distance(pos1, pos2): x1, y1 = pos1 x2, y2 = pos2 return (x1 - x2) ** 2 + (y1 - y2) ** 2 def _is_ligature_mark(glyph): return not glyph.name.startswith("_") and "_" in glyph.name def _bounds(component, glyph_set): """Return the (xmin, ymin) of the bounds of `component`.""" if hasattr(component, "bounds"): # e.g. defcon return component.bounds[:2] elif hasattr(component, "draw"): # e.g. ufoLib2 pen = fontTools.pens.boundsPen.BoundsPen(glyphSet=glyph_set) component.draw(pen) return pen.bounds[:2] else: raise ValueError( f"Don't know to to compute the bounds of component '{component}' " ) ufo2ft-2.30.0/Lib/ufo2ft/filters/removeOverlaps.py000066400000000000000000000030601434012334300217430ustar00rootroot00000000000000import logging from enum import Enum from ufo2ft.filters import BaseFilter logger = logging.getLogger(__name__) class RemoveOverlapsFilter(BaseFilter): class Backend(Enum): BOOLEAN_OPERATIONS = "booleanOperations" SKIA_PATHOPS = "pathops" # use booleanOperations by default, unless pathops specified as backend _kwargs = {"backend": Backend.BOOLEAN_OPERATIONS} def start(self): self.options.backend = self.Backend(self.options.backend) if self.options.backend is self.Backend.BOOLEAN_OPERATIONS: from booleanOperations import BooleanOperationsError, union self.union = union self.Error = BooleanOperationsError self.penGetter = "getPointPen" logger.debug("using booleanOperations as RemoveOverlapsFilter backend") elif self.options.backend is self.Backend.SKIA_PATHOPS: from pathops import PathOpsError, union self.union = union self.Error = PathOpsError self.penGetter = "getPen" logger.debug("using skia-pathops as RemoveOverlapsFilter backend") else: raise AssertionError(self.options.backend) def filter(self, glyph): if not len(glyph): return False contours = list(glyph) glyph.clearContours() pen = getattr(glyph, self.penGetter)() try: self.union(contours, pen) except self.Error: logger.error("Failed to remove overlaps for %s", glyph.name) raise return True ufo2ft-2.30.0/Lib/ufo2ft/filters/sortContours.py000066400000000000000000000030261434012334300214600ustar00rootroot00000000000000import logging import fontTools.pens.boundsPen from ufo2ft.filters import BaseFilter logger = logging.getLogger(__name__) class SortContoursFilter(BaseFilter): """Sort contours by their bounding box. ATTENTION: This filter should be run after decomposition! Mixed contours and components cannot meaningfully be sorted. This is to work around the undefined contour order in pyclipper, see https://sourceforge.net/p/polyclipping/bugs/195/. It only strikes on glyphs that contain a lot of contours on the same height (think word marks or glyphs like U+FFFC OBJECT REPLACEMENT CHARACTER, U+034F COMBINING GRAPHEME JOINER or U+2591 LIGHT SHADE). """ def filter(self, glyph): if len(glyph) == 0: # As in, no contours. return False if glyph.components: logger.warning( "Glyph '%s' contains components which will not be sorted.", glyph.name, ) contours = sorted( (c for c in glyph), key=lambda contour: _control_bounding_box(contour) ) glyph.clearContours() if hasattr(glyph, "appendContour"): # defcon for contour in contours: glyph.appendContour(contour) else: # ufoLib2 glyph.contours.extend(contours) return True def _control_bounding_box(contour): pen = fontTools.pens.boundsPen.ControlBoundsPen(None) p2s_pen = fontTools.pens.pointPen.PointToSegmentPen(pen) contour.drawPoints(p2s_pen) return pen.bounds ufo2ft-2.30.0/Lib/ufo2ft/filters/transformations.py000066400000000000000000000111361434012334300221660ustar00rootroot00000000000000import logging import math from enum import IntEnum from fontTools.misc.fixedTools import otRound from fontTools.misc.transform import Identity, Transform from fontTools.pens.recordingPen import RecordingPointPen from fontTools.pens.transformPen import TransformPointPen as _TransformPointPen from ufo2ft.filters import BaseFilter from ufo2ft.fontInfoData import getAttrWithFallback log = logging.getLogger(__name__) class TransformPointPen(_TransformPointPen): def __init__(self, outPointPen, transformation, modified=None): super().__init__(outPointPen, transformation) self.modified = modified if modified is not None else set() self._inverted = self._transformation.inverse() def addComponent(self, baseGlyph, transformation, identifier=None, **kwargs): if baseGlyph in self.modified: # multiply the component's transformation matrix with the inverse # of the filter's transformation matrix to compensate for the # transformation already applied to the base glyph transformation = Transform(*transformation).transform(self._inverted) super().addComponent(baseGlyph, transformation, identifier=identifier, **kwargs) class TransformationsFilter(BaseFilter): class Origin(IntEnum): CAP_HEIGHT = 0 HALF_CAP_HEIGHT = 1 X_HEIGHT = 2 HALF_X_HEIGHT = 3 BASELINE = 4 _kwargs = { "OffsetX": 0, "OffsetY": 0, "ScaleX": 100, "ScaleY": 100, "Slant": 0, "Origin": 4, # BASELINE } def start(self): self.options.Origin = self.Origin(self.options.Origin) def get_origin_height(self, font, origin): if origin is self.Origin.BASELINE: return 0 elif origin is self.Origin.CAP_HEIGHT: return getAttrWithFallback(font.info, "capHeight") elif origin is self.Origin.HALF_CAP_HEIGHT: return otRound(getAttrWithFallback(font.info, "capHeight") / 2) elif origin is self.Origin.X_HEIGHT: return getAttrWithFallback(font.info, "xHeight") elif origin is self.Origin.HALF_X_HEIGHT: return otRound(getAttrWithFallback(font.info, "xHeight") / 2) else: raise AssertionError(origin) def set_context(self, font, glyphSet): ctx = super().set_context(font, glyphSet) origin_height = self.get_origin_height(font, self.options.Origin) m = Identity dx, dy = self.options.OffsetX, self.options.OffsetY if dx != 0 or dy != 0: m = m.translate(dx, dy) sx, sy = self.options.ScaleX, self.options.ScaleY angle = self.options.Slant # TODO Add support for "Cursify" option # cursify = self.options.SlantCorrection if sx != 100 or sy != 100 or angle != 0: # vertically shift glyph to the specified 'Origin' before # scaling and/or slanting, then move it back if origin_height != 0: m = m.translate(0, origin_height) if sx != 100 or sy != 100: m = m.scale(sx / 100, sy / 100) if angle != 0: m = m.skew(math.radians(angle)) if origin_height != 0: m = m.translate(0, -origin_height) ctx.matrix = m return ctx def filter(self, glyph): matrix = self.context.matrix if matrix == Identity or not (glyph or glyph.components or glyph.anchors): return False # nothing to do modified = self.context.modified glyphSet = self.context.glyphSet for component in glyph.components: base_name = component.baseGlyph if base_name in modified: continue base_glyph = glyphSet[base_name] if self.include(base_glyph) and self.filter(base_glyph): # base glyph is included but was not transformed yet; we # call filter recursively until all the included bases are # transformed, or there are no more components modified.add(base_name) rec = RecordingPointPen() glyph.drawPoints(rec) glyph.clearContours() glyph.clearComponents() outpen = glyph.getPointPen() filterpen = TransformPointPen(outpen, matrix, modified) rec.replay(filterpen) # anchors are not drawn through the pen API, # must be transformed separately for a in glyph.anchors: a.x, a.y = matrix.transformPoint((a.x, a.y)) glyph.width, glyph.height = matrix.transformVector((glyph.width, glyph.height)) return True ufo2ft-2.30.0/Lib/ufo2ft/fontInfoData.py000066400000000000000000000411261434012334300176430ustar00rootroot00000000000000""" This file provides fallback data for info attributes that are required for building OTFs. There are two main functions that are important: * :func:`~getAttrWithFallback` * :func:`~preflightInfo` There are a set of other functions that are used internally for synthesizing values for specific attributes. These can be used externally as well. """ import calendar import logging import math import os import time import unicodedata from datetime import datetime from fontTools import ufoLib from fontTools.misc.fixedTools import otRound from fontTools.misc.textTools import binary2num logger = logging.getLogger(__name__) # ----------------- # Special Fallbacks # ----------------- # generic _styleMapStyleNames = ["regular", "bold", "italic", "bold italic"] def ascenderFallback(info): upm = getAttrWithFallback(info, "unitsPerEm") return otRound(upm * 0.8) def descenderFallback(info): upm = getAttrWithFallback(info, "unitsPerEm") return -otRound(upm * 0.2) def capHeightFallback(info): upm = getAttrWithFallback(info, "unitsPerEm") return otRound(upm * 0.7) def xHeightFallback(info): upm = getAttrWithFallback(info, "unitsPerEm") return otRound(upm * 0.5) def styleMapFamilyNameFallback(info): """ Fallback to *openTypeNamePreferredFamilyName* if *styleMapStyleName* or, if *styleMapStyleName* isn't defined, *openTypeNamePreferredSubfamilyName* is *regular*, *bold*, *italic* or *bold italic*, otherwise fallback to *openTypeNamePreferredFamilyName openTypeNamePreferredFamilyName*. """ familyName = getAttrWithFallback(info, "openTypeNamePreferredFamilyName") styleName = info.styleMapStyleName if not styleName: styleName = getAttrWithFallback(info, "openTypeNamePreferredSubfamilyName") if styleName is None: styleName = "" elif styleName.lower() in _styleMapStyleNames: styleName = "" return (familyName + " " + styleName).strip() def styleMapStyleNameFallback(info): """ Fallback to *openTypeNamePreferredSubfamilyName* if it is one of *regular*, *bold*, *italic*, *bold italic*, otherwise fallback to *regular*. """ styleName = getAttrWithFallback(info, "openTypeNamePreferredSubfamilyName") if styleName is None: styleName = "regular" elif styleName.strip().lower() not in _styleMapStyleNames: styleName = "regular" else: styleName = styleName.strip().lower() return styleName # head _date_format = "%Y/%m/%d %H:%M:%S" def dateStringForNow(): return time.strftime(_date_format, time.gmtime()) def openTypeHeadCreatedFallback(info): """ Fallback to the environment variable SOURCE_DATE_EPOCH if set, otherwise now. """ if "SOURCE_DATE_EPOCH" in os.environ: t = datetime.utcfromtimestamp(int(os.environ["SOURCE_DATE_EPOCH"])) return t.strftime(_date_format) else: return dateStringForNow() # hhea def openTypeHheaAscenderFallback(info): """ Fallback to *ascender + typoLineGap*. """ return getAttrWithFallback(info, "ascender") + getAttrWithFallback( info, "openTypeOS2TypoLineGap" ) def openTypeHheaDescenderFallback(info): """ Fallback to *descender*. """ return getAttrWithFallback(info, "descender") def openTypeHheaCaretSlopeRiseFallback(info): """ Fallback to *openTypeHheaCaretSlopeRise*. If the italicAngle is zero, return 1. If italicAngle is non-zero, compute the slope rise from the complementary openTypeHheaCaretSlopeRun, if the latter is defined. Else, default to an arbitrary fixed reference point (1000). """ italicAngle = getAttrWithFallback(info, "italicAngle") if italicAngle != 0: if ( hasattr(info, "openTypeHheaCaretSlopeRun") and info.openTypeHheaCaretSlopeRun is not None ): slopeRun = info.openTypeHheaCaretSlopeRun return otRound(slopeRun / math.tan(math.radians(-italicAngle))) else: return 1000 # just an arbitrary non-zero reference point return 1 def openTypeHheaCaretSlopeRunFallback(info): """ Fallback to *openTypeHheaCaretSlopeRun*. If the italicAngle is zero, return 0. If italicAngle is non-zero, compute the slope run from the complementary openTypeHheaCaretSlopeRise. """ italicAngle = getAttrWithFallback(info, "italicAngle") if italicAngle != 0: slopeRise = getAttrWithFallback(info, "openTypeHheaCaretSlopeRise") return otRound(math.tan(math.radians(-italicAngle)) * slopeRise) return 0 # name def openTypeNameVersionFallback(info): """ Fallback to *versionMajor.versionMinor* in the form 0.000. """ versionMajor = getAttrWithFallback(info, "versionMajor") versionMinor = getAttrWithFallback(info, "versionMinor") return "Version %d.%s" % (versionMajor, str(versionMinor).zfill(3)) def openTypeNameUniqueIDFallback(info): """ Fallback to *openTypeNameVersion;openTypeOS2VendorID;postscriptFontName*. """ version = getAttrWithFallback(info, "openTypeNameVersion").replace("Version ", "") vendor = getAttrWithFallback(info, "openTypeOS2VendorID") fontName = getAttrWithFallback(info, "postscriptFontName") return f"{version};{vendor};{fontName}" def openTypeNamePreferredFamilyNameFallback(info): """ Fallback to *familyName*. """ return getAttrWithFallback(info, "familyName") def openTypeNamePreferredSubfamilyNameFallback(info): """ Fallback to *styleName*. """ return getAttrWithFallback(info, "styleName") def openTypeNameWWSFamilyNameFallback(info): # not yet supported return None def openTypeNameWWSSubfamilyNameFallback(info): # not yet supported return None # OS/2 def openTypeOS2TypoAscenderFallback(info): """ Fallback to *ascender*. """ return getAttrWithFallback(info, "ascender") def openTypeOS2TypoDescenderFallback(info): """ Fallback to *descender*. """ return getAttrWithFallback(info, "descender") def openTypeOS2TypoLineGapFallback(info): """ Fallback to *UPM * 1.2 - ascender + descender*, or zero if that's negative. """ return max( int(getAttrWithFallback(info, "unitsPerEm") * 1.2) - getAttrWithFallback(info, "ascender") + getAttrWithFallback(info, "descender"), 0, ) def openTypeOS2WinAscentFallback(info): """ Fallback to *ascender + typoLineGap*. """ return getAttrWithFallback(info, "ascender") + getAttrWithFallback( info, "openTypeOS2TypoLineGap" ) def openTypeOS2WinDescentFallback(info): """ Fallback to *descender*. """ return abs(getAttrWithFallback(info, "descender")) # postscript _postscriptFontNameExceptions = set("[](){}<>/%") _postscriptFontNameAllowed = {chr(i) for i in range(33, 127)} def normalizeStringForPostscript(s, allowSpaces=True): normalized = [] for c in s: if c == " " and not allowSpaces: continue if c in _postscriptFontNameExceptions: continue if c not in _postscriptFontNameAllowed: # Use compatibility decomposed form, to keep parts in ascii c = unicodedata.normalize("NFKD", c) if not set(c) < _postscriptFontNameAllowed: c = c.encode("ascii", errors="replace").decode() normalized.append(c) return "".join(normalized) def normalizeNameForPostscript(name): return normalizeStringForPostscript(name, allowSpaces=False) def postscriptFontNameFallback(info): """ Fallback to a string containing only valid characters as defined in the specification. This will draw from *openTypeNamePreferredFamilyName* and *openTypeNamePreferredSubfamilyName*. """ name = "{}-{}".format( getAttrWithFallback(info, "openTypeNamePreferredFamilyName"), getAttrWithFallback(info, "openTypeNamePreferredSubfamilyName"), ) return normalizeNameForPostscript(name) def postscriptFullNameFallback(info): """ Fallback to *openTypeNamePreferredFamilyName openTypeNamePreferredSubfamilyName*. """ return "{} {}".format( getAttrWithFallback(info, "openTypeNamePreferredFamilyName"), getAttrWithFallback(info, "openTypeNamePreferredSubfamilyName"), ) def postscriptSlantAngleFallback(info): """ Fallback to *italicAngle*. """ return getAttrWithFallback(info, "italicAngle") def postscriptUnderlineThicknessFallback(info): """Return UPM * 0.05 (50 for 1000 UPM) and warn.""" logger.debug("Underline thickness not set in UFO, defaulting to UPM * 0.05") return getAttrWithFallback(info, "unitsPerEm") * 0.05 def postscriptUnderlinePositionFallback(info): """Return UPM * -0.075 (-75 for 1000 UPM) and warn.""" logger.debug("Underline position not set in UFO, defaulting to UPM * -0.075") return getAttrWithFallback(info, "unitsPerEm") * -0.075 def postscriptBlueScaleFallback(info): """ Fallback to a calculated value: 3/(4 * *maxZoneHeight*) where *maxZoneHeight* is the tallest zone from *postscriptBlueValues* and *postscriptOtherBlues*. If zones are not set, return 0.039625. """ blues = getAttrWithFallback(info, "postscriptBlueValues") otherBlues = getAttrWithFallback(info, "postscriptOtherBlues") maxZoneHeight = 0 blueScale = 0.039625 if blues: assert len(blues) % 2 == 0 for x, y in zip(blues[:-1:2], blues[1::2]): maxZoneHeight = max(maxZoneHeight, abs(y - x)) if otherBlues: assert len(otherBlues) % 2 == 0 for x, y in zip(otherBlues[:-1:2], otherBlues[1::2]): maxZoneHeight = max(maxZoneHeight, abs(y - x)) if maxZoneHeight != 0: blueScale = 3 / (4 * maxZoneHeight) return blueScale # -------------- # Attribute Maps # -------------- staticFallbackData = dict( versionMajor=0, versionMinor=0, copyright=None, trademark=None, familyName="New Font", styleName="Regular", unitsPerEm=1000, italicAngle=0, # not needed year=None, note=None, openTypeHeadLowestRecPPEM=6, openTypeHeadFlags=[0, 1], openTypeHheaLineGap=0, openTypeHheaCaretOffset=0, openTypeNameDesigner=None, openTypeNameDesignerURL=None, openTypeNameManufacturer=None, openTypeNameManufacturerURL=None, openTypeNameLicense=None, openTypeNameLicenseURL=None, openTypeNameDescription=None, openTypeNameCompatibleFullName=None, openTypeNameSampleText=None, openTypeNameRecords=[], openTypeOS2WidthClass=5, openTypeOS2WeightClass=400, openTypeOS2Selection=[], openTypeOS2VendorID="NONE", openTypeOS2Panose=[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], openTypeOS2FamilyClass=[0, 0], openTypeOS2UnicodeRanges=None, openTypeOS2CodePageRanges=None, openTypeOS2Type=[2], openTypeOS2SubscriptXSize=None, openTypeOS2SubscriptYSize=None, openTypeOS2SubscriptXOffset=None, openTypeOS2SubscriptYOffset=None, openTypeOS2SuperscriptXSize=None, openTypeOS2SuperscriptYSize=None, openTypeOS2SuperscriptXOffset=None, openTypeOS2SuperscriptYOffset=None, openTypeOS2StrikeoutSize=None, openTypeOS2StrikeoutPosition=None, # fallback to None on these # as the user should be in # complete control openTypeVheaVertTypoAscender=None, openTypeVheaVertTypoDescender=None, openTypeVheaVertTypoLineGap=None, # fallback to horizontal caret: # a value of 0 for the rise # and a value of 1 for the run. openTypeVheaCaretSlopeRise=0, openTypeVheaCaretSlopeRun=1, openTypeVheaCaretOffset=0, postscriptUniqueID=None, postscriptWeightName=None, postscriptIsFixedPitch=False, postscriptBlueValues=[], postscriptOtherBlues=[], postscriptFamilyBlues=[], postscriptFamilyOtherBlues=[], postscriptStemSnapH=[], postscriptStemSnapV=[], postscriptBlueFuzz=0, postscriptBlueShift=7, postscriptForceBold=0, postscriptDefaultWidthX=200, postscriptNominalWidthX=0, # not used in OTF postscriptDefaultCharacter=None, postscriptWindowsCharacterSet=None, # not used in OTF macintoshFONDFamilyID=None, macintoshFONDName=None, ) specialFallbacks = dict( ascender=ascenderFallback, descender=descenderFallback, capHeight=capHeightFallback, xHeight=xHeightFallback, styleMapFamilyName=styleMapFamilyNameFallback, styleMapStyleName=styleMapStyleNameFallback, openTypeHeadCreated=openTypeHeadCreatedFallback, openTypeHheaAscender=openTypeHheaAscenderFallback, openTypeHheaDescender=openTypeHheaDescenderFallback, openTypeHheaCaretSlopeRise=openTypeHheaCaretSlopeRiseFallback, openTypeHheaCaretSlopeRun=openTypeHheaCaretSlopeRunFallback, openTypeNameVersion=openTypeNameVersionFallback, openTypeNameUniqueID=openTypeNameUniqueIDFallback, openTypeNamePreferredFamilyName=openTypeNamePreferredFamilyNameFallback, openTypeNamePreferredSubfamilyName=openTypeNamePreferredSubfamilyNameFallback, openTypeNameWWSFamilyName=openTypeNameWWSFamilyNameFallback, openTypeNameWWSSubfamilyName=openTypeNameWWSSubfamilyNameFallback, openTypeOS2TypoAscender=openTypeOS2TypoAscenderFallback, openTypeOS2TypoDescender=openTypeOS2TypoDescenderFallback, openTypeOS2TypoLineGap=openTypeOS2TypoLineGapFallback, openTypeOS2WinAscent=openTypeOS2WinAscentFallback, openTypeOS2WinDescent=openTypeOS2WinDescentFallback, postscriptFontName=postscriptFontNameFallback, postscriptFullName=postscriptFullNameFallback, postscriptSlantAngle=postscriptSlantAngleFallback, postscriptUnderlineThickness=postscriptUnderlineThicknessFallback, postscriptUnderlinePosition=postscriptUnderlinePositionFallback, postscriptBlueScale=postscriptBlueScaleFallback, ) requiredAttributes = set(ufoLib.fontInfoAttributesVersion2) - ( set(staticFallbackData.keys()) | set(specialFallbacks.keys()) ) recommendedAttributes = { "styleMapFamilyName", "versionMajor", "versionMinor", "copyright", "trademark", "openTypeHeadCreated", "openTypeNameDesigner", "openTypeNameDesignerURL", "openTypeNameManufacturer", "openTypeNameManufacturerURL", "openTypeNameLicense", "openTypeNameLicenseURL", "openTypeNameDescription", "openTypeNameSampleText", "openTypeOS2WidthClass", "openTypeOS2WeightClass", "openTypeOS2VendorID", "openTypeOS2Panose", "openTypeOS2FamilyClass", "openTypeOS2UnicodeRanges", "openTypeOS2CodePageRanges", "openTypeOS2TypoLineGap", "openTypeOS2Type", "postscriptBlueValues", "postscriptOtherBlues", "postscriptFamilyBlues", "postscriptFamilyOtherBlues", "postscriptStemSnapH", "postscriptStemSnapV", } # ------------ # Main Methods # ------------ def getAttrWithFallback(info, attr): """ Get the value for *attr* from the *info* object. If the object does not have the attribute or the value for the atribute is None, this will either get a value from a predefined set of attributes or it will synthesize a value from the available data. """ if hasattr(info, attr) and getattr(info, attr) is not None: value = getattr(info, attr) else: if attr in specialFallbacks: value = specialFallbacks[attr](info) else: value = staticFallbackData[attr] return value def preflightInfo(info): """ Returns a dict containing two items. The value for each item will be a list of info attribute names. ================== === missingRequired Required data that is missing. missingRecommended Recommended data that is missing. ================== === """ missingRequired = set() missingRecommended = set() for attr in requiredAttributes: if not hasattr(info, attr) or getattr(info, attr) is None: missingRequired.add(attr) for attr in recommendedAttributes: if not hasattr(info, attr) or getattr(info, attr) is None: missingRecommended.add(attr) return dict(missingRequired=missingRequired, missingRecommended=missingRecommended) # ----------------- # Low Level Support # ----------------- # these should not be used outside of this package def intListToNum(intList, start, length): all = [] bin = "" for i in range(start, start + length): if i in intList: b = "1" else: b = "0" bin = b + bin if not (i + 1) % 8: all.append(bin) bin = "" if bin: all.append(bin) all.reverse() all = " ".join(all) return binary2num(all) def dateStringToTimeValue(date): try: t = time.strptime(date, "%Y/%m/%d %H:%M:%S") return calendar.timegm(t) except ValueError: return 0 ufo2ft-2.30.0/Lib/ufo2ft/maxContextCalc.py000066400000000000000000000003341434012334300202000ustar00rootroot00000000000000"""NOTE: this module was moved to fonttools, it is kept here only for backward compatibility. Please import it from the new location. """ from fontTools.otlLib.maxContextCalc import maxCtxFont __all__ = ["maxCtxFont"] ufo2ft-2.30.0/Lib/ufo2ft/outlineCompiler.py000066400000000000000000001656101434012334300204460ustar00rootroot00000000000000import logging import math from collections import Counter, namedtuple from io import BytesIO from types import SimpleNamespace from fontTools.cffLib import ( CharStrings, GlobalSubrsIndex, IndexedStrings, PrivateDict, SubrsIndex, TopDict, TopDictIndex, ) from fontTools.misc.arrayTools import unionRect from fontTools.misc.fixedTools import otRound from fontTools.pens.boundsPen import ControlBoundsPen from fontTools.pens.pointPen import SegmentToPointPen from fontTools.pens.reverseContourPen import ReverseContourPen from fontTools.pens.t2CharStringPen import T2CharStringPen from fontTools.pens.ttGlyphPen import TTGlyphPointPen from fontTools.ttLib import TTFont, newTable from fontTools.ttLib.standardGlyphOrder import standardGlyphOrder from fontTools.ttLib.tables._g_l_y_f import USE_MY_METRICS, Glyph from fontTools.ttLib.tables._h_e_a_d import mac_epoch_diff from fontTools.ttLib.tables.O_S_2f_2 import Panose from ufo2ft.constants import ( COLOR_LAYERS_KEY, COLOR_PALETTES_KEY, COLR_CLIP_BOXES_KEY, OPENTYPE_META_KEY, UNICODE_VARIATION_SEQUENCES_KEY, ) from ufo2ft.errors import InvalidFontData from ufo2ft.fontInfoData import ( dateStringForNow, dateStringToTimeValue, getAttrWithFallback, intListToNum, normalizeStringForPostscript, ) from ufo2ft.util import ( _copyGlyph, calcCodePageRanges, makeOfficialGlyphOrder, makeUnicodeToGlyphNameMapping, ) logger = logging.getLogger(__name__) BoundingBox = namedtuple("BoundingBox", ["xMin", "yMin", "xMax", "yMax"]) EMPTY_BOUNDING_BOX = BoundingBox(0, 0, 0, 0) def _isNonBMP(s): for c in s: if ord(c) > 65535: return True return False def _getVerticalOrigin(font, glyph): if hasattr(glyph, "verticalOrigin") and glyph.verticalOrigin is not None: verticalOrigin = glyph.verticalOrigin else: os2 = font.get("OS/2") typo_ascender = os2.sTypoAscender if os2 is not None else 0 verticalOrigin = typo_ascender return otRound(verticalOrigin) class BaseOutlineCompiler: """Create a feature-less outline binary.""" sfntVersion = None tables = frozenset( [ "head", "hmtx", "hhea", "name", "maxp", "cmap", "OS/2", "post", "vmtx", "vhea", "COLR", "CPAL", "meta", ] ) def __init__( self, font, glyphSet=None, glyphOrder=None, tables=None, notdefGlyph=None, colrLayerReuse=True, ): self.ufo = font # use the previously filtered glyphSet, if any if glyphSet is None: glyphSet = {g.name: g for g in font} self.makeMissingRequiredGlyphs(font, glyphSet, self.sfntVersion, notdefGlyph) self.allGlyphs = glyphSet # store the glyph order if glyphOrder is None: glyphOrder = font.glyphOrder self.glyphOrder = self.makeOfficialGlyphOrder(glyphOrder) # make a reusable character mapping self.unicodeToGlyphNameMapping = self.makeUnicodeToGlyphNameMapping() if tables is not None: self.tables = tables self.colrLayerReuse = colrLayerReuse # cached values defined later on self._glyphBoundingBoxes = None self._fontBoundingBox = None self._compiledGlyphs = None def compile(self): """ Compile the OpenType binary. """ self.otf = TTFont(sfntVersion=self.sfntVersion) # only compile vertical metrics tables if vhea metrics are defined vertical_metrics = [ "openTypeVheaVertTypoAscender", "openTypeVheaVertTypoDescender", "openTypeVheaVertTypoLineGap", ] self.vertical = all( getAttrWithFallback(self.ufo.info, metric) is not None for metric in vertical_metrics ) self.colorLayers = ( COLOR_LAYERS_KEY in self.ufo.lib and COLOR_PALETTES_KEY in self.ufo.lib ) self.meta = OPENTYPE_META_KEY in self.ufo.lib # write the glyph order self.otf.setGlyphOrder(self.glyphOrder) # populate basic tables self.setupTable_head() self.setupTable_hmtx() self.setupTable_hhea() self.setupTable_name() self.setupTable_maxp() self.setupTable_cmap() self.setupTable_OS2() self.setupTable_post() if self.vertical: self.setupTable_vmtx() self.setupTable_vhea() if self.colorLayers: self.setupTable_COLR() self.setupTable_CPAL() if self.meta: self.setupTable_meta() self.setupOtherTables() self.importTTX() return self.otf def compileGlyphs(self): """Compile glyphs and return dict keyed by glyph name. **This should not be called externally.** Subclasses must override this method to handle compilation of glyphs. """ raise NotImplementedError def getCompiledGlyphs(self): if self._compiledGlyphs is None: self._compiledGlyphs = self.compileGlyphs() return self._compiledGlyphs def makeGlyphsBoundingBoxes(self): """ Make bounding boxes for all the glyphs, and return a dictionary of BoundingBox(xMin, xMax, yMin, yMax) namedtuples keyed by glyph names. The bounding box of empty glyphs (without contours or components) is set to None. The bbox values are integers. **This should not be called externally.** Subclasses must override this method to handle the bounds creation for their specific glyph type. """ raise NotImplementedError @property def glyphBoundingBoxes(self): if self._glyphBoundingBoxes is None: self._glyphBoundingBoxes = self.makeGlyphsBoundingBoxes() return self._glyphBoundingBoxes def makeFontBoundingBox(self): """ Make a bounding box for the font. **This should not be called externally.** Subclasses may override this method to handle the bounds creation in a different way if desired. """ fontBox = None for glyphBox in self.glyphBoundingBoxes.values(): if glyphBox is None: continue if fontBox is None: fontBox = glyphBox else: fontBox = unionRect(fontBox, glyphBox) if fontBox is None: # unlikely fontBox = EMPTY_BOUNDING_BOX return fontBox @property def fontBoundingBox(self): if self._fontBoundingBox is None: self._fontBoundingBox = self.makeFontBoundingBox() return self._fontBoundingBox def makeUnicodeToGlyphNameMapping(self): """ Make a ``unicode : glyph name`` mapping for the font. **This should not be called externally.** Subclasses may override this method to handle the mapping creation in a different way if desired. """ return makeUnicodeToGlyphNameMapping(self.allGlyphs, self.glyphOrder) @staticmethod def makeMissingRequiredGlyphs(font, glyphSet, sfntVersion, notdefGlyph=None): """ Add .notdef to the glyph set if it is not present. **This should not be called externally.** Subclasses may override this method to handle the glyph creation in a different way if desired. """ if ".notdef" in glyphSet: return reverseContour = sfntVersion == "\000\001\000\000" if notdefGlyph: notdefGlyph = _copyGlyph(notdefGlyph, reverseContour=reverseContour) else: unitsPerEm = otRound(getAttrWithFallback(font.info, "unitsPerEm")) ascender = otRound(getAttrWithFallback(font.info, "ascender")) descender = otRound(getAttrWithFallback(font.info, "descender")) defaultWidth = otRound(unitsPerEm * 0.5) notdefGlyph = StubGlyph( name=".notdef", width=defaultWidth, unitsPerEm=unitsPerEm, ascender=ascender, descender=descender, reverseContour=reverseContour, ) glyphSet[".notdef"] = notdefGlyph def makeOfficialGlyphOrder(self, glyphOrder): """ Make the final glyph order. **This should not be called externally.** Subclasses may override this method to handle the order creation in a different way if desired. """ return makeOfficialGlyphOrder(self.allGlyphs, glyphOrder) # -------------- # Table Builders # -------------- def setupTable_gasp(self): if "gasp" not in self.tables: return self.otf["gasp"] = gasp = newTable("gasp") gasp_ranges = dict() for record in self.ufo.info.openTypeGaspRangeRecords: rangeMaxPPEM = record["rangeMaxPPEM"] behavior_bits = record["rangeGaspBehavior"] rangeGaspBehavior = intListToNum(behavior_bits, 0, 4) gasp_ranges[rangeMaxPPEM] = rangeGaspBehavior gasp.gaspRange = gasp_ranges def setupTable_head(self): """ Make the head table. **This should not be called externally.** Subclasses may override or supplement this method to handle the table creation in a different way if desired. """ if "head" not in self.tables: return self.otf["head"] = head = newTable("head") font = self.ufo head.checkSumAdjustment = 0 head.tableVersion = 1.0 head.magicNumber = 0x5F0F3CF5 # version numbers # limit minor version to 3 digits as recommended in OpenType spec: # https://www.microsoft.com/typography/otspec/recom.htm versionMajor = getAttrWithFallback(font.info, "versionMajor") versionMinor = getAttrWithFallback(font.info, "versionMinor") fullFontRevision = float("%d.%03d" % (versionMajor, versionMinor)) head.fontRevision = round(fullFontRevision, 3) if head.fontRevision != fullFontRevision: logger.warning( "Minor version in %s has too many digits and won't fit into " "the head table's fontRevision field; rounded to %s.", fullFontRevision, head.fontRevision, ) # upm head.unitsPerEm = otRound(getAttrWithFallback(font.info, "unitsPerEm")) # times head.created = ( dateStringToTimeValue(getAttrWithFallback(font.info, "openTypeHeadCreated")) - mac_epoch_diff ) head.modified = dateStringToTimeValue(dateStringForNow()) - mac_epoch_diff # bounding box xMin, yMin, xMax, yMax = self.fontBoundingBox head.xMin = otRound(xMin) head.yMin = otRound(yMin) head.xMax = otRound(xMax) head.yMax = otRound(yMax) # style mapping styleMapStyleName = getAttrWithFallback(font.info, "styleMapStyleName") macStyle = [] if styleMapStyleName == "bold": macStyle = [0] elif styleMapStyleName == "bold italic": macStyle = [0, 1] elif styleMapStyleName == "italic": macStyle = [1] head.macStyle = intListToNum(macStyle, 0, 16) # misc head.flags = intListToNum( getAttrWithFallback(font.info, "openTypeHeadFlags"), 0, 16 ) head.lowestRecPPEM = otRound( getAttrWithFallback(font.info, "openTypeHeadLowestRecPPEM") ) head.fontDirectionHint = 2 head.indexToLocFormat = 0 head.glyphDataFormat = 0 def setupTable_name(self): """ Make the name table. **This should not be called externally.** Subclasses may override or supplement this method to handle the table creation in a different way if desired. """ if "name" not in self.tables: return font = self.ufo self.otf["name"] = name = newTable("name") name.names = [] # Set name records from font.info.openTypeNameRecords for nameRecord in getAttrWithFallback(font.info, "openTypeNameRecords"): nameId = nameRecord["nameID"] platformId = nameRecord["platformID"] platEncId = nameRecord["encodingID"] langId = nameRecord["languageID"] # on Python 2, plistLib (used by ufoLib) returns unicode strings # only when plist data contain non-ascii characters, and returns # ascii-encoded bytes when it can. On the other hand, fontTools's # name table `setName` method wants unicode strings, so we must # decode them first nameVal = nameRecord["string"] name.setName(nameVal, nameId, platformId, platEncId, langId) # Build name records familyName = getAttrWithFallback(font.info, "styleMapFamilyName") styleName = getAttrWithFallback(font.info, "styleMapStyleName").title() preferredFamilyName = getAttrWithFallback( font.info, "openTypeNamePreferredFamilyName" ) preferredSubfamilyName = getAttrWithFallback( font.info, "openTypeNamePreferredSubfamilyName" ) fullName = f"{preferredFamilyName} {preferredSubfamilyName}" nameVals = { 0: getAttrWithFallback(font.info, "copyright"), 1: familyName, 2: styleName, 3: getAttrWithFallback(font.info, "openTypeNameUniqueID"), 4: fullName, 5: getAttrWithFallback(font.info, "openTypeNameVersion"), 6: getAttrWithFallback(font.info, "postscriptFontName"), 7: getAttrWithFallback(font.info, "trademark"), 8: getAttrWithFallback(font.info, "openTypeNameManufacturer"), 9: getAttrWithFallback(font.info, "openTypeNameDesigner"), 10: getAttrWithFallback(font.info, "openTypeNameDescription"), 11: getAttrWithFallback(font.info, "openTypeNameManufacturerURL"), 12: getAttrWithFallback(font.info, "openTypeNameDesignerURL"), 13: getAttrWithFallback(font.info, "openTypeNameLicense"), 14: getAttrWithFallback(font.info, "openTypeNameLicenseURL"), 16: preferredFamilyName, 17: preferredSubfamilyName, 18: getAttrWithFallback(font.info, "openTypeNameCompatibleFullName"), 19: getAttrWithFallback(font.info, "openTypeNameSampleText"), 21: getAttrWithFallback(font.info, "openTypeNameWWSFamilyName"), 22: getAttrWithFallback(font.info, "openTypeNameWWSSubfamilyName"), } # don't add typographic names if they are the same as the legacy ones if nameVals[1] == nameVals[16]: del nameVals[16] if nameVals[2] == nameVals[17]: del nameVals[17] # postscript font name if nameVals[6]: nameVals[6] = normalizeStringForPostscript(nameVals[6]) for nameId in sorted(nameVals.keys()): nameVal = nameVals[nameId] if not nameVal: continue platformId = 3 platEncId = 10 if _isNonBMP(nameVal) else 1 langId = 0x409 # Set built name record if not set yet if name.getName(nameId, platformId, platEncId, langId): continue name.setName(nameVal, nameId, platformId, platEncId, langId) def setupTable_maxp(self): """ Make the maxp table. **This should not be called externally.** Subclasses must override or supplement this method to handle the table creation for either CFF or TT data. """ raise NotImplementedError def setupTable_cmap(self): """ Make the cmap table. **This should not be called externally.** Subclasses may override or supplement this method to handle the table creation in a different way if desired. """ if "cmap" not in self.tables: return from fontTools.ttLib.tables._c_m_a_p import cmap_format_4 nonBMP = {k: v for k, v in self.unicodeToGlyphNameMapping.items() if k > 65535} if nonBMP: mapping = { k: v for k, v in self.unicodeToGlyphNameMapping.items() if k <= 65535 } else: mapping = dict(self.unicodeToGlyphNameMapping) # mac cmap4_0_3 = cmap_format_4(4) cmap4_0_3.platformID = 0 cmap4_0_3.platEncID = 3 cmap4_0_3.language = 0 cmap4_0_3.cmap = mapping # windows cmap4_3_1 = cmap_format_4(4) cmap4_3_1.platformID = 3 cmap4_3_1.platEncID = 1 cmap4_3_1.language = 0 cmap4_3_1.cmap = mapping # store self.otf["cmap"] = cmap = newTable("cmap") cmap.tableVersion = 0 cmap.tables = [cmap4_0_3, cmap4_3_1] # If we have glyphs outside Unicode BMP, we must set another # subtable that can hold longer codepoints for them. if nonBMP: from fontTools.ttLib.tables._c_m_a_p import cmap_format_12 nonBMP.update(mapping) # mac cmap12_0_4 = cmap_format_12(12) cmap12_0_4.platformID = 0 cmap12_0_4.platEncID = 4 cmap12_0_4.language = 0 cmap12_0_4.cmap = nonBMP # windows cmap12_3_10 = cmap_format_12(12) cmap12_3_10.platformID = 3 cmap12_3_10.platEncID = 10 cmap12_3_10.language = 0 cmap12_3_10.cmap = nonBMP # update tables registry cmap.tables = [cmap4_0_3, cmap4_3_1, cmap12_0_4, cmap12_3_10] # unicode variation sequences uvsMapping = self.ufo.lib.get(UNICODE_VARIATION_SEQUENCES_KEY) if uvsMapping: from fontTools.ttLib.tables._c_m_a_p import cmap_format_14 cmap14_0_5 = cmap_format_14(14) cmap14_0_5.platformID = 0 cmap14_0_5.platEncID = 5 cmap14_0_5.language = 0 cmap14_0_5.cmap = {} if nonBMP: mapping = nonBMP uvsDict = dict() # public.unicodeVariationSequences uses hex strings as keys and # a dict of dicts, while cmap uses ints and a dict of tuples. for hexvs, glyphMapping in uvsMapping.items(): uvsList = [] for hexvalue, glyphName in glyphMapping.items(): value = int(hexvalue, 16) if glyphName == mapping[value]: uvsList.append((value, None)) else: uvsList.append((value, glyphName)) uvsDict[int(hexvs, 16)] = uvsList cmap14_0_5.uvsDict = uvsDict # update tables registry cmap.tables.append(cmap14_0_5) def setupTable_OS2(self): """ Make the OS/2 table. **This should not be called externally.** Subclasses may override or supplement this method to handle the table creation in a different way if desired. """ if "OS/2" not in self.tables: return self.otf["OS/2"] = os2 = newTable("OS/2") font = self.ufo os2.version = 0x0004 # average glyph width os2.xAvgCharWidth = 0 hmtx = self.otf.get("hmtx") if hmtx is not None: widths = [width for width, _ in hmtx.metrics.values() if width > 0] if widths: os2.xAvgCharWidth = otRound(sum(widths) / len(widths)) # weight and width classes os2.usWeightClass = getAttrWithFallback(font.info, "openTypeOS2WeightClass") os2.usWidthClass = getAttrWithFallback(font.info, "openTypeOS2WidthClass") # embedding os2.fsType = intListToNum( getAttrWithFallback(font.info, "openTypeOS2Type"), 0, 16 ) # subscript, superscript, strikeout values, taken from AFDKO: # FDK/Tools/Programs/makeotf/makeotf_lib/source/hotconv/hot.c unitsPerEm = getAttrWithFallback(font.info, "unitsPerEm") italicAngle = float(getAttrWithFallback(font.info, "italicAngle")) xHeight = getAttrWithFallback(font.info, "xHeight") def adjustOffset(offset, angle): """Adjust Y offset based on italic angle, to get X offset.""" return offset * math.tan(math.radians(-angle)) if angle else 0 v = getAttrWithFallback(font.info, "openTypeOS2SubscriptXSize") if v is None: v = unitsPerEm * 0.65 os2.ySubscriptXSize = otRound(v) v = getAttrWithFallback(font.info, "openTypeOS2SubscriptYSize") if v is None: v = unitsPerEm * 0.6 os2.ySubscriptYSize = otRound(v) v = getAttrWithFallback(font.info, "openTypeOS2SubscriptYOffset") if v is None: v = unitsPerEm * 0.075 os2.ySubscriptYOffset = otRound(v) v = getAttrWithFallback(font.info, "openTypeOS2SubscriptXOffset") if v is None: v = adjustOffset(-os2.ySubscriptYOffset, italicAngle) os2.ySubscriptXOffset = otRound(v) v = getAttrWithFallback(font.info, "openTypeOS2SuperscriptXSize") if v is None: v = os2.ySubscriptXSize os2.ySuperscriptXSize = otRound(v) v = getAttrWithFallback(font.info, "openTypeOS2SuperscriptYSize") if v is None: v = os2.ySubscriptYSize os2.ySuperscriptYSize = otRound(v) v = getAttrWithFallback(font.info, "openTypeOS2SuperscriptYOffset") if v is None: v = unitsPerEm * 0.35 os2.ySuperscriptYOffset = otRound(v) v = getAttrWithFallback(font.info, "openTypeOS2SuperscriptXOffset") if v is None: v = adjustOffset(os2.ySuperscriptYOffset, italicAngle) os2.ySuperscriptXOffset = otRound(v) v = getAttrWithFallback(font.info, "openTypeOS2StrikeoutSize") if v is None: v = getAttrWithFallback(font.info, "postscriptUnderlineThickness") os2.yStrikeoutSize = otRound(v) v = getAttrWithFallback(font.info, "openTypeOS2StrikeoutPosition") if v is None: v = xHeight * 0.6 if xHeight else unitsPerEm * 0.22 os2.yStrikeoutPosition = otRound(v) # family class ibmFontClass, ibmFontSubclass = getAttrWithFallback( font.info, "openTypeOS2FamilyClass" ) os2.sFamilyClass = (ibmFontClass << 8) + ibmFontSubclass # panose data = getAttrWithFallback(font.info, "openTypeOS2Panose") panose = Panose() panose.bFamilyType = data[0] panose.bSerifStyle = data[1] panose.bWeight = data[2] panose.bProportion = data[3] panose.bContrast = data[4] panose.bStrokeVariation = data[5] panose.bArmStyle = data[6] panose.bLetterForm = data[7] panose.bMidline = data[8] panose.bXHeight = data[9] os2.panose = panose # Unicode ranges uniRanges = getAttrWithFallback(font.info, "openTypeOS2UnicodeRanges") if uniRanges is not None: os2.ulUnicodeRange1 = intListToNum(uniRanges, 0, 32) os2.ulUnicodeRange2 = intListToNum(uniRanges, 32, 32) os2.ulUnicodeRange3 = intListToNum(uniRanges, 64, 32) os2.ulUnicodeRange4 = intListToNum(uniRanges, 96, 32) else: os2.recalcUnicodeRanges(self.otf) # codepage ranges codepageRanges = getAttrWithFallback(font.info, "openTypeOS2CodePageRanges") if codepageRanges is None: unicodes = self.unicodeToGlyphNameMapping.keys() codepageRanges = calcCodePageRanges(unicodes) os2.ulCodePageRange1 = intListToNum(codepageRanges, 0, 32) os2.ulCodePageRange2 = intListToNum(codepageRanges, 32, 32) # vendor id os2.achVendID = getAttrWithFallback(font.info, "openTypeOS2VendorID") # vertical metrics os2.sxHeight = otRound(getAttrWithFallback(font.info, "xHeight")) os2.sCapHeight = otRound(getAttrWithFallback(font.info, "capHeight")) os2.sTypoAscender = otRound( getAttrWithFallback(font.info, "openTypeOS2TypoAscender") ) os2.sTypoDescender = otRound( getAttrWithFallback(font.info, "openTypeOS2TypoDescender") ) os2.sTypoLineGap = otRound( getAttrWithFallback(font.info, "openTypeOS2TypoLineGap") ) os2.usWinAscent = otRound( getAttrWithFallback(font.info, "openTypeOS2WinAscent") ) os2.usWinDescent = otRound( getAttrWithFallback(font.info, "openTypeOS2WinDescent") ) # style mapping selection = list(getAttrWithFallback(font.info, "openTypeOS2Selection")) styleMapStyleName = getAttrWithFallback(font.info, "styleMapStyleName") if styleMapStyleName == "regular": selection.append(6) elif styleMapStyleName == "bold": selection.append(5) elif styleMapStyleName == "italic": selection.append(0) elif styleMapStyleName == "bold italic": selection += [0, 5] os2.fsSelection = intListToNum(selection, 0, 16) # characetr indexes unicodes = [i for i in self.unicodeToGlyphNameMapping.keys() if i is not None] if unicodes: minIndex = min(unicodes) maxIndex = max(unicodes) else: # the font may have *no* unicode values (it really happens!) so # there needs to be a fallback. use 0xFFFF, as AFDKO does: # FDK/Tools/Programs/makeotf/makeotf_lib/source/hotconv/map.c minIndex = 0xFFFF maxIndex = 0xFFFF if maxIndex > 0xFFFF: # the spec says that 0xFFFF should be used # as the max if the max exceeds 0xFFFF maxIndex = 0xFFFF os2.fsFirstCharIndex = minIndex os2.fsLastCharIndex = maxIndex os2.usBreakChar = 32 os2.usDefaultChar = 0 # maximum contextual lookup length os2.usMaxContex = 0 def setupTable_hmtx(self): """ Make the hmtx table. **This should not be called externally.** Subclasses may override or supplement this method to handle the table creation in a different way if desired. """ if "hmtx" not in self.tables: return self.otf["hmtx"] = hmtx = newTable("hmtx") hmtx.metrics = {} for glyphName, glyph in self.allGlyphs.items(): width = otRound(glyph.width) if width < 0: raise ValueError("The width should not be negative: '%s'" % (glyphName)) bounds = self.glyphBoundingBoxes[glyphName] left = bounds.xMin if bounds else 0 hmtx[glyphName] = (width, left) def _setupTable_hhea_or_vhea(self, tag): """ Make the hhea table or the vhea table. This assume the hmtx or the vmtx were respectively made first. """ if tag not in self.tables: return if tag == "hhea": isHhea = True else: isHhea = False self.otf[tag] = table = newTable(tag) mtxTable = self.otf.get(tag[0] + "mtx") font = self.ufo if isHhea: table.tableVersion = 0x00010000 else: table.tableVersion = 0x00011000 # Vertical metrics in hhea, horizontal metrics in vhea # and caret info. # The hhea metrics names are formed as: # "openType" + tag.title() + "Ascender", etc. # While vhea metrics names are formed as: # "openType" + tag.title() + "VertTypo" + "Ascender", etc. # Caret info names only differ by tag.title(). commonPrefix = "openType%s" % tag.title() if isHhea: metricsPrefix = commonPrefix else: metricsPrefix = "openType%sVertTypo" % tag.title() metricsDict = { "ascent": "%sAscender" % metricsPrefix, "descent": "%sDescender" % metricsPrefix, "lineGap": "%sLineGap" % metricsPrefix, "caretSlopeRise": "%sCaretSlopeRise" % commonPrefix, "caretSlopeRun": "%sCaretSlopeRun" % commonPrefix, "caretOffset": "%sCaretOffset" % commonPrefix, } for otfName, ufoName in metricsDict.items(): setattr(table, otfName, otRound(getAttrWithFallback(font.info, ufoName))) # Horizontal metrics in hhea, vertical metrics in vhea advances = [] # width in hhea, height in vhea firstSideBearings = [] # left in hhea, top in vhea secondSideBearings = [] # right in hhea, bottom in vhea extents = [] if mtxTable is not None: for glyphName in self.glyphOrder: advance, firstSideBearing = mtxTable[glyphName] advances.append(advance) bounds = self.glyphBoundingBoxes[glyphName] if bounds is None: continue if isHhea: boundsAdvance = bounds.xMax - bounds.xMin # equation from the hhea spec for calculating xMaxExtent: # Max(lsb + (xMax - xMin)) extent = firstSideBearing + boundsAdvance else: boundsAdvance = bounds.yMax - bounds.yMin # equation from the vhea spec for calculating yMaxExtent: # Max(tsb + (yMax - yMin)). extent = firstSideBearing + boundsAdvance secondSideBearing = advance - firstSideBearing - boundsAdvance firstSideBearings.append(firstSideBearing) secondSideBearings.append(secondSideBearing) extents.append(extent) setattr( table, "advance%sMax" % ("Width" if isHhea else "Height"), max(advances) if advances else 0, ) setattr( table, "min%sSideBearing" % ("Left" if isHhea else "Top"), min(firstSideBearings) if firstSideBearings else 0, ) setattr( table, "min%sSideBearing" % ("Right" if isHhea else "Bottom"), min(secondSideBearings) if secondSideBearings else 0, ) setattr( table, "%sMaxExtent" % ("x" if isHhea else "y"), max(extents) if extents else 0, ) if isHhea: reserved = range(4) else: # vhea.reserved0 is caretOffset for legacy reasons reserved = range(1, 5) for i in reserved: setattr(table, "reserved%i" % i, 0) table.metricDataFormat = 0 # precompute the number of long{Hor,Ver}Metric records in 'hmtx' table # so we don't need to compile the latter to get this updated numLongMetrics = len(advances) if numLongMetrics > 1: lastAdvance = advances[-1] while advances[numLongMetrics - 2] == lastAdvance: numLongMetrics -= 1 if numLongMetrics <= 1: # all advances are equal numLongMetrics = 1 break setattr(table, "numberOf%sMetrics" % ("H" if isHhea else "V"), numLongMetrics) def setupTable_hhea(self): """ Make the hhea table. This assumes that the hmtx table was made first. **This should not be called externally.** Subclasses may override or supplement this method to handle the table creation in a different way if desired. """ self._setupTable_hhea_or_vhea("hhea") def setupTable_vmtx(self): """ Make the vmtx table. **This should not be called externally.** Subclasses may override or supplement this method to handle the table creation in a different way if desired. """ if "vmtx" not in self.tables: return self.otf["vmtx"] = vmtx = newTable("vmtx") vmtx.metrics = {} for glyphName, glyph in self.allGlyphs.items(): height = otRound(glyph.height) if height < 0: raise ValueError( "The height should not be negative: '%s'" % (glyphName) ) verticalOrigin = _getVerticalOrigin(self.otf, glyph) bounds = self.glyphBoundingBoxes[glyphName] top = bounds.yMax if bounds else 0 vmtx[glyphName] = (height, verticalOrigin - top) def setupTable_VORG(self): """ Make the VORG table. **This should not be called externally.** Subclasses may override or supplement this method to handle the table creation in a different way if desired. """ if "VORG" not in self.tables: return self.otf["VORG"] = vorg = newTable("VORG") vorg.majorVersion = 1 vorg.minorVersion = 0 vorg.VOriginRecords = {} # Find the most frequent verticalOrigin vorg_count = Counter( _getVerticalOrigin(self.otf, glyph) for glyph in self.allGlyphs.values() ) vorg.defaultVertOriginY = vorg_count.most_common(1)[0][0] if len(vorg_count) > 1: for glyphName, glyph in self.allGlyphs.items(): vertOriginY = _getVerticalOrigin(self.otf, glyph) if vertOriginY == vorg.defaultVertOriginY: continue vorg.VOriginRecords[glyphName] = vertOriginY vorg.numVertOriginYMetrics = len(vorg.VOriginRecords) def setupTable_vhea(self): """ Make the vhea table. This assumes that the head and vmtx tables were made first. **This should not be called externally.** Subclasses may override or supplement this method to handle the table creation in a different way if desired. """ self._setupTable_hhea_or_vhea("vhea") def setupTable_post(self): """ Make the post table. **This should not be called externally.** Subclasses may override or supplement this method to handle the table creation in a different way if desired. """ if "post" not in self.tables: return self.otf["post"] = post = newTable("post") font = self.ufo post.formatType = 3.0 # italic angle italicAngle = float(getAttrWithFallback(font.info, "italicAngle")) post.italicAngle = italicAngle # underline underlinePosition = getAttrWithFallback( font.info, "postscriptUnderlinePosition" ) post.underlinePosition = otRound(underlinePosition) underlineThickness = getAttrWithFallback( font.info, "postscriptUnderlineThickness" ) post.underlineThickness = otRound(underlineThickness) post.isFixedPitch = int( getAttrWithFallback(font.info, "postscriptIsFixedPitch") ) # misc post.minMemType42 = 0 post.maxMemType42 = 0 post.minMemType1 = 0 post.maxMemType1 = 0 def setupTable_COLR(self): """ Compile the COLR table. **This should not be called externally.** """ if "COLR" not in self.tables: return from fontTools.colorLib.builder import buildCOLR layerInfo = self.ufo.lib[COLOR_LAYERS_KEY] glyphMap = self.otf.getReverseGlyphMap() if layerInfo: # unpack (glyphs, clipBox) tuples to a flat dict keyed by glyph name, # as colorLib buildCOLR expects clipBoxes = { glyphName: tuple(box) for glyphs, box in self.ufo.lib.get(COLR_CLIP_BOXES_KEY, ()) for glyphName in glyphs } self.otf["COLR"] = buildCOLR( layerInfo, glyphMap=glyphMap, clipBoxes=clipBoxes, allowLayerReuse=self.colrLayerReuse, ) def setupTable_CPAL(self): """ Compile the CPAL table. **This should not be called externally.** """ if "CPAL" not in self.tables: return from fontTools.colorLib.builder import buildCPAL from fontTools.colorLib.errors import ColorLibError # colorLib wants colors as tuples, plistlib gives us lists palettes = [ [tuple(color) for color in palette] for palette in self.ufo.lib[COLOR_PALETTES_KEY] ] try: self.otf["CPAL"] = buildCPAL(palettes) except ColorLibError as e: raise InvalidFontData("Failed to build CPAL table") from e def setupTable_meta(self): """ Make the meta table. ***This should not be called externally.** Sublcasses may override or supplement this method to handle the table creation in a different way if desired. """ if "meta" not in self.tables: return font = self.ufo self.otf["meta"] = meta = newTable("meta") ufo_meta = font.lib.get(OPENTYPE_META_KEY) for key, value in ufo_meta.items(): if key in ["dlng", "slng"]: if not isinstance(value, list) or not all( isinstance(string, str) for string in value ): raise TypeError( f"public.openTypeMeta '{key}' value should " "be a list of strings" ) meta.data[key] = ",".join(value) elif key in ["appl", "bild"]: if not isinstance(value, bytes): raise TypeError( f"public.openTypeMeta '{key}' value should be bytes." ) meta.data[key] = value elif isinstance(value, bytes): meta.data[key] = value elif isinstance(value, str): meta.data[key] = value.encode("utf-8") else: raise TypeError( f"public.openTypeMeta '{key}' value should be bytes or a string." ) def setupOtherTables(self): """ Make the other tables. The default implementation does nothing. **This should not be called externally.** Subclasses may override this method to add other tables to the font if desired. """ pass def importTTX(self): """ Merge TTX files from data directory "com.github.fonttools.ttx" **This should not be called externally.** Subclasses may override this method to handle the bounds creation in a different way if desired. """ import os prefix = "com.github.fonttools.ttx" if not hasattr(self.ufo, "data"): return if not self.ufo.data.fileNames: return for path in self.ufo.data.fileNames: foldername, filename = os.path.split(path) if foldername == prefix and filename.endswith(".ttx"): ttx = self.ufo.data[path].decode("utf-8") fp = BytesIO(ttx.encode("utf-8")) # Preserve the original SFNT version when loading a TTX dump. sfntVersion = self.otf.sfntVersion try: self.otf.importXML(fp) finally: self.otf.sfntVersion = sfntVersion class OutlineOTFCompiler(BaseOutlineCompiler): """Compile a .otf font with CFF outlines.""" sfntVersion = "OTTO" tables = BaseOutlineCompiler.tables | {"CFF", "VORG"} def __init__( self, font, glyphSet=None, glyphOrder=None, tables=None, notdefGlyph=None, roundTolerance=None, optimizeCFF=True, ): if roundTolerance is not None: self.roundTolerance = float(roundTolerance) else: # round all coordinates to integers by default self.roundTolerance = 0.5 super().__init__( font, glyphSet=glyphSet, glyphOrder=glyphOrder, tables=tables, notdefGlyph=notdefGlyph, ) self.optimizeCFF = optimizeCFF self._defaultAndNominalWidths = None def getDefaultAndNominalWidths(self): """Return (defaultWidthX, nominalWidthX). If fontinfo.plist doesn't define these explicitly, compute optimal values from the glyphs' advance widths. """ if self._defaultAndNominalWidths is None: info = self.ufo.info # populate the width values if all( getattr(info, attr, None) is None for attr in ("postscriptDefaultWidthX", "postscriptNominalWidthX") ): # no custom values set in fontinfo.plist; compute optimal ones from fontTools.cffLib.width import optimizeWidths widths = [otRound(glyph.width) for glyph in self.allGlyphs.values()] defaultWidthX, nominalWidthX = optimizeWidths(widths) else: defaultWidthX = otRound( getAttrWithFallback(info, "postscriptDefaultWidthX") ) nominalWidthX = otRound( getAttrWithFallback(info, "postscriptNominalWidthX") ) self._defaultAndNominalWidths = (defaultWidthX, nominalWidthX) return self._defaultAndNominalWidths def compileGlyphs(self): """Compile and return the CFF T2CharStrings for this font.""" defaultWidth, nominalWidth = self.getDefaultAndNominalWidths() # The real PrivateDict will be created later on in setupTable_CFF. # For convenience here we use a namespace object to pass the default/nominal # widths that we need to draw the charstrings when computing their bounds. private = SimpleNamespace( defaultWidthX=defaultWidth, nominalWidthX=nominalWidth ) compiledGlyphs = {} for glyphName in self.glyphOrder: glyph = self.allGlyphs[glyphName] cs = self.getCharStringForGlyph(glyph, private) compiledGlyphs[glyphName] = cs return compiledGlyphs def makeGlyphsBoundingBoxes(self): """ Make bounding boxes for all the glyphs, and return a dictionary of BoundingBox(xMin, xMax, yMin, yMax) namedtuples keyed by glyph names. The bounding box of empty glyphs (without contours or components) is set to None. Check that the float values are within the range of the specified self.roundTolerance, and if so use the rounded value; else take the floor or ceiling to ensure that the bounding box encloses the original values. """ def toInt(value, else_callback): rounded = otRound(value) if tolerance >= 0.5 or abs(rounded - value) <= tolerance: return rounded else: return int(else_callback(value)) tolerance = self.roundTolerance glyphBoxes = {} charStrings = self.getCompiledGlyphs() for name, cs in charStrings.items(): bounds = cs.calcBounds(charStrings) if bounds is not None: rounded = [] for value in bounds[:2]: rounded.append(toInt(value, math.floor)) for value in bounds[2:]: rounded.append(toInt(value, math.ceil)) bounds = BoundingBox(*rounded) if bounds == EMPTY_BOUNDING_BOX: bounds = None glyphBoxes[name] = bounds return glyphBoxes def getCharStringForGlyph(self, glyph, private, globalSubrs=None): """ Get a Type2CharString for the *glyph* **This should not be called externally.** Subclasses may override this method to handle the charstring creation in a different way if desired. """ width = glyph.width defaultWidth = private.defaultWidthX nominalWidth = private.nominalWidthX if width == defaultWidth: # if width equals the default it can be omitted from charstring width = None else: # subtract the nominal width width -= nominalWidth if width is not None: width = otRound(width) pen = T2CharStringPen(width, self.allGlyphs, roundTolerance=self.roundTolerance) glyph.draw(pen) charString = pen.getCharString(private, globalSubrs, optimize=self.optimizeCFF) return charString def setupTable_maxp(self): """Make the maxp table.""" if "maxp" not in self.tables: return self.otf["maxp"] = maxp = newTable("maxp") maxp.tableVersion = 0x00005000 maxp.numGlyphs = len(self.glyphOrder) def setupOtherTables(self): self.setupTable_CFF() if self.vertical: self.setupTable_VORG() def setupTable_CFF(self): """Make the CFF table.""" if not {"CFF", "CFF "}.intersection(self.tables): return self.otf["CFF "] = cff = newTable("CFF ") cff = cff.cff # NOTE: Set up a back-reference to be used by some CFFFontSet methods # down the line (as of fontTools 4.21.1). cff.otFont = self.otf # set up the basics cff.major = 1 cff.minor = 0 cff.hdrSize = 4 cff.offSize = 4 cff.fontNames = [] strings = IndexedStrings() cff.strings = strings private = PrivateDict(strings=strings) private.rawDict.update(private.defaults) globalSubrs = GlobalSubrsIndex(private=private) topDict = TopDict(GlobalSubrs=globalSubrs, strings=strings) topDict.Private = private charStrings = topDict.CharStrings = CharStrings( file=None, charset=None, globalSubrs=globalSubrs, private=private, fdSelect=None, fdArray=None, ) charStrings.charStringsAreIndexed = True topDict.charset = [] charStringsIndex = charStrings.charStringsIndex = SubrsIndex( private=private, globalSubrs=globalSubrs ) cff.topDictIndex = topDictIndex = TopDictIndex() topDictIndex.append(topDict) topDictIndex.strings = strings cff.GlobalSubrs = globalSubrs # populate naming data info = self.ufo.info psName = getAttrWithFallback(info, "postscriptFontName") cff.fontNames.append(psName) topDict = cff.topDictIndex[0] topDict.version = "%d.%d" % ( getAttrWithFallback(info, "versionMajor"), getAttrWithFallback(info, "versionMinor"), ) trademark = getAttrWithFallback(info, "trademark") if trademark: trademark = normalizeStringForPostscript( trademark.replace("\u00A9", "Copyright") ) if trademark != self.ufo.info.trademark: logger.info( "The trademark was normalized for storage in the " "CFF table and consequently some characters were " "dropped: '%s'", trademark, ) if trademark is None: trademark = "" topDict.Notice = trademark copyright = getAttrWithFallback(info, "copyright") if copyright: copyright = normalizeStringForPostscript( copyright.replace("\u00A9", "Copyright") ) if copyright != self.ufo.info.copyright: logger.info( "The copyright was normalized for storage in the " "CFF table and consequently some characters were " "dropped: '%s'", copyright, ) if copyright is None: copyright = "" topDict.Copyright = copyright topDict.FullName = getAttrWithFallback(info, "postscriptFullName") topDict.FamilyName = getAttrWithFallback( info, "openTypeNamePreferredFamilyName" ) topDict.Weight = getAttrWithFallback(info, "postscriptWeightName") # populate various numbers topDict.isFixedPitch = int(getAttrWithFallback(info, "postscriptIsFixedPitch")) topDict.ItalicAngle = float(getAttrWithFallback(info, "italicAngle")) underlinePosition = getAttrWithFallback(info, "postscriptUnderlinePosition") topDict.UnderlinePosition = otRound(underlinePosition) underlineThickness = getAttrWithFallback(info, "postscriptUnderlineThickness") topDict.UnderlineThickness = otRound(underlineThickness) # populate font matrix unitsPerEm = otRound(getAttrWithFallback(info, "unitsPerEm")) topDict.FontMatrix = [1.0 / unitsPerEm, 0, 0, 1.0 / unitsPerEm, 0, 0] # populate the width values defaultWidthX, nominalWidthX = self.getDefaultAndNominalWidths() if defaultWidthX: private.rawDict["defaultWidthX"] = defaultWidthX if nominalWidthX: private.rawDict["nominalWidthX"] = nominalWidthX # populate hint data blueFuzz = otRound(getAttrWithFallback(info, "postscriptBlueFuzz")) blueShift = otRound(getAttrWithFallback(info, "postscriptBlueShift")) blueScale = getAttrWithFallback(info, "postscriptBlueScale") forceBold = getAttrWithFallback(info, "postscriptForceBold") blueValues = getAttrWithFallback(info, "postscriptBlueValues") if isinstance(blueValues, list): blueValues = [otRound(i) for i in blueValues] otherBlues = getAttrWithFallback(info, "postscriptOtherBlues") if isinstance(otherBlues, list): otherBlues = [otRound(i) for i in otherBlues] familyBlues = getAttrWithFallback(info, "postscriptFamilyBlues") if isinstance(familyBlues, list): familyBlues = [otRound(i) for i in familyBlues] familyOtherBlues = getAttrWithFallback(info, "postscriptFamilyOtherBlues") if isinstance(familyOtherBlues, list): familyOtherBlues = [otRound(i) for i in familyOtherBlues] stemSnapH = getAttrWithFallback(info, "postscriptStemSnapH") if isinstance(stemSnapH, list): stemSnapH = [otRound(i) for i in stemSnapH] stemSnapV = getAttrWithFallback(info, "postscriptStemSnapV") if isinstance(stemSnapV, list): stemSnapV = [otRound(i) for i in stemSnapV] # only write the blues data if some blues are defined. if any((blueValues, otherBlues, familyBlues, familyOtherBlues)): private.rawDict["BlueFuzz"] = blueFuzz private.rawDict["BlueShift"] = blueShift private.rawDict["BlueScale"] = blueScale private.rawDict["ForceBold"] = forceBold if blueValues: private.rawDict["BlueValues"] = blueValues if otherBlues: private.rawDict["OtherBlues"] = otherBlues if familyBlues: private.rawDict["FamilyBlues"] = familyBlues if familyOtherBlues: private.rawDict["FamilyOtherBlues"] = familyOtherBlues # only write the stems if both are defined. if stemSnapH and stemSnapV: private.rawDict["StemSnapH"] = stemSnapH private.rawDict["StdHW"] = stemSnapH[0] private.rawDict["StemSnapV"] = stemSnapV private.rawDict["StdVW"] = stemSnapV[0] # populate glyphs cffGlyphs = self.getCompiledGlyphs() for glyphName in self.glyphOrder: charString = cffGlyphs[glyphName] charString.private = private charString.globalSubrs = globalSubrs # add to the font if glyphName in charStrings: # XXX a glyph already has this name. should we choke? glyphID = charStrings.charStrings[glyphName] charStringsIndex.items[glyphID] = charString else: charStringsIndex.append(charString) glyphID = len(topDict.charset) charStrings.charStrings[glyphName] = glyphID topDict.charset.append(glyphName) topDict.FontBBox = self.fontBoundingBox class OutlineTTFCompiler(BaseOutlineCompiler): """Compile a .ttf font with TrueType outlines.""" sfntVersion = "\000\001\000\000" tables = BaseOutlineCompiler.tables | {"loca", "gasp", "glyf"} def compileGlyphs(self): """Compile and return the TrueType glyphs for this font.""" allGlyphs = self.allGlyphs ttGlyphs = {} for name in self.glyphOrder: glyph = allGlyphs[name] pen = TTGlyphPointPen(allGlyphs) try: glyph.drawPoints(pen) except NotImplementedError: logger.error("%r has invalid curve format; skipped", name) ttGlyph = Glyph() else: ttGlyph = pen.glyph() ttGlyphs[name] = ttGlyph return ttGlyphs def makeGlyphsBoundingBoxes(self): """Make bounding boxes for all the glyphs. Return a dictionary of BoundingBox(xMin, xMax, yMin, yMax) namedtuples keyed by glyph names. The bounding box of empty glyphs (without contours or components) is set to None. """ glyphBoxes = {} ttGlyphs = self.getCompiledGlyphs() for glyphName, glyph in ttGlyphs.items(): glyph.recalcBounds(ttGlyphs) bounds = BoundingBox(glyph.xMin, glyph.yMin, glyph.xMax, glyph.yMax) if bounds == EMPTY_BOUNDING_BOX: bounds = None glyphBoxes[glyphName] = bounds return glyphBoxes def setupTable_maxp(self): """Make the maxp table.""" if "maxp" not in self.tables: return self.otf["maxp"] = maxp = newTable("maxp") maxp.tableVersion = 0x00010000 maxp.numGlyphs = len(self.glyphOrder) maxp.maxZones = 1 maxp.maxTwilightPoints = 0 maxp.maxStorage = 0 maxp.maxFunctionDefs = 0 maxp.maxInstructionDefs = 0 maxp.maxStackElements = 0 maxp.maxSizeOfInstructions = 0 maxp.maxComponentElements = max( len(g.components) for g in self.allGlyphs.values() ) def setupTable_post(self): """Make a format 2 post table with the compiler's glyph order.""" super().setupTable_post() if "post" not in self.otf: return post = self.otf["post"] post.formatType = 2.0 # if we set extraNames = [], it will be automatically computed upon compile as # we do below; if we do it upfront we can skip reloading in postProcessor. post.extraNames = [g for g in self.glyphOrder if g not in standardGlyphOrder] post.mapping = {} post.glyphOrder = self.glyphOrder def setupOtherTables(self): self.setupTable_glyf() if self.ufo.info.openTypeGaspRangeRecords: self.setupTable_gasp() def setupTable_glyf(self): """Make the glyf table.""" if not {"glyf", "loca"}.issubset(self.tables): return self.otf["loca"] = newTable("loca") self.otf["glyf"] = glyf = newTable("glyf") glyf.glyphs = {} glyf.glyphOrder = self.glyphOrder hmtx = self.otf.get("hmtx") ttGlyphs = self.getCompiledGlyphs() for name in self.glyphOrder: ttGlyph = ttGlyphs[name] if ttGlyph.isComposite() and hmtx is not None and self.autoUseMyMetrics: self.autoUseMyMetrics(ttGlyph, name, hmtx) glyf[name] = ttGlyph # update various maxp fields based on glyf without needing to compile the font if "maxp" in self.otf: self.otf["maxp"].recalc(self.otf) @staticmethod def autoUseMyMetrics(ttGlyph, glyphName, hmtx): """Set the "USE_MY_METRICS" flag on the first component having the same advance width as the composite glyph, no transform and no horizontal shift (but allow it to shift vertically). This forces the composite glyph to use the possibly hinted horizontal metrics of the sub-glyph, instead of those from the "hmtx" table. """ width = hmtx[glyphName][0] for component in ttGlyph.components: try: baseName, transform = component.getComponentInfo() except AttributeError: # component uses '{first,second}Pt' instead of 'x' and 'y' continue try: baseMetrics = hmtx[baseName] except KeyError: continue # ignore missing components else: if baseMetrics[0] == width and transform[:-1] == (1, 0, 0, 1, 0): component.flags |= USE_MY_METRICS break class StubGlyph: """ This object will be used to create missing glyphs (specifically .notdef) in the provided UFO. """ def __init__( self, name, width, unitsPerEm, ascender, descender, unicodes=None, reverseContour=False, ): self.name = name self.width = width self.unitsPerEm = unitsPerEm self.ascender = ascender self.descender = descender self.unicodes = unicodes if unicodes is not None else [] self.components = [] self.anchors = [] if self.unicodes: self.unicode = self.unicodes[0] else: self.unicode = None if name == ".notdef": self.draw = self._drawDefaultNotdef self.drawPoints = self._drawDefaultNotdefPoints self.reverseContour = reverseContour def __len__(self): if self.name == ".notdef": return 1 return 0 @property def height(self): return self.ascender - self.descender def draw(self, pen): pass def drawPoints(self, pen): pass def _drawDefaultNotdef(self, pen): # Draw contour in PostScript direction (counter-clockwise) by default. Reverse # for TrueType. if self.reverseContour: pen = ReverseContourPen(pen) width = otRound(self.unitsPerEm * 0.5) stroke = otRound(self.unitsPerEm * 0.05) ascender = self.ascender descender = self.descender xMin = stroke xMax = width - stroke yMax = ascender yMin = descender pen.moveTo((xMin, yMin)) pen.lineTo((xMax, yMin)) pen.lineTo((xMax, yMax)) pen.lineTo((xMin, yMax)) pen.lineTo((xMin, yMin)) pen.closePath() xMin += stroke xMax -= stroke yMax -= stroke yMin += stroke pen.moveTo((xMin, yMin)) pen.lineTo((xMin, yMax)) pen.lineTo((xMax, yMax)) pen.lineTo((xMax, yMin)) pen.lineTo((xMin, yMin)) pen.closePath() def _drawDefaultNotdefPoints(self, pen): adapterPen = SegmentToPointPen(pen, guessSmooth=False) self.draw(adapterPen) def _get_controlPointBounds(self): pen = ControlBoundsPen(None) self.draw(pen) return pen.bounds controlPointBounds = property(_get_controlPointBounds) ufo2ft-2.30.0/Lib/ufo2ft/postProcessor.py000066400000000000000000000362601434012334300201570ustar00rootroot00000000000000import enum import logging import re from io import BytesIO from fontTools.ttLib import TTFont from fontTools.ttLib.standardGlyphOrder import standardGlyphOrder from ufo2ft.constants import ( GLYPHS_DONT_USE_PRODUCTION_NAMES, KEEP_GLYPH_NAMES, USE_PRODUCTION_NAMES, ) logger = logging.getLogger(__name__) class CFFVersion(enum.IntEnum): CFF = 1 CFF2 = 2 class PostProcessor: """Does some post-processing operations on a compiled OpenType font, using info from the source UFO where necessary. """ GLYPH_NAME_INVALID_CHARS = re.compile("[^0-9a-zA-Z_.]") MAX_GLYPH_NAME_LENGTH = 63 class SubroutinizerBackend(enum.Enum): COMPREFFOR = "compreffor" CFFSUBR = "cffsubr" # can override by passing explicit subroutinizer parameter to process method DEFAULT_SUBROUTINIZER_FOR_CFF_VERSION = { 1: SubroutinizerBackend.CFFSUBR, 2: SubroutinizerBackend.CFFSUBR, } def __init__(self, otf, ufo, glyphSet=None): self.ufo = ufo self.glyphSet = glyphSet if glyphSet is not None else ufo self.otf = otf self._postscriptNames = ufo.lib.get("public.postscriptNames") def process( self, useProductionNames=None, optimizeCFF=True, cffVersion=None, subroutinizer=None, ): """ useProductionNames (Optional[bool]): By default, when value is None, this will rename glyphs using the 'public.postscriptNames' in then UFO lib. If the mapping is not present, no glyph names are renamed. When useProductionNames is None and the UFO lib contains the plist bool key "com.github.googlei18n.ufo2ft.keepGlyphNames" set to False, then the 'post' table is set to format 3.0 and glyph names are dropped altogether from the font, saving a few KBs. Note that this only works for TTF and CFF2 flavored fonts. We currently do not support dropping glyph names from CFF 1.0 fonts. When the keepGlyphNames lib key is missing or set to True, the glyph names will be stored in 'post' table format 2.0 for TTF and CFF2 fonts, or in the CFF charset. If useProductionNames is False, no glyphs are renamed whether or not the 'public.postscriptNames' mapping is present. If the value is True, but no 'public.postscriptNames' are present, then uniXXXX names are generated from the glyphs' unicode. The 'com.github.googlei18n.ufo2ft.useProductionNames' key can be set in the UFO lib to control this parameter (plist boolean value). For legacy reasons, an alias key (with an inverted meaning) is also supported: "com.schriftgestaltung.Don't use Production Names"; when this is present if the UFO lib and is set to True, this is equivalent to 'useProductionNames' set to False. optimizeCFF (bool): Subroubtinize CFF or CFF2 table, if present. cffVersion (Optiona[int]): The output CFF format, choose between 1 or 2. By default, it's the same as as the input OTF's CFF or CFF2 table, if any. Ignored for TTFs. subroutinizer (Optional[str]): The name of the library to use for compressing CFF charstrings, if optimizeCFF is True and CFF or CFF2 table is present. Choose between "cffsubr" or "compreffor". By default "cffsubr" is used for both CFF 1 and CFF 2. NOTE: compreffor currently doesn't support input fonts with CFF2 table. """ if self._get_cff_version(self.otf): self.process_cff( optimizeCFF=optimizeCFF, cffVersion=cffVersion, subroutinizer=subroutinizer, ) self.process_glyph_names(useProductionNames) return self.otf def process_cff(self, *, optimizeCFF=True, cffVersion=None, subroutinizer=None): cffInputVersion = self._get_cff_version(self.otf) if not cffInputVersion: raise ValueError("Missing required 'CFF ' or 'CFF2' table") if cffVersion is None: cffOutputVersion = cffInputVersion else: cffOutputVersion = CFFVersion(cffVersion) if optimizeCFF: if subroutinizer is None: backend = self.DEFAULT_SUBROUTINIZER_FOR_CFF_VERSION[cffOutputVersion] else: backend = self.SubroutinizerBackend(subroutinizer) self._subroutinize(backend, self.otf, cffOutputVersion) elif cffInputVersion != cffOutputVersion: if ( cffInputVersion == CFFVersion.CFF and cffOutputVersion == CFFVersion.CFF2 ): self._convert_cff_to_cff2(self.otf) else: raise NotImplementedError( "Unsupported CFF conversion {cffInputVersion} => {cffOutputVersion}" ) def process_glyph_names(self, useProductionNames=None): if useProductionNames is None: keepGlyphNames = self.ufo.lib.get(KEEP_GLYPH_NAMES, True) useProductionNames = self.ufo.lib.get( USE_PRODUCTION_NAMES, not self.ufo.lib.get(GLYPHS_DONT_USE_PRODUCTION_NAMES) and self._postscriptNames is not None, ) else: keepGlyphNames = True if keepGlyphNames: if "CFF " not in self.otf: self.set_post_table_format(self.otf, 2.0) if useProductionNames: logger.info("Renaming glyphs to final production names") # We need to reload the font *before* renaming glyphs, since various # tables may have been build/loaded using the original glyph names. # After reloading, we can immediately set a new glyph order and update # the tables (post or CFF) that stores the new postcript names; any # other tables that get loaded subsequently will use the new glyph names. self.otf = _reloadFont(self.otf) self._rename_glyphs_from_ufo() else: if "CFF " in self.otf: logger.warning( "Dropping glyph names from CFF 1.0 is currently unsupported" ) else: # To drop glyph names from TTF or CFF2, we must reload the font *after* # setting the post format to 3.0, since other tables may still use # the old glyph names. self.set_post_table_format(self.otf, 3.0) self.otf = _reloadFont(self.otf) def _rename_glyphs_from_ufo(self): """Rename glyphs using ufo.lib.public.postscriptNames in UFO.""" rename_map = self._build_production_names() self.rename_glyphs(self.otf, rename_map) @staticmethod def rename_glyphs(otf, rename_map): newGlyphOrder = [rename_map.get(n, n) for n in otf.getGlyphOrder()] otf.setGlyphOrder(newGlyphOrder) if "post" in otf and otf["post"].formatType == 2.0: # annoyingly we need to update extraNames to match the new glyph order, # otherwise, if dumping the font to TTX directly before compiling first, # the post table will not contain the extraNames... otf["post"].extraNames = [ g for g in newGlyphOrder if g not in standardGlyphOrder ] otf["post"].mapping = {} cff_tag = "CFF " if "CFF " in otf else "CFF2" if "CFF2" in otf else None if cff_tag == "CFF " or (cff_tag == "CFF2" and otf.isLoaded(cff_tag)): cff = otf[cff_tag].cff.topDictIndex[0] char_strings = cff.CharStrings.charStrings cff.CharStrings.charStrings = { rename_map.get(n, n): v for n, v in char_strings.items() } cff.charset = [rename_map.get(n, n) for n in cff.charset] def _build_production_names(self): seen = {} rename_map = {} for name in self.otf.getGlyphOrder(): # Ignore glyphs that aren't in the source, as they are usually generated # and we lack information about them. if name not in self.glyphSet: continue prod_name = self._build_production_name(self.glyphSet[name]) # strip invalid characters not allowed in postscript glyph names if name != prod_name: valid_name = self.GLYPH_NAME_INVALID_CHARS.sub("", prod_name) if len(valid_name) > self.MAX_GLYPH_NAME_LENGTH: # if the length of the generated production name is too # long, try to fall back to the original name valid_name = self.GLYPH_NAME_INVALID_CHARS.sub("", name) else: valid_name = self.GLYPH_NAME_INVALID_CHARS.sub("", name) if len(valid_name) > self.MAX_GLYPH_NAME_LENGTH: logger.warning( "glyph name length exceeds 63 characters: '%s'", valid_name ) # add a suffix to make the production names unique rename_map[name] = self._unique_name(valid_name, seen) return rename_map @staticmethod def _unique_name(name, seen): """Append incremental '.N' suffix if glyph is a duplicate.""" if name in seen: n = seen[name] while (name + ".%d" % n) in seen: n += 1 seen[name] = n + 1 name += ".%d" % n seen[name] = 1 return name def _build_production_name(self, glyph): """Build a production name for a single glyph.""" # use PostScript names from UFO lib if available if self._postscriptNames: production_name = self._postscriptNames.get(glyph.name) return production_name if production_name else glyph.name # use name derived from unicode value unicode_val = glyph.unicode if glyph.unicode is not None: return "{}{:04X}".format( "u" if unicode_val > 0xFFFF else "uni", unicode_val ) # use production name + last (non-script) suffix if possible parts = glyph.name.rsplit(".", 1) if len(parts) == 2 and parts[0] in self.glyphSet: return "{}.{}".format( self._build_production_name(self.glyphSet[parts[0]]), parts[1], ) # use ligature name, making sure to look up components with suffixes parts = glyph.name.split(".", 1) if len(parts) == 2: liga_parts = ["{}.{}".format(n, parts[1]) for n in parts[0].split("_")] else: liga_parts = glyph.name.split("_") if len(liga_parts) > 1 and all(n in self.glyphSet for n in liga_parts): unicode_vals = [self.glyphSet[n].unicode for n in liga_parts] if all(v and v <= 0xFFFF for v in unicode_vals): return "uni" + "".join("%04X" % v for v in unicode_vals) return "_".join( self._build_production_name(self.glyphSet[n]) for n in liga_parts ) return glyph.name @staticmethod def set_post_table_format(otf, formatType): if formatType not in (2.0, 3.0): raise NotImplementedError(formatType) post = otf.get("post") if post: if post.formatType != formatType: logger.info("Setting post.formatType = %s", formatType) post.formatType = formatType # we want to update extraNames list even if formatType is the same # so we don't have to reload the font if formatType == 2.0: post.extraNames = [ g for g in otf.getGlyphOrder() if g not in standardGlyphOrder ] post.mapping = {} else: for attr in ("extraNames", "mapping"): if hasattr(post, attr): delattr(post, attr) post.glyphOrder = None @staticmethod def _get_cff_version(otf): if "CFF " in otf: return CFFVersion.CFF elif "CFF2" in otf: return CFFVersion.CFF2 else: return None @staticmethod def _convert_cff_to_cff2(otf): from fontTools.varLib.cff import convertCFFtoCFF2 logger.info("Converting CFF table to CFF2") # convertCFFtoCFF2 doesn't strip T2CharStrings' widths, so we do it ourselves # https://github.com/fonttools/fonttools/issues/1835 charstrings = otf["CFF "].cff[0].CharStrings for glyph_name in otf.getGlyphOrder(): cs = charstrings[glyph_name] cs.decompile() cs.program = _stripCharStringWidth(cs.program) convertCFFtoCFF2(otf) @classmethod def _subroutinize(cls, backend, otf, cffVersion): subroutinize = getattr(cls, f"_subroutinize_with_{backend.value}") subroutinize(otf, cffVersion) @classmethod def _subroutinize_with_compreffor(cls, otf, cffVersion): from compreffor import compress if cls._get_cff_version(otf) != CFFVersion.CFF or cffVersion != CFFVersion.CFF: raise NotImplementedError( "Only 'CFF ' 1.0 is supported by compreffor; try using cffsubr" ) logger.info("Subroutinizing CFF table with compreffor") compress(otf) @classmethod def _subroutinize_with_cffsubr(cls, otf, cffVersion): import cffsubr cffInputVersion = cls._get_cff_version(otf) assert cffInputVersion is not None, "Missing required 'CFF ' or 'CFF2' table" msg = f"Subroutinizing {cffInputVersion.name} table with cffsubr" if cffInputVersion != cffVersion: msg += f" (output format: {cffVersion.name})" logger.info(msg) return cffsubr.subroutinize(otf, cff_version=cffVersion, keep_glyph_names=False) # Adapted from fontTools.cff.specializer.programToCommands # https://github.com/fonttools/fonttools/blob/babca16 # /Lib/fontTools/cffLib/specializer.py#L40-L122 # When converting from CFF to CFF2 we need to drop the charstrings' widths. # This function returns a new charstring program without the initial width value. # TODO: Move to fontTools? def _stripCharStringWidth(program): seenWidthOp = False result = [] stack = [] for token in program: if not isinstance(token, str): stack.append(token) continue if (not seenWidthOp) and token in { "hstem", "hstemhm", "vstem", "vstemhm", "cntrmask", "hintmask", "hmoveto", "vmoveto", "rmoveto", "endchar", }: seenWidthOp = True parity = token in {"hmoveto", "vmoveto"} numArgs = len(stack) if numArgs and (numArgs % 2) ^ parity: stack.pop(0) # pop width result.extend(stack) result.append(token) stack = [] if stack: result.extend(stack) return result def _reloadFont(font: TTFont) -> TTFont: """Recompile a font to arrive at the final internal layout.""" stream = BytesIO() font.save(stream) stream.seek(0) return TTFont(stream) ufo2ft-2.30.0/Lib/ufo2ft/preProcessor.py000066400000000000000000000330161434012334300177540ustar00rootroot00000000000000import itertools from ufo2ft.constants import ( COLOR_LAYER_MAPPING_KEY, COLOR_LAYERS_KEY, COLOR_PALETTES_KEY, ) from ufo2ft.filters import isValidFilter, loadFilters from ufo2ft.filters.decomposeComponents import DecomposeComponentsFilter from ufo2ft.filters.decomposeTransformedComponents import ( DecomposeTransformedComponentsFilter, ) from ufo2ft.fontInfoData import getAttrWithFallback from ufo2ft.util import _GlyphSet def _load_custom_filters(ufo, filters=None): # Args: # ufo: Font # filters: Optional[List[Union[Filter, EllipsisType]]]) # Returns: List[Filter] # by default, load the filters from the lib; ellipsis is used as a placeholder # so one can optionally insert additional filters=[f1, ..., f2] either # before or after these, or override them by omitting the ellipsis. if filters is None: filters = [...] seen_ellipsis = False result = [] for f in filters: if f is ...: if seen_ellipsis: raise ValueError("ellipsis not allowed more than once") result.extend(itertools.chain(*loadFilters(ufo))) seen_ellipsis = True else: if not isValidFilter(type(f)): raise TypeError(f"Invalid filter: {f!r}") result.append(f) return result class BasePreProcessor: """Base class for objects that performs pre-processing operations on the UFO glyphs, such as decomposing composites, removing overlaps, or applying custom filters. By default the input UFO is **not** modified. The ``process`` method returns a dictionary containing the new modified glyphset, keyed by glyph name. If ``inplace`` is True, the input UFO is modified directly without the need to first copy the glyphs. Subclasses can override the ``initDefaultFilters`` method and return a list of built-in filters which are performed in a predefined order, between the user-defined pre- and post-filters. The extra kwargs passed to the constructor can be used to customize the initialization of the default filters. Custom filters can be applied before or after the default filters. These can be specified in the UFO lib.plist under the private key "com.github.googlei18n.ufo2ft.filters". Alternatively the optional ``filters`` parameter can be used. This is a list of filter instances (subclasses of BaseFilter) that overrides those defined in the UFO lib. The list can be empty, meaning no custom filters are run. If ``filters`` contain the special value ``...`` (i.e. the actual ``ellipsis`` singleton, not the str literal '...'), then all the filters from the UFO lib are loaded in its place. This allows to insert additional filters before or after those already defined in the UFO lib, as opposed to discard/replace them which is the default behavior when ``...`` is absent. """ def __init__( self, ufo, inplace=False, layerName=None, skipExportGlyphs=None, filters=None, **kwargs, ): self.ufo = ufo self.inplace = inplace self.layerName = layerName self.glyphSet = _GlyphSet.from_layer( ufo, layerName, copy=not inplace, skipExportGlyphs=skipExportGlyphs ) self.defaultFilters = self.initDefaultFilters(**kwargs) filters = _load_custom_filters(ufo, filters) self.preFilters = [f for f in filters if f.pre] self.postFilters = [f for f in filters if not f.pre] def initDefaultFilters(self, **kwargs): return [] # pragma: no cover def process(self): ufo = self.ufo glyphSet = self.glyphSet for func in self.preFilters + self.defaultFilters + self.postFilters: func(ufo, glyphSet) return glyphSet def _init_explode_color_layer_glyphs_filter(ufo, filters): # Initialize ExplodeColorLayerGlyphsFilter, which copies color glyph layers # as standalone glyphs to the default glyph set (for building COLR table), if the # UFO contains the required 'colorPalettes' key, as well as 'colorLayerMapping' lib # keys (in either the font's or glyph's lib). # Skip doing that if an explicit 'colorLayers' key is already present. if ( COLOR_PALETTES_KEY in ufo.lib and COLOR_LAYERS_KEY not in ufo.lib and ( COLOR_LAYER_MAPPING_KEY in ufo.lib or any(COLOR_LAYER_MAPPING_KEY in g.lib for g in ufo) ) ): from ufo2ft.filters.explodeColorLayerGlyphs import ExplodeColorLayerGlyphsFilter filters.append(ExplodeColorLayerGlyphsFilter()) class OTFPreProcessor(BasePreProcessor): """Preprocessor for building CFF-flavored OpenType fonts. By default, it decomposes all the components. If ``removeOverlaps`` is True, it performs a union boolean operation on all the glyphs' contours. By default, booleanOperations is used to remove overlaps. You can choose skia-pathops by setting ``overlapsBackend`` to the enum value ``RemoveOverlapsFilter.SKIA_PATHOPS``, or the string "pathops". """ def initDefaultFilters(self, removeOverlaps=False, overlapsBackend=None): filters = [] _init_explode_color_layer_glyphs_filter(self.ufo, filters) filters.append(DecomposeComponentsFilter()) if removeOverlaps: from ufo2ft.filters.removeOverlaps import RemoveOverlapsFilter if overlapsBackend is not None: filters.append(RemoveOverlapsFilter(backend=overlapsBackend)) else: filters.append(RemoveOverlapsFilter()) return filters class TTFPreProcessor(OTFPreProcessor): """Preprocessor for building TrueType-flavored OpenType fonts. By default, it decomposes all the glyphs with mixed component/contour outlines. If the ``flattenComponents`` setting is True, glyphs with nested components are flattened so that they have at most one level of components. If ``removeOverlaps`` is True, it performs a union boolean operation on all the glyphs' contours. By default, booleanOperations is used to remove overlaps. You can choose skia-pathops by setting ``overlapsBackend`` to the enum value ``RemoveOverlapsFilter.SKIA_PATHOPS``, or the string "pathops". By default, it also converts all the PostScript cubic Bezier curves to TrueType quadratic splines. If the outlines are already quadratic, you can skip this by setting ``convertCubics`` to False. The optional ``conversionError`` argument controls the tolerance of the approximation algorithm. It is measured as the maximum distance between the original and converted curve, and it's relative to the UPM of the font (default: 1/1000 or 0.001). When converting curves to quadratic, it is assumed that the contours' winding direction is set following the PostScript counter-clockwise convention. Thus, by default the direction is reversed, in order to conform to opposite clockwise convention for TrueType outlines. You can disable this by setting ``reverseDirection`` to False. If both ``inplace`` and ``rememberCurveType`` options are True, the curve type "quadratic" is saved in font' lib under a private cu2qu key; the preprocessor will not try to convert them again if the curve type is already set to "quadratic". """ def initDefaultFilters( self, removeOverlaps=False, overlapsBackend=None, flattenComponents=False, convertCubics=True, conversionError=None, reverseDirection=True, rememberCurveType=True, ): filters = [] _init_explode_color_layer_glyphs_filter(self.ufo, filters) # len(g) is the number of contours, so we include the all glyphs # that have both components and at least one contour filters.append(DecomposeComponentsFilter(include=lambda g: len(g))) if flattenComponents: from ufo2ft.filters.flattenComponents import FlattenComponentsFilter filters.append(FlattenComponentsFilter()) if removeOverlaps: from ufo2ft.filters.removeOverlaps import RemoveOverlapsFilter if overlapsBackend is not None: filters.append(RemoveOverlapsFilter(backend=overlapsBackend)) else: filters.append(RemoveOverlapsFilter()) if convertCubics: from ufo2ft.filters.cubicToQuadratic import CubicToQuadraticFilter filters.append( CubicToQuadraticFilter( conversionError=conversionError, reverseDirection=reverseDirection, rememberCurveType=rememberCurveType and self.inplace, ) ) return filters class TTFInterpolatablePreProcessor: """Preprocessor for building TrueType-flavored OpenType fonts with interpolatable quadratic outlines. The constructor takes a list of UFO fonts, and the ``process`` method returns the modified glyphsets (list of dicts) in the same order. The pre-processor performs the conversion from cubic to quadratic on all the UFOs at once, then decomposes mixed contour/component glyphs. Additional pre/post custom filter are also applied to each single UFOs, respectively before or after the default filters, if they are specified in the UFO's lib.plist under the private key "com.github.googlei18n.ufo2ft.filters". NOTE: If you use any custom filters, the resulting glyphsets may no longer be interpolation compatible, depending on the particular filter used or whether they are applied to only some vs all of the UFOs. The ``conversionError``, ``reverseDirection``, ``flattenComponents`` and ``rememberCurveType`` arguments work in the same way as in the ``TTFPreProcessor``. """ def __init__( self, ufos, inplace=False, flattenComponents=False, conversionError=None, reverseDirection=True, rememberCurveType=True, layerNames=None, skipExportGlyphs=None, filters=None, ): from cu2qu.ufo import DEFAULT_MAX_ERR self.ufos = ufos self.inplace = inplace self.flattenComponents = flattenComponents if layerNames is None: layerNames = [None] * len(ufos) assert len(ufos) == len(layerNames) self.layerNames = layerNames # For each UFO, make a mapping of name to glyph object (and ensure it # contains none of the glyphs to be skipped, or any references to it). self.glyphSets = [ _GlyphSet.from_layer( ufo, layerName, copy=not inplace, skipExportGlyphs=skipExportGlyphs ) for ufo, layerName in zip(ufos, layerNames) ] self._conversionErrors = [ (conversionError or DEFAULT_MAX_ERR) * getAttrWithFallback(ufo.info, "unitsPerEm") for ufo in ufos ] self._reverseDirection = reverseDirection self._rememberCurveType = rememberCurveType self.defaultFilters = [] for ufo in ufos: self.defaultFilters.append([]) _init_explode_color_layer_glyphs_filter(ufo, self.defaultFilters[-1]) filterses = [_load_custom_filters(ufo, filters) for ufo in ufos] self.preFilters = [[f for f in filters if f.pre] for filters in filterses] self.postFilters = [[f for f in filters if not f.pre] for filters in filterses] def process(self): from cu2qu.ufo import fonts_to_quadratic needs_decomposition = set() # first apply all custom pre-filters for funcs, ufo, glyphSet in zip(self.preFilters, self.ufos, self.glyphSets): for func in funcs: if isinstance(func, DecomposeTransformedComponentsFilter): needs_decomposition |= func(ufo, glyphSet) else: func(ufo, glyphSet) # If we decomposed a glyph in some masters, we must ensure it is decomposed in # all masters. (https://github.com/googlefonts/ufo2ft/issues/507) if needs_decomposition: decompose = DecomposeComponentsFilter(include=needs_decomposition) for ufo, glyphSet in zip(self.ufos, self.glyphSets): decompose(ufo, glyphSet) # then apply all default filters for funcs, ufo, glyphSet in zip(self.defaultFilters, self.ufos, self.glyphSets): for func in funcs: func(ufo, glyphSet) fonts_to_quadratic( self.glyphSets, max_err=self._conversionErrors, reverse_direction=self._reverseDirection, dump_stats=True, remember_curve_type=self._rememberCurveType and self.inplace, ) # TrueType fonts cannot mix contours and components, so pick out all glyphs # that have contours (`bool(len(g)) == True`) and decompose their # components, if any. decompose = DecomposeComponentsFilter(include=lambda g: len(g)) for ufo, glyphSet in zip(self.ufos, self.glyphSets): decompose(ufo, glyphSet) if self.flattenComponents: from ufo2ft.filters.flattenComponents import FlattenComponentsFilter for ufo, glyphSet in zip(self.ufos, self.glyphSets): FlattenComponentsFilter()(ufo, glyphSet) # finally apply all custom post-filters for funcs, ufo, glyphSet in zip(self.postFilters, self.ufos, self.glyphSets): for func in funcs: func(ufo, glyphSet) return self.glyphSets ufo2ft-2.30.0/Lib/ufo2ft/util.py000066400000000000000000000511131434012334300162410ustar00rootroot00000000000000import importlib import logging import re from copy import deepcopy from inspect import currentframe, getfullargspec from typing import Set from fontTools import subset, ttLib, unicodedata from fontTools.designspaceLib import DesignSpaceDocument from fontTools.feaLib.builder import addOpenTypeFeatures from fontTools.misc.fixedTools import otRound from fontTools.misc.transform import Identity, Transform from fontTools.pens.reverseContourPen import ReverseContourPen from fontTools.pens.transformPen import TransformPen logger = logging.getLogger(__name__) def makeOfficialGlyphOrder(font, glyphOrder=None): """Make the final glyph order for 'font'. If glyphOrder is None, try getting the font.glyphOrder list. If not explicit glyphOrder is defined, sort glyphs alphabetically. If ".notdef" glyph is present in the font, force this to always be the first glyph (at index 0). """ if glyphOrder is None: glyphOrder = getattr(font, "glyphOrder", ()) names = set(font.keys()) order = [] if ".notdef" in names: names.remove(".notdef") order.append(".notdef") for name in glyphOrder: if name not in names: continue names.remove(name) order.append(name) order.extend(sorted(names)) return order class _GlyphSet(dict): @classmethod def from_layer(cls, font, layerName=None, copy=False, skipExportGlyphs=None): """Return a mapping of glyph names to glyph objects from `font`.""" if layerName is not None: layer = font.layers[layerName] else: layer = font.layers.defaultLayer if copy: self = _copyLayer(layer, obj_type=cls) self.lib = deepcopy(layer.lib) else: self = cls((g.name, g) for g in layer) self.lib = layer.lib # If any glyphs in the skipExportGlyphs list are used as components, decompose # them in the containing glyphs... if skipExportGlyphs: for glyph in self.values(): if any(c.baseGlyph in skipExportGlyphs for c in glyph.components): deepCopyContours(self, glyph, glyph, Transform(), skipExportGlyphs) if hasattr(glyph, "removeComponent"): # defcon for c in [ component for component in glyph.components if component.baseGlyph in skipExportGlyphs ]: glyph.removeComponent(c) else: # ufoLib2 glyph.components[:] = [ c for c in glyph.components if c.baseGlyph not in skipExportGlyphs ] # ... and then remove them from the glyph set, if even present. for glyph_name in skipExportGlyphs: if glyph_name in self: del self[glyph_name] self.name = layer.name if layerName is not None else None return self def _copyLayer(layer, obj_type=dict): try: g = next(iter(layer)) except StopIteration: # layer is empty return obj_type() newGlyph = _getNewGlyphFactory(g) glyphSet = obj_type() for glyph in layer: glyphSet[glyph.name] = _copyGlyph(glyph, glyphFactory=newGlyph) return glyphSet def _getNewGlyphFactory(glyph): # defcon.Glyph doesn't take a name argument, ufoLib2 requires one... cls = glyph.__class__ if "name" in getfullargspec(cls.__init__).args: def newGlyph(name, **kwargs): return cls(name=name, **kwargs) else: def newGlyph(name, **kwargs): # use instantiateGlyphObject() to keep any custom sub-element classes # https://github.com/googlefonts/ufo2ft/issues/363 g2 = glyph.layer.instantiateGlyphObject() g2.name = name for k, v in kwargs.items(): setattr(g2, k, v) return g2 return newGlyph def _copyGlyph(glyph, glyphFactory=None, reverseContour=False): # copy everything except unused attributes: 'guidelines', 'note', 'image' if glyphFactory is None: glyphFactory = _getNewGlyphFactory(glyph) copy = glyphFactory(glyph.name) copy.width = glyph.width copy.height = glyph.height copy.unicodes = list(glyph.unicodes) copy.anchors = [dict(a) for a in glyph.anchors] copy.lib = deepcopy(glyph.lib) pointPen = copy.getPointPen() if reverseContour: from fontTools.pens.pointPen import ReverseContourPointPen pointPen = ReverseContourPointPen(pointPen) glyph.drawPoints(pointPen) return copy def _setGlyphMargin(glyph, side, margin): # defcon.Glyph has @property setters for the margins, whereas ufoLib2.Glyph # has regular instance methods assert side in {"left", "right", "top", "bottom"} if hasattr(glyph, f"set{side.title()}Margin"): # ufoLib2 getattr(glyph, f"set{side.title()}Margin")(margin) assert getattr(glyph, f"get{side.title()}Margin")() == margin elif hasattr(glyph, f"{side}Margin"): # defcon descriptor = getattr(type(glyph), f"{side}Margin") descriptor.__set__(glyph, margin) assert descriptor.__get__(glyph) == margin else: raise NotImplementedError(f"Unsupported Glyph class: {type(glyph)!r}") def deepCopyContours( glyphSet, parent, composite, transformation, specificComponents=None ): """Copy contours from component to parent, including nested components. specificComponent: an optional list of glyph name strings. If not passed or None, decompose all components of a glyph unconditionally and completely. If passed, only completely decompose components whose baseGlyph is in the list. """ for nestedComponent in composite.components: # Because this function works recursively, test at each turn if we are going to # recurse into a specificComponent. If so, set the specificComponents argument # to None so we unconditionally decompose the possibly nested component # completely. specificComponentsEffective = specificComponents if specificComponentsEffective: if nestedComponent.baseGlyph not in specificComponentsEffective: continue else: specificComponentsEffective = None try: nestedBaseGlyph = glyphSet[nestedComponent.baseGlyph] except KeyError: logger.warning( "dropping non-existent component '%s' in glyph '%s'", nestedComponent.baseGlyph, parent.name, ) else: deepCopyContours( glyphSet, parent, nestedBaseGlyph, transformation.transform(nestedComponent.transformation), specificComponents=specificComponentsEffective, ) # Check if there are any contours to copy before instantiating pens. if composite != parent and len(composite): if transformation == Identity: pen = parent.getPen() else: pen = TransformPen(parent.getPen(), transformation) # if the transformation has a negative determinant, it will # reverse the contour direction of the component xx, xy, yx, yy = transformation[:4] if xx * yy - xy * yx < 0: pen = ReverseContourPen(pen) for contour in composite: contour.draw(pen) def makeUnicodeToGlyphNameMapping(font, glyphOrder=None): """Make a unicode: glyph name mapping for this glyph set (dict or Font). Raises InvalidFontData exception if multiple glyphs are mapped to the same unicode codepoint. """ if glyphOrder is None: glyphOrder = makeOfficialGlyphOrder(font) mapping = {} for glyphName in glyphOrder: glyph = font[glyphName] unicodes = glyph.unicodes for uni in unicodes: if uni not in mapping: mapping[uni] = glyphName else: from ufo2ft.errors import InvalidFontData InvalidFontData( "cannot map '%s' to U+%04X; already mapped to '%s'" % (glyphName, uni, mapping[uni]) ) return mapping def compileGSUB(featureFile, glyphOrder): """Compile and return a GSUB table from `featureFile` (feaLib FeatureFile), using the given `glyphOrder` (list of glyph names). """ font = ttLib.TTFont() font.setGlyphOrder(glyphOrder) addOpenTypeFeatures(font, featureFile, tables={"GSUB"}) return font.get("GSUB") def compileGDEF(featureFile, glyphOrder): """Compile and return a GDEF table from `featureFile` (feaLib FeatureFile), using the given `glyphOrder` (list of glyph names). """ from fontTools.feaLib.ast import TableBlock font = ttLib.TTFont() font.setGlyphOrder(glyphOrder) gdefDefined = False for statement in featureFile.statements: if isinstance(statement, TableBlock) and statement.name == "GDEF": gdefDefined = True if not gdefDefined: addOpenTypeFeatures(font, featureFile, tables={"GDEF", "GPOS", "GSUB"}) else: addOpenTypeFeatures(font, featureFile, tables={"GDEF"}) return font.get("GDEF") def closeGlyphsOverGSUB(gsub, glyphs): """Use the FontTools subsetter to perform a closure over the GSUB table given the initial `glyphs` (set of glyph names, str). Update the set in-place adding all the glyph names that can be reached via GSUB substitutions from this initial set. """ subsetter = subset.Subsetter() subsetter.glyphs = glyphs gsub.closure_glyphs(subsetter) def classifyGlyphs(unicodeFunc, cmap, gsub=None): """'unicodeFunc' is a callable that takes a Unicode codepoint and returns a string denoting some Unicode property associated with the given character (or None if a character is considered 'neutral'). 'cmap' is a dictionary mapping Unicode codepoints to glyph names. 'gsub' is an (optional) fonttools GSUB table object, used to find all the glyphs that are "reachable" via substitutions from the initial sets of glyphs defined in the cmap. Returns a dictionary of glyph sets associated with the given Unicode properties. """ glyphSets = {} neutralGlyphs = set() for uv, glyphName in cmap.items(): key_or_keys = unicodeFunc(uv) if key_or_keys is None: neutralGlyphs.add(glyphName) elif isinstance(key_or_keys, (list, set)): for key in key_or_keys: glyphSets.setdefault(key, set()).add(glyphName) else: glyphSets.setdefault(key_or_keys, set()).add(glyphName) if gsub is not None: if neutralGlyphs: closeGlyphsOverGSUB(gsub, neutralGlyphs) for glyphs in glyphSets.values(): s = glyphs | neutralGlyphs closeGlyphsOverGSUB(gsub, s) glyphs.update(s - neutralGlyphs) return glyphSets def unicodeInScripts(uv, scripts): """Check UnicodeData's ScriptExtension property for unicode codepoint 'uv' and return True if it intersects with the set of 'scripts' provided, False if it does not intersect. Return None for 'Common' script ('Zyyy'). """ sx = unicodedata.script_extension(chr(uv)) if "Zyyy" in sx: return None return not sx.isdisjoint(scripts) # we consider the 'Common' and 'Inherited' scripts as neutral for # determining a script horizontal direction DFLT_SCRIPTS = {"Zyyy", "Zinh"} def unicodeScriptDirection(uv): sc = unicodedata.script(chr(uv)) if sc in DFLT_SCRIPTS: return None return unicodedata.script_horizontal_direction(sc) def calcCodePageRanges(unicodes): """Given a set of Unicode codepoints (integers), calculate the corresponding OS/2 CodePage range bits. This is a direct translation of FontForge implementation: https://github.com/fontforge/fontforge/blob/7b2c074/fontforge/tottf.c#L3158 """ codepageRanges = set() chars = [chr(u) for u in unicodes] hasAscii = set(range(0x20, 0x7E)).issubset(unicodes) hasLineart = "┤" in chars for char in chars: if char == "Þ" and hasAscii: codepageRanges.add(0) # Latin 1 elif char == "Ľ" and hasAscii: codepageRanges.add(1) # Latin 2: Eastern Europe if hasLineart: codepageRanges.add(58) # Latin 2 elif char == "Б": codepageRanges.add(2) # Cyrillic if "Ѕ" in chars and hasLineart: codepageRanges.add(57) # IBM Cyrillic if "╜" in chars and hasLineart: codepageRanges.add(49) # MS-DOS Russian elif char == "Ά": codepageRanges.add(3) # Greek if hasLineart and "½" in chars: codepageRanges.add(48) # IBM Greek if hasLineart and "√" in chars: codepageRanges.add(60) # Greek, former 437 G elif char == "İ" and hasAscii: codepageRanges.add(4) # Turkish if hasLineart: codepageRanges.add(56) # IBM turkish elif char == "א": codepageRanges.add(5) # Hebrew if hasLineart and "√" in chars: codepageRanges.add(53) # Hebrew elif char == "ر": codepageRanges.add(6) # Arabic if "√" in chars: codepageRanges.add(51) # Arabic if hasLineart: codepageRanges.add(61) # Arabic; ASMO 708 elif char == "ŗ" and hasAscii: codepageRanges.add(7) # Windows Baltic if hasLineart: codepageRanges.add(59) # MS-DOS Baltic elif char == "₫" and hasAscii: codepageRanges.add(8) # Vietnamese elif char == "ๅ": codepageRanges.add(16) # Thai elif char == "エ": codepageRanges.add(17) # JIS/Japan elif char == "ㄅ": codepageRanges.add(18) # Chinese: Simplified chars elif char == "ㄱ": codepageRanges.add(19) # Korean wansung elif char == "央": codepageRanges.add(20) # Chinese: Traditional chars elif char == "곴": codepageRanges.add(21) # Korean Johab elif char == "♥" and hasAscii: codepageRanges.add(30) # OEM Character Set # TODO: Symbol bit has a special meaning (check the spec), we need # to confirm if this is wanted by default. # elif chr(0xF000) <= char <= chr(0xF0FF): # codepageRanges.add(31) # Symbol Character Set elif char == "þ" and hasAscii and hasLineart: codepageRanges.add(54) # MS-DOS Icelandic elif char == "╚" and hasAscii: codepageRanges.add(62) # WE/Latin 1 codepageRanges.add(63) # US elif hasAscii and hasLineart and "√" in chars: if char == "Å": codepageRanges.add(50) # MS-DOS Nordic elif char == "é": codepageRanges.add(52) # MS-DOS Canadian French elif char == "õ": codepageRanges.add(55) # MS-DOS Portuguese if hasAscii and "‰" in chars and "∑" in chars: codepageRanges.add(29) # Macintosh Character Set (US Roman) # when no codepage ranges can be enabled, fall back to enabling bit 0 # (Latin 1) so that the font works in MS Word: # https://github.com/googlei18n/fontmake/issues/468 if not codepageRanges: codepageRanges.add(0) return codepageRanges class _LazyFontName: def __init__(self, font): self.font = font def __str__(self): from ufo2ft.fontInfoData import getAttrWithFallback return getAttrWithFallback(self.font.info, "postscriptFontName") def getDefaultMasterFont(designSpaceDoc): defaultSource = designSpaceDoc.findDefault() if not defaultSource: from ufo2ft.errors import InvalidDesignSpaceData raise InvalidDesignSpaceData( "Can't find base (neutral) master in DesignSpace document" ) if not defaultSource.font: from ufo2ft.errors import InvalidDesignSpaceData raise InvalidDesignSpaceData( "DesignSpace source '%s' is missing required 'font' attribute" % getattr(defaultSource, "name", "") ) return defaultSource.font def _getDefaultNotdefGlyph(designSpaceDoc): from ufo2ft.errors import InvalidDesignSpaceData try: baseUfo = getDefaultMasterFont(designSpaceDoc) except InvalidDesignSpaceData: notdefGlyph = None else: # unlike ufoLib2, defcon has no Font.get() method try: notdefGlyph = baseUfo[".notdef"] except KeyError: notdefGlyph = None return notdefGlyph # NOTE about the security risk involved in using eval: the function below is # meant to be used to parse string coming from the command-line, which is # inherently "trusted"; if that weren't the case, a potential attacker # could do worse things than segfaulting the Python interpreter... def _kwargsEval(s): return eval( "dict(%s)" % s, {"__builtins__": {"True": True, "False": False, "dict": dict}} ) _pluginSpecRE = re.compile( r"(?:([\w\.]+)::)?" # MODULE_NAME + '::' r"(\w+)" # CLASS_NAME [required] r"(?:\((.*)\))?" # (KWARGS) ) def _loadPluginFromString(spec, moduleName, isValidFunc): spec = spec.strip() m = _pluginSpecRE.match(spec) if not m or (m.end() - m.start()) != len(spec): raise ValueError(spec) moduleName = m.group(1) or moduleName className = m.group(2) kwargs = m.group(3) module = importlib.import_module(moduleName) klass = getattr(module, className) if not isValidFunc(klass): raise TypeError(klass) try: options = _kwargsEval(kwargs) if kwargs else {} except SyntaxError as e: raise ValueError("options have incorrect format: %r" % kwargs) from e return klass(**options) def quantize(number, factor): """Round to a multiple of the given parameter""" return factor * otRound(number / factor) def init_kwargs(kwargs, defaults): """Initialise kwargs default values. To be used as the first function in top-level `ufo2ft.compile*` functions. Raise TypeError with unexpected keyword arguments (missing from 'defaults'). """ extra_kwargs = set(kwargs).difference(defaults) if extra_kwargs: # get the name of the function that called init_kwargs func_name = currentframe().f_back.f_code.co_name raise TypeError( f"{func_name}() got unexpected keyword arguments: " f"{', '.join(repr(k) for k in extra_kwargs)}" ) return {k: (kwargs[k] if k in kwargs else v) for k, v in defaults.items()} def prune_unknown_kwargs(kwargs, *callables): """Inspect callables and return a new dict skipping any unknown arguments. To be used after `init_kwargs` to narrow down arguments for underlying code. """ known_args = set() for func in callables: known_args.update(getfullargspec(func).args) return {k: v for k, v in kwargs.items() if k in known_args} def ensure_all_sources_have_names(doc: DesignSpaceDocument) -> None: """Change in-place the given document to make sure that all elements have a unique name assigned. This may rename sources with a "temp_master.N" name, designspaceLib's default stand-in. """ used_names: Set[str] = set() counter = 0 for source in doc.sources: while source.name is None or source.name in used_names: source.name = f"temp_master.{counter}" counter += 1 used_names.add(source.name) def getMaxComponentDepth(glyph, glyphSet, maxComponentDepth=0): """Return the height of a composite glyph's tree of components. This is equal to the depth of its deepest node, where the depth means the number of edges (component references) from the node to the tree's root. For glyphs that contain no components, only contours, this is 0. Composite glyphs have max component depth of 1 or greater. """ if not glyph.components: return maxComponentDepth maxComponentDepth += 1 initialMaxComponentDepth = maxComponentDepth for component in glyph.components: try: baseGlyph = glyphSet[component.baseGlyph] except KeyError: continue componentDepth = getMaxComponentDepth( baseGlyph, glyphSet, initialMaxComponentDepth ) maxComponentDepth = max(maxComponentDepth, componentDepth) return maxComponentDepth ufo2ft-2.30.0/MANIFEST.in000066400000000000000000000002441434012334300145340ustar00rootroot00000000000000include README.rst include LICENSE include tox.ini include *requirements.txt recursive-include tests *.py recursive-include tests/data *.glif *.plist *.fea *.ttx ufo2ft-2.30.0/README.rst000066400000000000000000000154571434012334300145010ustar00rootroot00000000000000|GitHub Actions status| |PyPI Version| |Codecov| |Gitter Chat| ufo2ft ====== ufo2ft ("UFO to FontTools") is a fork of `ufo2fdk `__ whose goal is to generate OpenType font binaries from UFOs without the FDK dependency. The library provides two functions, ``compileOTF`` and ``compileTTF``, which work exactly the same way: .. code:: python from defcon import Font from ufo2ft import compileOTF ufo = Font('MyFont-Regular.ufo') otf = compileOTF(ufo) otf.save('MyFont-Regular.otf') In most cases, the behavior of ufo2ft should match that of ufo2fdk, whose documentation is retained below (and hopefully is still accurate). Naming Data ~~~~~~~~~~~ As with any OpenType compiler, you have to set the font naming data to a particular standard for your naming to be set correctly. In ufo2fdk, you can get away with setting *two* naming attributes in your font.info object for simple fonts: - familyName: The name for your family. For example, "My Garamond". - styleName: The style name for this particular font. For example, "Display Light Italic" ufo2fdk will create all of the other naming data based on thse two fields. If you want to use the fully automatic naming system, all of the other name attributes should be set to ``None`` in your font. However, if you want to override the automated system at any level, you can specify particular naming attributes and ufo2fdk will honor your settings. You don't have to set *all* of the attributes, just the ones you don't want to be automated. For example, in the family "My Garamond" you have eight weights. It would be nice to style map the italics to the romans for each weight. To do this, in the individual romans and italics, you need to set the style mapping data. This is done through the ``styleMapFamilyName`` and ``styleMapStyleName`` attributes. In each of your roman and italic pairs you would do this: **My Garamond-Light.ufo** - familyName = "My Garamond" - styleName = "Light" - styleMapFamilyName = "My Garamond Display Light" - styleMapStyleName = "regular" **My Garamond-Light Italic.ufo** - familyName = "My Garamond" - styleName = "Display Light Italic" - styleMapFamilyName = "My Garamond Display Light" - styleMapStyleName = "italic" **My Garamond-Book.ufo** - familyName = "My Garamond" - styleName = "Book" - styleMapFamilyName = "My Garamond Display Book" - styleMapStyleName = "regular" **My Garamond-Book Italic.ufo** - familyName = "My Garamond" - styleName = "Display Book Italic" - styleMapFamilyName = "My Garamond Display Book" - styleMapStyleName = "italic" **etc.** Additionally, if you have defined any naming data, or any data for that matter, in table definitions within your font's features that data will be honored. Feature generation ~~~~~~~~~~~~~~~~~~ If your font's features do not contain kerning/mark/mkmk features, ufo2ft will create them based on your font's kerning/anchor data. In addition to `Adobe OpenType feature files `__, ufo2ft also supports the `MTI/Monotype format `__. For example, a GPOS table in this format would be stored within the UFO at ``data/com.github.googlei18n.ufo2ft.mtiFeatures/GPOS.mti``. Fallbacks ~~~~~~~~~ Most of the fallbacks have static values. To see what is set for these, look at ``fontInfoData.py`` in the source code. In some cases, the fallback values are dynamically generated from other data in the info object. These are handled internally with functions. Merging TTX ~~~~~~~~~~~ If the UFO data directory has a ``com.github.fonttools.ttx`` folder with TTX files ending with ``.ttx``, these will be merged in the generated font. The index TTX (generated when using using ``ttx -s``) is not required. .. |GitHub Actions status| image:: https://github.com/googlefonts/ufo2ft/workflows/Test%20+%20Deploy/badge.svg .. |PyPI Version| image:: https://img.shields.io/pypi/v/ufo2ft.svg :target: https://pypi.org/project/ufo2ft/ .. |Codecov| image:: https://codecov.io/gh/googlefonts/ufo2ft/branch/master/graph/badge.svg :target: https://codecov.io/gh/googlefonts/ufo2ft .. |Gitter Chat| image:: https://badges.gitter.im/fonttools-dev/ufo2ft.svg :alt: Join the chat at https://gitter.im/fonttools-dev/ufo2ft :target: https://gitter.im/fonttools-dev/ufo2ft?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge Color fonts ~~~~~~~~~~~ ufo2ft supports building ``COLR`` and ``CPAL`` tables. If there is ``com.github.googlei18n.ufo2ft.colorPalettes`` key in font lib, and ``com.github.googlei18n.ufo2ft.colorLayerMapping`` key in the font or in any of the glyphs lib, then ufo2ft will build ``CPAL`` table from the color palettes, and ``COLR`` table from the color layers. ``colorPalettes`` is a array of palettes, each palette is a array of colors and each color is a array of floats representing RGBA colors. For example: .. code:: xml com.github.googlei18n.ufo2ft.colorPalettes 0.26 0.0 0.23 1.0 0.86 0.73 0.28 1.0 ``colorLayerMapping`` is a array of color layers, each color layer is a array of layer name and palette color index. It is a per-glyph key, but if present in the font lib then it will be used for all glyphs that lack it. For example: .. code:: xml com.github.googlei18n.ufo2ft.colorLayerMapping color.1 1 color.2 0 With these this key present, ufo2ft will copy the color layers into individual glyphs and setup ``COLR`` table. Alternatively, if the color layers are already separate UFO glyphs, the ``com.github.googlei18n.ufo2ft.colorLayers`` font lib key can be used. It uses a table keyed by base glyph, and the value is an array of color layers, each color layer is an array of glyph name and palette color index. For example: .. code:: xml com.github.googlei18n.ufo2ft.colorLayers alef-ar alef-ar.color0 2 alefHamzaabove-ar alefHamzaabove-ar.color0 1 alefHamzaabove-ar.color1 2 ufo2ft-2.30.0/dev-requirements.txt000066400000000000000000000000531434012334300170340ustar00rootroot00000000000000coverage pytest black isort flake8-bugbear ufo2ft-2.30.0/requirements.txt000066400000000000000000000003121434012334300162560ustar00rootroot00000000000000fonttools[lxml,ufo]==4.38.0 defcon==0.10.0 cu2qu==1.6.7.post1 compreffor==0.5.1.post1 booleanOperations==0.9.0 cffsubr==0.2.9.post1 skia-pathops==0.7.2 # alternative UFO implementation ufoLib2==0.13.1 ufo2ft-2.30.0/setup.cfg000066400000000000000000000005431434012334300146210ustar00rootroot00000000000000[wheel] universal = 1 [sdist] formats = zip [aliases] test = pytest [metadata] license_file = LICENSE [tool:pytest] minversion = 2.8 testpaths = tests python_files = *_test.py python_classes = *Test addopts = -r a filterwarnings: ignore:tostring:DeprecationWarning ignore:fromstring:DeprecationWarning ignore:.*bytes:DeprecationWarning:fs.base ufo2ft-2.30.0/setup.py000066400000000000000000000040351434012334300145120ustar00rootroot00000000000000#!/usr/bin/env python import sys from setuptools import find_packages, setup needs_pytest = {"pytest", "test"}.intersection(sys.argv) pytest_runner = ["pytest_runner"] if needs_pytest else [] needs_wheel = {"bdist_wheel"}.intersection(sys.argv) wheel = ["wheel"] if needs_wheel else [] with open("README.rst", "r") as f: long_description = f.read() setup( name="ufo2ft", use_scm_version={"write_to": "Lib/ufo2ft/_version.py"}, author="Tal Leming, James Godfrey-Kittle", author_email="tal@typesupply.com", maintainer="Cosimo Lupo", maintainer_email="cosimo@anthrotype.com", description="A bridge between UFOs and FontTools.", long_description=long_description, url="https://github.com/googlefonts/ufo2ft", package_dir={"": "Lib"}, packages=find_packages("Lib"), include_package_data=True, license="MIT", setup_requires=pytest_runner + wheel + ["setuptools_scm"], tests_require=["pytest>=2.8"], install_requires=[ "fonttools[ufo]>=4.38.0", "cu2qu>=1.6.7", "cffsubr>=0.2.8", "booleanOperations>=0.9.0", ], extras_require={ "pathops": ["skia-pathops>=0.5.1"], "cffsubr": [], # keep empty for backward compat "compreffor": ["compreffor>=0.4.6"], }, python_requires=">=3.7", classifiers=[ "Development Status :: 4 - Beta", "Environment :: Console", "Environment :: Other Environment", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: Multimedia :: Graphics", "Topic :: Multimedia :: Graphics :: Graphics Conversion", "Topic :: Multimedia :: Graphics :: Editors :: Vector-Based", "Topic :: Software Development :: Libraries :: Python Modules", ], ) ufo2ft-2.30.0/tests/000077500000000000000000000000001434012334300141405ustar00rootroot00000000000000ufo2ft-2.30.0/tests/__init__.py000066400000000000000000000000001434012334300162370ustar00rootroot00000000000000ufo2ft-2.30.0/tests/conftest.py000066400000000000000000000100071434012334300163350ustar00rootroot00000000000000import os import py import pytest from fontTools import designspaceLib @pytest.fixture(scope="session", params=["defcon", "ufoLib2"]) def ufo_module(request): return pytest.importorskip(request.param) @pytest.fixture(scope="session") def FontClass(ufo_module): if hasattr(ufo_module.Font, "open"): def ctor(path=None): if path is None: return ufo_module.Font() else: return ufo_module.Font.open(path) return ctor return ufo_module.Font @pytest.fixture(scope="session") def InfoClass(ufo_module): return ufo_module.objects.info.Info @pytest.fixture def datadir(): return py.path.local(py.path.local(__file__).dirname).join("data") def getpath(filename): dirname = os.path.dirname(__file__) return os.path.join(dirname, "data", filename) @pytest.fixture def layertestrgufo(FontClass): font = FontClass(getpath("LayerFont-Regular.ufo")) return font @pytest.fixture def layertestbdufo(FontClass): font = FontClass(getpath("LayerFont-Bold.ufo")) return font @pytest.fixture def designspace(layertestrgufo, layertestbdufo): ds = designspaceLib.DesignSpaceDocument() a1 = designspaceLib.AxisDescriptor() a1.tag = "wght" a1.name = "Weight" a1.default = a1.minimum = 350 a1.maximum = 625 ds.addAxis(a1) s1 = designspaceLib.SourceDescriptor() s1.name = "Layer Font Regular" s1.familyName = "Layer Font" s1.styleName = "Regular" s1.filename = "LayerFont-Regular.ufo" s1.location = {"Weight": 350} s1.font = layertestrgufo ds.addSource(s1) s2 = designspaceLib.SourceDescriptor() s2.name = "Layer Font Medium" s2.familyName = "Layer Font" s2.styleName = "Medium" s2.filename = "LayerFont-Regular.ufo" s2.layerName = "Medium" s2.location = {"Weight": 450} s2.font = layertestrgufo ds.addSource(s2) s3 = designspaceLib.SourceDescriptor() s3.name = "Layer Font Bold" s3.familyName = "Layer Font" s3.styleName = "Bold" s3.filename = "LayerFont-Bold.ufo" s3.location = {"Weight": 625} s3.font = layertestbdufo ds.addSource(s3) return ds @pytest.fixture def designspace_v5(FontClass): def draw_rectangle(pen, x_offset, y_offset): pen.moveTo((0 + x_offset, 0 + y_offset)) pen.lineTo((10 + x_offset, 0 + y_offset)) pen.lineTo((10 + x_offset, 10 + y_offset)) pen.lineTo((0 + x_offset, 10 + y_offset)) pen.closePath() def draw_something(glyph, number, is_sans): # Ensure Sans and Serif sources are incompatible to make sure that the # DS5 code treats them separately when using e.g. cu2qu. Use some number # to offset the drawings so we get some variation. if is_sans: draw_rectangle(glyph.getPen(), 10 * number, 0) else: draw_rectangle(glyph.getPen(), -10 * number, -20) draw_rectangle(glyph.getPen(), 10 * number, 20) ds5 = designspaceLib.DesignSpaceDocument.fromfile( "tests/data/DSv5/test_v5_MutatorSans_and_Serif.designspace" ) sources = {} # Create base UFOs for index, source in enumerate(ds5.sources): if source.layerName is not None: continue font = FontClass() for name in ("I", "S", "I.narrow", "S.closed", "a"): glyph = font.newGlyph(name) draw_something(glyph, index, "Serif" not in source.filename) font.lib["public.glyphOrder"] = sorted(font.keys()) sources[source.filename] = font # Fill in sparse UFOs for index, source in enumerate(ds5.sources): if source.layerName is None: continue font = sources[source.filename] layer = font.newLayer(source.layerName) for name in ("I", "S", "I.narrow", "S.closed"): glyph = layer.newGlyph(name) draw_something(glyph, index, "Serif" not in source.filename) # Assign UFOs to their attribute for source in ds5.sources: source.font = sources[source.filename] return ds5 ufo2ft-2.30.0/tests/data/000077500000000000000000000000001434012334300150515ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/Bug108.ttx000066400000000000000000000027761434012334300165740ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/Bug108.ufo/000077500000000000000000000000001434012334300166075ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/Bug108.ufo/features.fea000066400000000000000000000000351434012334300211000ustar00rootroot00000000000000include(Bug108_included.fea) ufo2ft-2.30.0/tests/data/Bug108.ufo/fontinfo.plist000066400000000000000000000012731434012334300215110ustar00rootroot00000000000000 familyName Bug 108 note https://github.com/googlei18n/ufo2ft/issues/108 unitsPerEm 1000 xHeight 500 ascender 750 capHeight 750 descender -250 postscriptUnderlinePosition -200 postscriptUnderlineThickness 20 ufo2ft-2.30.0/tests/data/Bug108.ufo/glyphs/000077500000000000000000000000001434012334300201155ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/Bug108.ufo/glyphs/_notdef.glif000066400000000000000000000007531434012334300224030ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/Bug108.ufo/glyphs/a.glif000066400000000000000000000004361434012334300212030ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/Bug108.ufo/glyphs/b.glif000066400000000000000000000005111434012334300211760ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/Bug108.ufo/glyphs/c.glif000066400000000000000000000006241434012334300212040ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/Bug108.ufo/glyphs/contents.plist000066400000000000000000000006371434012334300230350ustar00rootroot00000000000000 .notdef _notdef.glif a a.glif b b.glif c c.glif space space.glif ufo2ft-2.30.0/tests/data/Bug108.ufo/glyphs/space.glif000066400000000000000000000001771434012334300220600ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/Bug108.ufo/groups.plist000066400000000000000000000002761434012334300212100ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/Bug108.ufo/layercontents.plist000066400000000000000000000004231434012334300225550ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/Bug108.ufo/lib.plist000066400000000000000000000011251434012334300204310ustar00rootroot00000000000000 public.glyphOrder .notdef space a b c public.postscriptNames a uni0061 b uni0062 c uni0063 space uni0020 ufo2ft-2.30.0/tests/data/Bug108.ufo/metainfo.plist000066400000000000000000000004431434012334300214670ustar00rootroot00000000000000 creator copy-paste formatVersion 3 ufo2ft-2.30.0/tests/data/Bug108_included.fea000066400000000000000000000000461434012334300203430ustar00rootroot00000000000000feature kern { pos a b -10; } kern; ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/000077500000000000000000000000001434012334300231055ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/fontinfo.plist000066400000000000000000000047551434012334300260170ustar00rootroot00000000000000 ascender 739.0 capHeight 694.0 copyright Copyright (c) 2009--2017 The Cantarell Authors descender -217.0 familyName Cantarell guidelines italicAngle -0.0 openTypeHeadCreated 2009/03/13 21:44:13 openTypeNameDesigner Dave Crossland, Nikolaus Waxweiler, Jacques Le Bailly, Eben Sorkin, Alexei Vanyashin openTypeNameDesignerURL http://abattis.org openTypeNameManufacturerURL http://abattis.org openTypeOS2Panose 2 0 5 3 0 0 0 0 0 0 openTypeOS2Type 3 openTypeOS2VendorID ABAT postscriptBlueScale 0.0625 postscriptBlueValues -10.0 0.0 482.0 492.0 694.0 704.0 739.0 749.0 postscriptFamilyBlues postscriptFamilyOtherBlues postscriptOtherBlues -227.0 -217.0 postscriptStemSnapH 80 postscriptStemSnapV 70 postscriptUnderlinePosition -100 postscriptUnderlineThickness 50 styleName Regular unitsPerEm 1000 versionMajor 0 versionMinor 111 xHeight 482.0 ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/glyphs/000077500000000000000000000000001434012334300244135ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/glyphs/circumflexcomb.loclV_I_E_T_.glif000066400000000000000000000015211434012334300324730ustar00rootroot00000000000000 RMXScaler height 80 com.schriftgestaltung.Glyphs.originalWidth 386.0 ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/glyphs/circumflexcomb_tildecomb.glif000066400000000000000000000006011434012334300322770ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.originalWidth 432.0 ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/glyphs/contents.plist000066400000000000000000000011031434012334300273200ustar00rootroot00000000000000 circumflexcomb.loclVIET circumflexcomb.loclV_I_E_T_.glif circumflexcomb_tildecomb circumflexcomb_tildecomb.glif o o.glif ocircumflextilde ocircumflextilde.glif tildecomb.loclVIET tildecomb.loclV_I_E_T_.glif ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/glyphs/o.glif000066400000000000000000000027271434012334300255240ustar00rootroot00000000000000 o ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/glyphs/ocircumflextilde.glif000066400000000000000000000004011434012334300306130ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/glyphs/tildecomb.loclV_I_E_T_.glif000066400000000000000000000024511434012334300314360ustar00rootroot00000000000000 RMXScaler height 80 com.schriftgestaltung.Glyphs.originalWidth 450.0 ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/layercontents.plist000066400000000000000000000004371434012334300270600ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/lib.plist000066400000000000000000000016571434012334300247410ustar00rootroot00000000000000 com.github.googlei18n.ufo2ft.filters include o ocircumflextilde circumflexcomb.loclVIET tildecomb.loclVIET circumflexcomb_tildecomb name propagateAnchors pre public.glyphOrder o ocircumflextilde circumflexcomb.loclVIET tildecomb.loclVIET circumflexcomb_tildecomb ufo2ft-2.30.0/tests/data/CantarellAnchorPropagation.ufo/metainfo.plist000066400000000000000000000004761434012334300257730ustar00rootroot00000000000000 creator com.github.fonttools.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/data/ColorTest.ufo/000077500000000000000000000000001434012334300175575ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/ColorTest.ufo/fontinfo.plist000066400000000000000000000016051434012334300224600ustar00rootroot00000000000000 ascender 750 capHeight 750 descender -250 familyName ColorTest guidelines postscriptBlueValues postscriptFamilyBlues postscriptFamilyOtherBlues postscriptOtherBlues postscriptStemSnapH postscriptStemSnapV styleName Regular unitsPerEm 1000 xHeight 500 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color1/000077500000000000000000000000001434012334300222635ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color1/a.glif000066400000000000000000000012141434012334300233440ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color1/b.glif000066400000000000000000000011641434012334300233510ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color1/c.glif000066400000000000000000000011641434012334300233520ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color1/contents.plist000066400000000000000000000005071434012334300251770ustar00rootroot00000000000000 a a.glif b b.glif c c.glif ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color1/layerinfo.plist000066400000000000000000000003671434012334300253360ustar00rootroot00000000000000 color 0,1,0.25,0.7 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color2/000077500000000000000000000000001434012334300222645ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color2/a.glif000066400000000000000000000005371434012334300233540ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color2/b.glif000066400000000000000000000002301434012334300233430ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color2/c.glif000066400000000000000000000005071434012334300233530ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color2/contents.plist000066400000000000000000000005071434012334300252000ustar00rootroot00000000000000 a a.glif b b.glif c c.glif ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs.color2/layerinfo.plist000066400000000000000000000003641434012334300253340ustar00rootroot00000000000000 color 0,1,1,0.7 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs/000077500000000000000000000000001434012334300210655ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs/a.glif000066400000000000000000000005331434012334300221510ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs/b.glif000066400000000000000000000012441434012334300221520ustar00rootroot00000000000000 com.github.googlei18n.ufo2ft.colorLayerMapping color1 1 color2 0 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs/c.glif000066400000000000000000000012441434012334300221530ustar00rootroot00000000000000 com.github.googlei18n.ufo2ft.colorLayerMapping color2 1 color1 0 ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs/contents.plist000066400000000000000000000005071434012334300240010ustar00rootroot00000000000000 a a.glif b b.glif c c.glif ufo2ft-2.30.0/tests/data/ColorTest.ufo/glyphs/layerinfo.plist000066400000000000000000000003671434012334300241400ustar00rootroot00000000000000 color 1,0.75,0,0.7 ufo2ft-2.30.0/tests/data/ColorTest.ufo/layercontents.plist000066400000000000000000000007231434012334300235300ustar00rootroot00000000000000 foreground glyphs color1 glyphs.color1 color2 glyphs.color2 ufo2ft-2.30.0/tests/data/ColorTest.ufo/lib.plist000066400000000000000000000023401434012334300214010ustar00rootroot00000000000000 com.defcon.sortDescriptor ascending Latin-1 type characterSet com.github.googlei18n.ufo2ft.colorLayerMapping color1 0 color2 1 com.github.googlei18n.ufo2ft.colorPalettes 1 0.3 0.1 1 0 0.4 0.8 1 public.glyphOrder space a b c ufo2ft-2.30.0/tests/data/ColorTest.ufo/metainfo.plist000066400000000000000000000004761434012334300224450ustar00rootroot00000000000000 creator com.github.fonttools.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/data/ColorTestRaw.ufo/000077500000000000000000000000001434012334300202315ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/ColorTestRaw.ufo/fontinfo.plist000066400000000000000000000016051434012334300231320ustar00rootroot00000000000000 ascender 750 capHeight 750 descender -250 familyName ColorTest guidelines postscriptBlueValues postscriptFamilyBlues postscriptFamilyOtherBlues postscriptOtherBlues postscriptStemSnapH postscriptStemSnapV styleName Regular unitsPerEm 1000 xHeight 500 ufo2ft-2.30.0/tests/data/ColorTestRaw.ufo/glyphs/000077500000000000000000000000001434012334300215375ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/ColorTestRaw.ufo/glyphs/a.color1.glif000066400000000000000000000011731434012334300240220ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/ColorTestRaw.ufo/glyphs/a.color2.glif000066400000000000000000000005161434012334300240230ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/ColorTestRaw.ufo/glyphs/a.glif000066400000000000000000000005331434012334300226230ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/ColorTestRaw.ufo/glyphs/contents.plist000066400000000000000000000005431434012334300244530ustar00rootroot00000000000000 a a.glif a.color1 a.color1.glif a.color2 a.color2.glif ufo2ft-2.30.0/tests/data/ColorTestRaw.ufo/glyphs/layerinfo.plist000066400000000000000000000003671434012334300246120ustar00rootroot00000000000000 color 1,0.75,0,0.7 ufo2ft-2.30.0/tests/data/ColorTestRaw.ufo/layercontents.plist000066400000000000000000000004331434012334300242000ustar00rootroot00000000000000 foreground glyphs ufo2ft-2.30.0/tests/data/ColorTestRaw.ufo/lib.plist000066400000000000000000000024521434012334300220570ustar00rootroot00000000000000 com.defcon.sortDescriptor ascending Latin-1 type characterSet com.github.googlei18n.ufo2ft.colorLayers a a.color1 0 a.color2 1 com.github.googlei18n.ufo2ft.colorPalettes 1 0.3 0.1 1 0 0.4 0.8 1 public.glyphOrder space a a.color1 a.color2 ufo2ft-2.30.0/tests/data/ColorTestRaw.ufo/metainfo.plist000066400000000000000000000004761434012334300231170ustar00rootroot00000000000000 creator com.github.fonttools.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/data/ContourOrderTest.ufo/000077500000000000000000000000001434012334300211265ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/ContourOrderTest.ufo/fontinfo.plist000066400000000000000000000136351434012334300240350ustar00rootroot00000000000000 ascender 1069 capHeight 714 copyright Copyright 2019 Google Inc. All Rights Reserved. descender -293 familyName Noto Sans guidelines color 0,0,0,1 name m y 250 italicAngle 0 openTypeGaspRangeRecords rangeGaspBehavior 0 1 2 3 rangeMaxPPEM 65535 openTypeHeadCreated 2019/06/28 21:53:11 openTypeHeadFlags openTypeHheaAscender 1069 openTypeHheaDescender -293 openTypeHheaLineGap 0 openTypeNameDescription Designed by Monotype design team. openTypeNameDesigner Monotype Design Team openTypeNameDesignerURL http://www.monotype.com/studio openTypeNameLicense This Font Software is licensed under the SIL Open Font License, Version 1.1. This Font Software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the SIL Open Font License for the specific language, permissions and limitations governing your use of this Font Software. openTypeNameLicenseURL http://scripts.sil.org/OFL openTypeNameManufacturer Monotype Imaging Inc. openTypeNameManufacturerURL http://www.google.com/get/noto/ openTypeNamePreferredFamilyName Noto Sans openTypeNamePreferredSubfamilyName Regular openTypeNameVersion Version 2.001 openTypeOS2CodePageRanges 0 1 2 3 4 7 8 openTypeOS2Panose 2 11 5 2 4 5 4 2 2 4 openTypeOS2Selection 8 openTypeOS2Type 2 openTypeOS2TypoAscender 1069 openTypeOS2TypoDescender -293 openTypeOS2TypoLineGap 0 openTypeOS2UnicodeRanges 0 1 2 3 4 5 6 7 9 29 30 31 32 33 34 35 36 45 62 64 67 69 91 116 openTypeOS2VendorID GOOG openTypeOS2WeightClass 400 openTypeOS2WidthClass 5 openTypeOS2WinAscent 1069 openTypeOS2WinDescent 293 postscriptBlueFuzz 1 postscriptBlueScale 0.039625 postscriptBlueShift 7 postscriptBlueValues -20 0 postscriptFontName NotoSans-Regular postscriptForceBold postscriptFullName Noto Sans Regular postscriptIsFixedPitch postscriptStemSnapH 68 79 postscriptStemSnapV 75 90 postscriptUnderlinePosition -100 postscriptUnderlineThickness 50 postscriptWeightName Regular styleMapFamilyName Noto Sans styleMapStyleName regular styleName Regular trademark Noto is a trademark of Google Inc. unitsPerEm 1000 versionMajor 2 versionMinor 2 xHeight 536 ufo2ft-2.30.0/tests/data/ContourOrderTest.ufo/glyphs/000077500000000000000000000000001434012334300224345ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/ContourOrderTest.ufo/glyphs/contents.plist000066400000000000000000000005751434012334300253550ustar00rootroot00000000000000 graphemejoinercomb graphemejoinercomb.glif uniFFFC uniF_F_F_C_.glif xxx xxx.glif ufo2ft-2.30.0/tests/data/ContourOrderTest.ufo/glyphs/graphemejoinercomb.glif000066400000000000000000000245731434012334300271520ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.category Mark com.schriftgestaltung.Glyphs.lastChange 2017/12/05 22:37:40 com.schriftgestaltung.Glyphs.script com.schriftgestaltung.Glyphs.subCategory Nonspacing ufo2ft-2.30.0/tests/data/ContourOrderTest.ufo/glyphs/layerinfo.plist000066400000000000000000000011371434012334300255030ustar00rootroot00000000000000 lib com.fontlab.layer.locked com.fontlab.layer.name Regular com.fontlab.layer.opacity 1 com.fontlab.layer.service com.fontlab.layer.visible com.fontlab.layer.wireframe ufo2ft-2.30.0/tests/data/ContourOrderTest.ufo/glyphs/uniF_F_F_C_.glif000066400000000000000000000161241434012334300253170ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.category Symbol com.schriftgestaltung.Glyphs.lastChange 2017/12/05 22:37:38 com.schriftgestaltung.Glyphs.script com.schriftgestaltung.Glyphs.subCategory Other ufo2ft-2.30.0/tests/data/ContourOrderTest.ufo/glyphs/xxx.glif000066400000000000000000000006211434012334300241250ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/ContourOrderTest.ufo/layercontents.plist000066400000000000000000000004371434012334300251010ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/ContourOrderTest.ufo/lib.plist000066400000000000000000000012211434012334300227450ustar00rootroot00000000000000 com.github.googlei18n.ufo2ft.filters include uniFFFC graphemejoinercomb name sortContours public.glyphOrder graphemejoinercomb uniFFFC xxx ufo2ft-2.30.0/tests/data/ContourOrderTest.ufo/metainfo.plist000066400000000000000000000004761434012334300240140ustar00rootroot00000000000000 creator com.github.fonttools.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/data/DSv5/000077500000000000000000000000001434012334300156325ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/DSv5/MutatorSansVariable_Weight-CFF2.ttx000066400000000000000000000455151434012334300243200ustar00rootroot00000000000000 Weight Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Bold Condensed MutatorMathTest-SansBoldCondensed MutatorMathTest-SansLightCondensed Regular serif Sans weight Light Medium Bold width Condensed New Font Regular 0.000;NONE;NewFont-Regular New Font Regular Version 0.000 NewFont-Regular Weight Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Bold Condensed MutatorMathTest-SansBoldCondensed MutatorMathTest-SansLightCondensed Regular serif Sans weight Light Medium Bold width Condensed 50 -200 rmoveto 400 1000 -400 -1000 hlineto 50 50 rmoveto 900 300 -900 -300 vlineto 0 40 10 1 blend hmoveto 10 10 -10 hlineto 0 40 10 1 blend hmoveto 10 10 -10 hlineto 0 40 10 1 blend hmoveto 10 10 -10 hlineto 0 40 10 1 blend hmoveto 10 10 -10 hlineto 1 vsindex 0 10 1 blend hmoveto 10 10 -10 hlineto wght 0x0 300.0 300.0 700.0 256 ufo2ft-2.30.0/tests/data/DSv5/MutatorSansVariable_Weight-TTF.ttx000066400000000000000000000477731434012334300243050ustar00rootroot00000000000000 Weight Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Bold Condensed MutatorMathTest-SansBoldCondensed MutatorMathTest-SansLightCondensed Regular serif Sans weight Light Medium Bold width Condensed New Font Regular 0.000;NONE;NewFont-Regular New Font Regular Version 0.000 NewFont-Regular Weight Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Bold Condensed MutatorMathTest-SansBoldCondensed MutatorMathTest-SansLightCondensed Regular serif Sans weight Light Medium Bold width Condensed wght 0x0 300.0 300.0 700.0 256 ufo2ft-2.30.0/tests/data/DSv5/MutatorSansVariable_Weight_Width-CFF2.ttx000066400000000000000000001021441434012334300254470ustar00rootroot00000000000000 Weight Width Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Bold Condensed MutatorMathTest-SansBoldCondensed Sans Light Extended MutatorMathTest-SansLightExtended Sans Bold Extended MutatorMathTest-SansBoldExtended Sans Medium MutatorMathTest-SansMedium MutatorMathTest-SansMedium Sans Bold MutatorMathTest-SansBold Sans Medium Extended MutatorMathTest-SansMediumExtended MutatorMathTest-SansLightCondensed Regular serif Sans weight Light Medium Bold width Condensed Normal Extended S1 S2 New Font Regular 0.000;NONE;NewFont-Regular New Font Regular Version 0.000 NewFont-Regular Weight Width Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Bold Condensed MutatorMathTest-SansBoldCondensed Sans Light Extended MutatorMathTest-SansLightExtended Sans Bold Extended MutatorMathTest-SansBoldExtended Sans Medium MutatorMathTest-SansMedium MutatorMathTest-SansMedium Sans Bold MutatorMathTest-SansBold Sans Medium Extended MutatorMathTest-SansMediumExtended MutatorMathTest-SansLightCondensed Regular serif Sans weight Light Medium Bold width Condensed Normal Extended S1 S2 50 -200 rmoveto 400 1000 -400 -1000 hlineto 50 50 rmoveto 900 300 -900 -300 vlineto 0 40 10 20 -10 0 14.30922 1 blend hmoveto 10 10 -10 hlineto 0 40 10 20 -10 0 14.30922 1 blend hmoveto 10 10 -10 hlineto 0 40 10 20 -10 0 14.30922 1 blend hmoveto 10 10 -10 hlineto 0 40 10 20 -10 0 14.30922 1 blend hmoveto 10 10 -10 hlineto 1 vsindex 0 10 20 0 1 blend hmoveto 10 10 -10 hlineto wght 0x0 300.0 300.0 700.0 256 wdth 0x0 50.0 50.0 200.0 257 ufo2ft-2.30.0/tests/data/DSv5/MutatorSansVariable_Weight_Width-TTF.ttx000066400000000000000000001024141434012334300254240ustar00rootroot00000000000000 Weight Width Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Bold Condensed MutatorMathTest-SansBoldCondensed Sans Light Extended MutatorMathTest-SansLightExtended Sans Bold Extended MutatorMathTest-SansBoldExtended Sans Medium MutatorMathTest-SansMedium MutatorMathTest-SansMedium Sans Bold MutatorMathTest-SansBold Sans Medium Extended MutatorMathTest-SansMediumExtended MutatorMathTest-SansLightCondensed Regular serif Sans weight Light Medium Bold width Condensed Normal Extended S1 S2 New Font Regular 0.000;NONE;NewFont-Regular New Font Regular Version 0.000 NewFont-Regular Weight Width Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Bold Condensed MutatorMathTest-SansBoldCondensed Sans Light Extended MutatorMathTest-SansLightExtended Sans Bold Extended MutatorMathTest-SansBoldExtended Sans Medium MutatorMathTest-SansMedium MutatorMathTest-SansMedium Sans Bold MutatorMathTest-SansBold Sans Medium Extended MutatorMathTest-SansMediumExtended MutatorMathTest-SansLightCondensed Regular serif Sans weight Light Medium Bold width Condensed Normal Extended S1 S2 wght 0x0 300.0 300.0 700.0 256 wdth 0x0 50.0 50.0 200.0 257 ufo2ft-2.30.0/tests/data/DSv5/MutatorSansVariable_Width-CFF2.ttx000066400000000000000000000434541434012334300241500ustar00rootroot00000000000000 Width Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Light Extended MutatorMathTest-SansLightExtended MutatorMathTest-SansLightCondensed Regular serif Sans weight Light width Condensed Normal Extended New Font Regular 0.000;NONE;NewFont-Regular New Font Regular Version 0.000 NewFont-Regular Width Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Light Extended MutatorMathTest-SansLightExtended MutatorMathTest-SansLightCondensed Regular serif Sans weight Light width Condensed Normal Extended 50 -200 rmoveto 400 1000 -400 -1000 hlineto 50 50 rmoveto 900 300 -900 -300 vlineto 0 20 1 blend hmoveto 10 10 -10 hlineto 0 20 1 blend hmoveto 10 10 -10 hlineto 0 20 1 blend hmoveto 10 10 -10 hlineto 0 20 1 blend hmoveto 10 10 -10 hlineto 0 20 1 blend hmoveto 10 10 -10 hlineto wdth 0x0 50.0 50.0 200.0 256 ufo2ft-2.30.0/tests/data/DSv5/MutatorSansVariable_Width-TTF.ttx000066400000000000000000000461631434012334300241250ustar00rootroot00000000000000 Width Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Light Extended MutatorMathTest-SansLightExtended MutatorMathTest-SansLightCondensed Regular serif Sans weight Light width Condensed Normal Extended New Font Regular 0.000;NONE;NewFont-Regular New Font Regular Version 0.000 NewFont-Regular Width Sans Light Condensed MutatorMathTest-SansLightCondensed Sans Light Extended MutatorMathTest-SansLightExtended MutatorMathTest-SansLightCondensed Regular serif Sans weight Light width Condensed Normal Extended wdth 0x0 50.0 50.0 200.0 256 ufo2ft-2.30.0/tests/data/DSv5/MutatorSerifVariable_Width-CFF2.ttx000066400000000000000000000345031434012334300243070ustar00rootroot00000000000000 Width Serif Light Condensed MutatorMathTest-SerifLightCondensed Regular serif Serif weight Light width Condensed Normal Extended New Font Regular 0.000;NONE;NewFont-Regular New Font Regular Version 0.000 NewFont-Regular Width Serif Light Condensed MutatorMathTest-SerifLightCondensed Regular serif Serif weight Light width Condensed Normal Extended 50 -200 rmoveto 400 1000 -400 -1000 hlineto 50 50 rmoveto 900 300 -900 -300 vlineto -70 -10 1 blend -20 rmoveto 10 10 -10 hlineto 140 20 1 blend 30 rmoveto 10 10 -10 hlineto -70 -10 1 blend -20 rmoveto 10 10 -10 hlineto 140 20 1 blend 30 rmoveto 10 10 -10 hlineto -70 -10 1 blend -20 rmoveto 10 10 -10 hlineto 140 20 1 blend 30 rmoveto 10 10 -10 hlineto -70 -10 1 blend -20 rmoveto 10 10 -10 hlineto 140 20 1 blend 30 rmoveto 10 10 -10 hlineto -70 -10 1 blend -20 rmoveto 10 10 -10 hlineto 140 20 1 blend 30 rmoveto 10 10 -10 hlineto wdth 0x0 50.0 50.0 200.0 256 ufo2ft-2.30.0/tests/data/DSv5/MutatorSerifVariable_Width-TTF.ttx000066400000000000000000000405441434012334300242660ustar00rootroot00000000000000 Width Serif Light Condensed MutatorMathTest-SerifLightCondensed Regular serif Serif weight Light width Condensed Normal Extended New Font Regular 0.000;NONE;NewFont-Regular New Font Regular Version 0.000 NewFont-Regular Width Serif Light Condensed MutatorMathTest-SerifLightCondensed Regular serif Serif weight Light width Condensed Normal Extended wdth 0x0 50.0 50.0 200.0 256 ufo2ft-2.30.0/tests/data/DSv5/test_v5_MutatorSans_and_Serif.designspace000066400000000000000000000163061434012334300257520ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/DottedCircleTest.ufo/000077500000000000000000000000001434012334300210465ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/DottedCircleTest.ufo/fontinfo.plist000066400000000000000000000020661434012334300237510ustar00rootroot00000000000000 ascender 800 capHeight 700 descender -200 familyName Dotted Circle Test openTypeHeadCreated 2022/04/18 20:12:58 postscriptBlueValues -16.0 0.0 500.0 516.0 700.0 716.0 800.0 816.0 postscriptFontName DottedCircleTest-Regular postscriptOtherBlues -216.0 -200.0 styleName Regular unitsPerEm 1000 versionMajor 1 versionMinor 0 xHeight 500 ufo2ft-2.30.0/tests/data/DottedCircleTest.ufo/glyphs/000077500000000000000000000000001434012334300223545ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/DottedCircleTest.ufo/glyphs/a.glif000066400000000000000000000035121434012334300234400ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2022-04-18 20:32:56 +0000 ufo2ft-2.30.0/tests/data/DottedCircleTest.ufo/glyphs/acutecomb.glif000066400000000000000000000007721434012334300251670ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2022-04-18 20:15:50 +0000 ufo2ft-2.30.0/tests/data/DottedCircleTest.ufo/glyphs/c.glif000066400000000000000000000031671434012334300234500ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2022-04-18 20:34:17 +0000 ufo2ft-2.30.0/tests/data/DottedCircleTest.ufo/glyphs/contents.plist000066400000000000000000000005761434012334300252760ustar00rootroot00000000000000 a a.glif acutecomb acutecomb.glif c c.glif dotbelowcomb dotbelowcomb.glif ufo2ft-2.30.0/tests/data/DottedCircleTest.ufo/glyphs/dotbelowcomb.glif000066400000000000000000000014261434012334300257020ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2022-04-18 20:32:37 +0000 ufo2ft-2.30.0/tests/data/DottedCircleTest.ufo/layercontents.plist000066400000000000000000000004151434012334300250150ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/DottedCircleTest.ufo/lib.plist000066400000000000000000000012561434012334300226750ustar00rootroot00000000000000 com.schriftgestaltung.DisplayStrings cacacaa com.schriftgestaltung.disablesAutomaticAlignment com.schriftgestaltung.fontMasterID m01 com.schriftgestaltung.glyphOrder com.schriftgestaltung.useNiceNames public.glyphOrder a c acutecomb dotbelowcomb ufo2ft-2.30.0/tests/data/DottedCircleTest.ufo/metainfo.plist000066400000000000000000000004701434012334300237260ustar00rootroot00000000000000 creator com.schriftgestaltung.GlyphsUFOExport formatVersion 3 ufo2ft-2.30.0/tests/data/IncompatibleMasters/000077500000000000000000000000001434012334300210165ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/IncompatibleMasters/IncompatibleMasters.designspace000066400000000000000000000015461434012334300272000ustar00rootroot00000000000000 public.skipExportGlyphs b d ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/000077500000000000000000000000001434012334300240445ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/fontinfo.plist000066400000000000000000000024701434012334300267460ustar00rootroot00000000000000 ascender 800.0 capHeight 700.0 descender -200.0 familyName New Font guidelines italicAngle -0.0 openTypeHeadCreated 2019/03/06 11:24:59 openTypeOS2Type 3 postscriptBlueValues postscriptFamilyBlues postscriptFamilyOtherBlues postscriptOtherBlues postscriptStemSnapH postscriptStemSnapV postscriptUnderlinePosition -100 postscriptUnderlineThickness 50 styleName Bold unitsPerEm 1000 versionMajor 1 versionMinor 0 xHeight 500.0 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/glyphs/000077500000000000000000000000001434012334300253525ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/glyphs/a.glif000066400000000000000000000007711434012334300264420ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2019/03/06 11:26:11 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/glyphs/b.glif000066400000000000000000000044651434012334300264470ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.Export com.schriftgestaltung.Glyphs.lastChange 2019/03/06 11:43:17 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/glyphs/c.glif000066400000000000000000000020031434012334300264320ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2019/03/06 17:23:09 com.schriftgestaltung.componentsAlignment -1 -1 0 -1 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/glyphs/contents.plist000066400000000000000000000007161434012334300302700ustar00rootroot00000000000000 a a.glif b b.glif c c.glif d d.glif e e.glif f f.glif ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/glyphs/d.glif000066400000000000000000000016211434012334300264400ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.Export com.schriftgestaltung.Glyphs.lastChange 2019/03/06 11:43:08 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/glyphs/e.glif000066400000000000000000000014031434012334300264370ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2019/03/06 17:23:23 com.schriftgestaltung.componentsAlignment -1 -1 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/glyphs/f.glif000066400000000000000000000004021434012334300264360ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/glyphs/layerinfo.plist000066400000000000000000000014031434012334300304150ustar00rootroot00000000000000 lib com.schriftgestaltung.layerId 43102E19-C314-492C-BD24-71BD43330434 com.schriftgestaltung.layerOrderInGlyph.a 1 com.schriftgestaltung.layerOrderInGlyph.b 1 com.schriftgestaltung.layerOrderInGlyph.c 1 com.schriftgestaltung.layerOrderInGlyph.d 1 com.schriftgestaltung.layerOrderInGlyph.e 1 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/groups.plist000066400000000000000000000022001434012334300264320ustar00rootroot00000000000000 public.kern1.a a public.kern1.b b public.kern1.c c public.kern1.d d public.kern1.e e public.kern1.f f public.kern2.a a public.kern2.b b public.kern2.c c public.kern2.d d public.kern2.e e public.kern2.f f ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/kerning.plist000066400000000000000000000017041434012334300265600ustar00rootroot00000000000000 a d 10 public.kern1.a public.kern2.a 10 public.kern2.b 10 public.kern2.d 10 public.kern1.b public.kern2.d 10 public.kern1.d public.kern2.f 10 public.kern1.e public.kern2.f 10 public.kern1.f public.kern2.e 10 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/layercontents.plist000066400000000000000000000004371434012334300300170ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/lib.plist000066400000000000000000000045171434012334300256760ustar00rootroot00000000000000 com.schriftgestaltung.appVersion 1192 com.schriftgestaltung.customParameter.GSFont.DisplayStrings c e com.schriftgestaltung.customParameter.GSFont.Enforce Compatibility Check 1 com.schriftgestaltung.customParameter.GSFont.disablesAutomaticAlignment com.schriftgestaltung.customParameter.GSFont.useNiceNames 1 com.schriftgestaltung.customParameter.GSFontMaster.customValue 0.0 com.schriftgestaltung.customParameter.GSFontMaster.customValue1 0.0 com.schriftgestaltung.customParameter.GSFontMaster.customValue2 0.0 com.schriftgestaltung.customParameter.GSFontMaster.customValue3 0.0 com.schriftgestaltung.customParameter.GSFontMaster.iconName com.schriftgestaltung.customParameter.GSFontMaster.weightValue 700.0 com.schriftgestaltung.customParameter.GSFontMaster.widthValue 100.0 com.schriftgestaltung.fontMasterID 43102E19-C314-492C-BD24-71BD43330434 com.schriftgestaltung.fontMasterOrder 1 com.schriftgestaltung.glyphOrder com.schriftgestaltung.keyboardIncrement 1 com.schriftgestaltung.weight Bold com.schriftgestaltung.weightValue 700.0 com.schriftgestaltung.width Regular com.schriftgestaltung.widthValue 100.0 public.glyphOrder a c e b d f public.skipExportGlyphs b d f ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Bold.ufo/metainfo.plist000066400000000000000000000004761434012334300267320ustar00rootroot00000000000000 creator com.github.fonttools.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/000077500000000000000000000000001434012334300245655ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/fontinfo.plist000066400000000000000000000024731434012334300274720ustar00rootroot00000000000000 ascender 800.0 capHeight 700.0 descender -200.0 familyName New Font guidelines italicAngle -0.0 openTypeHeadCreated 2019/03/06 11:24:59 openTypeOS2Type 3 postscriptBlueValues postscriptFamilyBlues postscriptFamilyOtherBlues postscriptOtherBlues postscriptStemSnapH postscriptStemSnapV postscriptUnderlinePosition -100 postscriptUnderlineThickness 50 styleName Regular unitsPerEm 1000 versionMajor 1 versionMinor 0 xHeight 500.0 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/glyphs/000077500000000000000000000000001434012334300260735ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/glyphs/a.glif000066400000000000000000000007751434012334300271670ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2019/03/06 11:26:11 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/glyphs/b.glif000066400000000000000000000030321434012334300271550ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.Export com.schriftgestaltung.Glyphs.lastChange 2019/03/06 11:43:17 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/glyphs/c.glif000066400000000000000000000017111434012334300271600ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2019/03/06 17:23:09 com.schriftgestaltung.componentsAlignment -1 -1 0 -1 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/glyphs/contents.plist000066400000000000000000000007161434012334300310110ustar00rootroot00000000000000 a a.glif b b.glif c c.glif d d.glif e e.glif f f.glif ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/glyphs/d.glif000066400000000000000000000016211434012334300271610ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.Export com.schriftgestaltung.Glyphs.lastChange 2019/03/06 11:43:08 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/glyphs/e.glif000066400000000000000000000014051434012334300271620ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2019/03/06 17:23:23 com.schriftgestaltung.componentsAlignment -1 -1 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/glyphs/f.glif000066400000000000000000000004741434012334300271700ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/glyphs/layerinfo.plist000066400000000000000000000014031434012334300311360ustar00rootroot00000000000000 lib com.schriftgestaltung.layerId 17DAEA70-42F2-48A2-B948-7696177362C3 com.schriftgestaltung.layerOrderInGlyph.a 0 com.schriftgestaltung.layerOrderInGlyph.b 0 com.schriftgestaltung.layerOrderInGlyph.c 0 com.schriftgestaltung.layerOrderInGlyph.d 0 com.schriftgestaltung.layerOrderInGlyph.e 0 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/groups.plist000066400000000000000000000022001434012334300271530ustar00rootroot00000000000000 public.kern1.a a public.kern1.b b public.kern1.c c public.kern1.d d public.kern1.e e public.kern1.f f public.kern2.a a public.kern2.b b public.kern2.c c public.kern2.d d public.kern2.e e public.kern2.f f ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/kerning.plist000066400000000000000000000017041434012334300273010ustar00rootroot00000000000000 a d 10 public.kern1.a public.kern2.a 10 public.kern2.b 10 public.kern2.d 10 public.kern1.b public.kern2.d 10 public.kern1.d public.kern2.f 10 public.kern1.e public.kern2.f 10 public.kern1.f public.kern2.e 10 ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/layercontents.plist000066400000000000000000000004371434012334300305400ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/lib.plist000066400000000000000000000043071434012334300264140ustar00rootroot00000000000000 com.schriftgestaltung.appVersion 1192 com.schriftgestaltung.customParameter.GSFont.DisplayStrings c e com.schriftgestaltung.customParameter.GSFont.Enforce Compatibility Check 1 com.schriftgestaltung.customParameter.GSFont.disablesAutomaticAlignment com.schriftgestaltung.customParameter.GSFont.useNiceNames 1 com.schriftgestaltung.customParameter.GSFontMaster.customValue 0.0 com.schriftgestaltung.customParameter.GSFontMaster.customValue1 0.0 com.schriftgestaltung.customParameter.GSFontMaster.customValue2 0.0 com.schriftgestaltung.customParameter.GSFontMaster.customValue3 0.0 com.schriftgestaltung.customParameter.GSFontMaster.iconName com.schriftgestaltung.customParameter.GSFontMaster.weightValue 400.0 com.schriftgestaltung.customParameter.GSFontMaster.widthValue 100.0 com.schriftgestaltung.fontMasterID 17DAEA70-42F2-48A2-B948-7696177362C3 com.schriftgestaltung.fontMasterOrder 0 com.schriftgestaltung.glyphOrder com.schriftgestaltung.keyboardIncrement 1 com.schriftgestaltung.weight Regular com.schriftgestaltung.weightValue 400.0 com.schriftgestaltung.width Regular com.schriftgestaltung.widthValue 100.0 public.glyphOrder a c e b d f ufo2ft-2.30.0/tests/data/IncompatibleMasters/NewFont-Regular.ufo/metainfo.plist000066400000000000000000000004761434012334300274530ustar00rootroot00000000000000 creator com.github.fonttools.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/000077500000000000000000000000001434012334300204225ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/features.fea000066400000000000000000000000521434012334300227120ustar00rootroot00000000000000feature liga { sub a e s s by s; } liga; ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/fontinfo.plist000066400000000000000000000033551434012334300233270ustar00rootroot00000000000000 ascender 750 capHeight 700 copyright descender -250 familyName Layer Font guidelines italicAngle 0 note openTypeHeadCreated 2018/11/21 11:49:03 openTypeNameDesigner openTypeNameDesignerURL openTypeNameLicense openTypeNameLicenseURL openTypeNameManufacturer openTypeNameManufacturerURL postscriptBlueValues postscriptFamilyBlues postscriptFamilyOtherBlues postscriptOtherBlues postscriptStemSnapH postscriptStemSnapV postscriptUnderlinePosition -100 postscriptUnderlineThickness 50 styleName Bold trademark unitsPerEm 1000 versionMajor 0 versionMinor 0 xHeight 500 ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/glyphs/000077500000000000000000000000001434012334300217305ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/glyphs/_notdef.glif000066400000000000000000000010461434012334300242120ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/glyphs/a.glif000066400000000000000000000020271434012334300230140ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/glyphs/contents.plist000066400000000000000000000010001434012334300246310ustar00rootroot00000000000000 .notdef _notdef.glif a a.glif dotabovecomb dotabovecomb.glif e e.glif edotabove edotabove.glif s s.glif ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/glyphs/dotabovecomb.glif000066400000000000000000000006061434012334300252410ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/glyphs/e.glif000066400000000000000000000015261434012334300230230ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/glyphs/edotabove.glif000066400000000000000000000004251434012334300245440ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/glyphs/s.glif000066400000000000000000000013421434012334300230350ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/layercontents.plist000066400000000000000000000004371434012334300243750ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/lib.plist000066400000000000000000000006121434012334300222440ustar00rootroot00000000000000 public.glyphOrder a e s dotabovecomb edotabove ufo2ft-2.30.0/tests/data/LayerFont-Bold.ufo/metainfo.plist000066400000000000000000000004761434012334300233100ustar00rootroot00000000000000 creator com.github.fonttools.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/000077500000000000000000000000001434012334300211435ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/features.fea000066400000000000000000000000521434012334300234330ustar00rootroot00000000000000feature liga { sub a e s s by s; } liga; ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/fontinfo.plist000066400000000000000000000031431434012334300240430ustar00rootroot00000000000000 ascender 750 capHeight 700 copyright descender -250 familyName Layer Font guidelines italicAngle 0 note openTypeHeadCreated 2018/11/21 11:49:03 openTypeNameDesigner openTypeNameDesignerURL openTypeNameLicense openTypeNameLicenseURL openTypeNameManufacturer openTypeNameManufacturerURL postscriptBlueValues postscriptFamilyBlues postscriptFamilyOtherBlues postscriptOtherBlues postscriptStemSnapH postscriptStemSnapV styleName Regular trademark unitsPerEm 1000 versionMajor 0 versionMinor 0 xHeight 500 ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/glyphs.M_edium/000077500000000000000000000000001434012334300240275ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/glyphs.M_edium/contents.plist000066400000000000000000000003551434012334300267440ustar00rootroot00000000000000 e e.glif ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/glyphs.M_edium/e.glif000066400000000000000000000015001434012334300251120ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/glyphs/000077500000000000000000000000001434012334300224515ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/glyphs/_notdef.glif000066400000000000000000000010461434012334300247330ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/glyphs/a.glif000066400000000000000000000020241434012334300235320ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/glyphs/contents.plist000066400000000000000000000010001434012334300253520ustar00rootroot00000000000000 .notdef _notdef.glif a a.glif dotabovecomb dotabovecomb.glif e e.glif edotabove edotabove.glif s s.glif ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/glyphs/dotabovecomb.glif000066400000000000000000000005661434012334300257670ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/glyphs/e.glif000066400000000000000000000014721434012334300235440ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/glyphs/edotabove.glif000066400000000000000000000004251434012334300252650ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/glyphs/s.glif000066400000000000000000000013421434012334300235560ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/layercontents.plist000066400000000000000000000005741434012334300251200ustar00rootroot00000000000000 public.default glyphs Medium glyphs.M_edium ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/lib.plist000066400000000000000000000006121434012334300227650ustar00rootroot00000000000000 public.glyphOrder a e s dotabovecomb edotabove ufo2ft-2.30.0/tests/data/LayerFont-Regular.ufo/metainfo.plist000066400000000000000000000004761434012334300240310ustar00rootroot00000000000000 creator com.github.fonttools.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/data/MTIFeatures.ttx000066400000000000000000000023171434012334300177450ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/000077500000000000000000000000001434012334300177715ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/data/000077500000000000000000000000001434012334300207025ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/data/com.github.googlei18n.ufo2ft.mtiFeatures/000077500000000000000000000000001434012334300304075ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/data/com.github.googlei18n.ufo2ft.mtiFeatures/GSUB.mti000066400000000000000000000003421434012334300316610ustar00rootroot00000000000000FontDame GSUB table script table begin latn default 0 script table end feature table begin 0 ccmp 0 feature table end lookup 0 ligature RightToLeft no IgnoreBaseGlyphs no IgnoreLigatures no IgnoreMarks no a b c lookup end ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/fontinfo.plist000066400000000000000000000013011434012334300226630ustar00rootroot00000000000000 familyName MTIFeatures note https://github.com/googlei18n/fontmake/issues/289 unitsPerEm 1000 xHeight 500 ascender 750 capHeight 750 descender -250 postscriptUnderlinePosition -200 postscriptUnderlineThickness 20 ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/glyphs/000077500000000000000000000000001434012334300212775ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/glyphs/_notdef.glif000066400000000000000000000007531434012334300235650ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/glyphs/a.glif000066400000000000000000000004361434012334300223650ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/glyphs/b.glif000066400000000000000000000005111434012334300223600ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/glyphs/c.glif000066400000000000000000000006241434012334300223660ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/glyphs/contents.plist000066400000000000000000000006371434012334300242170ustar00rootroot00000000000000 .notdef _notdef.glif a a.glif b b.glif c c.glif space space.glif ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/glyphs/space.glif000066400000000000000000000001771434012334300232420ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/groups.plist000066400000000000000000000002761434012334300223720ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/kerning.plist000066400000000000000000000006031434012334300225020ustar00rootroot00000000000000 a a -666 b -666 b a -666 ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/layercontents.plist000066400000000000000000000004231434012334300237370ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/lib.plist000066400000000000000000000011251434012334300216130ustar00rootroot00000000000000 public.glyphOrder .notdef space a b c public.postscriptNames a uni0061 b uni0062 c uni0063 space uni0020 ufo2ft-2.30.0/tests/data/MTIFeatures.ufo/metainfo.plist000066400000000000000000000004431434012334300226510ustar00rootroot00000000000000 creator copy-paste formatVersion 3 ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/000077500000000000000000000000001434012334300221055ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/fontinfo.plist000066400000000000000000000017531434012334300250120ustar00rootroot00000000000000 note https://github.com/googlefonts/ufo2ft/issues/303 ascender 750 capHeight 750 descender -250 familyName MultipleAnchorClasses guidelines postscriptBlueValues postscriptFamilyBlues postscriptFamilyOtherBlues postscriptOtherBlues postscriptStemSnapH postscriptStemSnapV styleName Regular unitsPerEm 1000 xHeight 500 ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/glyphs/000077500000000000000000000000001434012334300234135ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/glyphs/_notdef.glif000066400000000000000000000001531434012334300256730ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/glyphs/a.glif000066400000000000000000000072441434012334300245050ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/glyphs/acutecomb.glif000066400000000000000000000017111434012334300262200ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/glyphs/contents.plist000066400000000000000000000005741434012334300263330ustar00rootroot00000000000000 .notdef _notdef.glif a a.glif acutecomb acutecomb.glif e e.glif ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/glyphs/e.glif000066400000000000000000000060751434012334300245120ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/glyphs/layerinfo.plist000066400000000000000000000002741434012334300264630ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/layercontents.plist000066400000000000000000000004231434012334300260530ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/lib.plist000066400000000000000000000005321434012334300237300ustar00rootroot00000000000000 public.glyphOrder .notdef a e acutecomb ufo2ft-2.30.0/tests/data/MultipleAnchorClasses.ufo/metainfo.plist000066400000000000000000000004511434012334300247640ustar00rootroot00000000000000 creator com.fontlab.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/data/MultipleAnchorClassesConflict.ufo/000077500000000000000000000000001434012334300235675ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/MultipleAnchorClassesConflict.ufo/fontinfo.plist000066400000000000000000000017631434012334300264750ustar00rootroot00000000000000 note https://github.com/googlefonts/ufo2ft/issues/303 ascender 750 capHeight 750 descender -250 familyName MultipleAnchorClassesConflict guidelines postscriptBlueValues postscriptFamilyBlues postscriptFamilyOtherBlues postscriptOtherBlues postscriptStemSnapH postscriptStemSnapV styleName Regular unitsPerEm 1000 xHeight 500 ufo2ft-2.30.0/tests/data/MultipleAnchorClassesConflict.ufo/glyphs/000077500000000000000000000000001434012334300250755ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/MultipleAnchorClassesConflict.ufo/glyphs/_notdef.glif000066400000000000000000000001531434012334300273550ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MultipleAnchorClassesConflict.ufo/glyphs/acutecomb.glif000066400000000000000000000016421434012334300277050ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MultipleAnchorClassesConflict.ufo/glyphs/ae.glif000066400000000000000000000155241434012334300263340ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MultipleAnchorClassesConflict.ufo/glyphs/contents.plist000066400000000000000000000005251434012334300300110ustar00rootroot00000000000000 .notdef _notdef.glif acutecomb acutecomb.glif ae ae.glif ufo2ft-2.30.0/tests/data/MultipleAnchorClassesConflict.ufo/glyphs/layerinfo.plist000066400000000000000000000002741434012334300301450ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/MultipleAnchorClassesConflict.ufo/layercontents.plist000066400000000000000000000004231434012334300275350ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/MultipleAnchorClassesConflict.ufo/lib.plist000066400000000000000000000005041434012334300254110ustar00rootroot00000000000000 public.glyphOrder .notdef acutecomb ae ufo2ft-2.30.0/tests/data/MultipleAnchorClassesConflict.ufo/metainfo.plist000066400000000000000000000004511434012334300264460ustar00rootroot00000000000000 creator com.fontlab.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/000077500000000000000000000000001434012334300220075ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/fontinfo.plist000066400000000000000000000121011434012334300247010ustar00rootroot00000000000000 ascender 750 capHeight 750 descender -250 italicAngle -12.5 macintoshFONDFamilyID 15000 macintoshFONDName SomeFont Regular (FOND Name) note A note. openTypeHeadCreated 2000/01/01 00:00:00 openTypeHeadFlags 0 1 openTypeHeadLowestRecPPEM 10 openTypeHheaCaretOffset 0 openTypeHheaCaretSlopeRise 1 openTypeHheaCaretSlopeRun 0 openTypeNameCompatibleFullName Some Font Regular (Compatible Full Name) openTypeNameDescription Some Font by Some Designer for Some Foundry. openTypeNameDesigner Some Designer openTypeNameDesignerURL http://somedesigner.com openTypeNameLicense License info for Some Foundry. openTypeNameLicenseURL http://somefoundry.com/license openTypeNameManufacturer Some Foundry openTypeNameManufacturerURL http://somefoundry.com openTypeNamePreferredFamilyName Some Font (Preferred Family Name) openTypeNameSampleText Sample Text for Some Font. openTypeNameUniqueID OpenType name Table Unique ID openTypeNameVersion OpenType name Table Version openTypeNameWWSFamilyName Some Font (WWS Family Name) openTypeNameWWSSubfamilyName Regular (WWS Subfamily Name) openTypeOS2CodePageRanges 0 openTypeOS2FamilyClass 1 1 openTypeOS2Panose 0 1 2 3 4 5 6 7 8 9 openTypeOS2Selection 3 openTypeOS2SubscriptXOffset 0 openTypeOS2SubscriptXSize 200 openTypeOS2SubscriptYOffset -100 openTypeOS2SubscriptYSize 400 openTypeOS2SuperscriptXOffset 0 openTypeOS2SuperscriptXSize 200 openTypeOS2SuperscriptYOffset 200 openTypeOS2SuperscriptYSize 400 openTypeOS2Type openTypeOS2UnicodeRanges 0 1 openTypeOS2VendorID SOME openTypeVheaCaretOffset 0 openTypeVheaCaretSlopeRise 0 openTypeVheaCaretSlopeRun 1 openTypeVheaVertTypoAscender 750 openTypeVheaVertTypoDescender -250 openTypeVheaVertTypoLineGap 200 postscriptBlueFuzz 1 postscriptBlueScale 0.04 postscriptBlueShift 7 postscriptBlueValues 500.0 510.0 postscriptDefaultCharacter .notdef postscriptDefaultWidthX 400 postscriptFamilyBlues 500.0 510.0 postscriptFamilyOtherBlues -260.0 -250.0 postscriptForceBold postscriptIsFixedPitch postscriptNominalWidthX 400.0 postscriptOtherBlues postscriptSlantAngle -12.5 postscriptStemSnapH 0.0 0.0 postscriptStemSnapV 0.0 0.0 postscriptUniqueID 4000000 postscriptWindowsCharacterSet 1 styleName Regular trademark Trademark Some Foundry unitsPerEm 1000 versionMajor 1 versionMinor 0 xHeight 500 year 2008 ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/glyphs/000077500000000000000000000000001434012334300233155ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/glyphs/_notdef.glif000066400000000000000000000007531434012334300256030ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/glyphs/a.glif000066400000000000000000000005111434012334300243750ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/glyphs/b.glif000066400000000000000000000010471434012334300244030ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/glyphs/c.glif000066400000000000000000000007051434012334300244040ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.ComponentInfo alignment -1 index 0 name a ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/glyphs/contents.plist000066400000000000000000000007411434012334300262310ustar00rootroot00000000000000 .notdef _notdef.glif a a.glif b b.glif c c.glif d d.glif e e.glif space space.glif ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/glyphs/d.glif000066400000000000000000000012571434012334300244100ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.ComponentInfo alignment -1 index 0 name b alignment -1 index 1 name a ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/glyphs/e.glif000066400000000000000000000012241434012334300244030ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.ComponentInfo alignment -1 index 0 name c alignment -1 index 1 name d ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/glyphs/space.glif000066400000000000000000000002261434012334300252530ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/layercontents.plist000066400000000000000000000004151434012334300257560ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/lib.plist000066400000000000000000000063721434012334300236420ustar00rootroot00000000000000 com.schriftgestaltung.disablesAutomaticAlignment com.schriftgestaltung.font.customParameters name openTypeGaspRangeRecords value rangeGaspBehavior 1 3 rangeMaxPPEM 7 rangeGaspBehavior 0 1 2 3 rangeMaxPPEM 65535 name openTypeNameRecords value encodingID 0 languageID 0 nameID 3 platformID 1 string Unique Font Identifier encodingID 1 languageID 1033 nameID 8 platformID 3 string Some Foundry (Manufacturer Name) com.schriftgestaltung.font.userData GSDimensionPlugin.Dimensions master01 com.schriftgestaltung.fontMasterID 0A7BF222-74C5-44F5-877F-4AAEDEB31DD5 com.schriftgestaltung.glyphOrder .notdef glyph1 glyph2 space a b c d e f g h i j k l com.schriftgestaltung.master.name Bold com.schriftgestaltung.useNiceNames com.schriftgestaltung.weightValue 800 public.glyphOrder .notdef space a b c d e public.postscriptNames a uni0061 b uni0062 c uni0063 d uni0064 e uni0065 f uni0066 g uni0067 h uni0068 i uni0069 j uni006A k uni006B l uni006C space uni0020 ufo2ft-2.30.0/tests/data/NestedComponents-Bold.ufo/metainfo.plist000066400000000000000000000004701434012334300246670ustar00rootroot00000000000000 creator com.schriftgestaltung.GlyphsUFOExport formatVersion 3 ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/000077500000000000000000000000001434012334300225305ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/fontinfo.plist000066400000000000000000000155421434012334300254360ustar00rootroot00000000000000 ascender 750 capHeight 750 descender -250 guidelines angle 0 x 250 y 0 angle 0 x -20 y 0 angle 0 x 30 y 0 y 500 y -200 y 700 angle 135 x 0 y 0 angle 45 x 0 y 700 angle 135 x 20 y 0 italicAngle -12.5 macintoshFONDFamilyID 15000 macintoshFONDName SomeFont Regular (FOND Name) note A note. openTypeHeadCreated 2000/01/01 00:00:00 openTypeHeadFlags 0 1 openTypeHeadLowestRecPPEM 10 openTypeHheaAscender 750 openTypeHheaCaretOffset 0 openTypeHheaCaretSlopeRise 1 openTypeHheaCaretSlopeRun 0 openTypeHheaDescender -250 openTypeHheaLineGap 200 openTypeNameCompatibleFullName Some Font Regular (Compatible Full Name) openTypeNameDescription Some Font by Some Designer for Some Foundry. openTypeNameDesigner Some Designer openTypeNameDesignerURL http://somedesigner.com openTypeNameLicense License info for Some Foundry. openTypeNameLicenseURL http://somefoundry.com/license openTypeNameManufacturer Some Foundry openTypeNameManufacturerURL http://somefoundry.com openTypeNamePreferredFamilyName Some Font (Preferred Family Name) openTypeNameSampleText Sample Text for Some Font. openTypeNameUniqueID OpenType name Table Unique ID openTypeNameVersion OpenType name Table Version openTypeNameWWSFamilyName Some Font (WWS Family Name) openTypeNameWWSSubfamilyName Regular (WWS Subfamily Name) openTypeOS2CodePageRanges 0 openTypeOS2FamilyClass 1 1 openTypeOS2Panose 0 1 2 3 4 5 6 7 8 9 openTypeOS2Selection 3 openTypeOS2StrikeoutPosition 300 openTypeOS2StrikeoutSize 20 openTypeOS2SubscriptXOffset 0 openTypeOS2SubscriptXSize 200 openTypeOS2SubscriptYOffset -100 openTypeOS2SubscriptYSize 400 openTypeOS2SuperscriptXOffset 0 openTypeOS2SuperscriptXSize 200 openTypeOS2SuperscriptYOffset 200 openTypeOS2SuperscriptYSize 400 openTypeOS2Type openTypeOS2TypoAscender 750 openTypeOS2TypoDescender -250 openTypeOS2TypoLineGap 200 openTypeOS2UnicodeRanges 0 1 openTypeOS2VendorID SOME openTypeOS2WinAscent 750 openTypeOS2WinDescent 250 openTypeVheaCaretOffset 0 openTypeVheaCaretSlopeRise 0 openTypeVheaCaretSlopeRun 1 openTypeVheaVertTypoAscender 750 openTypeVheaVertTypoDescender -250 openTypeVheaVertTypoLineGap 200 postscriptBlueFuzz 1 postscriptBlueScale 0.04 postscriptBlueShift 7 postscriptBlueValues 500.0 510.0 postscriptDefaultCharacter .notdef postscriptDefaultWidthX 400 postscriptFamilyBlues 500.0 510.0 postscriptFamilyOtherBlues -260.0 -250.0 postscriptForceBold postscriptIsFixedPitch postscriptNominalWidthX 400.0 postscriptOtherBlues postscriptSlantAngle -12.5 postscriptStemSnapH 100.0 120.0 postscriptStemSnapV 80.0 90.0 postscriptUnderlinePosition -200 postscriptUnderlineThickness 20 postscriptUniqueID 4000000 postscriptWindowsCharacterSet 1 styleName Regular trademark Trademark Some Foundry unitsPerEm 1000 versionMajor 1 versionMinor 0 xHeight 500 year 2008 ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/glyphs/000077500000000000000000000000001434012334300240365ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/glyphs/_notdef.glif000066400000000000000000000007711434012334300263240ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/glyphs/a.glif000066400000000000000000000007341434012334300251250ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2020-12-07 11:48:13 +0000 ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/glyphs/b.glif000066400000000000000000000012741434012334300251260ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.lastChange 2020-12-07 11:48:16 +0000 ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/glyphs/c.glif000066400000000000000000000010511434012334300251200ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.ComponentInfo alignment -1 index 0 name a com.schriftgestaltung.Glyphs.lastChange 2020-12-07 11:48:18 +0000 ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/glyphs/contents.plist000066400000000000000000000007411434012334300267520ustar00rootroot00000000000000 .notdef _notdef.glif a a.glif b b.glif c c.glif d d.glif e e.glif space space.glif ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/glyphs/d.glif000066400000000000000000000014051434012334300251240ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.ComponentInfo alignment -1 index 0 name b alignment -1 index 1 name a com.schriftgestaltung.Glyphs.lastChange 2020-12-07 11:48:21 +0000 ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/glyphs/e.glif000066400000000000000000000013701434012334300251260ustar00rootroot00000000000000 com.schriftgestaltung.Glyphs.ComponentInfo alignment -1 index 0 name c alignment -1 index 1 name d com.schriftgestaltung.Glyphs.lastChange 2020-12-07 11:48:37 +0000 ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/glyphs/space.glif000066400000000000000000000002431434012334300257730ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/layercontents.plist000066400000000000000000000004151434012334300264770ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/lib.plist000066400000000000000000000067171434012334300243660ustar00rootroot00000000000000 com.schriftgestaltung.disablesAutomaticAlignment com.schriftgestaltung.font.customParameters name openTypeGaspRangeRecords value rangeGaspBehavior 1 3 rangeMaxPPEM 7 rangeGaspBehavior 0 1 2 3 rangeMaxPPEM 65535 name openTypeNameRecords value encodingID 0 languageID 0 nameID 3 platformID 1 string Unique Font Identifier encodingID 1 languageID 1033 nameID 8 platformID 3 string Some Foundry (Manufacturer Name) com.schriftgestaltung.font.userData GSDimensionPlugin.Dimensions master01 com.schriftgestaltung.fontMaster.customParameters name Alignment Zones value pos -260.0 size 10.0 com.schriftgestaltung.fontMasterID master01 com.schriftgestaltung.glyphOrder .notdef glyph1 glyph2 space a b c d e f g h i j k l com.schriftgestaltung.useNiceNames com.schriftgestaltung.weightValue 400 public.glyphOrder .notdef space a b c d e public.postscriptNames a uni0061 b uni0062 c uni0063 d uni0064 e uni0065 f uni0066 g uni0067 h uni0068 i uni0069 j uni006A k uni006B l uni006C space uni0020 ufo2ft-2.30.0/tests/data/NestedComponents-Regular.ufo/metainfo.plist000066400000000000000000000004701434012334300254100ustar00rootroot00000000000000 creator com.schriftgestaltung.GlyphsUFOExport formatVersion 3 ufo2ft-2.30.0/tests/data/TestFont-CFF-compreffor.ttx000066400000000000000000000425631434012334300221260ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 256 hlineto -128 510 rlineto endchar 200 -55 -80 rmoveto 509 hlineto 149 -50 50 -205 -204 -50 -50 -149 vhcurveto 121 return rmoveto 509 hlineto 150 -50 50 -205 -204 -50 -50 -150 vhcurveto endchar rmoveto -34 -27 -27 -33 -33 27 -27 34 33 27 27 33 33 -27 27 -33 hvcurveto return 100 450 hmoveto 750 -400 -750 vlineto 350 50 rmoveto -300 650 300 hlineto endchar -150 endchar -12 66 hmoveto -107 callsubr 10 100 505 rmoveto -510 210 510 vlineto endchar -26 300 -10 rmoveto 510 vlineto -150 -50 -50 -205 -205 50 -50 150 hvcurveto endchar -26 151 197 -104 callsubr endchar -12 66 510 rmoveto 128 -435 128 435 rlineto -377 -487 -105 callsubr 10 66 510 rmoveto 256 hlineto -128 -435 rlineto -249 -52 -105 callsubr -12 66 hmoveto -107 callsubr 10 211 657 -104 callsubr -111 -152 rmoveto -510 210 510 vlineto endchar -106 callsubr 80 rmoveto -107 callsubr -106 callsubr 310 rmoveto 128 -510 128 510 rlineto endchar 200 66 hmoveto 256 hlineto -128 510 rlineto -28 -510 rmoveto -107 callsubr 200 334 hmoveto -128 510 -128 -510 rlineto 88 hmoveto -107 callsubr 0001beef ufo2ft-2.30.0/tests/data/TestFont-CFF.ttx000066400000000000000000000432651434012334300177660ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) -55 -80 rmoveto 509 hlineto 149 -98 callsubr -149 vhcurveto 121 return rmoveto 509 hlineto 150 -98 callsubr -150 vhcurveto endchar rmoveto -34 -27 -27 -33 -33 27 -27 34 33 27 27 33 33 -27 27 -33 hvcurveto return -102 callsubr endchar rmoveto -99 callsubr endchar 66 -100 callsubr return rmoveto -510 210 510 vlineto endchar hmoveto -99 callsubr return 256 hlineto -128 510 rlineto return -50 50 -205 -204 -50 -50 return 143 450 hmoveto 750 -400 -750 vlineto 350 50 rmoveto -300 650 300 hlineto endchar -107 endchar 31 -104 callsubr 53 100 505 -101 callsubr 17 300 -10 rmoveto 510 vlineto -150 -50 -50 -205 -205 50 -50 150 hvcurveto endchar 17 151 197 -105 callsubr endchar 31 66 510 rmoveto 128 -435 128 435 rlineto -377 -487 -106 callsubr 53 66 510 rmoveto 256 hlineto -128 -435 rlineto -249 -52 -106 callsubr 31 -104 callsubr 53 211 657 -105 callsubr -111 -152 -101 callsubr -107 callsubr 80 -103 callsubr -107 callsubr 310 rmoveto 128 -510 128 510 rlineto endchar -102 callsubr -28 -510 -103 callsubr 334 hmoveto -128 510 -128 -510 rlineto 88 -100 callsubr endchar 0001beef ufo2ft-2.30.0/tests/data/TestFont-CFF2-cffsubr.ttx000066400000000000000000000432511434012334300214730ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 66 -102 callsubr -55 -80 rmoveto 509 hlineto 149 -99 callsubr -149 vhcurveto 121 rmoveto -34 -27 -27 -33 -33 27 -27 34 33 27 27 33 33 -27 27 -33 hvcurveto rmoveto 509 hlineto 150 -99 callsubr -150 vhcurveto rmoveto -100 callsubr hmoveto -100 callsubr rmoveto -510 210 510 vlineto 256 hlineto -128 510 rlineto -50 50 -205 -204 -50 -50 450 hmoveto 750 -400 -750 vlineto 350 50 rmoveto -300 650 300 hlineto -107 callsubr 100 505 -101 callsubr 300 -10 rmoveto 510 vlineto -150 -50 -50 -205 -205 50 -50 150 hvcurveto 151 197 -105 callsubr 66 510 rmoveto 128 -435 128 435 rlineto -377 -487 -104 callsubr 66 510 rmoveto 256 hlineto -128 -435 rlineto -249 -52 -104 callsubr -107 callsubr 211 657 -105 callsubr -111 -152 -101 callsubr -106 callsubr 80 -103 callsubr -106 callsubr 310 rmoveto 128 -510 128 510 rlineto -107 callsubr -28 -510 -103 callsubr 334 hmoveto -128 510 -128 -510 rlineto 88 -102 callsubr 0001beef ufo2ft-2.30.0/tests/data/TestFont-CFF2-compreffor.ttx000066400000000000000000000427641434012334300222130ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 256 hlineto -128 510 rlineto 200 -55 -80 rmoveto 509 hlineto 149 -50 50 -205 -204 -50 -50 -149 vhcurveto 121 rmoveto 509 hlineto 150 -50 50 -205 -204 -50 -50 -150 vhcurveto rmoveto -34 -27 -27 -33 -33 27 -27 34 33 27 27 33 33 -27 27 -33 hvcurveto 100 450 hmoveto 750 -400 -750 vlineto 350 50 rmoveto -300 650 300 hlineto -150 -12 66 hmoveto -107 callsubr 10 100 505 rmoveto -510 210 510 vlineto -26 300 -10 rmoveto 510 vlineto -150 -50 -50 -205 -205 50 -50 150 hvcurveto -26 151 197 -104 callsubr -12 66 510 rmoveto 128 -435 128 435 rlineto -377 -487 -105 callsubr 10 66 510 rmoveto 256 hlineto -128 -435 rlineto -249 -52 -105 callsubr -12 66 hmoveto -107 callsubr 10 211 657 -104 callsubr -111 -152 rmoveto -510 210 510 vlineto -106 callsubr 80 rmoveto -107 callsubr -106 callsubr 310 rmoveto 128 -510 128 510 rlineto 200 66 hmoveto 256 hlineto -128 510 rlineto -28 -510 rmoveto -107 callsubr 200 334 hmoveto -128 510 -128 -510 rlineto 88 hmoveto -107 callsubr 0001beef ufo2ft-2.30.0/tests/data/TestFont-CFF2-post3.ttx000066400000000000000000000404011434012334300211030ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 66 -102 callsubr -55 -80 rmoveto 509 hlineto 149 -99 callsubr -149 vhcurveto 121 rmoveto -34 -27 -27 -33 -33 27 -27 34 33 27 27 33 33 -27 27 -33 hvcurveto rmoveto 509 hlineto 150 -99 callsubr -150 vhcurveto rmoveto -100 callsubr hmoveto -100 callsubr rmoveto -510 210 510 vlineto 256 hlineto -128 510 rlineto -50 50 -205 -204 -50 -50 450 hmoveto 750 -400 -750 vlineto 350 50 rmoveto -300 650 300 hlineto -107 callsubr 100 505 -101 callsubr 300 -10 rmoveto 510 vlineto -150 -50 -50 -205 -205 50 -50 150 hvcurveto 151 197 -105 callsubr 66 510 rmoveto 128 -435 128 435 rlineto -377 -487 -104 callsubr 66 510 rmoveto 256 hlineto -128 -435 rlineto -249 -52 -104 callsubr -107 callsubr 211 657 -105 callsubr -111 -152 -101 callsubr -106 callsubr 80 -103 callsubr -106 callsubr 310 rmoveto 128 -510 128 510 rlineto -107 callsubr -28 -510 -103 callsubr 334 hmoveto -128 510 -128 -510 rlineto 88 -102 callsubr 0001beef ufo2ft-2.30.0/tests/data/TestFont-NoOptimize-CFF.ttx000066400000000000000000000434761434012334300220650ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 100 450 0 rmoveto 0 750 rlineto -400 0 rlineto 0 -750 rlineto 350 50 rmoveto -300 0 rlineto 0 650 rlineto 300 0 rlineto endchar -150 endchar -12 66 0 rmoveto 256 0 rlineto -128 510 rlineto endchar 10 100 505 rmoveto 0 -510 rlineto 210 0 rlineto 0 510 rlineto endchar -26 300 -10 rmoveto 0 510 rlineto -150 0 -50 -50 0 -205 rrcurveto 0 -205 50 -50 150 0 rrcurveto endchar -26 151 197 rmoveto -34 0 -27 -27 0 -33 rrcurveto 0 -33 27 -27 34 0 rrcurveto 33 0 27 27 0 33 rrcurveto 0 33 -27 27 -33 0 rrcurveto endchar -12 66 510 rmoveto 128 -435 rlineto 128 435 rlineto -377 -487 rmoveto 509 0 rlineto 0 150 -50 50 -205 0 rrcurveto -204 0 -50 -50 0 -150 rrcurveto endchar 10 66 510 rmoveto 256 0 rlineto -128 -435 rlineto -249 -52 rmoveto 509 0 rlineto 0 150 -50 50 -205 0 rrcurveto -204 0 -50 -50 0 -150 rrcurveto endchar -12 66 0 rmoveto 256 0 rlineto -128 510 rlineto endchar 10 211 657 rmoveto -34 0 -27 -27 0 -33 rrcurveto 0 -33 27 -27 34 0 rrcurveto 33 0 27 27 0 33 rrcurveto 0 33 -27 27 -33 0 rrcurveto -111 -152 rmoveto 0 -510 rlineto 210 0 rlineto 0 510 rlineto endchar 200 -55 -80 rmoveto 509 0 rlineto 0 149 -50 50 -205 0 rrcurveto -204 0 -50 -50 0 -149 rrcurveto 121 80 rmoveto 256 0 rlineto -128 510 rlineto endchar 200 -55 -80 rmoveto 509 0 rlineto 0 149 -50 50 -205 0 rrcurveto -204 0 -50 -50 0 -149 rrcurveto 121 310 rmoveto 128 -510 rlineto 128 510 rlineto endchar 200 66 0 rmoveto 256 0 rlineto -128 510 rlineto -28 -510 rmoveto 256 0 rlineto -128 510 rlineto endchar 200 334 0 rmoveto -128 510 rlineto -128 -510 rlineto 88 0 rmoveto 256 0 rlineto -128 510 rlineto endchar 0001beef ufo2ft-2.30.0/tests/data/TestFont-NoOptimize-CFF2.ttx000066400000000000000000000434031434012334300221350ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 450 0 rmoveto 0 750 rlineto -400 0 rlineto 0 -750 rlineto 350 50 rmoveto -300 0 rlineto 0 650 rlineto 300 0 rlineto 66 0 rmoveto 256 0 rlineto -128 510 rlineto 100 505 rmoveto 0 -510 rlineto 210 0 rlineto 0 510 rlineto 300 -10 rmoveto 0 510 rlineto -150 0 -50 -50 0 -205 rrcurveto 0 -205 50 -50 150 0 rrcurveto 151 197 rmoveto -34 0 -27 -27 0 -33 rrcurveto 0 -33 27 -27 34 0 rrcurveto 33 0 27 27 0 33 rrcurveto 0 33 -27 27 -33 0 rrcurveto 66 510 rmoveto 128 -435 rlineto 128 435 rlineto -377 -487 rmoveto 509 0 rlineto 0 150 -50 50 -205 0 rrcurveto -204 0 -50 -50 0 -150 rrcurveto 66 510 rmoveto 256 0 rlineto -128 -435 rlineto -249 -52 rmoveto 509 0 rlineto 0 150 -50 50 -205 0 rrcurveto -204 0 -50 -50 0 -150 rrcurveto 66 0 rmoveto 256 0 rlineto -128 510 rlineto 211 657 rmoveto -34 0 -27 -27 0 -33 rrcurveto 0 -33 27 -27 34 0 rrcurveto 33 0 27 27 0 33 rrcurveto 0 33 -27 27 -33 0 rrcurveto -111 -152 rmoveto 0 -510 rlineto 210 0 rlineto 0 510 rlineto -55 -80 rmoveto 509 0 rlineto 0 149 -50 50 -205 0 rrcurveto -204 0 -50 -50 0 -149 rrcurveto 121 80 rmoveto 256 0 rlineto -128 510 rlineto -55 -80 rmoveto 509 0 rlineto 0 149 -50 50 -205 0 rrcurveto -204 0 -50 -50 0 -149 rrcurveto 121 310 rmoveto 128 -510 rlineto 128 510 rlineto 66 0 rmoveto 256 0 rlineto -128 510 rlineto -28 -510 rmoveto 256 0 rlineto -128 510 rlineto 334 0 rmoveto -128 510 rlineto -128 -510 rlineto 88 0 rmoveto 256 0 rlineto -128 510 rlineto 0001beef ufo2ft-2.30.0/tests/data/TestFont-NoOverlaps-CFF-pathops.ttx000066400000000000000000000431311434012334300235200ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) hlineto -128 510 return 66 510 rmoveto 85 -288 rlineto -164 -8 -42 -54 -137 vvcurveto 509 hlineto 140 -44 53 -173 6 vhcurveto 85 288 rlineto return rmoveto -34 -27 -27 -33 -33 27 -27 34 33 27 27 33 33 -27 27 -33 hvcurveto endchar 66 hmoveto 256 -107 callsubr rlineto endchar 100 505 rmoveto -510 210 510 vlineto return hlineto 123 -34 55 -127 16 vhcurveto return 143 450 hmoveto 750 -400 -750 vlineto 350 50 rmoveto -300 650 300 hlineto endchar -107 endchar 31 -104 callsubr 53 -103 callsubr endchar 17 300 -10 rmoveto 510 vlineto -150 -50 -50 -205 -205 50 -50 150 hvcurveto endchar 17 151 197 -105 callsubr 31 -106 callsubr endchar 53 -106 callsubr -85 -288 rmoveto -43 -147 -43 147 rlineto 1 15 16 0 17 hhcurveto 13 13 0 -1 12 hvcurveto endchar 31 -104 callsubr 53 -103 callsubr -99 152 -105 callsubr -55 -80 rmoveto 509 -102 callsubr -99 396 -100 -397 rlineto -117 -18 -32 -56 -119 vvcurveto endchar -55 -80 rmoveto 199 hlineto 50 -200 50 200 rlineto 210 -102 callsubr 29 116 rlineto -256 hlineto 29 -117 rlineto -118 -18 -32 -55 -120 vvcurveto endchar 66 hmoveto 356 -107 callsubr -50 -199 -50 199 rlineto endchar 334 hmoveto 88 -107 callsubr -44 -175 -44 175 -128 -510 rlineto endchar 0001beef ufo2ft-2.30.0/tests/data/TestFont-NoOverlaps-CFF.ttx000066400000000000000000000430271434012334300220500ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) -55 23 rmoveto 509 hlineto 140 -44 53 -173 6 vhcurveto 85 288 rlineto -256 hlineto 85 -288 rlineto -164 -8 -42 -54 -137 vvcurveto return rmoveto -34 -27 -27 -33 -33 27 -27 34 33 27 27 33 33 -27 27 -33 hvcurveto return 66 hmoveto 256 hlineto -128 510 rlineto endchar rmoveto -510 210 510 vlineto endchar hlineto 123 -34 55 -127 16 vhcurveto return 143 450 hmoveto 750 -400 -750 vlineto 350 50 rmoveto -300 650 300 hlineto endchar -107 endchar 31 -105 callsubr 53 100 505 -104 callsubr 17 300 -10 rmoveto 510 vlineto -150 -50 -50 -205 -205 50 -50 150 hvcurveto endchar 17 151 197 -106 callsubr endchar 31 -107 callsubr endchar 53 -107 callsubr 254 200 rmoveto 13 13 0 -1 12 hvcurveto -43 -147 -43 147 rlineto 1 15 16 0 17 hhcurveto endchar 31 -105 callsubr 53 211 657 -106 callsubr -111 -152 -104 callsubr -55 -80 rmoveto 509 -103 callsubr -99 396 -100 -397 rlineto -117 -18 -32 -56 -119 vvcurveto endchar -55 -80 rmoveto 199 hlineto 50 -200 50 200 rlineto 210 -103 callsubr 29 116 rlineto -256 hlineto 29 -117 rlineto -118 -18 -32 -55 -120 vvcurveto endchar 66 hmoveto 356 hlineto -128 510 -50 -199 -50 199 rlineto endchar 294 510 rmoveto -44 -175 -44 175 -128 -510 rlineto 344 hlineto endchar 0001beef ufo2ft-2.30.0/tests/data/TestFont-NoOverlaps-TTF-pathops.ttx000066400000000000000000000500651434012334300235630ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 0001beef ufo2ft-2.30.0/tests/data/TestFont-NoOverlaps-TTF.ttx000066400000000000000000000500651434012334300221070ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 0001beef ufo2ft-2.30.0/tests/data/TestFont-Specialized-CFF.ttx000066400000000000000000000424301434012334300222110ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 100 450 hmoveto 750 -400 -750 vlineto 350 50 rmoveto -300 650 300 hlineto endchar -150 endchar -12 66 hmoveto 256 hlineto -128 510 rlineto endchar 10 100 505 rmoveto -510 210 510 vlineto endchar -26 300 -10 rmoveto 510 vlineto -150 -50 -50 -205 -205 50 -50 150 hvcurveto endchar -26 151 197 rmoveto -34 -27 -27 -33 -33 27 -27 34 33 27 27 33 33 -27 27 -33 hvcurveto endchar -12 66 510 rmoveto 128 -435 128 435 rlineto -377 -487 rmoveto 509 hlineto 150 -50 50 -205 -204 -50 -50 -150 vhcurveto endchar 10 66 510 rmoveto 256 hlineto -128 -435 rlineto -249 -52 rmoveto 509 hlineto 150 -50 50 -205 -204 -50 -50 -150 vhcurveto endchar -12 66 hmoveto 256 hlineto -128 510 rlineto endchar 10 211 657 rmoveto -34 -27 -27 -33 -33 27 -27 34 33 27 27 33 33 -27 27 -33 hvcurveto -111 -152 rmoveto -510 210 510 vlineto endchar 200 -55 -80 rmoveto 509 hlineto 149 -50 50 -205 -204 -50 -50 -149 vhcurveto 121 80 rmoveto 256 hlineto -128 510 rlineto endchar 200 -55 -80 rmoveto 509 hlineto 149 -50 50 -205 -204 -50 -50 -149 vhcurveto 121 310 rmoveto 128 -510 128 510 rlineto endchar 200 66 hmoveto 256 hlineto -128 510 rlineto -28 -510 rmoveto 256 hlineto -128 510 rlineto endchar 200 334 hmoveto -128 510 -128 -510 rlineto 88 hmoveto 256 hlineto -128 510 rlineto endchar 0001beef ufo2ft-2.30.0/tests/data/TestFont-Specialized-CFF2.ttx000066400000000000000000000423351434012334300222770ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 450 hmoveto 750 -400 -750 vlineto 350 50 rmoveto -300 650 300 hlineto 66 hmoveto 256 hlineto -128 510 rlineto 100 505 rmoveto -510 210 510 vlineto 300 -10 rmoveto 510 vlineto -150 -50 -50 -205 -205 50 -50 150 hvcurveto 151 197 rmoveto -34 -27 -27 -33 -33 27 -27 34 33 27 27 33 33 -27 27 -33 hvcurveto 66 510 rmoveto 128 -435 128 435 rlineto -377 -487 rmoveto 509 hlineto 150 -50 50 -205 -204 -50 -50 -150 vhcurveto 66 510 rmoveto 256 hlineto -128 -435 rlineto -249 -52 rmoveto 509 hlineto 150 -50 50 -205 -204 -50 -50 -150 vhcurveto 66 hmoveto 256 hlineto -128 510 rlineto 211 657 rmoveto -34 -27 -27 -33 -33 27 -27 34 33 27 27 33 33 -27 27 -33 hvcurveto -111 -152 rmoveto -510 210 510 vlineto -55 -80 rmoveto 509 hlineto 149 -50 50 -205 -204 -50 -50 -149 vhcurveto 121 80 rmoveto 256 hlineto -128 510 rlineto -55 -80 rmoveto 509 hlineto 149 -50 50 -205 -204 -50 -50 -149 vhcurveto 121 310 rmoveto 128 -510 128 510 rlineto 66 hmoveto 256 hlineto -128 510 rlineto -28 -510 rmoveto 256 hlineto -128 510 rlineto 334 hmoveto -128 510 -128 -510 rlineto 88 hmoveto 256 hlineto -128 510 rlineto 0001beef ufo2ft-2.30.0/tests/data/TestFont-TTF-post3.ttx000066400000000000000000000446261434012334300210750ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 0001beef ufo2ft-2.30.0/tests/data/TestFont.ttx000066400000000000000000000475501434012334300173730ustar00rootroot00000000000000 Unique Font Identifier Copyright © Some Foundry. Some Font Regular (Style Map Family Name) Regular OpenType name Table Unique ID Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) OpenType name Table Version SomeFont-Regular Postscript Font Name Trademark Some Foundry Some Foundry (Manufacturer Name) Some Designer Some Font by Some Designer for Some Foundry. http://somefoundry.com http://somedesigner.com License info for Some Foundry. http://somefoundry.com/license Some Font (Preferred Family Name) Regular (Preferred Subfamily Name) Some Font Regular (Compatible Full Name) Sample Text for Some Font. Some Font (WWS Family Name) Regular (WWS Subfamily Name) 0001beef ufo2ft-2.30.0/tests/data/TestFont.ufo/000077500000000000000000000000001434012334300174075ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/TestFont.ufo/data/000077500000000000000000000000001434012334300203205ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/TestFont.ufo/data/com.github.fonttools.ttx/000077500000000000000000000000001434012334300252235ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/TestFont.ufo/data/com.github.fonttools.ttx/CUST.ttx000066400000000000000000000002711434012334300265420ustar00rootroot00000000000000 0001beef ufo2ft-2.30.0/tests/data/TestFont.ufo/features.fea000066400000000000000000000000001434012334300216700ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/TestFont.ufo/fontinfo.plist000066400000000000000000000215341434012334300223130ustar00rootroot00000000000000 ascender 750 capHeight 750 copyright Copyright © Some Foundry. descender -250 familyName Some Font (Family Name) guidelines x 250 x -20 x 30 y 500 y -200 y 700 angle 135 x 0 y 0 angle 45 x 0 y 700 angle 135 x 20 y 0 italicAngle -12.5 macintoshFONDFamilyID 15000 macintoshFONDName SomeFont Regular (FOND Name) note A note. openTypeGaspRangeRecords rangeGaspBehavior 1 3 rangeMaxPPEM 7 rangeGaspBehavior 0 1 2 3 rangeMaxPPEM 65535 openTypeHeadCreated 2000/01/01 00:00:00 openTypeHeadFlags 0 1 openTypeHeadLowestRecPPEM 10 openTypeHheaAscender 750 openTypeHheaCaretOffset 0 openTypeHheaCaretSlopeRise 1 openTypeHheaCaretSlopeRun 0 openTypeHheaDescender -250 openTypeHheaLineGap 200 openTypeNameCompatibleFullName Some Font Regular (Compatible Full Name) openTypeNameDescription Some Font by Some Designer for Some Foundry. openTypeNameDesigner Some Designer openTypeNameDesignerURL http://somedesigner.com openTypeNameLicense License info for Some Foundry. openTypeNameLicenseURL http://somefoundry.com/license openTypeNameManufacturer Some Foundry openTypeNameManufacturerURL http://somefoundry.com openTypeNamePreferredFamilyName Some Font (Preferred Family Name) openTypeNamePreferredSubfamilyName Regular (Preferred Subfamily Name) openTypeNameRecords encodingID 0 languageID 0 nameID 3 platformID 1 string Unique Font Identifier encodingID 1 languageID 1033 nameID 8 platformID 3 string Some Foundry (Manufacturer Name) openTypeNameSampleText Sample Text for Some Font. openTypeNameUniqueID OpenType name Table Unique ID openTypeNameVersion OpenType name Table Version openTypeNameWWSFamilyName Some Font (WWS Family Name) openTypeNameWWSSubfamilyName Regular (WWS Subfamily Name) openTypeOS2CodePageRanges 0 1 openTypeOS2FamilyClass 1 1 openTypeOS2Panose 0 1 2 3 4 5 6 7 8 9 openTypeOS2Selection 3 openTypeOS2StrikeoutPosition 300 openTypeOS2StrikeoutSize 20 openTypeOS2SubscriptXOffset 0 openTypeOS2SubscriptXSize 200 openTypeOS2SubscriptYOffset -100 openTypeOS2SubscriptYSize 400 openTypeOS2SuperscriptXOffset 0 openTypeOS2SuperscriptXSize 200 openTypeOS2SuperscriptYOffset 200 openTypeOS2SuperscriptYSize 400 openTypeOS2Type openTypeOS2TypoAscender 750 openTypeOS2TypoDescender -250 openTypeOS2TypoLineGap 200 openTypeOS2UnicodeRanges 0 1 openTypeOS2VendorID SOME openTypeOS2WeightClass 500 openTypeOS2WidthClass 5 openTypeOS2WinAscent 750 openTypeOS2WinDescent 250 openTypeVheaCaretOffset 0 openTypeVheaCaretSlopeRise 0 openTypeVheaCaretSlopeRun 1 openTypeVheaVertTypoAscender 750 openTypeVheaVertTypoDescender -250 openTypeVheaVertTypoLineGap 200 postscriptBlueFuzz 1 postscriptBlueScale 0.039625 postscriptBlueShift 7 postscriptBlueValues 500 510 postscriptDefaultCharacter .notdef postscriptDefaultWidthX 400 postscriptFamilyBlues 500 510 postscriptFamilyOtherBlues -250 -260 postscriptFontName SomeFont-Regular (Postscript Font Name) postscriptForceBold postscriptFullName Some Font-Regular (Postscript Full Name) postscriptIsFixedPitch postscriptNominalWidthX 400 postscriptOtherBlues -250 -260 postscriptSlantAngle -12.5 postscriptStemSnapH 100 120 postscriptStemSnapV 80 90 postscriptUnderlinePosition -200 postscriptUnderlineThickness 20 postscriptUniqueID 4000000 postscriptWeightName Medium postscriptWindowsCharacterSet 1 styleMapFamilyName Some Font Regular (Style Map Family Name) styleMapStyleName regular styleName Regular (Style Name) trademark Trademark Some Foundry unitsPerEm 1000 versionMajor 1 versionMinor 0 xHeight 500 year 2008 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/000077500000000000000000000000001434012334300207155ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/_notdef.glif000066400000000000000000000007711434012334300232030ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/a.glif000066400000000000000000000004531434012334300220020ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/b.glif000066400000000000000000000005261434012334300220040ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/c.glif000066400000000000000000000006411434012334300220030ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/contents.plist000066400000000000000000000014201434012334300236240ustar00rootroot00000000000000 .notdef _notdef.glif a a.glif b b.glif c c.glif d d.glif e e.glif f f.glif g g.glif h h.glif i i.glif j j.glif k k.glif l l.glif space space.glif ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/d.glif000066400000000000000000000011621434012334300220030ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/e.glif000066400000000000000000000010561434012334300220060ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/f.glif000066400000000000000000000010561434012334300220070ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/g.glif000066400000000000000000000002671434012334300220130ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/h.glif000066400000000000000000000003531434012334300220100ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/i.glif000066400000000000000000000006671434012334300220210ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/j.glif000066400000000000000000000007221434012334300220120ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/k.glif000066400000000000000000000003361434012334300220140ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/l.glif000066400000000000000000000003701434012334300220130ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/glyphs/space.glif000066400000000000000000000002141434012334300226500ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/TestFont.ufo/kerning.plist000066400000000000000000000006511434012334300221230ustar00rootroot00000000000000 a a 5 b -10 space 1 b a -7 ufo2ft-2.30.0/tests/data/TestFont.ufo/layercontents.plist000066400000000000000000000004231434012334300233550ustar00rootroot00000000000000 public.default glyphs ufo2ft-2.30.0/tests/data/TestFont.ufo/lib.plist000066400000000000000000000023351434012334300212350ustar00rootroot00000000000000 public.glyphOrder .notdef glyph1 glyph2 space a b c d e f g h i j k l public.postscriptNames a uni0061 b uni0062 c uni0063 d uni0064 e uni0065 f uni0066 g uni0067 h uni0068 i uni0069 j uni006A k uni006B l uni006C space uni0020 ufo2ft-2.30.0/tests/data/TestFont.ufo/metainfo.plist000066400000000000000000000004531434012334300222700ustar00rootroot00000000000000 creator org.robofab.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/data/TestVariableFont-CFF2-cffsubr.ttx000066400000000000000000000432571434012334300231470ustar00rootroot00000000000000 Weight Layer Font Regular 0.000;NONE;LayerFont-Regular Layer Font Regular Version 0.000 LayerFont-Regular Weight 2 blend rmoveto -16 -94 78 -2 9 88 -19 -48 44 7 -4 29 6 blend rlineto -280 -54 -82 188 170 153 163 -124 -355 1 2 blend rmoveto 449 -2 50 -250 rmoveto 400 1000 -400 hlineto 50 -950 rmoveto 900 300 -900 vlineto 468 -1 rmoveto -21 435 -233 70 -205 -76 27 -91 -56 1 blend 172 60 155 -40 -59 2 2 blend 3 -360 56 1 blend rlineto 12 266 59 -2 2 blend rmoveto -352 -23 3 -218 139 -34 221 83 -6 63 -222 -60 -75 52 15 40 13 37 -21 5 blend 2 46 294 35 -78 -30 2 blend rlineto -21 597 -8 28 -107 callsubr 1 vsindex 127 228 -1 70 -25 -105 callsubr 1 -45 -2 -2 2 blend -5 79 -255 208 -276 -252 148 -279 338 63 -17 84 -106 callsubr 6 27 0 0 -27 0 36 0 -29 0 -34 0 31 0 -1 0 2 0 -45 -2 13 28 100 37 0 13 0 -2 55 -40 -54 -32 -86 -30 -57 -85 -60 34 57 84 146 -5 0 21 blend rlineto 127 228 70 -105 callsubr -45 -2 2 blend -5 79 -255 208 -276 -252 148 -279 338 63 -17 84 -27 36 -29 -34 31 -1 2 -45 13 100 10 blend -106 callsubr 55 -54 -86 -57 -60 57 146 7 blend 6 rlineto 167 395 -84 118 -107 callsubr 559 459 rmoveto -235 71 -286 -187 389 -188 -145 -79 -229 98 -28 -91 279 -96 278 187 -369 192 113 76 -22 55 -58 -61 19 49 34 9 9 -56 -2 -41 46 12 29 24 -57 -31 18 blend 213 -66 rlineto wght 0x0 350.0 350.0 625.0 256 ufo2ft-2.30.0/tests/data/TestVariableFont-CFF2-post3.ttx000066400000000000000000000412541434012334300225600ustar00rootroot00000000000000 Weight Layer Font Regular 0.000;NONE;LayerFont-Regular Layer Font Regular Version 0.000 LayerFont-Regular Weight 50 -250 rmoveto 400 1000 -400 hlineto 50 -950 rmoveto 900 300 -900 vlineto 468 -1 rmoveto -21 435 -233 70 -205 -76 27 -91 -56 1 blend 172 60 155 -40 -59 2 2 blend 3 -360 56 1 blend rlineto 12 266 59 -2 2 blend rmoveto -352 -23 3 -218 139 -34 221 83 -6 63 -222 -60 -75 52 15 40 13 37 -21 5 blend 2 46 294 35 -78 -30 2 blend rlineto 1 vsindex 127 228 -1 70 -25 1 2 blend rmoveto 449 -2 1 -45 -2 -2 2 blend -5 79 -255 208 -276 -252 148 -279 338 63 -17 84 -280 -54 -82 188 170 153 163 -124 -355 6 27 0 0 -27 0 36 0 -29 0 -34 0 31 0 -1 0 2 0 -45 -2 13 28 100 37 0 13 0 -2 55 -40 -54 -32 -86 -30 -57 -85 -60 34 57 84 146 -5 0 21 blend rlineto 127 228 70 1 2 blend rmoveto 449 -2 -45 -2 2 blend -5 79 -255 208 -276 -252 148 -279 338 63 -17 84 -27 36 -29 -34 31 -1 2 -45 13 100 10 blend -280 -54 -82 188 170 153 163 -124 -355 55 -54 -86 -57 -60 57 146 7 blend 6 rlineto 167 395 -84 118 2 blend rmoveto -16 -94 78 -2 9 88 -19 -48 44 7 -4 29 6 blend rlineto 559 459 rmoveto -235 71 -286 -187 389 -188 -145 -79 -229 98 -28 -91 279 -96 278 187 -369 192 113 76 -22 55 -58 -61 19 49 34 9 9 -56 -2 -41 46 12 29 24 -57 -31 18 blend 213 -66 rlineto -21 597 -8 28 2 blend rmoveto -16 -94 78 -2 9 88 -19 -48 44 7 -4 29 6 blend rlineto wght 0x0 350.0 350.0 625.0 256 ufo2ft-2.30.0/tests/data/TestVariableFont-CFF2-useProductionNames.ttx000066400000000000000000000430531434012334300253360ustar00rootroot00000000000000 Weight Layer Font Regular 0.000;NONE;LayerFont-Regular Layer Font Regular Version 0.000 LayerFont-Regular Weight 50 -250 rmoveto 400 1000 -400 hlineto 50 -950 rmoveto 900 300 -900 vlineto 468 -1 rmoveto -21 435 -233 70 -205 -76 27 -91 -56 1 blend 172 60 155 -40 -59 2 2 blend 3 -360 56 1 blend rlineto 12 266 59 -2 2 blend rmoveto -352 -23 3 -218 139 -34 221 83 -6 63 -222 -60 -75 52 15 40 13 37 -21 5 blend 2 46 294 35 -78 -30 2 blend rlineto 1 vsindex 127 228 -1 70 -25 1 2 blend rmoveto 449 -2 1 -45 -2 -2 2 blend -5 79 -255 208 -276 -252 148 -279 338 63 -17 84 -280 -54 -82 188 170 153 163 -124 -355 6 27 0 0 -27 0 36 0 -29 0 -34 0 31 0 -1 0 2 0 -45 -2 13 28 100 37 0 13 0 -2 55 -40 -54 -32 -86 -30 -57 -85 -60 34 57 84 146 -5 0 21 blend rlineto 559 459 rmoveto -235 71 -286 -187 389 -188 -145 -79 -229 98 -28 -91 279 -96 278 187 -369 192 113 76 -22 55 -58 -61 19 49 34 9 9 -56 -2 -41 46 12 29 24 -57 -31 18 blend 213 -66 rlineto 127 228 70 1 2 blend rmoveto 449 -2 -45 -2 2 blend -5 79 -255 208 -276 -252 148 -279 338 63 -17 84 -27 36 -29 -34 31 -1 2 -45 13 100 10 blend -280 -54 -82 188 170 153 163 -124 -355 55 -54 -86 -57 -60 57 146 7 blend 6 rlineto 167 395 -84 118 2 blend rmoveto -16 -94 78 -2 9 88 -19 -48 44 7 -4 29 6 blend rlineto -21 597 -8 28 2 blend rmoveto -16 -94 78 -2 9 88 -19 -48 44 7 -4 29 6 blend rlineto wght 0x0 350.0 350.0 625.0 256 ufo2ft-2.30.0/tests/data/TestVariableFont-CFF2.ttx000066400000000000000000000425631434012334300215160ustar00rootroot00000000000000 Weight Layer Font Regular 0.000;NONE;LayerFont-Regular Layer Font Regular Version 0.000 LayerFont-Regular Weight 50 -250 rmoveto 400 1000 -400 hlineto 50 -950 rmoveto 900 300 -900 vlineto 468 -1 rmoveto -21 435 -233 70 -205 -76 27 -91 -56 1 blend 172 60 155 -40 -59 2 2 blend 3 -360 56 1 blend rlineto 12 266 59 -2 2 blend rmoveto -352 -23 3 -218 139 -34 221 83 -6 63 -222 -60 -75 52 15 40 13 37 -21 5 blend 2 46 294 35 -78 -30 2 blend rlineto -21 597 -8 28 2 blend rmoveto -16 -94 78 -2 9 88 -19 -48 44 7 -4 29 6 blend rlineto 1 vsindex 127 228 -1 70 -25 1 2 blend rmoveto 449 -2 1 -45 -2 -2 2 blend -5 79 -255 208 -276 -252 148 -279 338 63 -17 84 -280 -54 -82 188 170 153 163 -124 -355 6 27 0 0 -27 0 36 0 -29 0 -34 0 31 0 -1 0 2 0 -45 -2 13 28 100 37 0 13 0 -2 55 -40 -54 -32 -86 -30 -57 -85 -60 34 57 84 146 -5 0 21 blend rlineto 127 228 70 1 2 blend rmoveto 449 -2 -45 -2 2 blend -5 79 -255 208 -276 -252 148 -279 338 63 -17 84 -27 36 -29 -34 31 -1 2 -45 13 100 10 blend -280 -54 -82 188 170 153 163 -124 -355 55 -54 -86 -57 -60 57 146 7 blend 6 rlineto 167 395 -84 118 2 blend rmoveto -16 -94 78 -2 9 88 -19 -48 44 7 -4 29 6 blend rlineto 559 459 rmoveto -235 71 -286 -187 389 -188 -145 -79 -229 98 -28 -91 279 -96 278 187 -369 192 113 76 -22 55 -58 -61 19 49 34 9 9 -56 -2 -41 46 12 29 24 -57 -31 18 blend 213 -66 rlineto wght 0x0 350.0 350.0 625.0 256 ufo2ft-2.30.0/tests/data/TestVariableFont-TTF-post3.ttx000066400000000000000000000503571434012334300225410ustar00rootroot00000000000000 Weight Layer Font Regular 0.000;NONE;LayerFont-Regular Layer Font Regular Version 0.000 LayerFont-Regular Weight wght 0x0 350.0 350.0 625.0 256 ufo2ft-2.30.0/tests/data/TestVariableFont-TTF-useProductionNames.ttx000066400000000000000000000522171434012334300253150ustar00rootroot00000000000000 Weight Layer Font Regular 0.000;NONE;LayerFont-Regular Layer Font Regular Version 0.000 LayerFont-Regular Weight wght 0x0 350.0 350.0 625.0 256 ufo2ft-2.30.0/tests/data/TestVariableFont-TTF.ttx000066400000000000000000000517131434012334300214700ustar00rootroot00000000000000 Weight Layer Font Regular 0.000;NONE;LayerFont-Regular Layer Font Regular Version 0.000 LayerFont-Regular Weight wght 0x0 350.0 350.0 625.0 256 ufo2ft-2.30.0/tests/data/UseMyMetrics.ufo/000077500000000000000000000000001434012334300202325ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/UseMyMetrics.ufo/fontinfo.plist000066400000000000000000000023521434012334300231330ustar00rootroot00000000000000 ascender 2146.0 capHeight 1456.0 copyright Copyright 2011 Google Inc. All Rights Reserved. descender -555.0 familyName Roboto italicAngle 0 openTypeNameDesigner Christian Robertson openTypeNameDesignerURL Google.com openTypeNameLicense Licensed under the Apache License, Version 2.0 openTypeNameLicenseURL http://www.apache.org/licenses/LICENSE-2.0 openTypeNameManufacturer Google openTypeNameManufacturerURL Google.com styleName Regular trademark Roboto is a trademark of Google. unitsPerEm 2048.0 versionMajor 0 versionMinor 0 xHeight 1082.0 ufo2ft-2.30.0/tests/data/UseMyMetrics.ufo/glyphs/000077500000000000000000000000001434012334300215405ustar00rootroot00000000000000ufo2ft-2.30.0/tests/data/UseMyMetrics.ufo/glyphs/I_.glif000066400000000000000000000005351434012334300227350ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/UseMyMetrics.ufo/glyphs/I_acute.glif000066400000000000000000000003561434012334300237600ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/UseMyMetrics.ufo/glyphs/acute.glif000066400000000000000000000005471434012334300235120ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/UseMyMetrics.ufo/glyphs/contents.plist000066400000000000000000000005761434012334300244620ustar00rootroot00000000000000 I I_.glif Iacute I_acute.glif acute acute.glif romanthree romanthree.glif ufo2ft-2.30.0/tests/data/UseMyMetrics.ufo/glyphs/romanthree.glif000066400000000000000000000004131434012334300245450ustar00rootroot00000000000000 ufo2ft-2.30.0/tests/data/UseMyMetrics.ufo/layercontents.plist000066400000000000000000000004111434012334300241750ustar00rootroot00000000000000 foreground glyphs ufo2ft-2.30.0/tests/data/UseMyMetrics.ufo/metainfo.plist000066400000000000000000000004451434012334300231140ustar00rootroot00000000000000 creator org.robofab.ufoLib formatVersion 3 ufo2ft-2.30.0/tests/featureCompiler_test.py000066400000000000000000000234651434012334300207110ustar00rootroot00000000000000import logging import re from textwrap import dedent import py import pytest from fontTools import ttLib from fontTools.feaLib.error import FeatureLibError, IncludedFeaNotFound from ufo2ft.featureCompiler import FeatureCompiler, logger, parseLayoutFeatures from ufo2ft.featureWriters import ( FEATURE_WRITERS_KEY, BaseFeatureWriter, KernFeatureWriter, ast, ) from .testSupport import pushd class ParseLayoutFeaturesTest: def test_include(self, FontClass, tmpdir): tmpdir.join("test.fea").write_text( dedent( """\ # hello world """ ), encoding="utf-8", ) ufo = FontClass() ufo.features.text = dedent( """\ include(test.fea) """ ) ufo.save(str(tmpdir.join("Test.ufo"))) fea = parseLayoutFeatures(ufo) assert "# hello world" in str(fea) def test_include_no_ufo_path(self, FontClass, tmpdir): ufo = FontClass() ufo.features.text = dedent( """\ include(test.fea) """ ) with pushd(str(tmpdir)): with pytest.raises(IncludedFeaNotFound): parseLayoutFeatures(ufo) def test_include_not_found(self, FontClass, tmpdir, caplog): caplog.set_level(logging.ERROR) tmpdir.join("test.fea").write_text( dedent( """\ # hello world """ ), encoding="utf-8", ) ufo = FontClass() ufo.features.text = dedent( """\ include(../test.fea) """ ) ufo.save(str(tmpdir.join("Test.ufo"))) with caplog.at_level(logging.WARNING, logger=logger.name): with pytest.raises(IncludedFeaNotFound): parseLayoutFeatures(ufo) assert len(caplog.records) == 1 assert "change the file name in the include" in caplog.text def test_include_dir(self, FontClass, tmp_path, caplog): features_dir = tmp_path / "features" features_dir.mkdir() (features_dir / "test.fea").write_text( dedent( """\ # hello world """ ), encoding="utf-8", ) ufo = FontClass() ufo.features.text = dedent( """\ include(test.fea) """ ) ufo.save(tmp_path / "Test.ufo") fea = parseLayoutFeatures(ufo, features_dir) assert "# hello world" in str(fea) class DummyFeatureWriter: tableTag = "GPOS" def write(self, font, feaFile, compiler=None): pass class FeatureCompilerTest: def test_ttFont(self, FontClass): ufo = FontClass() ufo.newGlyph("f") ufo.newGlyph("f_f") ufo.features.text = dedent( """\ feature liga { sub f f by f_f; } liga; """ ) ttFont = ttLib.TTFont() ttFont.setGlyphOrder(["f", "f_f"]) compiler = FeatureCompiler(ufo, ttFont) compiler.compile() assert "GSUB" in ttFont gsub = ttFont["GSUB"].table assert gsub.FeatureList.FeatureCount == 1 assert gsub.FeatureList.FeatureRecord[0].FeatureTag == "liga" def test_ttFont_None(self, FontClass): ufo = FontClass() ufo.newGlyph("f") ufo.newGlyph("f_f") ufo.features.text = dedent( """\ feature liga { sub f f by f_f; } liga; """ ) compiler = FeatureCompiler(ufo) ttFont = compiler.compile() assert "GSUB" in ttFont gsub = ttFont["GSUB"].table assert gsub.FeatureList.FeatureCount == 1 assert gsub.FeatureList.FeatureRecord[0].FeatureTag == "liga" def test_deprecated_methods(self, FontClass): compiler = FeatureCompiler(FontClass()) with pytest.warns(UserWarning, match="method is deprecated"): compiler.setupFile_features() compiler.features = "" with pytest.warns(UserWarning, match="method is deprecated"): compiler.setupFile_featureTables() class UserCompiler(FeatureCompiler): def setupFile_features(self): self.features = "# hello world" def setupFile_featureTables(self): self.ttFont = ttLib.TTFont() compiler = UserCompiler(FontClass()) with pytest.warns(UserWarning, match="method is deprecated"): compiler.compile() def test_deprecated_mtiFeatures_argument(self, FontClass): with pytest.warns(UserWarning, match="argument is ignored"): FeatureCompiler(FontClass(), mtiFeatures="whatever") def test_featureWriters_empty(self, FontClass): kernWriter = KernFeatureWriter(ignoreMarks=False) ufo = FontClass() ufo.newGlyph("a") ufo.newGlyph("v") ufo.kerning.update({("a", "v"): -40}) compiler = FeatureCompiler(ufo, featureWriters=[kernWriter]) ttFont1 = compiler.compile() assert "GPOS" in ttFont1 compiler = FeatureCompiler(ufo, featureWriters=[]) ttFont2 = compiler.compile() assert "GPOS" not in ttFont2 def test_loadFeatureWriters_from_UFO_lib(self, FontClass): ufo = FontClass() ufo.newGlyph("a") ufo.newGlyph("v") ufo.kerning.update({("a", "v"): -40}) ufo.lib[FEATURE_WRITERS_KEY] = [{"class": "KernFeatureWriter"}] compiler = FeatureCompiler(ufo) ttFont = compiler.compile() assert len(compiler.featureWriters) == 1 assert isinstance(compiler.featureWriters[0], KernFeatureWriter) assert "GPOS" in ttFont def test_loadFeatureWriters_from_both_UFO_lib_and_argument(self, FontClass): ufo = FontClass() ufo.lib[FEATURE_WRITERS_KEY] = [{"class": "KernFeatureWriter"}] compiler = FeatureCompiler(ufo, featureWriters=[..., DummyFeatureWriter]) assert len(compiler.featureWriters) == 2 assert isinstance(compiler.featureWriters[0], KernFeatureWriter) assert isinstance(compiler.featureWriters[1], DummyFeatureWriter) def test_loadFeatureWriters_from_both_defaults_and_argument(self, FontClass): ufo = FontClass() compiler = FeatureCompiler(ufo, featureWriters=[DummyFeatureWriter, ...]) assert len(compiler.featureWriters) == 1 + len( FeatureCompiler.defaultFeatureWriters ) assert isinstance(compiler.featureWriters[0], DummyFeatureWriter) def test_GSUB_writers_run_first(self, FontClass): class FooFeatureWriter(BaseFeatureWriter): tableTag = "GSUB" def write(self, font, feaFile, compiler=None): foo = ast.FeatureBlock("FOO ") foo.statements.append( ast.SingleSubstStatement( "a", "v", prefix="", suffix="", forceChain=None ) ) feaFile.statements.append(foo) featureWriters = [KernFeatureWriter, FooFeatureWriter] ufo = FontClass() ufo.newGlyph("a") ufo.newGlyph("v") ufo.kerning.update({("a", "v"): -40}) compiler = FeatureCompiler(ufo, featureWriters=featureWriters) assert len(compiler.featureWriters) == 2 assert compiler.featureWriters[0].tableTag == "GSUB" assert compiler.featureWriters[1].tableTag == "GPOS" ttFont = compiler.compile() assert "GSUB" in ttFont gsub = ttFont["GSUB"].table assert gsub.FeatureList.FeatureCount == 1 assert gsub.FeatureList.FeatureRecord[0].FeatureTag == "FOO " def test_buildTables_FeatureLibError(self, FontClass, caplog): caplog.set_level(logging.CRITICAL) ufo = FontClass() ufo.newGlyph("f") ufo.newGlyph("f.alt01") ufo.newGlyph("f_f") features = dedent( """\ feature BUGS { # invalid lookup MIXED_TYPE { sub f by f.alt01; sub f f by f_f; } MIXED_TYPE; } BUGS; """ ) ufo.features.text = features compiler = FeatureCompiler(ufo) tmpfile = None try: with caplog.at_level(logging.ERROR, logger=logger.name): with pytest.raises(FeatureLibError): compiler.compile() assert len(caplog.records) == 1 assert "Compilation failed! Inspect temporary file" in caplog.text tmpfile = py.path.local(re.findall(".*: '(.*)'$", caplog.text)[0]) assert tmpfile.exists() assert tmpfile.read_text("utf-8") == features finally: if tmpfile is not None: tmpfile.remove(ignore_errors=True) def test_setupFeatures_custom_feaIncludeDir(self, FontClass, tmp_path): (tmp_path / "family.fea").write_text( """\ feature liga { sub f f by f_f; } liga; """ ) ufo = FontClass() ufo.newGlyph("a") ufo.newGlyph("v") ufo.newGlyph("f") ufo.newGlyph("f_f") ufo.kerning.update({("a", "v"): -40}) ufo.features.text = dedent( """\ include(family.fea); """ ) compiler = FeatureCompiler(ufo, feaIncludeDir=str(tmp_path)) compiler.setupFeatures() assert compiler.features == dedent( """\ feature liga { sub f f by f_f; } liga; lookup kern_ltr { lookupflag IgnoreMarks; pos a v -40; } kern_ltr; feature kern { lookup kern_ltr; } kern; """ ) ufo2ft-2.30.0/tests/featureWriters/000077500000000000000000000000001434012334300171535ustar00rootroot00000000000000ufo2ft-2.30.0/tests/featureWriters/__init__.py000066400000000000000000000012351434012334300212650ustar00rootroot00000000000000from ufo2ft.featureCompiler import parseLayoutFeatures from ufo2ft.featureWriters import ast class FeatureWriterTest: # subclasses must override this FeatureWriter = None @classmethod def writeFeatures(cls, ufo, **kwargs): """Return a new FeatureFile object containing only the newly generated statements, or None if no new feature was generated. """ writer = cls.FeatureWriter(**kwargs) feaFile = parseLayoutFeatures(ufo) n = len(feaFile.statements) if writer.write(ufo, feaFile): new = ast.FeatureFile() new.statements = feaFile.statements[n:] return new ufo2ft-2.30.0/tests/featureWriters/cursFeatureWriter_test.py000066400000000000000000000105021434012334300242470ustar00rootroot00000000000000from textwrap import dedent import pytest from ufo2ft.featureWriters.cursFeatureWriter import CursFeatureWriter from . import FeatureWriterTest @pytest.fixture def testufo(FontClass): ufo = FontClass() ufo.newGlyph("a").appendAnchor({"name": "exit", "x": 100, "y": 200}) glyph = ufo.newGlyph("b") glyph.appendAnchor({"name": "entry", "x": 0, "y": 200}) glyph.appendAnchor({"name": "exit", "x": 111, "y": 200}) ufo.newGlyph("c").appendAnchor({"name": "entry", "x": 100, "y": 200}) return ufo class CursFeatureWriterTest(FeatureWriterTest): FeatureWriter = CursFeatureWriter def test_curs_feature(self, testufo): generated = self.writeFeatures(testufo) assert str(generated) == dedent( """\ feature curs { lookup curs { lookupflag RightToLeft IgnoreMarks; pos cursive a ; pos cursive b ; pos cursive c ; } curs; } curs; """ ) def test_curs_feature_LTR(self, testufo): testufo["a"].unicode = ord("a") testufo["b"].unicode = ord("b") testufo["c"].unicode = ord("c") generated = self.writeFeatures(testufo) assert str(generated) == dedent( """\ feature curs { lookup curs_ltr { lookupflag IgnoreMarks; pos cursive a ; pos cursive b ; pos cursive c ; } curs_ltr; } curs; """ ) def test_curs_feature_mixed(self, testufo): testufo["a"].unicode = ord("a") testufo["b"].unicode = ord("b") testufo["c"].unicode = ord("c") glyph = testufo.newGlyph("a.swsh") glyph.appendAnchor({"name": "entry", "x": 100, "y": 200}) glyph = testufo.newGlyph("alef") glyph.unicode = 0x0627 glyph = testufo.newGlyph("alef.fina") glyph.appendAnchor({"name": "entry", "x": 300, "y": 10}) glyph = testufo.newGlyph("meem") glyph.unicode = 0x0645 glyph = testufo.newGlyph("meem.init") glyph.appendAnchor({"name": "exit", "x": 0, "y": 10}) glyph = testufo.newGlyph("meem.medi") glyph.appendAnchor({"name": "entry", "x": 500, "y": 10}) glyph.appendAnchor({"name": "exit", "x": 0, "y": 10}) glyph = testufo.newGlyph("meem.fina") glyph.appendAnchor({"name": "entry", "x": 500, "y": 10}) testufo.features.text = dedent( """\ feature swsh { sub a by a.swsh; } swsh; feature init { sub meem by meem.init; } init; feature medi { sub meem by meem.medi; } medi; feature fina { sub alef by alef.fina; sub meem by meem.fina; } fina; """ ) testufo.lib["public.glyphOrder"] = [ "a", "b", "c", "a.swsh", "alef", "alef.fina", "meem", "meem.init", "meem.medi", "meem.fina", ] generated = self.writeFeatures(testufo) assert str(generated) == dedent( """\ feature curs { lookup curs_ltr { lookupflag IgnoreMarks; pos cursive a ; pos cursive b ; pos cursive c ; pos cursive a.swsh ; } curs_ltr; lookup curs_rtl { lookupflag RightToLeft IgnoreMarks; pos cursive alef.fina ; pos cursive meem.init ; pos cursive meem.medi ; pos cursive meem.fina ; } curs_rtl; } curs; """ ) ufo2ft-2.30.0/tests/featureWriters/featureWriters_test.py000066400000000000000000000045671434012334300236130ustar00rootroot00000000000000from ufo2ft.featureWriters import ( FEATURE_WRITERS_KEY, BaseFeatureWriter, loadFeatureWriterFromString, loadFeatureWriters, ) try: from plistlib import FMT_XML, loads def readPlistFromString(s): return loads(s, fmt=FMT_XML) except ImportError: from plistlib import readPlistFromString import pytest from ..testSupport import _TempModule TEST_LIB_PLIST = readPlistFromString( b""" com.github.googlei18n.ufo2ft.featureWriters class KernFeatureWriter options mode skip """ ) class FooBarWriter(BaseFeatureWriter): tableTag = "GSUB" def __init__(self, **kwargs): pass def write(self, font, feaFile, compiler=None): return False @pytest.fixture(scope="module", autouse=True) def customWriterModule(): """Make a temporary 'myFeatureWriters' module containing a 'FooBarWriter' class for testing the wruter loading machinery. """ with _TempModule("myFeatureWriters") as temp_module: temp_module.module.__dict__["FooBarWriter"] = FooBarWriter yield VALID_SPEC_LISTS = [ [{"class": "KernFeatureWriter"}], [ {"class": "KernFeatureWriter", "options": {"ignoreMarks": False}}, {"class": "MarkFeatureWriter", "options": {"features": ["mark"]}}, ], [{"class": "FooBarWriter", "module": "myFeatureWriters", "options": {"a": 1}}], TEST_LIB_PLIST[FEATURE_WRITERS_KEY], ] @pytest.mark.parametrize("specList", VALID_SPEC_LISTS) def test_loadFeatureWriters_valid(specList, FontClass): ufo = FontClass() ufo.lib[FEATURE_WRITERS_KEY] = specList for writer in loadFeatureWriters(ufo, ignoreErrors=False): assert writer.tableTag in {"GSUB", "GPOS"} assert callable(writer.write) VALID_SPEC_STRINGS = [ "KernFeatureWriter", "KernFeatureWriter(ignoreMarks=False)", "MarkFeatureWriter(features=['mark'])", "myFeatureWriters::FooBarWriter(a=1)", ] @pytest.mark.parametrize("spec", VALID_SPEC_STRINGS) def test_loadFeatureWriterFromString_valid(spec, FontClass): writer = loadFeatureWriterFromString(spec) assert writer.tableTag in {"GSUB", "GPOS"} assert callable(writer.write) ufo2ft-2.30.0/tests/featureWriters/gdefFeatureWriter_test.py000066400000000000000000000173621434012334300242130ustar00rootroot00000000000000import logging from textwrap import dedent import pytest from ufo2ft.featureCompiler import parseLayoutFeatures from ufo2ft.featureWriters import GdefFeatureWriter from . import FeatureWriterTest @pytest.fixture def testufo(FontClass): ufo = FontClass() ufo.newGlyph("a") ufo.newGlyph("f") ufo.newGlyph("f.component") ufo.newGlyph("i") liga = ufo.newGlyph("f_f_i") liga.appendAnchor({"name": "caret_2", "x": 400, "y": 0}) liga.appendAnchor({"name": "caret_1", "x": 200, "y": 0}) liga = ufo.newGlyph("f_i") liga.appendAnchor({"name": "caret_", "x": 200, "y": 0}) ufo.newGlyph("acutecomb") ufo.newGlyph("tildecomb") ufo.lib["public.glyphOrder"] = [ "a", "f", "f.component", "i", "f_f_i", "f_i", "acutecomb", "tildecomb", ] return ufo class GdefFeatureWriterTest(FeatureWriterTest): FeatureWriter = GdefFeatureWriter @classmethod def writeGDEF(cls, ufo, **kwargs): writer = cls.FeatureWriter(**kwargs) feaFile = parseLayoutFeatures(ufo) if writer.write(ufo, feaFile): return feaFile def test_no_GDEF_no_openTypeCategories_in_font(self, testufo): newFea = self.writeGDEF(testufo) assert str(newFea) == dedent( """\ table GDEF { LigatureCaretByPos f_f_i 200 400; LigatureCaretByPos f_i 200; } GDEF; """ ) def test_GDEF_in_font(self, testufo): testufo.features.text = dedent( """\ table GDEF { GlyphClassDef [a], [], [acutecomb], []; LigatureCaretByPos f_i 300; } GDEF; """ ) assert self.writeGDEF(testufo) is None def test_openTypeCategories_in_font(self, testufo): testufo.lib["public.openTypeCategories"] = { "a": "base", "f.component": "component", "f_i": "ligature", "acutecomb": "mark", } newFea = self.writeGDEF(testufo) assert str(newFea) == dedent( """\ table GDEF { GlyphClassDef [a], [f_i], [acutecomb], [f.component]; LigatureCaretByPos f_f_i 200 400; LigatureCaretByPos f_i 200; } GDEF; """ ) def test_GDEF_and_openTypeCategories_in_font(self, testufo): testufo.lib["public.openTypeCategories"] = { "a": "base", "f.component": "component", "f_i": "ligature", "acutecomb": "mark", } testufo.features.text = dedent( """\ table GDEF { GlyphClassDef [i], [], [tildecomb], []; LigatureCaretByPos f_i 100; } GDEF; """ ) assert self.writeGDEF(testufo) is None def test_GDEF_LigatureCarets_and_openTypeCategories_in_font(self, testufo): testufo.lib["public.openTypeCategories"] = { "a": "base", "f.component": "component", "f_i": "ligature", "acutecomb": "mark", } testufo.features.text = dedent( """\ table GDEF { LigatureCaretByPos f_i 100; } GDEF; """ ) newFea = self.writeGDEF(testufo) assert str(newFea) == dedent( """\ table GDEF { LigatureCaretByPos f_i 100; GlyphClassDef [a], [f_i], [acutecomb], [f.component]; } GDEF; """ ) def test_GDEF_GlyphClassDef_and_carets_in_font(self, testufo): testufo.lib["public.openTypeCategories"] = { "a": "base", "f.component": "component", "f_i": "ligature", "acutecomb": "mark", } testufo.features.text = dedent( """\ table GDEF { GlyphClassDef [], [], [acutecomb tildecomb], []; } GDEF; """ ) newFea = self.writeGDEF(testufo) assert str(newFea) == dedent( """\ table GDEF { GlyphClassDef [], [], [acutecomb tildecomb], []; LigatureCaretByPos f_f_i 200 400; LigatureCaretByPos f_i 200; } GDEF; """ ) def test_mark_and_openTypeCategories_in_font(self, testufo): testufo.lib["public.openTypeCategories"] = { "a": "base", "f.component": "component", "f_f_i": "base", "f_i": "ligature", "acutecomb": "mark", "tildecomb": "component", } testufo.features.text = old = dedent( """\ feature mark { markClass tildecomb @TOP_MARKS; pos base a mark @TOP_MARKS; pos base f mark @TOP_MARKS; pos ligature f_f_i mark @TOP_MARKS ligComponent mark @TOP_MARKS ligComponent mark @TOP_MARKS; } mark; """ ) newFea = self.writeGDEF(testufo) assert str(newFea) == old + "\n" + dedent( """\ table GDEF { GlyphClassDef [a f_f_i], [f_i], [acutecomb], [f.component tildecomb]; LigatureCaretByPos f_f_i 200 400; LigatureCaretByPos f_i 200; } GDEF; """ ) def test_vertical_carets(self, testufo): vliga = testufo.newGlyph("vi_li_ga") vliga.appendAnchor({"name": "vcaret_1", "x": 0, "y": 100}) vliga.appendAnchor({"name": "vcaret_2", "x": 0, "y": 200}) vliga = testufo.newGlyph("vli_ga") vliga.appendAnchor({"name": "vcaret_", "x": 0, "y": 100}) newFea = self.writeGDEF(testufo) assert str(newFea) == dedent( """\ table GDEF { LigatureCaretByPos f_f_i 200 400; LigatureCaretByPos f_i 200; LigatureCaretByPos vi_li_ga 100 200; LigatureCaretByPos vli_ga 100; } GDEF; """ ) def test_floaty_carets(self, testufo): # Some Glyphs sources happen to contain fractional caret positions. # In the Adobe feature file syntax (and binary OpenType GDEF tables), # caret positions must be integers. liga = testufo.newGlyph("li_ga") liga.appendAnchor({"name": "vcaret_1", "x": 0, "y": 200.1111}) liga.appendAnchor({"name": "caret_1", "x": 499.9876, "y": 0}) newFea = self.writeGDEF(testufo) assert str(newFea) == dedent( """\ table GDEF { LigatureCaretByPos f_f_i 200 400; LigatureCaretByPos f_i 200; LigatureCaretByPos li_ga 200 500; } GDEF; """ ) def test_getOpenTypeCategories_invalid(self, testufo, caplog): caplog.set_level(logging.WARNING) testufo.lib["public.openTypeCategories"] = { "a": "base", "f.component": "component", "f_f_i": "base", "f_i": "ligature", "acutecomb": "mark", "tildecomb": "components", } logger = "ufo2ft.featureWriters.gdefFeatureWriter.GdefFeatureWriter" with caplog.at_level(logging.WARNING, logger=logger): self.writeGDEF(testufo) assert len(caplog.records) == 1 assert "The 'public.openTypeCategories' value of tildecomb in" in caplog.text assert "is 'components' when it should be" in caplog.text ufo2ft-2.30.0/tests/featureWriters/kernFeatureWriter_test.py000066400000000000000000001070451434012334300242430ustar00rootroot00000000000000import logging from textwrap import dedent import pytest from ufo2ft.errors import InvalidFeaturesData from ufo2ft.featureCompiler import parseLayoutFeatures from ufo2ft.featureWriters import KernFeatureWriter, ast from . import FeatureWriterTest def makeUFO(cls, glyphMap, groups=None, kerning=None, features=None): ufo = cls() for name, uni in glyphMap.items(): glyph = ufo.newGlyph(name) if uni is not None: glyph.unicode = uni if groups is not None: ufo.groups.update(groups) if kerning is not None: ufo.kerning.update(kerning) if features is not None: ufo.features.text = features return ufo def getClassDefs(feaFile): return [s for s in feaFile.statements if isinstance(s, ast.GlyphClassDefinition)] def getGlyphs(classDef): return [str(g) for g in classDef.glyphs.glyphSet()] def getLookups(feaFile): return [s for s in feaFile.statements if isinstance(s, ast.LookupBlock)] def getPairPosRules(lookup): return [s for s in lookup.statements if isinstance(s, ast.PairPosStatement)] class KernFeatureWriterTest(FeatureWriterTest): FeatureWriter = KernFeatureWriter def test_cleanup_missing_glyphs(self, FontClass): groups = { "public.kern1.A": ["A", "Aacute", "Abreve", "Acircumflex"], "public.kern2.B": ["B", "D", "E", "F"], "public.kern1.C": ["foobar"], } kerning = { ("public.kern1.A", "public.kern2.B"): 10, ("public.kern1.A", "baz"): -25, ("baz", "public.kern2.B"): -20, ("public.kern1.C", "public.kern2.B"): 20, } ufo = FontClass() exclude = {"Abreve", "D", "foobar"} for glyphs in groups.values(): for glyph in glyphs: if glyph in exclude: continue ufo.newGlyph(glyph) ufo.groups.update(groups) ufo.kerning.update(kerning) writer = KernFeatureWriter() feaFile = parseLayoutFeatures(ufo) writer.write(ufo, feaFile) classDefs = getClassDefs(feaFile) assert len(classDefs) == 2 assert classDefs[0].name == "kern1.A" assert classDefs[1].name == "kern2.B" assert getGlyphs(classDefs[0]) == ["A", "Aacute", "Acircumflex"] assert getGlyphs(classDefs[1]) == ["B", "E", "F"] lookups = getLookups(feaFile) assert len(lookups) == 1 kern_ltr = lookups[0] assert kern_ltr.name == "kern_ltr" rules = getPairPosRules(kern_ltr) assert len(rules) == 1 assert str(rules[0]) == "pos @kern1.A @kern2.B 10;" def test_ignoreMarks(self, FontClass): font = FontClass() for name in ("one", "four", "six"): font.newGlyph(name) font.kerning.update({("four", "six"): -55.0, ("one", "six"): -30.0}) # default is ignoreMarks=True writer = KernFeatureWriter() feaFile = ast.FeatureFile() assert writer.write(font, feaFile) assert str(feaFile) == dedent( """\ lookup kern_ltr { lookupflag IgnoreMarks; pos four six -55; pos one six -30; } kern_ltr; feature kern { lookup kern_ltr; } kern; """ ) writer = KernFeatureWriter(ignoreMarks=False) feaFile = ast.FeatureFile() assert writer.write(font, feaFile) assert str(feaFile) == dedent( """\ lookup kern_ltr { pos four six -55; pos one six -30; } kern_ltr; feature kern { lookup kern_ltr; } kern; """ ) def test_mark_to_base_kern(self, FontClass): font = FontClass() for name in ("A", "B", "C"): font.newGlyph(name) font.newGlyph("acutecomb").unicode = 0x0301 font.kerning.update({("A", "acutecomb"): -55.0, ("B", "C"): -30.0}) font.features.text = dedent( """\ @Bases = [A B C]; @Marks = [acutecomb]; table GDEF { GlyphClassDef @Bases, [], @Marks, ; } GDEF; """ ) # default is ignoreMarks=True feaFile = self.writeFeatures(font) assert str(feaFile) == dedent( """ lookup kern_ltr { lookupflag IgnoreMarks; pos B C -30; } kern_ltr; lookup kern_ltr_marks { pos A acutecomb -55; } kern_ltr_marks; feature kern { lookup kern_ltr; lookup kern_ltr_marks; } kern; """ ) feaFile = self.writeFeatures(font, ignoreMarks=False) assert str(feaFile) == dedent( """ lookup kern_ltr { pos A acutecomb -55; pos B C -30; } kern_ltr; feature kern { lookup kern_ltr; } kern; """ ) def test_mark_to_base_only(self, FontClass): font = FontClass() for name in ("A", "B", "C"): font.newGlyph(name) font.newGlyph("acutecomb").unicode = 0x0301 font.kerning.update({("A", "acutecomb"): -55.0}) font.features.text = dedent( """\ @Bases = [A B C]; @Marks = [acutecomb]; table GDEF { GlyphClassDef @Bases, [], @Marks, ; } GDEF; """ ) # default is ignoreMarks=True feaFile = self.writeFeatures(font) assert str(feaFile) == dedent( """ lookup kern_ltr_marks { pos A acutecomb -55; } kern_ltr_marks; feature kern { lookup kern_ltr_marks; } kern; """ ) def test_mode(self, FontClass): ufo = FontClass() for name in ("one", "four", "six", "seven"): ufo.newGlyph(name) existing = dedent( """\ feature kern { pos one four' -50 six; } kern; """ ) ufo.features.text = existing ufo.kerning.update({("seven", "six"): 25.0}) writer = KernFeatureWriter() # default mode="skip" feaFile = parseLayoutFeatures(ufo) assert not writer.write(ufo, feaFile) assert str(feaFile) == existing # pass optional "append" mode writer = KernFeatureWriter(mode="append") feaFile = parseLayoutFeatures(ufo) assert writer.write(ufo, feaFile) expected = existing + dedent( """ lookup kern_ltr { lookupflag IgnoreMarks; pos seven six 25; } kern_ltr; feature kern { lookup kern_ltr; } kern; """ ) assert str(feaFile) == expected # pass "skip" mode explicitly writer = KernFeatureWriter(mode="skip") feaFile = parseLayoutFeatures(ufo) assert not writer.write(ufo, feaFile) assert str(feaFile) == existing def test_insert_comment_before(self, FontClass): ufo = FontClass() for name in ("one", "four", "six", "seven"): ufo.newGlyph(name) existing = dedent( """\ feature kern { # # Automatic Code # pos one four' -50 six; } kern; """ ) ufo.features.text = existing ufo.kerning.update({("seven", "six"): 25.0}) writer = KernFeatureWriter() feaFile = parseLayoutFeatures(ufo) assert writer.write(ufo, feaFile) expected = dedent( """\ lookup kern_ltr { lookupflag IgnoreMarks; pos seven six 25; } kern_ltr; feature kern { lookup kern_ltr; } kern; feature kern { # # pos one four' -50 six; } kern; """ ) assert str(feaFile).strip() == expected.strip() # test append mode ignores insert marker generated = self.writeFeatures(ufo, mode="append") assert str(generated) == dedent( """ lookup kern_ltr { lookupflag IgnoreMarks; pos seven six 25; } kern_ltr; feature kern { lookup kern_ltr; } kern; """ ) def test_insert_comment_before_extended(self, FontClass): ufo = FontClass() for name in ("one", "four", "six", "seven"): ufo.newGlyph(name) existing = dedent( """\ feature kern { # # Automatic Code End # pos one four' -50 six; } kern; """ ) ufo.features.text = existing ufo.kerning.update({("seven", "six"): 25.0}) writer = KernFeatureWriter() feaFile = parseLayoutFeatures(ufo) assert writer.write(ufo, feaFile) expected = dedent( """\ lookup kern_ltr { lookupflag IgnoreMarks; pos seven six 25; } kern_ltr; feature kern { lookup kern_ltr; } kern; feature kern { # # pos one four' -50 six; } kern; """ ) assert str(feaFile).strip() == expected.strip() def test_insert_comment_after(self, FontClass): ufo = FontClass() for name in ("one", "four", "six", "seven"): ufo.newGlyph(name) existing = dedent( """\ feature kern { pos one four' -50 six; # # Automatic Code # } kern; """ ) ufo.features.text = existing ufo.kerning.update({("seven", "six"): 25.0}) writer = KernFeatureWriter() feaFile = parseLayoutFeatures(ufo) assert writer.write(ufo, feaFile) expected = dedent( """\ feature kern { pos one four' -50 six; # # } kern; lookup kern_ltr { lookupflag IgnoreMarks; pos seven six 25; } kern_ltr; feature kern { lookup kern_ltr; } kern; """ ) assert str(feaFile) == expected # test append mode ignores insert marker generated = self.writeFeatures(ufo, mode="append") assert str(generated) == dedent( """ lookup kern_ltr { lookupflag IgnoreMarks; pos seven six 25; } kern_ltr; feature kern { lookup kern_ltr; } kern; """ ) def test_insert_comment_middle(self, FontClass): ufo = FontClass() for name in ("one", "four", "six", "seven"): ufo.newGlyph(name) existing = dedent( """\ feature kern { pos one four' -50 six; # # Automatic Code # pos one six' -50 six; } kern; """ ) ufo.features.text = existing ufo.kerning.update({("seven", "six"): 25.0}) writer = KernFeatureWriter() feaFile = parseLayoutFeatures(ufo) with pytest.raises( InvalidFeaturesData, match="Insert marker has rules before and after, feature kern " "cannot be inserted.", ): writer.write(ufo, feaFile) # test append mode ignores insert marker generated = self.writeFeatures(ufo, mode="append") assert str(generated) == dedent( """ lookup kern_ltr { lookupflag IgnoreMarks; pos seven six 25; } kern_ltr; feature kern { lookup kern_ltr; } kern; """ ) def test_arabic_numerals(self, FontClass): """Test that arabic numerals (with bidi type AN) are kerned LTR. https://github.com/googlei18n/ufo2ft/issues/198 https://github.com/googlei18n/ufo2ft/pull/200 """ ufo = FontClass() for name, code in [("four-ar", 0x664), ("seven-ar", 0x667)]: glyph = ufo.newGlyph(name) glyph.unicode = code ufo.kerning.update({("four-ar", "seven-ar"): -30}) ufo.features.text = dedent( """ languagesystem DFLT dflt; languagesystem arab dflt; """ ) generated = self.writeFeatures(ufo) assert str(generated) == dedent( """ lookup kern_rtl { lookupflag IgnoreMarks; pos four-ar seven-ar -30; } kern_rtl; feature kern { lookup kern_rtl; } kern; """ ) def test__groupScriptsByTagAndDirection(self, FontClass): font = FontClass() font.features.text = dedent( """ languagesystem DFLT dflt; languagesystem latn dflt; languagesystem latn TRK; languagesystem arab dflt; languagesystem arab URD; languagesystem deva dflt; languagesystem dev2 dflt; languagesystem math dflt; """ ) feaFile = parseLayoutFeatures(font) scripts = ast.getScriptLanguageSystems(feaFile) scriptGroups = KernFeatureWriter._groupScriptsByTagAndDirection(scripts) assert "kern" in scriptGroups assert list(scriptGroups["kern"]["LTR"]) == [ ("latn", ["dflt", "TRK "]), ("math", ["dflt"]), ] assert list(scriptGroups["kern"]["RTL"]) == [("arab", ["dflt", "URD "])] assert "dist" in scriptGroups assert list(scriptGroups["dist"]["LTR"]) == [ ("deva", ["dflt"]), ("dev2", ["dflt"]), ] def test_getKerningClasses(self, FontClass): font = FontClass() for i in range(65, 65 + 6): # A..F font.newGlyph(chr(i)) font.groups.update({"public.kern1.A": ["A", "B"], "public.kern2.C": ["C", "D"]}) # simulate a name clash between pre-existing class definitions in # feature file, and those generated by the feature writer font.features.text = "@kern1.A = [E F];" feaFile = parseLayoutFeatures(font) side1Classes, side2Classes = KernFeatureWriter.getKerningClasses(font, feaFile) assert "public.kern1.A" in side1Classes # the new class gets a unique name assert side1Classes["public.kern1.A"].name == "kern1.A_1" assert getGlyphs(side1Classes["public.kern1.A"]) == ["A", "B"] assert "public.kern2.C" in side2Classes assert side2Classes["public.kern2.C"].name == "kern2.C" assert getGlyphs(side2Classes["public.kern2.C"]) == ["C", "D"] def test_correct_invalid_class_names(self, FontClass): font = FontClass() for i in range(65, 65 + 12): # A..L font.newGlyph(chr(i)) font.groups.update( { "public.kern1.foo$": ["A", "B", "C"], "public.kern1.foo@": ["D", "E", "F"], "@public.kern2.bar": ["G", "H", "I"], "public.kern2.bar&": ["J", "K", "L"], } ) font.kerning.update( { ("public.kern1.foo$", "@public.kern2.bar"): 10, ("public.kern1.foo@", "public.kern2.bar&"): -10, } ) side1Classes, side2Classes = KernFeatureWriter.getKerningClasses(font) assert side1Classes["public.kern1.foo$"].name == "kern1.foo" assert side1Classes["public.kern1.foo@"].name == "kern1.foo_1" # no valid 'public.kern{1,2}.' prefix, skipped assert "@public.kern2.bar" not in side2Classes assert side2Classes["public.kern2.bar&"].name == "kern2.bar" def test_getKerningPairs(self, FontClass): font = FontClass() for i in range(65, 65 + 8): # A..H font.newGlyph(chr(i)) font.groups.update( { "public.kern1.foo": ["A", "B"], "public.kern2.bar": ["C", "D"], "public.kern1.baz": ["E", "F"], "public.kern2.nul": ["G", "H"], } ) font.kerning.update( { ("public.kern1.foo", "public.kern2.bar"): 10, ("public.kern1.baz", "public.kern2.bar"): -10, ("public.kern1.foo", "D"): 15, ("A", "public.kern2.bar"): 5, ("G", "H"): -5, # class-class zero-value pairs are skipped ("public.kern1.foo", "public.kern2.nul"): 0, } ) s1c, s2c = KernFeatureWriter.getKerningClasses(font) pairs = KernFeatureWriter.getKerningPairs(font, s1c, s2c) assert len(pairs) == 5 assert "G H -5" in repr(pairs[0]) assert (pairs[0].firstIsClass, pairs[0].secondIsClass) == (False, False) assert pairs[0].glyphs == {"G", "H"} assert "A @kern2.bar 5" in repr(pairs[1]) assert (pairs[1].firstIsClass, pairs[1].secondIsClass) == (False, True) assert pairs[1].glyphs == {"A", "C", "D"} assert "@kern1.foo D 15" in repr(pairs[2]) assert (pairs[2].firstIsClass, pairs[2].secondIsClass) == (True, False) assert pairs[2].glyphs == {"A", "B", "D"} assert "@kern1.baz @kern2.bar -10" in repr(pairs[3]) assert (pairs[3].firstIsClass, pairs[3].secondIsClass) == (True, True) assert pairs[3].glyphs == {"C", "D", "E", "F"} assert "@kern1.foo @kern2.bar 10" in repr(pairs[4]) assert (pairs[4].firstIsClass, pairs[4].secondIsClass) == (True, True) assert pairs[4].glyphs == {"A", "B", "C", "D"} def test_kern_LTR_and_RTL(self, FontClass): glyphs = { ".notdef": None, "four": 0x34, "seven": 0x37, "A": 0x41, "V": 0x56, "Aacute": 0xC1, "alef-ar": 0x627, "reh-ar": 0x631, "zain-ar": 0x632, "lam-ar": 0x644, "four-ar": 0x664, "seven-ar": 0x667, # # we also add glyphs without unicode codepoint, but linked to # # an encoded 'character' glyph by some GSUB rule "alef-ar.isol": None, "lam-ar.init": None, "reh-ar.fina": None, } groups = { "public.kern1.A": ["A", "Aacute"], "public.kern1.reh": ["reh-ar", "zain-ar", "reh-ar.fina"], "public.kern2.alef": ["alef-ar", "alef-ar.isol"], } kerning = { ("public.kern1.A", "V"): -40, ("seven", "four"): -25, ("reh-ar.fina", "lam-ar.init"): -80, ("public.kern1.reh", "public.kern2.alef"): -100, ("four-ar", "seven-ar"): -30, } features = dedent( """\ languagesystem DFLT dflt; languagesystem latn dflt; languagesystem latn TRK; languagesystem arab dflt; languagesystem arab URD; feature init { script arab; sub lam-ar by lam-ar.init; language URD; } init; feature fina { script arab; sub reh-ar by reh-ar.fina; language URD; } fina; """ ) ufo = makeUFO(FontClass, glyphs, groups, kerning, features) newFeatures = self.writeFeatures(ufo, ignoreMarks=False) assert str(newFeatures) == dedent( """\ @kern1.A = [A Aacute]; @kern1.reh = [reh-ar zain-ar reh-ar.fina]; @kern2.alef = [alef-ar alef-ar.isol]; lookup kern_dflt { pos seven four -25; } kern_dflt; lookup kern_ltr { enum pos @kern1.A V -40; } kern_ltr; lookup kern_rtl { pos four-ar seven-ar -30; pos reh-ar.fina lam-ar.init <-80 0 -80 0>; pos @kern1.reh @kern2.alef <-100 0 -100 0>; } kern_rtl; feature kern { lookup kern_dflt; script latn; language dflt; lookup kern_ltr; language TRK; script arab; language dflt; lookup kern_rtl; language URD; } kern; """ ) def test_kern_LTR_and_RTL_with_marks(self, FontClass): glyphs = { ".notdef": None, "four": 0x34, "seven": 0x37, "A": 0x41, "V": 0x56, "Aacute": 0xC1, "acutecomb": 0x301, "alef-ar": 0x627, "reh-ar": 0x631, "zain-ar": 0x632, "lam-ar": 0x644, "four-ar": 0x664, "seven-ar": 0x667, "fatha-ar": 0x64E, # # we also add glyphs without unicode codepoint, but linked to # # an encoded 'character' glyph by some GSUB rule "alef-ar.isol": None, "lam-ar.init": None, "reh-ar.fina": None, } groups = { "public.kern1.A": ["A", "Aacute"], "public.kern1.reh": ["reh-ar", "zain-ar", "reh-ar.fina"], "public.kern2.alef": ["alef-ar", "alef-ar.isol"], } kerning = { ("public.kern1.A", "V"): -40, ("seven", "four"): -25, ("reh-ar.fina", "lam-ar.init"): -80, ("public.kern1.reh", "public.kern2.alef"): -100, ("four-ar", "seven-ar"): -30, ("V", "acutecomb"): 70, ("reh-ar", "fatha-ar"): 80, } features = dedent( """\ languagesystem DFLT dflt; languagesystem latn dflt; languagesystem latn TRK; languagesystem arab dflt; languagesystem arab URD; feature init { script arab; sub lam-ar by lam-ar.init; language URD; } init; feature fina { script arab; sub reh-ar by reh-ar.fina; language URD; } fina; @Bases = [A V Aacute alef-ar reh-ar zain-ar lam-ar alef-ar.isol lam-ar.init reh-ar.fina]; @Marks = [acutecomb fatha-ar]; table GDEF { GlyphClassDef @Bases, [], @Marks, ; } GDEF; """ ) ufo = makeUFO(FontClass, glyphs, groups, kerning, features) newFeatures = self.writeFeatures(ufo) assert str(newFeatures) == dedent( """\ @kern1.A = [A Aacute]; @kern1.reh = [reh-ar zain-ar reh-ar.fina]; @kern2.alef = [alef-ar alef-ar.isol]; lookup kern_dflt { lookupflag IgnoreMarks; pos seven four -25; } kern_dflt; lookup kern_ltr { lookupflag IgnoreMarks; enum pos @kern1.A V -40; } kern_ltr; lookup kern_ltr_marks { pos V acutecomb 70; } kern_ltr_marks; lookup kern_rtl { lookupflag IgnoreMarks; pos four-ar seven-ar -30; pos reh-ar.fina lam-ar.init <-80 0 -80 0>; pos @kern1.reh @kern2.alef <-100 0 -100 0>; } kern_rtl; lookup kern_rtl_marks { pos reh-ar fatha-ar <80 0 80 0>; } kern_rtl_marks; feature kern { lookup kern_dflt; script latn; language dflt; lookup kern_ltr; lookup kern_ltr_marks; language TRK; script arab; language dflt; lookup kern_rtl; lookup kern_rtl_marks; language URD; } kern; """ ) def test_kern_RTL_with_marks(self, FontClass): glyphs = { ".notdef": None, "alef-ar": 0x627, "reh-ar": 0x631, "zain-ar": 0x632, "lam-ar": 0x644, "four-ar": 0x664, "seven-ar": 0x667, "fatha-ar": 0x64E, # # we also add glyphs without unicode codepoint, but linked to # # an encoded 'character' glyph by some GSUB rule "alef-ar.isol": None, "lam-ar.init": None, "reh-ar.fina": None, } groups = { "public.kern1.reh": ["reh-ar", "zain-ar", "reh-ar.fina"], "public.kern2.alef": ["alef-ar", "alef-ar.isol"], } kerning = { ("reh-ar.fina", "lam-ar.init"): -80, ("public.kern1.reh", "public.kern2.alef"): -100, ("reh-ar", "fatha-ar"): 80, } features = dedent( """\ languagesystem arab dflt; languagesystem arab ARA; feature init { script arab; sub lam-ar by lam-ar.init; } init; feature fina { script arab; sub reh-ar by reh-ar.fina; } fina; @Bases = [alef-ar reh-ar zain-ar lam-ar alef-ar.isol lam-ar.init reh-ar.fina]; @Marks = [fatha-ar]; table GDEF { GlyphClassDef @Bases, [], @Marks, ; } GDEF; """ ) ufo = makeUFO(FontClass, glyphs, groups, kerning, features) newFeatures = self.writeFeatures(ufo) assert str(newFeatures) == dedent( """\ @kern1.reh = [reh-ar zain-ar reh-ar.fina]; @kern2.alef = [alef-ar alef-ar.isol]; lookup kern_rtl { lookupflag IgnoreMarks; pos reh-ar.fina lam-ar.init <-80 0 -80 0>; pos @kern1.reh @kern2.alef <-100 0 -100 0>; } kern_rtl; lookup kern_rtl_marks { pos reh-ar fatha-ar <80 0 80 0>; } kern_rtl_marks; feature kern { lookup kern_rtl; lookup kern_rtl_marks; } kern; """ ) def test_kern_LTR_and_RTL_one_uses_DFLT(self, FontClass): glyphs = {"A": 0x41, "V": 0x56, "reh-ar": 0x631, "alef-ar": 0x627} kerning = {("A", "V"): -40, ("reh-ar", "alef-ar"): -100} features = "languagesystem latn dflt;" ufo = makeUFO(FontClass, glyphs, kerning=kerning, features=features) generated = self.writeFeatures(ufo) assert str(generated) == dedent( """ lookup kern_ltr { lookupflag IgnoreMarks; pos A V -40; } kern_ltr; lookup kern_rtl { lookupflag IgnoreMarks; pos reh-ar alef-ar <-100 0 -100 0>; } kern_rtl; feature kern { script DFLT; language dflt; lookup kern_rtl; script latn; language dflt; lookup kern_ltr; } kern; """ ) features = dedent("languagesystem arab dflt;") ufo = makeUFO(FontClass, glyphs, kerning=kerning, features=features) generated = self.writeFeatures(ufo) assert str(generated) == dedent( """ lookup kern_ltr { lookupflag IgnoreMarks; pos A V -40; } kern_ltr; lookup kern_rtl { lookupflag IgnoreMarks; pos reh-ar alef-ar <-100 0 -100 0>; } kern_rtl; feature kern { script DFLT; language dflt; lookup kern_ltr; script arab; language dflt; lookup kern_rtl; } kern; """ ) def test_kern_LTR_and_RTL_cannot_use_DFLT(self, FontClass): glyphs = {"A": 0x41, "V": 0x56, "reh-ar": 0x631, "alef-ar": 0x627} kerning = {("A", "V"): -40, ("reh-ar", "alef-ar"): -100} ufo = makeUFO(FontClass, glyphs, kerning=kerning) with pytest.raises(ValueError, match="cannot use DFLT script"): self.writeFeatures(ufo) def test_dist_LTR(self, FontClass): glyphs = {"aaMatra_kannada": 0x0CBE, "ailength_kannada": 0xCD6} groups = { "public.kern1.KND_aaMatra_R": ["aaMatra_kannada"], "public.kern2.KND_ailength_L": ["aaMatra_kannada"], } kerning = {("public.kern1.KND_aaMatra_R", "public.kern2.KND_ailength_L"): 34} features = dedent( """\ languagesystem DFLT dflt; languagesystem latn dflt; languagesystem knda dflt; languagesystem knd2 dflt; """ ) ufo = makeUFO(FontClass, glyphs, groups, kerning, features) generated = self.writeFeatures(ufo) assert str(generated) == dedent( """\ @kern1.KND_aaMatra_R = [aaMatra_kannada]; @kern2.KND_ailength_L = [aaMatra_kannada]; lookup kern_ltr { lookupflag IgnoreMarks; pos @kern1.KND_aaMatra_R @kern2.KND_ailength_L 34; } kern_ltr; feature kern { script DFLT; language dflt; lookup kern_ltr; script latn; language dflt; lookup kern_ltr; } kern; feature dist { script knda; language dflt; lookup kern_ltr; script knd2; language dflt; lookup kern_ltr; } dist; """ ) def test_dist_RTL(self, FontClass): glyphs = {"u10A06": 0x10A06, "u10A1E": 0x10A1E} kerning = {("u10A1E", "u10A06"): 117} features = dedent( """\ languagesystem DFLT dflt; languagesystem arab dflt; languagesystem khar dflt; """ ) ufo = makeUFO(FontClass, glyphs, kerning=kerning, features=features) generated = self.writeFeatures(ufo) assert str(generated) == dedent( """ lookup kern_rtl { lookupflag IgnoreMarks; pos u10A1E u10A06 <117 0 117 0>; } kern_rtl; feature kern { script DFLT; language dflt; lookup kern_rtl; script arab; language dflt; lookup kern_rtl; } kern; feature dist { script khar; language dflt; lookup kern_rtl; } dist; """ ) def test_dist_LTR_and_RTL(self, FontClass): glyphs = { "aaMatra_kannada": 0x0CBE, "ailength_kannada": 0xCD6, "u10A06": 0x10A06, "u10A1E": 0x10A1E, } groups = { "public.kern1.KND_aaMatra_R": ["aaMatra_kannada"], "public.kern2.KND_ailength_L": ["aaMatra_kannada"], } kerning = { ("public.kern1.KND_aaMatra_R", "public.kern2.KND_ailength_L"): 34, ("u10A1E", "u10A06"): 117, } features = dedent( """\ languagesystem DFLT dflt; languagesystem knda dflt; languagesystem knd2 dflt; languagesystem khar dflt; """ ) ufo = makeUFO(FontClass, glyphs, groups, kerning, features) generated = self.writeFeatures(ufo) assert str(generated) == dedent( """\ @kern1.KND_aaMatra_R = [aaMatra_kannada]; @kern2.KND_ailength_L = [aaMatra_kannada]; lookup kern_ltr { lookupflag IgnoreMarks; pos @kern1.KND_aaMatra_R @kern2.KND_ailength_L 34; } kern_ltr; lookup kern_rtl { lookupflag IgnoreMarks; pos u10A1E u10A06 <117 0 117 0>; } kern_rtl; feature dist { script knda; language dflt; lookup kern_ltr; script knd2; language dflt; lookup kern_ltr; script khar; language dflt; lookup kern_rtl; } dist; """ ) def test_skip_ambiguous_direction_pair(self, FontClass, caplog): caplog.set_level(logging.ERROR) ufo = FontClass() ufo.newGlyph("A").unicode = 0x41 ufo.newGlyph("one").unicode = 0x31 ufo.newGlyph("yod-hb").unicode = 0x5D9 ufo.newGlyph("reh-ar").unicode = 0x631 ufo.newGlyph("one-ar").unicode = 0x661 ufo.newGlyph("bar").unicodes = [0x73, 0x627] ufo.kerning.update( { ("bar", "bar"): 1, ("bar", "A"): 2, ("reh-ar", "A"): 3, ("reh-ar", "one-ar"): 4, ("yod-hb", "one"): 5, } ) ufo.features.text = dedent( """\ languagesystem DFLT dflt; languagesystem latn dflt; languagesystem arab dflt; """ ) logger = "ufo2ft.featureWriters.kernFeatureWriter.KernFeatureWriter" with caplog.at_level(logging.WARNING, logger=logger): generated = self.writeFeatures(ufo) assert not generated assert len(caplog.records) == 5 assert "skipped kern pair with ambiguous direction" in caplog.text def test_kern_RTL_and_DFLT_numbers(self, FontClass): glyphs = {"four": 0x34, "seven": 0x37, "bet-hb": 0x5D1, "yod-hb": 0x5D9} kerning = {("seven", "four"): -25, ("yod-hb", "bet-hb"): -100} features = dedent( """\ languagesystem DFLT dflt; languagesystem hebr dflt; """ ) ufo = makeUFO(FontClass, glyphs, kerning=kerning, features=features) generated = self.writeFeatures(ufo) assert str(generated) == dedent( """ lookup kern_dflt { lookupflag IgnoreMarks; pos seven four -25; } kern_dflt; lookup kern_rtl { lookupflag IgnoreMarks; pos yod-hb bet-hb <-100 0 -100 0>; } kern_rtl; feature kern { lookup kern_dflt; lookup kern_rtl; } kern; """ ) def test_quantize(self, FontClass): font = FontClass() for name in ("one", "four", "six"): font.newGlyph(name) font.kerning.update({("four", "six"): -57.0, ("one", "six"): -24.0}) writer = KernFeatureWriter(quantization=5) feaFile = ast.FeatureFile() assert writer.write(font, feaFile) assert str(feaFile) == dedent( """\ lookup kern_ltr { lookupflag IgnoreMarks; pos four six -55; pos one six -25; } kern_ltr; feature kern { lookup kern_ltr; } kern; """ ) if __name__ == "__main__": import sys sys.exit(pytest.main(sys.argv)) ufo2ft-2.30.0/tests/featureWriters/markFeatureWriter_test.py000066400000000000000000001354621434012334300242420ustar00rootroot00000000000000import logging import os import re from textwrap import dedent import pytest from ufo2ft.errors import InvalidFeaturesData from ufo2ft.featureCompiler import parseLayoutFeatures from ufo2ft.featureWriters import ast from ufo2ft.featureWriters.markFeatureWriter import ( MarkFeatureWriter, NamedAnchor, parseAnchorName, ) from . import FeatureWriterTest @pytest.fixture def testufo(FontClass): ufo = FontClass() ufo.newGlyph("a").appendAnchor({"name": "top", "x": 100, "y": 200}) liga = ufo.newGlyph("f_i") liga.appendAnchor({"name": "top_1", "x": 100, "y": 500}) liga.appendAnchor({"name": "top_2", "x": 600, "y": 500}) ufo.newGlyph("acutecomb").appendAnchor({"name": "_top", "x": 100, "y": 200}) accent = ufo.newGlyph("tildecomb") accent.appendAnchor({"name": "_top", "x": 100, "y": 200}) accent.appendAnchor({"name": "top", "x": 100, "y": 300}) return ufo @pytest.mark.parametrize( "input_expected", [ ("top", (False, "top", None)), ("top_", (False, "top_", None)), ("top1", (False, "top1", None)), ("_bottom", (True, "bottom", None)), ("bottom_2", (False, "bottom", 2)), ("top_right_1", (False, "top_right", 1)), ], ) def test_parseAnchorName(input_expected): anchorName, (isMark, key, number) = input_expected assert parseAnchorName(anchorName) == (isMark, key, number) def test_parseAnchorName_invalid(): with pytest.raises(ValueError, match="mark anchor cannot be numbered"): parseAnchorName("_top_2") with pytest.raises(ValueError, match="mark anchor key is nil"): parseAnchorName("_") def test_NamedAnchor_invalid(): with pytest.raises(ValueError, match="indexes must start from 1"): NamedAnchor("top_0", 1, 2) def test_NamedAnchor_repr(): expected = "NamedAnchor(name='top', x=1.0, y=2.0)" assert repr(NamedAnchor("top", 1.0, 2.0)) == expected class MarkFeatureWriterTest(FeatureWriterTest): FeatureWriter = MarkFeatureWriter def test__makeMarkClassDefinitions_empty(self, FontClass): ufo = FontClass() ufo.newGlyph("a").appendAnchor({"name": "top", "x": 250, "y": 500}) ufo.newGlyph("c").appendAnchor({"name": "bottom", "x": 250, "y": -100}) ufo.newGlyph("grave").appendAnchor({"name": "_top", "x": 100, "y": 200}) ufo.newGlyph("cedilla").appendAnchor({"name": "_bottom", "x": 100, "y": 0}) writer = MarkFeatureWriter() feaFile = ast.FeatureFile() writer.setContext(ufo, feaFile) markClassDefs = writer._makeMarkClassDefinitions() assert len(feaFile.markClasses) == 2 assert [str(mcd) for mcd in markClassDefs] == [ "markClass cedilla @MC_bottom;", "markClass grave @MC_top;", ] def test__makeMarkClassDefinitions_non_empty(self, FontClass): ufo = FontClass() ufo.newGlyph("a").appendAnchor({"name": "top", "x": 250, "y": 500}) ufo.newGlyph("c").appendAnchor({"name": "bottom", "x": 250, "y": -100}) ufo.newGlyph("grave").appendAnchor({"name": "_top", "x": 100, "y": 200}) ufo.newGlyph("cedilla").appendAnchor({"name": "_bottom", "x": 100, "y": 0}) ufo.features.text = dedent( """\ markClass cedilla @MC_bottom; markClass grave @MC_top; """ ) writer = MarkFeatureWriter() feaFile = parseLayoutFeatures(ufo) writer.setContext(ufo, feaFile) markClassDefs = writer._makeMarkClassDefinitions() assert len(markClassDefs) == 1 assert len(feaFile.markClasses) == 3 assert "MC_bottom" in feaFile.markClasses assert "MC_top" in feaFile.markClasses assert [str(mcd) for mcd in markClassDefs] == [ "markClass cedilla @MC_bottom_1;" ] def test_skip_empty_feature(self, FontClass): ufo = FontClass() assert not self.writeFeatures(ufo) ufo.newGlyph("a").appendAnchor({"name": "top", "x": 100, "y": 200}) ufo.newGlyph("acutecomb").appendAnchor({"name": "_top", "x": 100, "y": 200}) fea = str(self.writeFeatures(ufo)) assert "feature mark" in fea assert "feature mkmk" not in fea def test_skip_unnamed_anchors(self, FontClass, caplog): caplog.set_level(logging.ERROR) ufo = FontClass() ufo.newGlyph("a").appendAnchor({"x": 100, "y": 200}) writer = MarkFeatureWriter() feaFile = ast.FeatureFile() logger = "ufo2ft.featureWriters.markFeatureWriter.MarkFeatureWriter" with caplog.at_level(logging.WARNING, logger=logger): writer.setContext(ufo, feaFile) assert len(caplog.records) == 1 assert "unnamed anchor discarded in glyph 'a'" in caplog.text def test_warn_duplicate_anchor_names(self, FontClass, caplog): caplog.set_level(logging.ERROR) ufo = FontClass() ufo.newGlyph("a").anchors = [ {"name": "top", "x": 100, "y": 200}, {"name": "top", "x": 200, "y": 300}, ] writer = MarkFeatureWriter() feaFile = ast.FeatureFile() logger = "ufo2ft.featureWriters.markFeatureWriter.MarkFeatureWriter" with caplog.at_level(logging.WARNING, logger=logger): writer.setContext(ufo, feaFile) assert len(caplog.records) == 1 assert "duplicate anchor 'top' in glyph 'a'" in caplog.text def test_warn_liga_anchor_in_mark_glyph(self, testufo, caplog): caplog.set_level(logging.ERROR) testufo.newGlyph("ogonekcomb").anchors = [ {"name": "_top", "x": 200, "y": -40}, {"name": "top_1", "x": 200, "y": 450}, # should not be there! ] logger = "ufo2ft.featureWriters.markFeatureWriter.MarkFeatureWriter" with caplog.at_level(logging.WARNING, logger=logger): _ = self.writeFeatures(testufo) assert len(caplog.records) == 1 assert "invalid ligature anchor 'top_1' in mark glyph" in caplog.text def test_ligature_NULL_anchor(self, testufo): testufo.newGlyph("f_f_foo").anchors = [ {"name": "top_1", "x": 250, "y": 600}, {"name": "top_2", "x": 500, "y": 600}, {"name": "_3", "x": 0, "y": 0}, # this becomes ] generated = self.writeFeatures(testufo) assert re.search(r"ligComponent\s+", str(generated)) def test_skip_existing_feature(self, testufo): testufo.features.text = dedent( """\ markClass acutecomb @MC_top; feature mark { lookup mark1 { pos base a mark @MC_top; } mark1; } mark; """ ) generated = self.writeFeatures(testufo) # only mkmk is generated, mark was already present assert str(generated) == dedent( """\ markClass tildecomb @MC_top; feature mkmk { lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; """ ) def test_append_feature(self, testufo): testufo.features.text = dedent( """\ markClass acutecomb @MC_top; feature mark { lookup mark1 { pos base a mark @MC_top; } mark1; } mark; """ ) generated = self.writeFeatures(testufo, mode="append") assert str(generated) == dedent( """\ markClass tildecomb @MC_top; feature mark { lookup mark2base { pos base a mark @MC_top; } mark2base; lookup mark2liga { pos ligature f_i mark @MC_top ligComponent mark @MC_top; } mark2liga; } mark; feature mkmk { lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; """ ) def test_insert_comment_before(self, testufo): writer = MarkFeatureWriter() testufo.features.text = dedent( """\ markClass acutecomb @MC_top; feature mark { # # Automatic Code # lookup mark1 { pos base a mark @MC_top; } mark1; } mark; """ ) feaFile = parseLayoutFeatures(testufo) assert writer.write(testufo, feaFile) assert str(feaFile) == dedent( """\ markClass acutecomb @MC_top; markClass tildecomb @MC_top; feature mark { lookup mark2base { pos base a mark @MC_top; } mark2base; lookup mark2liga { pos ligature f_i mark @MC_top ligComponent mark @MC_top; } mark2liga; } mark; feature mark { # # lookup mark1 { pos base a mark @MC_top; } mark1; } mark; feature mkmk { lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; """ ) # test append mode ignores insert marker generated = self.writeFeatures(testufo, mode="append") assert str(generated) == dedent( """\ markClass tildecomb @MC_top; feature mark { lookup mark2base { pos base a mark @MC_top; } mark2base; lookup mark2liga { pos ligature f_i mark @MC_top ligComponent mark @MC_top; } mark2liga; } mark; feature mkmk { lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; """ ) def test_insert_comment_after(self, testufo): writer = MarkFeatureWriter() testufo.features.text = dedent( """\ markClass acutecomb @MC_top; feature mark { lookup mark1 { pos base a mark @MC_top; } mark1; # # Automatic Code # } mark; """ ) feaFile = parseLayoutFeatures(testufo) assert writer.write(testufo, feaFile) assert str(feaFile) == dedent( """\ markClass acutecomb @MC_top; feature mark { lookup mark1 { pos base a mark @MC_top; } mark1; # # } mark; markClass tildecomb @MC_top; feature mark { lookup mark2base { pos base a mark @MC_top; } mark2base; lookup mark2liga { pos ligature f_i mark @MC_top ligComponent mark @MC_top; } mark2liga; } mark; feature mkmk { lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; """ ) # test append mode ignores insert marker generated = self.writeFeatures(testufo, mode="append") assert str(generated) == dedent( """\ markClass tildecomb @MC_top; feature mark { lookup mark2base { pos base a mark @MC_top; } mark2base; lookup mark2liga { pos ligature f_i mark @MC_top ligComponent mark @MC_top; } mark2liga; } mark; feature mkmk { lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; """ ) def test_insert_comment_middle(self, testufo): writer = MarkFeatureWriter() testufo.features.text = dedent( """\ markClass acutecomb @MC_top; feature mark { lookup mark1 { pos base a mark @MC_top; } mark1; # # Automatic Code # lookup mark2 { pos base a mark @MC_top; } mark2; } mark; """ ) feaFile = parseLayoutFeatures(testufo) with pytest.raises( InvalidFeaturesData, match="Insert marker has rules before and after, feature mark " "cannot be inserted.", ): writer.write(testufo, feaFile) # test append mode ignores insert marker generated = self.writeFeatures(testufo, mode="append") assert str(generated) == dedent( """\ markClass tildecomb @MC_top; feature mark { lookup mark2base { pos base a mark @MC_top; } mark2base; lookup mark2liga { pos ligature f_i mark @MC_top ligComponent mark @MC_top; } mark2liga; } mark; feature mkmk { lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; """ ) def test_insert_comment_outside_block(self, testufo): writer = MarkFeatureWriter() testufo.features.text = dedent( """\ # # Automatic Code # """ ) feaFile = parseLayoutFeatures(testufo) assert writer.write(testufo, feaFile) testufo.features.text = dedent( """\ # # Automatic Code # markClass acutecomb @MC_top; feature mark { lookup mark1 { pos base a mark @MC_top; } mark1; } mark; """ ) feaFile = parseLayoutFeatures(testufo) assert writer.write(testufo, feaFile) # test append mode writer = MarkFeatureWriter(mode="append") assert writer.write(testufo, feaFile) def test_defs_and_lookups_first(self, testufo): testufo.newGlyph("circumflexcomb") writer = MarkFeatureWriter() testufo.features.text = dedent( """\ feature mkmk { # Automatic Code # Move acutecomb down and right if preceded by circumflexcomb lookup move_acutecomb { lookupflag UseMarkFilteringSet [acutecomb circumflexcomb]; pos circumflexcomb acutecomb' <0 20 0 20>; } move_acutecomb; } mkmk; """ ) feaFile = parseLayoutFeatures(testufo) assert writer.write(testufo, feaFile) assert str(feaFile) == dedent( """\ markClass acutecomb @MC_top; markClass tildecomb @MC_top; feature mark { lookup mark2base { pos base a mark @MC_top; } mark2base; lookup mark2liga { pos ligature f_i mark @MC_top ligComponent mark @MC_top; } mark2liga; } mark; feature mkmk { lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; feature mkmk { # Move acutecomb down and right if preceded by circumflexcomb lookup move_acutecomb { lookupflag UseMarkFilteringSet [acutecomb circumflexcomb]; pos circumflexcomb acutecomb' <0 20 0 20>; } move_acutecomb; } mkmk; """ ) def test_mark_mkmk_features(self, testufo): writer = MarkFeatureWriter() # by default both mark + mkmk are built feaFile = ast.FeatureFile() assert writer.write(testufo, feaFile) assert str(feaFile) == dedent( """\ markClass acutecomb @MC_top; markClass tildecomb @MC_top; feature mark { lookup mark2base { pos base a mark @MC_top; } mark2base; lookup mark2liga { pos ligature f_i mark @MC_top ligComponent mark @MC_top; } mark2liga; } mark; feature mkmk { lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; """ ) def test_write_only_one(self, testufo): writer = MarkFeatureWriter(features=["mkmk"]) # only builds "mkmk" feaFile = ast.FeatureFile() assert writer.write(testufo, feaFile) fea = str(feaFile) assert "feature mark" not in fea assert "feature mkmk" in fea writer = MarkFeatureWriter(features=["mark"]) # only builds "mark" feaFile = ast.FeatureFile() assert writer.write(testufo, feaFile) fea = str(feaFile) assert "feature mark" in fea assert "feature mkmk" not in fea def test_predefined_anchor_lists(self, FontClass): """Roboto uses some weird anchor naming scheme, see: https://github.com/google/roboto/blob/ 5700de83856781fa0c097a349e46dbaae5792cb0/ scripts/lib/fontbuild/markFeature.py#L41-L47 """ class RobotoMarkFeatureWriter(MarkFeatureWriter): class NamedAnchor(NamedAnchor): markPrefix = "_mark" ignoreRE = "(^mkmk|_acc$)" ufo = FontClass() a = ufo.newGlyph("a") a.anchors = [ {"name": "top", "x": 250, "y": 600}, {"name": "bottom", "x": 250, "y": -100}, ] f_i = ufo.newGlyph("f_i") f_i.anchors = [ {"name": "top_1", "x": 200, "y": 700}, {"name": "top_2", "x": 500, "y": 700}, ] gravecomb = ufo.newGlyph("gravecomb") gravecomb.anchors = [ {"name": "_marktop", "x": 160, "y": 780}, {"name": "mkmktop", "x": 150, "y": 800}, {"name": "mkmkbottom_acc", "x": 150, "y": 600}, ] ufo.newGlyph("cedillacomb").appendAnchor( {"name": "_markbottom", "x": 200, "y": 0} ) ufo.newGlyph("ogonekcomb").appendAnchor({"name": "_bottom", "x": 180, "y": -10}) writer = RobotoMarkFeatureWriter() feaFile = ast.FeatureFile() writer.write(ufo, feaFile) assert str(feaFile) == dedent( """\ markClass cedillacomb @MC_markbottom; markClass gravecomb @MC_marktop; feature mark { lookup mark2base { pos base a mark @MC_markbottom mark @MC_marktop; } mark2base; lookup mark2liga { pos ligature f_i mark @MC_marktop ligComponent mark @MC_marktop; } mark2liga; } mark; feature mkmk { lookup mark2mark_bottom { @MFS_mark2mark_bottom = [cedillacomb gravecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_bottom; pos mark gravecomb mark @MC_markbottom; } mark2mark_bottom; lookup mark2mark_top { @MFS_mark2mark_top = [gravecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark gravecomb mark @MC_marktop; } mark2mark_top; } mkmk; """ # noqa: B950 ) def test_abvm_blwm_features(self, FontClass): ufo = FontClass() ufo.info.unitsPerEm = 1000 dottedCircle = ufo.newGlyph("dottedCircle") dottedCircle.unicode = 0x25CC dottedCircle.anchors = [ {"name": "top", "x": 297, "y": 552}, {"name": "topright", "x": 491, "y": 458}, {"name": "bottom", "x": 297, "y": 0}, ] nukta = ufo.newGlyph("nukta-kannada") nukta.unicode = 0x0CBC nukta.appendAnchor({"name": "_bottom", "x": 0, "y": 0}) nukta = ufo.newGlyph("candrabindu-kannada") nukta.unicode = 0x0C81 nukta.appendAnchor({"name": "_top", "x": 0, "y": 547}) halant = ufo.newGlyph("halant-kannada") halant.unicode = 0x0CCD halant.appendAnchor({"name": "_topright", "x": -456, "y": 460}) ka = ufo.newGlyph("ka-kannada") ka.unicode = 0x0C95 ka.appendAnchor({"name": "bottom", "x": 290, "y": 0}) ka_base = ufo.newGlyph("ka-kannada.base") ka_base.appendAnchor({"name": "top", "x": 291, "y": 547}) ka_base.appendAnchor({"name": "topright", "x": 391, "y": 460}) ka_base.appendAnchor({"name": "bottom", "x": 290, "y": 0}) ufo.features.text = dedent( """\ languagesystem DFLT dflt; languagesystem knda dflt; languagesystem knd2 dflt; feature psts { sub ka-kannada' halant-kannada by ka-kannada.base; } psts; """ ) generated = self.writeFeatures(ufo) assert str(generated) == dedent( """\ markClass nukta-kannada @MC_bottom; markClass candrabindu-kannada @MC_top; markClass halant-kannada @MC_topright; feature abvm { lookup abvm_mark2base { pos base ka-kannada.base mark @MC_top mark @MC_topright; } abvm_mark2base; } abvm; feature blwm { lookup blwm_mark2base { pos base ka-kannada mark @MC_bottom; pos base ka-kannada.base mark @MC_bottom; } blwm_mark2base; } blwm; feature mark { lookup mark2base { pos base dottedCircle mark @MC_bottom mark @MC_top mark @MC_topright; } mark2base; } mark; """ # noqa: B950 ) def test_shared_script_char(self, FontClass): ufo = FontClass() ufo.info.unitsPerEm = 1000 dottedCircle = ufo.newGlyph("kashida-ar") dottedCircle.unicode = 0x0640 dottedCircle.anchors = [ {"name": "top", "x": 100, "y": 100}, {"name": "bottom", "x": 100, "y": -100}, ] nukta = ufo.newGlyph("fatha-ar") nukta.unicode = 0x064E nukta.appendAnchor({"name": "_top", "x": 0, "y": 0}) nukta = ufo.newGlyph("kasra-ar") nukta.unicode = 0x0650 nukta.appendAnchor({"name": "_bottom", "x": 0, "y": 547}) ufo.features.text = dedent( """\ languagesystem DFLT dflt; languagesystem arab dflt; """ ) generated = self.writeFeatures(ufo) assert str(generated) == dedent( """\ markClass kasra-ar @MC_bottom; markClass fatha-ar @MC_top; feature mark { lookup mark2base { pos base kashida-ar mark @MC_bottom mark @MC_top; } mark2base; } mark; """ # noqa: B950 ) expected = dedent( """\ markClass kasra-ar @MC_bottom; markClass fatha-ar @MC_top; feature abvm { lookup abvm_mark2base { pos base kashida-ar mark @MC_top; } abvm_mark2base; } abvm; feature blwm { lookup blwm_mark2base { pos base kashida-ar mark @MC_bottom; } blwm_mark2base; } blwm; feature mark { lookup mark2base { pos base kashida-ar mark @MC_bottom mark @MC_top; } mark2base; } mark; """ # noqa: B950 ) ufo.features.text = "" generated = self.writeFeatures(ufo) assert str(generated) == expected ufo.features.text = dedent( """\ languagesystem DFLT dflt; languagesystem arab dflt; languagesystem adlm dflt; """ ) generated = self.writeFeatures(ufo) assert str(generated) == expected def test_all_features(self, testufo): ufo = testufo ufo.info.unitsPerEm = 1000 ufo.newGlyph("cedillacomb").anchors = [ {"name": "_bottom", "x": 10, "y": -5}, {"name": "bottom", "x": 20, "y": -309}, ] ufo.newGlyph("c").appendAnchor({"name": "bottom", "x": 240, "y": 0}) dottedCircle = ufo.newGlyph("dottedCircle") dottedCircle.unicode = 0x25CC dottedCircle.anchors = [ {"name": "top", "x": 297, "y": 552}, {"name": "bottom", "x": 297, "y": 0}, {"name": "bar", "x": 491, "y": 458}, ] # too lazy, couldn't come up with a real-word example :/ foocomb = ufo.newGlyph("foocomb") foocomb.unicode = 0x0B85 foocomb.anchors = [ {"name": "_top", "x": 100, "y": 40}, {"name": "top", "x": 100, "y": 190}, ] barcomb = ufo.newGlyph("barcomb") barcomb.unicode = 0x0B86 barcomb.anchors = [ {"name": "_bar", "x": 100, "y": 40}, {"name": "bar", "x": 100, "y": 440.1}, ] bazcomb = ufo.newGlyph("bazcomb") bazcomb.unicode = 0x0B87 bazcomb.anchors = [ {"name": "_bottom", "x": 90, "y": 320}, {"name": "bottom", "x": 100, "y": -34}, ] foo_bar_baz = ufo.newGlyph("foo_bar_baz") foo_bar_baz.unicode = 0x0B88 foo_bar_baz.anchors = [ {"name": "top_1", "x": 100, "y": 500}, {"name": "bottom_1", "x": 100, "y": 10}, {"name": "_2", "x": 600, "y": 500}, {"name": "top_3", "x": 1000, "y": 500}, {"name": "bar_3", "x": 1100, "y": 499}, # below half UPEM ] bar_foo = ufo.newGlyph("bar_foo") bar_foo.unicode = 0x0B89 # sequence doesn't start from 1, the first is implied NULL anchor bar_foo.anchors = [{"name": "top_2", "x": 600, "y": 501}] testufo.glyphOrder = [ "a", "f_i", "acutecomb", "tildecomb", "cedillacomb", "c", "dottedCircle", "foocomb", "barcomb", "bazcomb", "foo_bar_baz", "bar_foo", ] ufo.features.text = dedent( """\ languagesystem DFLT dflt; languagesystem taml dflt; """ ) generated = self.writeFeatures(testufo) assert str(generated) == dedent( """\ markClass barcomb @MC_bar; markClass cedillacomb @MC_bottom; markClass bazcomb @MC_bottom; markClass acutecomb @MC_top; markClass tildecomb @MC_top; markClass foocomb @MC_top; feature abvm { lookup abvm_mark2liga { pos ligature foo_bar_baz mark @MC_top ligComponent ligComponent mark @MC_bar mark @MC_top; pos ligature bar_foo ligComponent mark @MC_top; } abvm_mark2liga; lookup abvm_mark2mark_bar { @MFS_abvm_mark2mark_bar = [barcomb]; lookupflag UseMarkFilteringSet @MFS_abvm_mark2mark_bar; pos mark barcomb mark @MC_bar; } abvm_mark2mark_bar; lookup abvm_mark2mark_top { @MFS_abvm_mark2mark_top = [foocomb]; lookupflag UseMarkFilteringSet @MFS_abvm_mark2mark_top; pos mark foocomb mark @MC_top; } abvm_mark2mark_top; } abvm; feature blwm { lookup blwm_mark2liga { pos ligature foo_bar_baz mark @MC_bottom ligComponent ligComponent ; } blwm_mark2liga; lookup blwm_mark2mark_bottom { @MFS_blwm_mark2mark_bottom = [bazcomb]; lookupflag UseMarkFilteringSet @MFS_blwm_mark2mark_bottom; pos mark bazcomb mark @MC_bottom; } blwm_mark2mark_bottom; } blwm; feature mark { lookup mark2base { pos base a mark @MC_top; pos base c mark @MC_bottom; pos base dottedCircle mark @MC_bar mark @MC_bottom mark @MC_top; } mark2base; lookup mark2liga { pos ligature f_i mark @MC_top ligComponent mark @MC_top; } mark2liga; } mark; feature mkmk { lookup mark2mark_bottom { @MFS_mark2mark_bottom = [cedillacomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_bottom; pos mark cedillacomb mark @MC_bottom; } mark2mark_bottom; lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; """ # noqa: B950 ) def test_mark_mkmk_features_with_GDEF(self, testufo): D = testufo.newGlyph("D") D.anchors = [ {"name": "top", "x": 300, "y": 700}, {"name": "center", "x": 320, "y": 360}, ] # these glyphs have compatible anchors but since they not listed in # the GDEF groups, they won't be included in the mark/mkmk feature testufo.newGlyph("Alpha").appendAnchor({"name": "topleft", "x": -10, "y": 400}) testufo.newGlyph("psili").appendAnchor({"name": "_topleft", "x": 0, "y": 50}) dotaccentcomb = testufo.newGlyph("dotaccentcomb") # this mark glyph has more than one mark anchor, and both will be # generated. Since the two mark anchors cannot cohabit in the same # mark lookup, two lookups will be generated. dotaccentcomb.anchors = [ {"name": "_center", "x": 0, "y": 0}, {"name": "_top", "x": 0, "y": 0}, {"name": "top", "x": 0, "y": 300}, ] testufo.features.text = dedent( """\ @Bases = [a D]; @Marks = [acutecomb tildecomb dotaccentcomb]; table GDEF { GlyphClassDef @Bases, [f_i], @Marks, ; } GDEF; """ ) testufo.glyphOrder = [ "Alpha", "D", "a", "acutecomb", "dotaccentcomb", "f_i", "psili", "tildecomb", ] generated = self.writeFeatures(testufo) assert str(generated) == dedent( """\ markClass dotaccentcomb @MC_center; markClass acutecomb @MC_top; markClass dotaccentcomb @MC_top; markClass tildecomb @MC_top; feature mark { lookup mark2base { pos base D mark @MC_center; } mark2base; lookup mark2base_1 { pos base D mark @MC_top; pos base a mark @MC_top; } mark2base_1; lookup mark2liga { pos ligature f_i mark @MC_top ligComponent mark @MC_top; } mark2liga; } mark; feature mkmk { lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb dotaccentcomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark dotaccentcomb mark @MC_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; """ ) def test_mark_mkmk_features_with_GDEF_and_openTypeCategories(self, testufo): # this glyph has compatible anchors and has an openTypeCategories "base" # value D = testufo.newGlyph("D") D.anchors = [ {"name": "top", "x": 300, "y": 700}, {"name": "center", "x": 320, "y": 360}, ] # these glyphs have compatible anchors but since they not listed in # the GDEF groups, they won't be included in the mark/mkmk feature testufo.newGlyph("Alpha").appendAnchor({"name": "topleft", "x": -10, "y": 400}) testufo.newGlyph("psili").appendAnchor({"name": "_topleft", "x": 0, "y": 50}) dotaccentcomb = testufo.newGlyph("dotaccentcomb") # this mark glyph has more than one mark anchor, and both will be # generated. Since the two mark anchors cannot cohabit in the same # mark lookup, two lookups will be generated. dotaccentcomb.anchors = [ {"name": "_center", "x": 0, "y": 0}, {"name": "_top", "x": 0, "y": 0}, {"name": "top", "x": 0, "y": 300}, ] # will be ignored because in GDEF table below testufo.lib["public.openTypeCategories"] = { "D": "base", "dotaccentcomb": "mark", "tildecomb": "base", } testufo.features.text = dedent( """\ @Bases = [a]; @Marks = [acutecomb tildecomb]; table GDEF { GlyphClassDef @Bases, [f_i], @Marks, ; } GDEF; """ ) testufo.glyphOrder = [ "Alpha", "D", "a", "acutecomb", "dotaccentcomb", "f_i", "psili", "tildecomb", ] generated = self.writeFeatures(testufo) assert str(generated) == dedent( """\ markClass acutecomb @MC_top; markClass tildecomb @MC_top; feature mark { lookup mark2base { pos base a mark @MC_top; } mark2base; lookup mark2liga { pos ligature f_i mark @MC_top ligComponent mark @MC_top; } mark2liga; } mark; feature mkmk { lookup mark2mark_top { @MFS_mark2mark_top = [acutecomb tildecomb]; lookupflag UseMarkFilteringSet @MFS_mark2mark_top; pos mark tildecomb mark @MC_top; } mark2mark_top; } mkmk; """ ) def test_multiple_anchor_classes_base(self, FontClass): dirname = os.path.dirname(os.path.dirname(__file__)) fontPath = os.path.join(dirname, "data", "MultipleAnchorClasses.ufo") testufo = FontClass(fontPath) generated = self.writeFeatures(testufo) assert str(generated) == dedent( """\ markClass acutecomb @MC_topA; markClass acutecomb @MC_topE; feature mark { lookup mark2base { pos base a mark @MC_topA; } mark2base; lookup mark2base_1 { pos base e mark @MC_topE; } mark2base_1; } mark; """ ) def test_multiple_anchor_classes_liga(self, FontClass): ufo = FontClass() liga = ufo.newGlyph("f_i") liga.appendAnchor({"name": "top_1", "x": 100, "y": 500}) liga.appendAnchor({"name": "top_2", "x": 600, "y": 500}) ligaOther = ufo.newGlyph("f_f") ligaOther.appendAnchor({"name": "topOther_1", "x": 101, "y": 501}) ligaOther.appendAnchor({"name": "topOther_2", "x": 601, "y": 501}) ligaMix = ufo.newGlyph("f_l") ligaMix.appendAnchor({"name": "top_1", "x": 102, "y": 502}) ligaMix.appendAnchor({"name": "topOther_2", "x": 602, "y": 502}) acutecomb = ufo.newGlyph("acutecomb") acutecomb.appendAnchor({"name": "_top", "x": 100, "y": 200}) acutecomb.appendAnchor({"name": "_topOther", "x": 150, "y": 250}) generated = self.writeFeatures(ufo) # MC_top should be last thanks to the anchorSortKey assert str(generated) == dedent( """\ markClass acutecomb @MC_top; markClass acutecomb @MC_topOther; feature mark { lookup mark2liga { pos ligature f_f mark @MC_topOther ligComponent mark @MC_topOther; pos ligature f_l ligComponent mark @MC_topOther; } mark2liga; lookup mark2liga_1 { pos ligature f_i mark @MC_top ligComponent mark @MC_top; pos ligature f_l mark @MC_top ligComponent ; } mark2liga_1; } mark; """ ) def test_multiple_anchor_classes_conflict_warning(self, FontClass, caplog): """Check that when there is an ambiguity in the form of one base glyph and one mark glyph being able to be linked through two different anchor pairs, the mark feature writer emits a warning about the situation but still outputs a valid feature declaraction. The last lookup in that feature declaration will "win" and determine the outcome of mark positioning. See this comment for more information: https://github.com/googlefonts/ufo2ft/pull/416#issuecomment-721693266 """ caplog.set_level(logging.INFO) ufo = FontClass() liga = ufo.newGlyph("a") liga.appendAnchor({"name": "top", "x": 100, "y": 500}) liga.appendAnchor({"name": "topOther", "x": 150, "y": 550}) acutecomb = ufo.newGlyph("acutecomb") acutecomb.appendAnchor({"name": "_top", "x": 100, "y": 200}) acutecomb.appendAnchor({"name": "_topOther", "x": 150, "y": 250}) generated = self.writeFeatures(ufo) assert ( "The base glyph a and mark glyph acutecomb are ambiguously " "connected by several anchor classes: MC_topOther, MC_top. " "The last one will prevail." in caplog.text ) # MC_top should be last thanks to the anchorSortKey assert str(generated) == dedent( """\ markClass acutecomb @MC_top; markClass acutecomb @MC_topOther; feature mark { lookup mark2base { pos base a mark @MC_topOther; } mark2base; lookup mark2base_1 { pos base a mark @MC_top; } mark2base_1; } mark; """ ) def test_skipExportGlyphs(self, testufo): testufo.lib["public.skipExportGlyphs"] = ["f_i", "tildecomb"] testufo.glyphOrder = ["a", "f_i", "acutecomb", "tildcomb"] generated = self.writeFeatures(testufo) assert str(generated) == dedent( """\ markClass acutecomb @MC_top; feature mark { lookup mark2base { pos base a mark @MC_top; } mark2base; } mark; """ ) def test_quantize(self, testufo): testufo.newGlyph("ogonekcomb").anchors = [ {"name": "_top", "x": 236, "y": 188}, ] testufo.lib["public.skipExportGlyphs"] = ["f_i", "tildecomb"] generated = self.writeFeatures(testufo, quantization=50) assert str(generated) == dedent( """\ markClass acutecomb @MC_top; markClass ogonekcomb @MC_top; feature mark { lookup mark2base { pos base a mark @MC_top; } mark2base; } mark; """ ) if __name__ == "__main__": import sys sys.exit(pytest.main(sys.argv)) ufo2ft-2.30.0/tests/filters/000077500000000000000000000000001434012334300156105ustar00rootroot00000000000000ufo2ft-2.30.0/tests/filters/__init__.py000066400000000000000000000000001434012334300177070ustar00rootroot00000000000000ufo2ft-2.30.0/tests/filters/decomposeComponents_test.py000066400000000000000000000033471434012334300232540ustar00rootroot00000000000000import logging from ufo2ft.filters.decomposeComponents import DecomposeComponentsFilter from ufo2ft.util import logger def test_missing_component_is_dropped(FontClass, caplog): ufo = FontClass() a = ufo.newGlyph("a") a.width = 100 pen = a.getPen() pen.moveTo((0, 0)) pen.lineTo((300, 0)) pen.lineTo((300, 300)) pen.lineTo((0, 300)) pen.closePath() aacute = ufo.newGlyph("aacute") aacute.width = 100 pen = aacute.getPen() pen.addComponent("a", (1, 0, 0, 1, 0, 0)) pen.addComponent("acute", (1, 0, 0, 1, 350, 0)) # missing assert len(ufo["aacute"]) == 0 assert len(ufo["aacute"].components) == 2 with caplog.at_level(logging.WARNING, logger=logger.name): filter_ = DecomposeComponentsFilter() assert filter_(ufo) assert len(ufo["aacute"]) == 1 assert len(ufo["aacute"].components) == 0 assert len(caplog.records) == 1 assert "dropping non-existent component" in caplog.text def test_nested_components(FontClass): ufo = FontClass() a = ufo.newGlyph("six.lf") a.width = 100 pen = a.getPen() pen.moveTo((0, 0)) pen.lineTo((300, 0)) pen.lineTo((300, 300)) pen.lineTo((0, 300)) pen.closePath() b = ufo.newGlyph("nine.lf") b.width = 100 pen = b.getPen() pen.addComponent("six.lf", (-1, 0, 0, -1, 0, 0)) c = ufo.newGlyph("nine") c.width = 100 pen = c.getPen() pen.addComponent("nine.lf", (1, 0, 0, 1, 0, 0)) filter_ = DecomposeComponentsFilter() assert filter_(ufo) assert len(ufo["six.lf"]) == 1 assert not ufo["six.lf"].components assert len(ufo["nine.lf"]) == 1 assert not ufo["nine.lf"].components assert len(ufo["nine"]) == 1 assert not ufo["nine"].components ufo2ft-2.30.0/tests/filters/decomposeTransformedComponents_test.py000066400000000000000000000166761434012334300254720ustar00rootroot00000000000000from ufo2ft.filters.decomposeTransformedComponents import ( DecomposeTransformedComponentsFilter, ) from ufo2ft.preProcessor import TTFInterpolatablePreProcessor class DecomposeTransformedComponentsFilterTest: def test_transformed_components(self, FontClass): ufo = FontClass() a = ufo.newGlyph("six.lf") a.width = 300 pen = a.getPen() pen.moveTo((0, 0)) pen.lineTo((300, 0)) pen.lineTo((150, 300)) pen.closePath() # six has one component c = ufo.newGlyph("six") c.width = 300 pen = c.getPen() pen.addComponent("six.lf", (1, 0, 0, 1, 0, 0)) # nine.lf has one transformed component of a component b = ufo.newGlyph("nine.lf") b.width = 300 pen = b.getPen() pen.addComponent("six.lf", (-1, 0, 0, -1, 0, 0)) # nine has one transformed component c = ufo.newGlyph("nine") c.width = 300 pen = c.getPen() pen.addComponent("six", (-1, 0, 0, -1, 0, 0)) # nine.of has one component of a transformed component d = ufo.newGlyph("nine.of") d.width = 300 pen = d.getPen() pen.addComponent("nine", (1, 0, 0, 1, 0, -80)) filter_ = DecomposeTransformedComponentsFilter() assert filter_(ufo) # six.lf has one outline and no component assert len(ufo["six.lf"]) == 1 assert not ufo["six.lf"].components # six has no outline and one component assert len(ufo["six"]) == 0 assert len(ufo["six"].components) == 1 # nine.lf has one outline and no component, it was decomposed assert len(ufo["nine.lf"]) == 1 assert not ufo["nine.lf"].components # nine has one outline and no component, it was decomposed assert len(ufo["nine"]) == 1 assert not ufo["nine"].components # nine.of has no outline and one component, it was not decomposed assert len(ufo["nine.of"]) == 0 assert len(ufo["nine.of"].components) == 1 def test_decompose_compatibly(self, FontClass): ufo1 = FontClass() c = ufo1.newGlyph("comp") c.width = 300 pen = c.getPen() pen.moveTo((0, 0)) pen.lineTo((300, 0)) pen.lineTo((150, 300)) pen.closePath() b = ufo1.newGlyph("base") b.width = 300 pen = b.getPen() pen.addComponent("comp", (0.5, 0, 0, 0.5, 0, 0)) ufo2 = FontClass() c = ufo2.newGlyph("comp") c.width = 600 pen = c.getPen() pen.moveTo((0, 0)) pen.lineTo((600, 0)) pen.lineTo((300, 600)) pen.closePath() b = ufo2.newGlyph("base") b.width = 600 pen = b.getPen() pen.addComponent("comp", (1, 0, 0, 1, 0, 0)) # Because ufo1.base needs decomposing, so should ufo2.base glyphsets = TTFInterpolatablePreProcessor( [ufo1, ufo2], filters=[DecomposeTransformedComponentsFilter(pre=True)] ).process() assert len(glyphsets[0]["base"]) == 1 assert len(glyphsets[1]["base"]) == 1 def test_decompose_compatibly_nested_transformed_components(self, FontClass): # This replicates three glyphs from the 'changa.zip' test fonts at # https://github.com/googlefonts/ufo2ft/issues/621 # In both fonts, the "exclam" glyph is made of one simple contour and one # component ("period"); "exclamdown" in turn is made of one "exclam" component # that is flipped vertically and horizontally; "period" is a single contour. # But only in the Bold.ufo, the "exclam" contains a scaled down "period"; in # the Regular.ufo, the "period" component only has an offset. # This would previously trigger a situation whereby after "exclamdown" was # decomposed, its points were no longer interpolation compatible across masters # because the order in which the contours were decomposed was different. # This is because filters used to modify glyphs in-place in alphabetical order, # so 'exclam' comes before 'exclamdown', and in the Bold.ufo, 'exclam' has # a 2x3 transform so is decomposed (with the period appended at the end), but # then 'exclamdown' needs decomposing as well (for it's flipped) and the # already decomposed 'exclam' contours are drawn onto it in the same order; # whereas in Regular.ufo, the 'exclam' does not contain transformed components # so it's kept as composite (for the time being, it will be decomposed later on # because it's mixed), but when it's the turn of 'exclamdown', the period's # contour gets appended to it before the rest of the rest of the 'exclam' # (deepCopyContours follows a post-order depth-first traversal so the children # get decomposed before the parent) -- leading to cu2qu crashing... Pfew! regular_ufo = FontClass() period = regular_ufo.newGlyph("period") period.width = 230 pen = period.getPen() pen.moveTo((50, 62)) pen.curveTo((50, 13), (61, -6), (115, -6)) pen.curveTo((168, -6), (180, 13), (180, 62)) pen.curveTo((180, 110), (168, 131), (115, 131)) pen.curveTo((61, 131), (50, 110), (50, 62)) pen.closePath() exclam = regular_ufo.newGlyph("exclam") exclam.width = 250 pen = exclam.getPen() pen.moveTo((93, 196)) pen.lineTo((156, 196)) pen.lineTo((186, 627)) pen.curveTo((186, 637), (181, 645), (161, 645)) pen.lineTo((87, 645)) pen.curveTo((67, 645), (63, 637), (63, 627)) pen.closePath() pen.addComponent("period", (1, 0, 0, 1, 10, 0)) exclamdown = regular_ufo.newGlyph("exclamdown") exclamdown.width = 250 pen = exclamdown.getPen() pen.addComponent("exclam", (-1, 0, 0, -1, 250, 509)) bold_ufo = FontClass() period = bold_ufo.newGlyph("period") period.width = 277 pen = period.getPen() pen.moveTo((30, 99)) pen.curveTo((30, 23), (50, -6), (139, -6)) pen.curveTo((227, -6), (247, 23), (247, 99)) pen.curveTo((247, 175), (227, 206), (139, 206)) pen.curveTo((50, 206), (30, 175), (30, 99)) pen.closePath() exclam = bold_ufo.newGlyph("exclam") exclam.width = 297 pen = exclam.getPen() pen.moveTo((84, 230)) pen.lineTo((214, 230)) pen.lineTo((254, 618)) pen.curveTo((254, 633), (247, 645), (217, 645)) pen.lineTo((81, 645)) pen.curveTo((51, 645), (44, 633), (44, 618)) pen.closePath() pen.addComponent("period", (0.87, 0, 0, 0.87, 28, -1)) exclamdown = bold_ufo.newGlyph("exclamdown") exclamdown.width = 297 pen = exclamdown.getPen() pen.addComponent("exclam", (-1, 0, 0, -1, 298, 509)) # We test that, even with DecomposeTransformedComponentsFilter(pre=True) and # the above nested/transformed/mixed component setup, we don't crash cu2qu # with errors about masters with inconsistent contour order after decomposition # of "exclamdown". glyphsets = TTFInterpolatablePreProcessor( [regular_ufo, bold_ufo], filters=[DecomposeTransformedComponentsFilter(pre=True)], ).process() assert len(glyphsets[0]["exclam"]) == 2 assert len(glyphsets[0]["exclamdown"]) == 2 assert len(glyphsets[1]["exclam"]) == 2 assert len(glyphsets[1]["exclamdown"]) == 2 ufo2ft-2.30.0/tests/filters/dottedCircle_test.py000066400000000000000000000022361434012334300216310ustar00rootroot00000000000000from ufo2ft.filters.dottedCircleFilter import DottedCircleFilter from ufo2ft.util import _GlyphSet def test_dotted_circle_filter(FontClass, datadir): ufo_path = datadir.join("DottedCircleTest.ufo") font = FontClass(ufo_path) assert "uni25CC" not in font philter = DottedCircleFilter() glyphset = _GlyphSet.from_layer(font) modified = philter(font, glyphset) assert "uni25CC" in modified dotted_circle = glyphset["uni25CC"] # check the Glyph's module is the same as the Font's (both ufoLib2 or defcon, # not mixed): https://github.com/googlefonts/ufo2ft/issues/644 font_ufo_module = type(font).__module__.split(".")[0] glyph_ufo_module = type(dotted_circle).__module__.split(".")[0] assert glyph_ufo_module == font_ufo_module anchors = list(sorted(dotted_circle.anchors, key=lambda x: x.name)) assert anchors[0].x == 464 assert anchors[0].y == -17 assert anchors[0].name == "bottom" assert anchors[1].x == 563 assert anchors[1].y == 546 assert anchors[1].name == "top" assert len(dotted_circle) == 12 assert int(dotted_circle.width) == 688 assert dotted_circle.unicodes == [0x25CC] ufo2ft-2.30.0/tests/filters/filters_test.py000066400000000000000000000154711434012334300207010ustar00rootroot00000000000000from types import SimpleNamespace import pytest from fontTools.misc.loggingTools import CapturingLogHandler from ufo2ft.filters import ( FILTERS_KEY, BaseFilter, getFilterClass, loadFilterFromString, loadFilters, logger, ) from ..testSupport import _TempModule class FooBarFilter(BaseFilter): """A filter that does nothing.""" _args = ("a", "b") _kwargs = {"c": 0} def filter(self, glyph): return False @pytest.fixture(scope="module", autouse=True) def fooBar(): """Make a temporary 'ufo2ft.filters.fooBar' module containing a 'FooBarFilter' class for testing the filter loading machinery. """ with _TempModule("ufo2ft.filters.fooBar") as temp_module: temp_module.module.__dict__["FooBarFilter"] = FooBarFilter yield def test_getFilterClass(): assert getFilterClass("Foo Bar") == FooBarFilter assert getFilterClass("FooBar") == FooBarFilter assert getFilterClass("fooBar") == FooBarFilter with pytest.raises(ImportError): getFilterClass("Baz") with _TempModule("myfilters"), _TempModule("myfilters.fooBar") as temp_module: with pytest.raises(AttributeError): # this fails because `myfilters.fooBar` module does not # have a `FooBarFilter` class getFilterClass("Foo Bar", pkg="myfilters") temp_module.module.__dict__["FooBarFilter"] = FooBarFilter # this will attempt to import the `FooBarFilter` class from the # `myfilters.fooBar` module assert getFilterClass("Foo Bar", pkg="myfilters") == FooBarFilter class MockFont(SimpleNamespace): pass class MockGlyph(SimpleNamespace): pass def test_loadFilters_empty(): ufo = MockFont(lib={}) assert FILTERS_KEY not in ufo.lib assert loadFilters(ufo) == ([], []) @pytest.fixture def ufo(): ufo = MockFont(lib={}) ufo.lib[FILTERS_KEY] = [{"name": "Foo Bar", "args": ["foo", "bar"]}] return ufo def test_loadFilters_pre(ufo): ufo.lib[FILTERS_KEY][0]["pre"] = True pre, post = loadFilters(ufo) assert len(pre) == 1 assert not post assert isinstance(pre[0], FooBarFilter) def test_loadFilters_custom_namespace(ufo): ufo.lib[FILTERS_KEY][0]["name"] = "Self Destruct" ufo.lib[FILTERS_KEY][0]["namespace"] = "my_dangerous_filters" class SelfDestructFilter(FooBarFilter): def filter(self, glyph): # Don't try this at home!!! LOL :) # shutil.rmtree(os.path.expanduser("~")) return True with _TempModule("my_dangerous_filters"), _TempModule( "my_dangerous_filters.selfDestruct" ) as temp: temp.module.__dict__["SelfDestructFilter"] = SelfDestructFilter _, [filter_obj] = loadFilters(ufo) assert isinstance(filter_obj, SelfDestructFilter) def test_loadFilters_args_missing(ufo): del ufo.lib[FILTERS_KEY][0]["args"] with pytest.raises(TypeError) as exc_info: loadFilters(ufo) assert exc_info.match("missing") def test_loadFilters_args_unsupported(ufo): ufo.lib[FILTERS_KEY][0]["args"].append("baz") with pytest.raises(TypeError) as exc_info: loadFilters(ufo) assert exc_info.match("unsupported") def test_loadFilters_args_as_keywords(ufo): del ufo.lib[FILTERS_KEY][0]["args"] ufo.lib[FILTERS_KEY][0]["kwargs"] = {"a": "foo", "b": "bar"} _, [filter_obj] = loadFilters(ufo) assert filter_obj.options.a == "foo" assert filter_obj.options.b == "bar" def test_loadFilters_args_as_duplicated_keywords(ufo): ufo.lib[FILTERS_KEY][0]["args"] = ["foo"] ufo.lib[FILTERS_KEY][0]["kwargs"] = {"a": "foo", "b": "bar"} with pytest.raises(TypeError) as exc_info: loadFilters(ufo) assert exc_info.match("duplicated") def test_loadFilters_include_all(ufo): _, [filter_obj] = loadFilters(ufo) assert filter_obj.include(MockGlyph(name="hello")) assert filter_obj.include(MockGlyph(name="world")) def test_loadFilters_include_list(ufo): ufo.lib[FILTERS_KEY][0]["include"] = ["a", "b"] _, [filter_obj] = loadFilters(ufo) assert filter_obj.include(MockGlyph(name="a")) assert filter_obj.include(MockGlyph(name="b")) assert not filter_obj.include(MockGlyph(name="c")) def test_loadFilters_exclude_list(ufo): ufo.lib[FILTERS_KEY][0]["exclude"] = ["a", "b"] _, [filter_obj] = loadFilters(ufo) assert not filter_obj.include(MockGlyph(name="a")) assert not filter_obj.include(MockGlyph(name="b")) assert filter_obj.include(MockGlyph(name="c")) def test_loadFilters_both_include_exclude(ufo): ufo.lib[FILTERS_KEY][0]["include"] = ["a", "b"] ufo.lib[FILTERS_KEY][0]["exclude"] = ["c", "d"] with pytest.raises(ValueError) as exc_info: loadFilters(ufo) assert exc_info.match("arguments are mutually exclusive") def test_loadFilters_failed(ufo): ufo.lib[FILTERS_KEY].append(dict(name="Non Existent")) with CapturingLogHandler(logger, level="ERROR") as captor: loadFilters(ufo) captor.assertRegex("Failed to load filter") def test_loadFilters_kwargs_unsupported(ufo): ufo.lib[FILTERS_KEY][0]["kwargs"] = {} ufo.lib[FILTERS_KEY][0]["kwargs"]["c"] = 1 ufo.lib[FILTERS_KEY][0]["kwargs"]["d"] = 2 # unknown with pytest.raises(TypeError) as exc_info: loadFilters(ufo) assert exc_info.match("got an unsupported keyword") VALID_SPEC_STRINGS = [ "RemoveOverlapsFilter", "PropagateAnchorsFilter(include=['a', 'b', 'c'])", "ufo2ft.filters.fooBar::FooBarFilter(a='a', b='b', c=1)", ] @pytest.mark.parametrize("spec", VALID_SPEC_STRINGS) def test_loadFilterFromString(spec, ufo): philter = loadFilterFromString(spec) assert callable(philter) def test_loadFilterFromString_args_missing(ufo): with pytest.raises(TypeError) as info: loadFilterFromString( "ufo2ft.filters.fooBar::FooBarFilter(a='a', c=1)", ) assert info.match("missing 1 required positional argument: 'b'") with pytest.raises(TypeError) as info: loadFilterFromString( "ufo2ft.filters.fooBar::FooBarFilter(c=1)", ) assert info.match("missing 2 required positional arguments: 'a', 'b'") def test_BaseFilter_repr(): class NoArgFilter(BaseFilter): pass assert repr(NoArgFilter()) == "NoArgFilter()" assert repr(FooBarFilter("a", "b", c=1)) == ("FooBarFilter('a', 'b', c=1)") assert ( repr(FooBarFilter("c", "d", include=["x", "y"])) == "FooBarFilter('c', 'd', c=0, include=['x', 'y'])" ) assert ( repr(FooBarFilter("e", "f", c=2.0, exclude=("z",))) == "FooBarFilter('e', 'f', c=2.0, exclude=('z',))" ) def f(g): return False assert repr( FooBarFilter("g", "h", include=f) ) == "FooBarFilter('g', 'h', c=0, include={})".format(repr(f)) if __name__ == "__main__": import sys sys.exit(pytest.main(sys.argv)) ufo2ft-2.30.0/tests/filters/flattenComponents_test.py000066400000000000000000000204121434012334300227230ustar00rootroot00000000000000import pytest from fontTools.misc.loggingTools import CapturingLogHandler from ufo2ft.filters.flattenComponents import FlattenComponentsFilter, logger @pytest.fixture( params=[ { "glyphs": [ {"name": "space", "width": 500}, { "name": "contourGlyph", "width": 350, "outline": [ ("moveTo", ((0, 0),)), ("lineTo", ((300, 0),)), ("lineTo", ((300, 300),)), ("lineTo", ((0, 300),)), ("closePath", ()), ], }, { "name": "componentGlyph", "width": 350, "outline": [("addComponent", ("contourGlyph", (1, 0, 0, 1, 0, 0)))], }, { "name": "nestedComponentGlyph", "width": 350, "outline": [ ("addComponent", ("componentGlyph", (1, 0, 0, 1, 0, 0))) ], }, { "name": "componentAndNestedComponentsGlyph", "width": 700, "outline": [ ("addComponent", ("contourGlyph", (1, 0, 0, 1, 0, 0))), ("addComponent", ("componentGlyph", (1, 0, 0, 1, 350, 0))), ( "addComponent", ("nestedComponentGlyph", (1, 0, 0, 1, 700, 0)), ), ], }, { "name": "contourAndComponentGlyph", "width": 600, "outline": [ ("moveTo", ((400, 0),)), ("lineTo", ((400, 100),)), ("lineTo", ((500, 100),)), ("lineTo", ((500, 0),)), ("closePath", ()), ("addComponent", ("contourGlyph", (1, 0, 0, 1, 0, 0))), ], }, { "name": "nestedContourAndComponentGlyph", "width": 600, "outline": [ ( "addComponent", ("contourAndComponentGlyph", (1, 0, 0, 1, 50, 0)), ), ], }, { "name": "nestedNestedContourAndComponentGlyph", "width": 600, "outline": [ ( "addComponent", ("nestedContourAndComponentGlyph", (1, 0, 0, 1, 45, 0)), ), ], }, { "name": "scaledComponentGlyph", "width": 600, "outline": [ ( "addComponent", ("contourGlyph", (0.5, 0, 0, 0.5, 50, 50)), ), ], }, { "name": "nestedScaledComponentGlyph", "width": 600, "outline": [ ( "addComponent", ("scaledComponentGlyph", (1, 0, 0, 1, 40, 40)), ), ], }, { "name": "scaledNestedComponentGlyph", "width": 600, "outline": [ ( "addComponent", ("scaledComponentGlyph", (1.2, 0, 0, 1.2, 40, 40)), ), ], }, ] } ] ) def font(request, FontClass): font = FontClass() for param in request.param["glyphs"]: glyph = font.newGlyph(param["name"]) glyph.width = param.get("width", 0) pen = glyph.getPen() for operator, operands in param.get("outline", []): getattr(pen, operator)(*operands) return font class FlattenComponentsFilterTest: def test_empty_glyph(self, font): philter = FlattenComponentsFilter(include={"space"}) assert not philter(font) def test_contour_glyph(self, font): philter = FlattenComponentsFilter(include={"contourGlyph"}) assert not philter(font) def test_component_glyph(self, font): philter = FlattenComponentsFilter(include={"componentGlyph"}) assert not philter(font) def test_nested_components_glyph(self, font): philter = FlattenComponentsFilter(include={"nestedComponentGlyph"}) modified = philter(font) assert modified == {"nestedComponentGlyph"} assert [ (c.baseGlyph, c.transformation) for c in font["nestedComponentGlyph"].components ] == [("contourGlyph", (1, 0, 0, 1, 0, 0))] def test_nested_contour_and_component_glyph(self, font): philter = FlattenComponentsFilter( include={ "nestedContourAndComponentGlyph", "nestedNestedContourAndComponentGlyph", } ) modified = philter(font) assert modified == {"nestedNestedContourAndComponentGlyph"} assert [ (c.baseGlyph, c.transformation) for c in font["nestedNestedContourAndComponentGlyph"].components ] == [("contourAndComponentGlyph", (1, 0, 0, 1, 95, 0))] def test_scaled_component_glyph(self, font): philter = FlattenComponentsFilter( include={ "scaledComponentGlyph", "nestedScaledComponentGlyph", "scaledNestedComponentGlyph", } ) modified = philter(font) assert modified == { "nestedScaledComponentGlyph", "scaledNestedComponentGlyph", } assert [ (c.baseGlyph, c.transformation) for c in font["nestedScaledComponentGlyph"].components ] == [("contourGlyph", (0.5, 0, 0, 0.5, 90, 90))] assert [ (c.baseGlyph, c.transformation) for c in font["scaledNestedComponentGlyph"].components ] == [("contourGlyph", (0.6, 0, 0, 0.6, 100, 100))] def test_whole_font(self, font): philter = FlattenComponentsFilter() modified = philter(font) assert modified == { "nestedComponentGlyph", "componentAndNestedComponentsGlyph", "nestedNestedContourAndComponentGlyph", "nestedScaledComponentGlyph", "scaledNestedComponentGlyph", } assert [ (c.baseGlyph, c.transformation) for c in font["nestedComponentGlyph"].components ] == [("contourGlyph", (1, 0, 0, 1, 0, 0))] assert [ (c.baseGlyph, c.transformation) for c in font["componentAndNestedComponentsGlyph"].components ] == [ ("contourGlyph", (1, 0, 0, 1, 0, 0)), ("contourGlyph", (1, 0, 0, 1, 350, 0)), ("contourGlyph", (1, 0, 0, 1, 700, 0)), ] assert [ (c.baseGlyph, c.transformation) for c in font["nestedContourAndComponentGlyph"].components ] == [ ("contourAndComponentGlyph", (1, 0, 0, 1, 50, 0)), ] assert [ (c.baseGlyph, c.transformation) for c in font["nestedNestedContourAndComponentGlyph"].components ] == [("contourAndComponentGlyph", (1, 0, 0, 1, 95, 0))] assert [ (c.baseGlyph, c.transformation) for c in font["nestedScaledComponentGlyph"].components ] == [("contourGlyph", (0.5, 0, 0, 0.5, 90, 90))] assert [ (c.baseGlyph, c.transformation) for c in font["scaledNestedComponentGlyph"].components ] == [("contourGlyph", (0.6, 0, 0, 0.6, 100, 100))] def test_logger(self, font): with CapturingLogHandler(logger, level="INFO") as captor: philter = FlattenComponentsFilter() _ = philter(font) captor.assertRegex("Flattened composite glyphs: 5") ufo2ft-2.30.0/tests/filters/propagateAnchors_test.py000066400000000000000000000204201434012334300225170ustar00rootroot00000000000000import pytest from fontTools.misc.loggingTools import CapturingLogHandler import ufo2ft.filters from ufo2ft.filters.propagateAnchors import PropagateAnchorsFilter, logger @pytest.fixture( params=[ { "glyphs": [ {"name": "space", "width": 500}, { "name": "a", "width": 350, "outline": [ ("moveTo", ((0, 0),)), ("lineTo", ((300, 0),)), ("lineTo", ((300, 300),)), ("lineTo", ((0, 300),)), ("closePath", ()), ], "anchors": [(175, 300, "top"), (175, 0, "bottom")], }, { "name": "dieresiscomb", "width": 0, "outline": [ ("moveTo", ((-120, 320),)), ("lineTo", ((-60, 320),)), ("lineTo", ((-60, 360),)), ("lineTo", ((-120, 360),)), ("closePath", ()), ("moveTo", ((120, 320),)), ("lineTo", ((60, 320),)), ("lineTo", ((60, 360),)), ("lineTo", ((120, 360),)), ("closePath", ()), ], "anchors": [(0, 300, "_top"), (0, 480, "top")], }, { "name": "macroncomb", "width": 0, "outline": [ ("moveTo", ((-120, 330),)), ("lineTo", ((120, 330),)), ("lineTo", ((120, 350),)), ("lineTo", ((-120, 350),)), ("closePath", ()), ], "anchors": [(0, 300, "_top"), (0, 480, "top")], }, { "name": "a-cyr", "width": 350, "outline": [("addComponent", ("a", (1, 0, 0, 1, 0, 0)))], }, { "name": "amacron", "width": 350, "outline": [ ("addComponent", ("a", (1, 0, 0, 1, 0, 0))), ("addComponent", ("macroncomb", (1, 0, 0, 1, 175, 0))), ], "anchors": [(176, 481, "top")], }, { "name": "adieresis", "width": 350, "outline": [ ("addComponent", ("a", (1, 0, 0, 1, 0, 0))), ("addComponent", ("dieresiscomb", (1, 0, 0, 1, 175, 0))), ], }, { "name": "amacrondieresis", "width": 350, "outline": [ ("addComponent", ("amacron", (1, 0, 0, 1, 0, 0))), ("addComponent", ("dieresiscomb", (1, 0, 0, 1, 175, 180))), ], }, { "name": "adieresismacron", "width": 350, "outline": [ ("addComponent", ("a", (1, 0, 0, 1, 0, 0))), ("addComponent", ("dieresiscomb", (1, 0, 0, 1, 175, 0))), ("addComponent", ("macroncomb", (1, 0, 0, 1, 175, 180))), ], }, { "name": "a_a", "width": 700, "outline": [ ("addComponent", ("a", (1, 0, 0, 1, 0, 0))), ("addComponent", ("a", (1, 0, 0, 1, 350, 0))), ], }, { "name": "emacron", "width": 350, "outline": [ ("addComponent", ("e", (1, 0, 0, 1, 0, 0))), ("addComponent", ("macroncomb", (1, 0, 0, 1, 175, 0))), ], }, ] } ] ) def font(request, FontClass): font = FontClass() for param in request.param["glyphs"]: glyph = font.newGlyph(param["name"]) glyph.width = param.get("width", 0) pen = glyph.getPen() for operator, operands in param.get("outline", []): getattr(pen, operator)(*operands) for x, y, name in param.get("anchors", []): glyph.appendAnchor(dict(x=x, y=y, name=name)) return font EXPECTED = { # single component glyph "a-cyr": ([("bottom", 175, 0), ("top", 175, 300)], {"a-cyr"}), # two component glyph "adieresis": ([("bottom", 175, 0), ("top", 175, 480)], {"adieresis"}), # one anchor, two component glyph "amacron": ([("top", 176, 481), ("bottom", 175, 0)], {"amacron"}), # three component glyph "adieresismacron": ([("bottom", 175, 0), ("top", 175, 660)], {"adieresismacron"}), # nested component glyph "amacrondieresis": ( [("bottom", 175, 0), ("top", 175, 660)], # 'amacron' is used as component by 'amacrondieresis' hence it is modified # as well... {"amacrondieresis", "amacron"}, ), # ligature glyph "a_a": ( [ ("bottom_1", 175, 0), ("bottom_2", 525, 0), ("top_1", 175, 300), ("top_2", 525, 300), ], {"a_a"}, ), } class PropagateAnchorsFilterTest: def test_empty_glyph(self, font): philter = PropagateAnchorsFilter(include={"space"}) assert not philter(font) def test_contour_glyph(self, font): philter = PropagateAnchorsFilter(include={"a"}) assert not philter(font) @pytest.mark.parametrize("name", list(EXPECTED)) def test_include_one_glyph_at_a_time(self, font, name): philter = PropagateAnchorsFilter(include={name}) modified = philter(font) expected_anchors, expected_modified = EXPECTED[name] assert modified == expected_modified assert [(a.name, a.x, a.y) for a in font[name].anchors] == expected_anchors def test_whole_font(self, font): philter = PropagateAnchorsFilter() modified = philter(font) assert modified == set(EXPECTED) for name, (expected_anchors, _) in EXPECTED.items(): assert [(a.name, a.x, a.y) for a in font[name].anchors] == expected_anchors def test_fail_during_anchor_propagation(self, font): name = "emacron" with CapturingLogHandler(logger, level="WARNING") as captor: philter = PropagateAnchorsFilter(include={name}) philter(font) captor.assertRegex( "Anchors not propagated for inexistent component e " "in glyph emacron" ) def test_logger(self, font): with CapturingLogHandler(logger, level="INFO") as captor: philter = PropagateAnchorsFilter() philter(font) captor.assertRegex("Glyphs with propagated anchors: 6") def test_CantarellAnchorPropagation(FontClass, datadir): ufo_path = datadir.join("CantarellAnchorPropagation.ufo") ufo = FontClass(ufo_path) pre_filters, _ = ufo2ft.filters.loadFilters(ufo) philter = pre_filters[0] philter(ufo) anchors_combined = { (a.name, a.x, a.y) for a in ufo["circumflexcomb_tildecomb"].anchors } assert ("top", 214.0, 730.0) in anchors_combined assert ("_top", 213.0, 482.0) in anchors_combined anchors_o = {(a.name, a.x, a.y) for a in ufo["ocircumflextilde"].anchors} assert ("top", 284.0, 730.0) in anchors_o def test_CantarellAnchorPropagation_reduced_filter(FontClass, datadir): ufo_path = datadir.join("CantarellAnchorPropagation.ufo") ufo = FontClass(ufo_path) ufo.lib["com.github.googlei18n.ufo2ft.filters"][0]["include"] = ["ocircumflextilde"] pre_filters, _ = ufo2ft.filters.loadFilters(ufo) philter = pre_filters[0] philter(ufo) anchors_combined = { (a.name, a.x, a.y) for a in ufo["circumflexcomb_tildecomb"].anchors } assert ("top", 214.0, 730.0) in anchors_combined assert ("_top", 213.0, 482.0) in anchors_combined anchors_o = {(a.name, a.x, a.y) for a in ufo["ocircumflextilde"].anchors} assert ("top", 284.0, 730.0) in anchors_o ufo2ft-2.30.0/tests/filters/sortContours_test.py000066400000000000000000000333111434012334300217460ustar00rootroot00000000000000import logging import pytest import ufo2ft import ufo2ft.filters.sortContours @pytest.fixture def font(request, datadir, FontClass): font = FontClass(datadir.join("ContourOrderTest.ufo")) return font def test_sort_contour_order(font, FontClass): test_ufo = FontClass() font_compiled = ufo2ft.compileTTF(font, inplace=True) font_glyf = font_compiled["glyf"] glyph_uniFFFC = font_glyf["uniFFFC"] glyph_test1 = test_ufo.newGlyph("test1") glyph_uniFFFC.draw(glyph_test1.getPen(), font_glyf) assert [ [(p.x, p.y, p.segmentType, p.smooth) for p in c] for c in glyph_test1 ] == EXPECTED_glyph_uniFFFC glyph_graphemejoinercomb = font_glyf["graphemejoinercomb"] glyph_test2 = test_ufo.newGlyph("test2") glyph_graphemejoinercomb.draw(glyph_test2.getPen(), font_glyf) assert [ [(p.x, p.y, p.segmentType, p.smooth) for p in c] for c in glyph_test2 ] == EXPECTED_glyph_graphemejoinercomb def test_no_sort_contour_order(font, FontClass): test_ufo = FontClass() del font.lib["com.github.googlei18n.ufo2ft.filters"] font_compiled = ufo2ft.compileTTF(font, inplace=True) font_glyf = font_compiled["glyf"] glyph_uniFFFC = font_glyf["uniFFFC"] glyph_test1 = test_ufo.newGlyph("test1") glyph_uniFFFC.draw(glyph_test1.getPen(), font_glyf) assert [ [(p.x, p.y, p.segmentType, p.smooth) for p in c] for c in glyph_test1 ] != EXPECTED_glyph_uniFFFC glyph_graphemejoinercomb = font_glyf["graphemejoinercomb"] glyph_test2 = test_ufo.newGlyph("test2") glyph_graphemejoinercomb.draw(glyph_test2.getPen(), font_glyf) assert [ [(p.x, p.y, p.segmentType, p.smooth) for p in c] for c in glyph_test2 ] != EXPECTED_glyph_graphemejoinercomb def test_warn_pre_filter(font, caplog): font.lib["com.github.googlei18n.ufo2ft.filters"][0]["pre"] = True font.lib["com.github.googlei18n.ufo2ft.filters"][0]["include"].append("xxx") with caplog.at_level( logging.WARNING, logger=ufo2ft.filters.sortContours.logger.name ): _ = ufo2ft.compileTTF(font, inplace=True) assert len(caplog.records) == 1 assert "contains components which will not be sorted" in caplog.text def test_no_warn_post_filter(font, caplog): font.lib["com.github.googlei18n.ufo2ft.filters"][0]["include"].append("xxx") with caplog.at_level( logging.WARNING, logger=ufo2ft.filters.sortContours.logger.name ): _ = ufo2ft.compileTTF(font, inplace=True) assert len(caplog.records) == 0 EXPECTED_glyph_uniFFFC = [ [ (41, -187, "line", False), (41, -39, "line", False), (95, -39, "line", False), (95, -134, "line", False), (189, -134, "line", False), (189, -187, "line", False), ], [ (95, 19, "line", False), (41, 19, "line", False), (41, 151, "line", False), (95, 151, "line", False), ], [ (95, 210, "line", False), (41, 210, "line", False), (41, 343, "line", False), (95, 343, "line", False), ], [ (95, 402, "line", False), (41, 402, "line", False), (41, 534, "line", False), (95, 534, "line", False), ], [ (41, 593, "line", False), (41, 741, "line", False), (189, 741, "line", False), (189, 687, "line", False), (95, 687, "line", False), (95, 593, "line", False), ], [ (422, 307, "qcurve", True), (422, 241, None, False), (360, 160, None, False), (294, 160, "qcurve", True), (228, 160, None, False), (166, 241, None, False), (166, 307, "qcurve", True), (166, 374, None, False), (228, 454, None, False), (294, 454, "qcurve", True), (360, 454, None, False), (422, 374, None, False), ], [ (228, 307, "qcurve", True), (228, 262, None, False), (260, 211, None, False), (294, 211, "qcurve", True), (329, 211, None, False), (360, 262, None, False), (360, 307, "qcurve", True), (360, 352, None, False), (329, 403, None, False), (294, 403, "qcurve", True), (260, 403, None, False), (228, 352, None, False), ], [ (248, -187, "line", False), (248, -134, "line", False), (380, -134, "line", False), (380, -187, "line", False), ], [ (248, 687, "line", False), (248, 741, "line", False), (380, 741, "line", False), (380, 687, "line", False), ], [ (439, -187, "line", False), (439, -134, "line", False), (572, -134, "line", False), (572, -187, "line", False), ], [ (439, 687, "line", False), (439, 741, "line", False), (572, 741, "line", False), (572, 687, "line", False), ], [ (463, 450, "line", False), (547, 450, "line", True), (600, 450, None, False), (655, 418, None, False), (655, 377, "qcurve", True), (655, 353, None, False), (632, 321, None, False), (611, 317, "qcurve", False), (611, 313, "line", False), (633, 309, None, False), (663, 281, None, False), (663, 247, "qcurve", True), (663, 208, None, False), (610, 164, None, False), (564, 164, "qcurve", True), (463, 164, "line", False), ], [ (523, 289, "line", False), (523, 214, "line", False), (559, 214, "line", True), (583, 214, None, False), (601, 235, None, False), (601, 253, "qcurve", True), (601, 269, None, False), (583, 289, None, False), (557, 289, "qcurve", True), ], [ (523, 337, "line", False), (555, 337, "line", True), (578, 337, None, False), (595, 353, None, False), (595, 369, "qcurve", True), (595, 400, None, False), (552, 400, "qcurve", True), (523, 400, "line", False), ], [ (630, -187, "line", False), (630, -134, "line", False), (763, -134, "line", False), (763, -187, "line", False), ], [ (630, 687, "line", False), (630, 741, "line", False), (763, 741, "line", False), (763, 687, "line", False), ], [ (728, 161, "qcurve", True), (704, 161, None, False), (689, 166, "qcurve", False), (689, 216, "line", False), (697, 215, None, False), (712, 212, None, False), (722, 212, "qcurve", True), (740, 212, None, False), (764, 229, None, False), (764, 254, "qcurve", True), (764, 450, "line", False), (825, 450, "line", False), (825, 256, "line", True), (825, 207, None, False), (771, 161, None, False), ], [ (821, -187, "line", False), (821, -134, "line", False), (916, -134, "line", False), (916, -39, "line", False), (969, -39, "line", False), (969, -187, "line", False), ], [ (821, 687, "line", False), (821, 741, "line", False), (969, 741, "line", False), (969, 593, "line", False), (916, 593, "line", False), (916, 687, "line", False), ], [ (969, 19, "line", False), (916, 19, "line", False), (916, 151, "line", False), (969, 151, "line", False), ], [ (969, 210, "line", False), (916, 210, "line", False), (916, 343, "line", False), (969, 343, "line", False), ], [ (969, 402, "line", False), (916, 402, "line", False), (916, 534, "line", False), (969, 534, "line", False), ], ] EXPECTED_glyph_graphemejoinercomb = [ [ (-357, 0, "line", False), (-357, 157, "line", False), (-303, 157, "line", False), (-303, 54, "line", False), (-201, 54, "line", False), (-201, 0, "line", False), ], [ (-357, 279, "line", False), (-357, 436, "line", False), (-303, 436, "line", False), (-303, 279, "line", False), ], [ (-357, 558, "line", False), (-357, 714, "line", False), (-201, 714, "line", False), (-201, 660, "line", False), (-303, 660, "line", False), (-303, 558, "line", False), ], [ (-218, 330, "qcurve", True), (-245, 330, None, False), (-245, 357, "qcurve", True), (-245, 384, None, False), (-218, 384, "qcurve", True), (-191, 384, None, False), (-191, 357, "qcurve", True), (-191, 330, None, False), ], [ (-200, 244, "qcurve", True), (-227, 244, None, False), (-227, 271, "qcurve", True), (-227, 298, None, False), (-200, 298, "qcurve", True), (-173, 298, None, False), (-173, 271, "qcurve", True), (-173, 244, None, False), ], [ (-200, 416, "qcurve", True), (-227, 416, None, False), (-227, 443, "qcurve", True), (-227, 470, None, False), (-200, 470, "qcurve", True), (-173, 470, None, False), (-173, 443, "qcurve", True), (-173, 416, None, False), ], [ (-157, 174, "qcurve", True), (-184, 174, None, False), (-184, 201, "qcurve", True), (-184, 228, None, False), (-157, 228, "qcurve", True), (-130, 228, None, False), (-130, 201, "qcurve", True), (-130, 174, None, False), ], [ (-157, 486, "qcurve", True), (-184, 486, None, False), (-184, 513, "qcurve", True), (-184, 540, None, False), (-157, 540, "qcurve", True), (-130, 540, None, False), (-130, 513, "qcurve", True), (-130, 486, None, False), ], [ (-86, 128, "qcurve", True), (-113, 128, None, False), (-113, 155, "qcurve", True), (-113, 182, None, False), (-86, 182, "qcurve", True), (-59, 182, None, False), (-59, 155, "qcurve", True), (-59, 128, None, False), ], [ (-86, 532, "qcurve", True), (-113, 532, None, False), (-113, 559, "qcurve", True), (-113, 586, None, False), (-86, 586, "qcurve", True), (-59, 586, None, False), (-59, 559, "qcurve", True), (-59, 532, None, False), ], [ (-79, 0, "line", False), (-79, 54, "line", False), (79, 54, "line", False), (79, 0, "line", False), ], [ (-79, 660, "line", False), (-79, 714, "line", False), (79, 714, "line", False), (79, 660, "line", False), ], [ (0, 112, "qcurve", True), (-27, 112, None, False), (-27, 139, "qcurve", True), (-27, 166, None, False), (0, 166, "qcurve", True), (27, 166, None, False), (27, 139, "qcurve", True), (27, 112, None, False), ], [ (0, 548, "qcurve", True), (-27, 548, None, False), (-27, 575, "qcurve", True), (-27, 602, None, False), (0, 602, "qcurve", True), (27, 602, None, False), (27, 575, "qcurve", True), (27, 548, None, False), ], [ (86, 128, "qcurve", True), (59, 128, None, False), (59, 155, "qcurve", True), (59, 182, None, False), (86, 182, "qcurve", True), (113, 182, None, False), (113, 155, "qcurve", True), (113, 128, None, False), ], [ (86, 532, "qcurve", True), (59, 532, None, False), (59, 559, "qcurve", True), (59, 586, None, False), (86, 586, "qcurve", True), (113, 586, None, False), (113, 559, "qcurve", True), (113, 532, None, False), ], [ (157, 174, "qcurve", True), (130, 174, None, False), (130, 201, "qcurve", True), (130, 228, None, False), (157, 228, "qcurve", True), (184, 228, None, False), (184, 201, "qcurve", True), (184, 174, None, False), ], [ (157, 486, "qcurve", True), (130, 486, None, False), (130, 513, "qcurve", True), (130, 540, None, False), (157, 540, "qcurve", True), (184, 540, None, False), (184, 513, "qcurve", True), (184, 486, None, False), ], [ (204, 244, "qcurve", True), (177, 244, None, False), (177, 271, "qcurve", True), (177, 298, None, False), (204, 298, "qcurve", True), (231, 298, None, False), (231, 271, "qcurve", True), (231, 244, None, False), ], [ (204, 416, "qcurve", True), (177, 416, None, False), (177, 443, "qcurve", True), (177, 470, None, False), (204, 470, "qcurve", True), (231, 470, None, False), (231, 443, "qcurve", True), (231, 416, None, False), ], [ (223, 330, "qcurve", True), (196, 330, None, False), (196, 357, "qcurve", True), (196, 384, None, False), (223, 384, "qcurve", True), (250, 384, None, False), (250, 357, "qcurve", True), (250, 330, None, False), ], [ (201, 0, "line", False), (201, 54, "line", False), (304, 54, "line", False), (304, 157, "line", False), (357, 157, "line", False), (357, 0, "line", False), ], [ (304, 558, "line", False), (304, 660, "line", False), (201, 660, "line", False), (201, 714, "line", False), (357, 714, "line", False), (357, 558, "line", False), ], [ (304, 279, "line", False), (304, 436, "line", False), (357, 436, "line", False), (357, 279, "line", False), ], ] ufo2ft-2.30.0/tests/filters/transformations_test.py000066400000000000000000000162141434012334300224560ustar00rootroot00000000000000from math import isclose import pytest from ufo2ft.filters.transformations import TransformationsFilter @pytest.fixture( params=[ { "capHeight": 700, "xHeight": 500, "glyphs": [ {"name": "space", "width": 500}, { "name": "a", "width": 350, "outline": [ ("moveTo", ((0, 0),)), ("lineTo", ((300, 0),)), ("lineTo", ((300, 300),)), ("lineTo", ((0, 300),)), ("closePath", ()), ], "anchors": [(100, 200, "top"), (100, -200, "bottom")], }, { "name": "b", "width": 450, "outline": [ ("addComponent", ("a", (1, 0, 0, 1, 0, 0))), ("addComponent", ("c", (1, 0, 0, 1, 0, 0))), ("addComponent", ("a", (1, 0, 0, 1, 10, -10))), ], }, { "name": "c", "outline": [ ("moveTo", ((0, 0),)), ("lineTo", ((300, 0),)), ("lineTo", ((150, 300),)), ("closePath", ()), ], }, { "name": "d", "outline": [("addComponent", ("b", (1, 0, 0, -1, 0, 0)))], }, ], } ] ) def font(request, FontClass): font = FontClass() font.info.capHeight = request.param["capHeight"] font.info.xHeight = request.param["xHeight"] for param in request.param["glyphs"]: glyph = font.newGlyph(param["name"]) glyph.width = param.get("width", 0) pen = glyph.getPen() for operator, operands in param.get("outline", []): getattr(pen, operator)(*operands) for x, y, name in param.get("anchors", []): glyph.appendAnchor(dict(x=x, y=y, name=name)) return font @pytest.fixture( params=TransformationsFilter.Origin, ids=[e.name for e in TransformationsFilter.Origin], ) def origin(request): return request.param class TransformationsFilterTest: def test_invalid_origin_value(self): with pytest.raises(ValueError) as excinfo: TransformationsFilter(Origin=5) excinfo.match(r"is not a valid (TransformationsFilter\.)?Origin") def test_empty_glyph(self, font): filter_ = TransformationsFilter(OffsetY=51, include={"space"}) assert not filter_(font) def test_Identity(self, font): filter_ = TransformationsFilter() assert not filter_(font) def test_OffsetX(self, font): filter_ = TransformationsFilter(OffsetX=-10) assert filter_(font) a = font["a"] assert (a[0][0].x, a[0][0].y) == (-10, 0) assert (a.anchors[1].x, a.anchors[1].y) == (90, -200) # base glyph was already transformed, component didn't change assert font["b"].components[0].transformation[-2:] == (0, 0) def test_OffsetY(self, font): filter_ = TransformationsFilter(OffsetY=51) assert filter_(font) a = font["a"] assert (a[0][0].x, a[0][0].y) == (0, 51) assert (a.anchors[1].x, a.anchors[1].y) == (100, -149) assert font["b"].components[0].transformation[-2:] == (0, 0) def test_OffsetXY(self, font): filter_ = TransformationsFilter(OffsetX=-10, OffsetY=51) assert filter_(font) a = font["a"] assert (a[0][0].x, a[0][0].y) == (-10, 51) assert (a.anchors[1].x, a.anchors[1].y) == (90, -149) assert font["b"].components[0].transformation[-2:] == (0, 0) def test_ScaleX(self, font, origin): # different Origin heights should not affect horizontal scale filter_ = TransformationsFilter(ScaleX=50, Origin=origin) assert filter_(font) a = font["a"] assert (a[0][0].x, a[0][0].y) == (0, 0) assert (a[0][2].x, a[0][2].y) == (150, 300) assert a.width == 350 * 0.50 def test_ScaleY(self, font, origin): percent = 50 filter_ = TransformationsFilter(ScaleY=percent, Origin=origin) assert filter_(font) factor = percent / 100 origin_height = filter_.get_origin_height(font, origin) bottom = origin_height * factor top = bottom + 300 * factor a = font["a"] # only y coords change assert (a[0][0].x, a[0][0].y) == (0, bottom) assert (a[0][2].x, a[0][2].y) == (300, top) def test_ScaleXY(self, font, origin): percent = 50 filter_ = TransformationsFilter(ScaleX=percent, ScaleY=percent, Origin=origin) assert filter_(font) factor = percent / 100 origin_height = filter_.get_origin_height(font, origin) bottom = origin_height * factor top = bottom + 300 * factor a = font["a"] # both x and y change assert (a[0][0].x, a[0][0].y) == (0, bottom) assert (a[0][2].x, a[0][2].y) == (150, top) assert a.width == 350 * factor def test_Slant(self, font, origin): filter_ = TransformationsFilter(Slant=45, Origin=origin) assert filter_(font) origin_height = filter_.get_origin_height(font, origin) a = font["a"] assert isclose(a[0][0].x, -origin_height) assert a[0][0].y == 0 def test_composite_glyphs(self, font): filter_ = TransformationsFilter( OffsetX=-10, OffsetY=51, ScaleX=50, ScaleY=50, exclude={"c"} ) assert filter_(font) b = font["b"] # component 'a' #1 was not transformed, because the base glyph was already # transformed, and the component's own transformation is identity assert b.components[0].transformation == (1, 0, 0, 1, 0, 0) # component 'c' was transformed, because base glyph was not included assert b.components[1].transformation == (0.5, 0, 0, 0.5, -10, 51) # component 'a' #2 was partly transformed: the base glyph was transformed, but # the component's original transformation was not identity; thus # it was modified to compensate for the transformation already applied to # the base glyph (scale stays same, offsets are scaled) assert b.components[2].transformation == (1, 0, 0, 1, 5, -5) d = font["d"] # component 'b' was transformed as well as its base glyph, because # its original transform had a scale, so it was necessary to # compensate for the transformation applied on the base glyph assert d.components[0].transformation == (1, 0, 0, -1, 0, 102) def test_ScaleOffset_width(self, font, origin): percent = 50 filter_ = TransformationsFilter( OffsetX=-100, ScaleX=percent, ScaleY=percent, Origin=origin ) assert filter_(font) factor = percent / 100 a = font["a"] # The offset value here should not change the fact that the glyph # bounding box is scaled by 50%. assert a.width == 350 * factor ufo2ft-2.30.0/tests/fontInfoData_test.py000066400000000000000000000207341434012334300201330ustar00rootroot00000000000000import os import random import time import pytest from ufo2ft.fontInfoData import ( dateStringToTimeValue, getAttrWithFallback, normalizeStringForPostscript, ) @pytest.fixture def info(InfoClass): self = InfoClass() self.familyName = "Family Name" self.styleName = "Style Name" self.unitsPerEm = 1000 self.descender = -250 self.xHeight = 450 self.capHeight = 600 self.ascender = 650 self.italicAngle = 0 return self class GetAttrWithFallbackTest: @pytest.mark.parametrize( "infoDict,expected", [ # no styleMapFamilyName, no styleMapStyleName ( {}, { "familyName": "Family Name", "styleName": "Style Name", "styleMapFamilyName": "Family Name Style Name", "styleMapStyleName": "regular", "openTypeNamePreferredFamilyName": "Family Name", "openTypeNamePreferredSubfamilyName": "Style Name", }, ), # no styleMapStyleName ( {"styleMapFamilyName": "Style Map Family Name"}, { "styleMapFamilyName": "Style Map Family Name", "styleMapStyleName": "regular", "openTypeNamePreferredFamilyName": "Family Name", "openTypeNamePreferredSubfamilyName": "Style Name", }, ), # no styleMapFamilyName, no styleMapStyleName but styleName="Regular" ( {"styleName": "Regular"}, { "familyName": "Family Name", "styleName": "Regular", "styleMapFamilyName": "Family Name", "styleMapStyleName": "regular", "openTypeNamePreferredFamilyName": "Family Name", "openTypeNamePreferredSubfamilyName": "Regular", }, ), # no styleMapFamilyName but styleName="Regular" ( {"styleName": "Regular", "styleMapStyleName": "regular"}, { "styleMapFamilyName": "Family Name", "styleMapStyleName": "regular", "openTypeNamePreferredFamilyName": "Family Name", "openTypeNamePreferredSubfamilyName": "Regular", }, ), # no styleMapStyleName but styleName="Regular" ( {"styleName": "Regular", "styleMapFamilyName": "Style Map Family Name"}, { "styleMapFamilyName": "Style Map Family Name", "styleMapStyleName": "regular", "openTypeNamePreferredFamilyName": "Family Name", "openTypeNamePreferredSubfamilyName": "Regular", }, ), # no styleMapFamilyName, no styleMapStyleName but styleName="Bold" ( {"styleName": "Bold"}, { "familyName": "Family Name", "styleName": "Bold", "styleMapFamilyName": "Family Name", "styleMapStyleName": "bold", "openTypeNamePreferredFamilyName": "Family Name", "openTypeNamePreferredSubfamilyName": "Bold", }, ), ], ) def test_family_and_style_names(self, info, infoDict, expected): for key, value in infoDict.items(): setattr(info, key, value) for key, value in expected.items(): assert getAttrWithFallback(info, key) == value def test_redundant_metadata(self, info): assert getAttrWithFallback(info, "openTypeNameVersion") == "Version 0.000" info.versionMinor = 1 info.versionMajor = 1 assert getAttrWithFallback(info, "openTypeNameVersion") == "Version 1.001" assert ( getAttrWithFallback(info, "openTypeNameUniqueID") == "1.001;NONE;FamilyName-StyleName" ) assert getAttrWithFallback(info, "postscriptSlantAngle") == 0 def test_unecessary_metadata(self, info): assert getAttrWithFallback(info, "postscriptWeightName") is None info.postscriptWeightName = "Normal" assert getAttrWithFallback(info, "postscriptWeightName") == "Normal" def test_vertical_metrics(self, info): assert getAttrWithFallback(info, "openTypeHheaAscender") == 950 assert getAttrWithFallback(info, "openTypeHheaDescender") == -250 assert getAttrWithFallback(info, "openTypeOS2TypoAscender") == 650 assert getAttrWithFallback(info, "openTypeOS2TypoDescender") == -250 assert getAttrWithFallback(info, "openTypeOS2WinAscent") == 950 assert getAttrWithFallback(info, "openTypeOS2WinDescent") == 250 def test_caret_slope(self, info): assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRise") == 1 assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRun") == 0 info.italicAngle = -12 assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRise") == 1000 assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRun") == 213 info.italicAngle = 12 assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRise") == 1000 assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRun") == -213 info.openTypeHheaCaretSlopeRise = 2048 assert info.openTypeHheaCaretSlopeRun is None assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRise") == 2048 assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRun") == -435 info.openTypeHheaCaretSlopeRise = None info.openTypeHheaCaretSlopeRun = 200 assert info.openTypeHheaCaretSlopeRise is None assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRise") == -941 assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRun") == 200 def test_head_created(self, info): os.environ["SOURCE_DATE_EPOCH"] = "1514485183" try: assert ( getAttrWithFallback(info, "openTypeHeadCreated") == "2017/12/28 18:19:43" ) finally: del os.environ["SOURCE_DATE_EPOCH"] assert getAttrWithFallback(info, "openTypeHeadCreated") != "2017/12/28 18:19:43" def test_empty_info(self, InfoClass): info = InfoClass() assert getAttrWithFallback(info, "familyName") == "New Font" assert getAttrWithFallback(info, "styleName") == "Regular" assert getAttrWithFallback(info, "unitsPerEm") == 1000 assert getAttrWithFallback(info, "ascender") == 800 assert getAttrWithFallback(info, "capHeight") == 700 assert getAttrWithFallback(info, "xHeight") == 500 assert getAttrWithFallback(info, "descender") == -200 def test_empty_info_2048(self, InfoClass): info = InfoClass() info.unitsPerEm = 2048 assert getAttrWithFallback(info, "unitsPerEm") == 2048 assert getAttrWithFallback(info, "ascender") == 1638 assert getAttrWithFallback(info, "capHeight") == 1434 assert getAttrWithFallback(info, "xHeight") == 1024 assert getAttrWithFallback(info, "descender") == -410 class PostscriptBlueScaleFallbackTest: def test_without_blue_zones(self, info): postscriptBlueScale = getAttrWithFallback(info, "postscriptBlueScale") assert postscriptBlueScale == 0.039625 def test_with_blue_zones(self, info): info.postscriptBlueValues = [ -13, 0, 470, 483, 534, 547, 556, 569, 654, 667, 677, 690, 738, 758, ] info.postscriptOtherBlues = [-255, -245] postscriptBlueScale = getAttrWithFallback(info, "postscriptBlueScale") assert postscriptBlueScale == 0.0375 class NormalizeStringForPostscriptTest: def test_no_change(self): assert ( normalizeStringForPostscript("Sample copyright notice.") == "Sample copyright notice." ) class DateStringToTimeValueTest: def test_roundtrip_random_timestamp(self): timestamp = random.randint(0, 10**9) ds = time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(timestamp)) assert dateStringToTimeValue(ds) == timestamp if __name__ == "__main__": import sys sys.exit(pytest.main(sys.argv)) ufo2ft-2.30.0/tests/integration_test.py000066400000000000000000000353661434012334300201110ustar00rootroot00000000000000import difflib import io import os import sys from pathlib import Path import pytest from fontTools.pens.boundsPen import BoundsPen from ufo2ft import ( compileInterpolatableTTFs, compileOTF, compileTTF, compileVariableCFF2, compileVariableCFF2s, compileVariableTTF, compileVariableTTFs, ) from ufo2ft.constants import KEEP_GLYPH_NAMES from ufo2ft.filters import TransformationsFilter def getpath(filename): dirname = os.path.dirname(__file__) return os.path.join(dirname, "data", filename) @pytest.fixture def testufo(FontClass): return FontClass(getpath("TestFont.ufo")) def readLines(f): f.seek(0) lines = [] for line in f.readlines(): # Elide ttLibVersion because it frequently changes. # Use os-native line separators so we can run difflib. if line.startswith("" + os.linesep) else: lines.append(line.rstrip() + os.linesep) return lines def expectTTX(font, expectedTTX, tables=None): with open(getpath(expectedTTX), encoding="utf-8") as f: expected = readLines(f) font.recalcTimestamp = False font["head"].created, font["head"].modified = 3570196637, 3601822698 font["head"].checkSumAdjustment = 0x12345678 f = io.StringIO() font.saveXML(f, tables=tables) actual = readLines(f) if actual != expected: for line in difflib.unified_diff( expected, actual, fromfile=expectedTTX, tofile="" ): sys.stderr.write(line) pytest.fail("TTX output is different from expected") @pytest.fixture(params=[None, True, False]) def useProductionNames(request): return request.param class IntegrationTest: _layoutTables = ["GDEF", "GSUB", "GPOS", "BASE"] # We have specific unit tests for CFF vs TrueType output, but we run # an integration test here to make sure things work end-to-end. # No need to test both formats for every single test case. def test_TestFont_TTF(self, testufo): ttf = compileTTF(testufo) expectTTX(ttf, "TestFont.ttx") def test_TestFont_CFF(self, testufo): otf = compileOTF(testufo) expectTTX(otf, "TestFont-CFF.ttx") def test_included_features(self, FontClass): """Checks how the compiler handles include statements in features.fea. The compiler should detect which features are defined by the features.fea inside the compiled UFO, or by feature files that are included from there. https://github.com/googlei18n/ufo2ft/issues/108 Relative paths should be resolved taking the UFO path as reference, not the embedded features.fea file. https://github.com/unified-font-object/ufo-spec/issues/55 """ ufo = FontClass(getpath("Bug108.ufo")) ttf = compileTTF(ufo) expectTTX(ttf, "Bug108.ttx", tables=self._layoutTables) def test_included_features_with_custom_include_dir(self, FontClass, tmp_path): ufo = FontClass(getpath("Bug108.ufo")) features_dir = tmp_path / "features" features_dir.mkdir() (features_dir / "foobarbaz.fea").write_text( Path(getpath("Bug108_included.fea")).read_text() ) ufo.features.text = "include(features/foobarbaz.fea);" ttf = compileTTF(ufo, feaIncludeDir=tmp_path) expectTTX(ttf, "Bug108.ttx", tables=self._layoutTables) def test_mti_features(self, FontClass): """Checks handling of UFOs with embdedded MTI/Monotype feature files https://github.com/googlei18n/fontmake/issues/289 """ ufo = FontClass(getpath("MTIFeatures.ufo")) ttf = compileTTF(ufo) expectTTX(ttf, "MTIFeatures.ttx", tables=self._layoutTables) def test_removeOverlaps_CFF(self, testufo): otf = compileOTF(testufo, removeOverlaps=True) expectTTX(otf, "TestFont-NoOverlaps-CFF.ttx") def test_removeOverlaps_CFF_pathops(self, testufo): otf = compileOTF(testufo, removeOverlaps=True, overlapsBackend="pathops") expectTTX(otf, "TestFont-NoOverlaps-CFF-pathops.ttx") def test_removeOverlaps(self, testufo): ttf = compileTTF(testufo, removeOverlaps=True) expectTTX(ttf, "TestFont-NoOverlaps-TTF.ttx") def test_removeOverlaps_pathops(self, testufo): ttf = compileTTF(testufo, removeOverlaps=True, overlapsBackend="pathops") expectTTX(ttf, "TestFont-NoOverlaps-TTF-pathops.ttx") def test_nestedComponents(self, FontClass): ufo = FontClass(getpath("NestedComponents-Regular.ufo")) ttf = compileTTF(ufo) assert ttf["maxp"].maxComponentDepth != 1 ttf = compileTTF(ufo, flattenComponents=True) assert ttf["maxp"].maxComponentDepth == 1 def test_nestedComponents_interpolatable(self, FontClass): ufos = [ FontClass(getpath("NestedComponents-Regular.ufo")), FontClass(getpath("NestedComponents-Bold.ufo")), ] ttfs = compileInterpolatableTTFs(ufos) for ttf in ttfs: assert ttf["maxp"].maxComponentDepth != 1 ttfs = compileInterpolatableTTFs(ufos, flattenComponents=True) for ttf in ttfs: assert ttf["maxp"].maxComponentDepth == 1 def test_interpolatableTTFs_lazy(self, FontClass): # two same UFOs **must** be interpolatable ufos = [FontClass(getpath("TestFont.ufo")) for _ in range(2)] ttfs = list(compileInterpolatableTTFs(ufos)) expectTTX(ttfs[0], "TestFont.ttx") expectTTX(ttfs[1], "TestFont.ttx") @pytest.mark.parametrize( "cff_version, expected_ttx", [(1, "TestFont-NoOptimize-CFF.ttx"), (2, "TestFont-NoOptimize-CFF2.ttx")], ids=["cff1", "cff2"], ) def test_optimizeCFF_none(self, testufo, cff_version, expected_ttx): otf = compileOTF(testufo, optimizeCFF=0, cffVersion=cff_version) expectTTX(otf, expected_ttx) @pytest.mark.parametrize( "cff_version, expected_ttx", [(1, "TestFont-Specialized-CFF.ttx"), (2, "TestFont-Specialized-CFF2.ttx")], ids=["cff1", "cff2"], ) def test_optimizeCFF_specialize(self, testufo, cff_version, expected_ttx): otf = compileOTF(testufo, optimizeCFF=1, cffVersion=cff_version) expectTTX(otf, expected_ttx) @pytest.mark.parametrize( "subroutinizer, cff_version, expected_ttx", [ (None, 1, "TestFont-CFF.ttx"), ("compreffor", 1, "TestFont-CFF-compreffor.ttx"), ("cffsubr", 1, "TestFont-CFF.ttx"), (None, 2, "TestFont-CFF2-cffsubr.ttx"), # ("compreffor", 2, "TestFont-CFF2-compreffor.ttx"), ("cffsubr", 2, "TestFont-CFF2-cffsubr.ttx"), ], ids=[ "default-cff1", "compreffor-cff1", "cffsubr-cff1", "default-cff2", # "compreffor-cff2", "cffsubr-cff2", ], ) def test_optimizeCFF_subroutinize( self, testufo, cff_version, subroutinizer, expected_ttx ): otf = compileOTF( testufo, optimizeCFF=2, cffVersion=cff_version, subroutinizer=subroutinizer ) expectTTX(otf, expected_ttx) def test_compileVariableTTF(self, designspace, useProductionNames): varfont = compileVariableTTF(designspace, useProductionNames=useProductionNames) expectTTX( varfont, "TestVariableFont-TTF{}.ttx".format( "-useProductionNames" if useProductionNames else "" ), ) def test_compileVariableCFF2(self, designspace, useProductionNames): varfont = compileVariableCFF2( designspace, useProductionNames=useProductionNames ) expectTTX( varfont, "TestVariableFont-CFF2{}.ttx".format( "-useProductionNames" if useProductionNames else "" ), ) def test_compileVariableCFF2_subroutinized(self, designspace): varfont = compileVariableCFF2(designspace, optimizeCFF=2) expectTTX(varfont, "TestVariableFont-CFF2-cffsubr.ttx") def test_debugFeatureFile(self, designspace): tmp = io.StringIO() _ = compileVariableTTF(designspace, debugFeatureFile=tmp) assert "### LayerFont-Regular ###" in tmp.getvalue() assert "### LayerFont-Bold ###" in tmp.getvalue() @pytest.mark.parametrize( "output_format, options, expected_ttx", [ ("TTF", {}, "TestFont-TTF-post3.ttx"), ("OTF", {"cffVersion": 2}, "TestFont-CFF2-post3.ttx"), ], ) def test_drop_glyph_names(self, testufo, output_format, options, expected_ttx): testufo.lib[KEEP_GLYPH_NAMES] = False compile_func = globals()[f"compile{output_format}"] ttf = compile_func(testufo, **options) expectTTX(ttf, expected_ttx) @pytest.mark.parametrize( "output_format, options, expected_ttx", [ ("VariableTTF", {}, "TestVariableFont-TTF-post3.ttx"), ("VariableCFF2", {}, "TestVariableFont-CFF2-post3.ttx"), ], ) def test_drop_glyph_names_variable( self, designspace, output_format, options, expected_ttx ): # set keepGlyphNames in the default UFO.lib where postProcessor finds it designspace.findDefault().font.lib[KEEP_GLYPH_NAMES] = False compile_func = globals()[f"compile{output_format}"] ttf = compile_func(designspace, **options) expectTTX(ttf, expected_ttx) @pytest.mark.parametrize( "compileFunc", [ compileOTF, compileTTF, ], ) def test_compile_filters(self, compileFunc, FontClass): ufo = FontClass(getpath("LayerFont-Regular.ufo")) filters = [TransformationsFilter(OffsetY=10)] ttf = compileFunc(ufo, filters=filters) pen1 = BoundsPen(ufo) glyph = ufo["a"] glyph.draw(pen1) glyphSet = ttf.getGlyphSet() tt_glyph = glyphSet["a"] pen2 = BoundsPen(glyphSet) tt_glyph.draw(pen2) assert pen1.bounds[0] == pen2.bounds[0] assert pen1.bounds[1] + 10 == pen2.bounds[1] assert pen1.bounds[2] == pen2.bounds[2] assert pen1.bounds[3] + 10 == pen2.bounds[3] @pytest.mark.parametrize( "compileFunc", [ compileVariableTTF, compileVariableCFF2, ], ) def test_compileVariable_filters(self, designspace, compileFunc): filters = [TransformationsFilter(OffsetY=10)] varfont = compileFunc(designspace, filters=filters) ufo = designspace.sources[0].font pen1 = BoundsPen(ufo) glyph = ufo["a"] glyph.draw(pen1) glyphSet = varfont.getGlyphSet() tt_glyph = glyphSet["a"] pen2 = BoundsPen(glyphSet) tt_glyph.draw(pen2) assert pen1.bounds[0] == pen2.bounds[0] assert pen1.bounds[1] + 10 == pen2.bounds[1] assert pen1.bounds[2] == pen2.bounds[2] assert pen1.bounds[3] + 10 == pen2.bounds[3] def test_compileInterpolatableTTFs(self, FontClass): ufos = [ FontClass(getpath("NestedComponents-Regular.ufo")), FontClass(getpath("NestedComponents-Bold.ufo")), ] filters = [TransformationsFilter(OffsetY=10)] ttfs = compileInterpolatableTTFs(ufos, filters=filters) for i, ttf in enumerate(ttfs): glyph = ufos[i]["a"] pen1 = BoundsPen(ufos[i]) glyph.draw(pen1) glyphSet = ttf.getGlyphSet() tt_glyph = glyphSet["uni0061"] pen2 = BoundsPen(glyphSet) tt_glyph.draw(pen2) assert pen1.bounds[0] == pen2.bounds[0] assert pen1.bounds[1] + 10 == pen2.bounds[1] assert pen1.bounds[2] == pen2.bounds[2] assert pen1.bounds[3] + 10 == pen2.bounds[3] def test_compileVariableTTFs(self, designspace_v5): fonts = compileVariableTTFs(designspace_v5) # NOTE: Test dumps were generated like this: # for k, font in fonts.items(): # font.recalcTimestamp = False # font["head"].created, font["head"].modified = 3570196637, 3601822698 # font["head"].checkSumAdjustment = 0x12345678 # font.saveXML(f"tests/data/DSv5/{k}-TTF.ttx") assert set(fonts.keys()) == { "MutatorSansVariable_Weight_Width", "MutatorSansVariable_Weight", "MutatorSansVariable_Width", "MutatorSerifVariable_Width", } # The STAT table is set to [SRIF=0, wght=[300, 700], wdth=[50, 200]] + S1 + S2 expectTTX( fonts["MutatorSansVariable_Weight_Width"], "DSv5/MutatorSansVariable_Weight_Width-TTF.ttx", ) # The STAT table is set to [SRIF=0, wght=[300, 700], wdth=50] expectTTX( fonts["MutatorSansVariable_Weight"], "DSv5/MutatorSansVariable_Weight-TTF.ttx", ) # The STAT table is set to [SRIF=0, wght=300, wdth=[50, 200]] expectTTX( fonts["MutatorSansVariable_Width"], "DSv5/MutatorSansVariable_Width-TTF.ttx", ) # The STAT table is set to [SRIF=1, wght=300, wdth=[50, 200]] expectTTX( fonts["MutatorSerifVariable_Width"], "DSv5/MutatorSerifVariable_Width-TTF.ttx", ) def test_compileVariableCFF2s(self, designspace_v5): fonts = compileVariableCFF2s(designspace_v5) # NOTE: Test dumps were generated like this: # for k, font in fonts.items(): # font.recalcTimestamp = False # font["head"].created, font["head"].modified = 3570196637, 3601822698 # font["head"].checkSumAdjustment = 0x12345678 # font.saveXML(f"tests/data/DSv5/{k}-CFF2.ttx") assert set(fonts.keys()) == { "MutatorSansVariable_Weight_Width", "MutatorSansVariable_Weight", "MutatorSansVariable_Width", "MutatorSerifVariable_Width", } # The STAT table is set to [SRIF=0, wght=[300, 700], wdth=[50, 200]] + S1 + S2 expectTTX( fonts["MutatorSansVariable_Weight_Width"], "DSv5/MutatorSansVariable_Weight_Width-CFF2.ttx", ) # The STAT table is set to [SRIF=0, wght=[300, 700], wdth=50] expectTTX( fonts["MutatorSansVariable_Weight"], "DSv5/MutatorSansVariable_Weight-CFF2.ttx", ) # The STAT table is set to [SRIF=0, wght=300, wdth=[50, 200]] expectTTX( fonts["MutatorSansVariable_Width"], "DSv5/MutatorSansVariable_Width-CFF2.ttx", ) # The STAT table is set to [SRIF=1, wght=300, wdth=[50, 200]] expectTTX( fonts["MutatorSerifVariable_Width"], "DSv5/MutatorSerifVariable_Width-CFF2.ttx", ) if __name__ == "__main__": sys.exit(pytest.main(sys.argv)) ufo2ft-2.30.0/tests/outlineCompiler_test.py000066400000000000000000001133701434012334300207300ustar00rootroot00000000000000import logging import os import pytest from cu2qu.ufo import font_to_quadratic from fontTools.ttLib import TTFont from fontTools.ttLib.tables._g_l_y_f import USE_MY_METRICS from ufo2ft import ( compileInterpolatableOTFsFromDS, compileInterpolatableTTFs, compileInterpolatableTTFsFromDS, compileOTF, compileTTF, ) from ufo2ft.constants import ( GLYPHS_DONT_USE_PRODUCTION_NAMES, SPARSE_OTF_MASTER_TABLES, SPARSE_TTF_MASTER_TABLES, USE_PRODUCTION_NAMES, ) from ufo2ft.fontInfoData import intListToNum from ufo2ft.outlineCompiler import OutlineOTFCompiler, OutlineTTFCompiler def getpath(filename): dirname = os.path.dirname(__file__) return os.path.join(dirname, "data", filename) @pytest.fixture def testufo(FontClass): font = FontClass(getpath("TestFont.ufo")) del font.lib["public.postscriptNames"] return font @pytest.fixture def quadufo(FontClass): font = FontClass(getpath("TestFont.ufo")) font_to_quadratic(font) return font @pytest.fixture def use_my_metrics_ufo(FontClass): return FontClass(getpath("UseMyMetrics.ufo")) @pytest.fixture def emptyufo(FontClass): font = FontClass() font.info.unitsPerEm = 1000 font.info.familyName = "Test Font" font.info.styleName = "Regular" font.info.ascender = 750 font.info.descender = -250 font.info.xHeight = 500 font.info.capHeight = 750 return font class OutlineTTFCompilerTest: def test_setupTable_gasp(self, testufo): compiler = OutlineTTFCompiler(testufo) compiler.otf = TTFont() compiler.setupTable_gasp() assert "gasp" in compiler.otf assert compiler.otf["gasp"].gaspRange == {7: 10, 65535: 15} def test_compile_with_gasp(self, testufo): compiler = OutlineTTFCompiler(testufo) compiler.compile() assert "gasp" in compiler.otf assert compiler.otf["gasp"].gaspRange == {7: 10, 65535: 15} def test_compile_without_gasp(self, testufo): testufo.info.openTypeGaspRangeRecords = None compiler = OutlineTTFCompiler(testufo) compiler.compile() assert "gasp" not in compiler.otf def test_compile_empty_gasp(self, testufo): # ignore empty gasp testufo.info.openTypeGaspRangeRecords = [] compiler = OutlineTTFCompiler(testufo) compiler.compile() assert "gasp" not in compiler.otf def test_makeGlyphsBoundingBoxes(self, quadufo): compiler = OutlineTTFCompiler(quadufo) assert compiler.glyphBoundingBoxes[".notdef"] == (50, 0, 450, 750) # no outline data assert compiler.glyphBoundingBoxes["space"] is None # float coordinates are rounded, so is the bbox assert compiler.glyphBoundingBoxes["d"] == (90, 77, 211, 197) def test_autoUseMyMetrics(self, use_my_metrics_ufo): compiler = OutlineTTFCompiler(use_my_metrics_ufo) ttf = compiler.compile() # the first component in the 'Iacute' composite glyph ('acute') # does _not_ have the USE_MY_METRICS flag assert not (ttf["glyf"]["Iacute"].components[0].flags & USE_MY_METRICS) # the second component in the 'Iacute' composite glyph ('I') # has the USE_MY_METRICS flag set assert ttf["glyf"]["Iacute"].components[1].flags & USE_MY_METRICS # none of the 'I' components of the 'romanthree' glyph has # the USE_MY_METRICS flag set, because the composite glyph has a # different width for component in ttf["glyf"]["romanthree"].components: assert not (component.flags & USE_MY_METRICS) def test_autoUseMyMetrics_None(self, use_my_metrics_ufo): compiler = OutlineTTFCompiler(use_my_metrics_ufo) # setting 'autoUseMyMetrics' attribute to None disables the feature compiler.autoUseMyMetrics = None ttf = compiler.compile() assert not (ttf["glyf"]["Iacute"].components[1].flags & USE_MY_METRICS) def test_importTTX(self, testufo): compiler = OutlineTTFCompiler(testufo) otf = compiler.otf = TTFont() compiler.importTTX() assert "CUST" in otf assert otf["CUST"].data == b"\x00\x01\xbe\xef" assert otf.sfntVersion == "\x00\x01\x00\x00" def test_no_contour_glyphs(self, testufo): for glyph in testufo: glyph.clearContours() compiler = OutlineTTFCompiler(testufo) compiler.compile() assert compiler.otf["hhea"].advanceWidthMax == 600 assert compiler.otf["hhea"].minLeftSideBearing == 0 assert compiler.otf["hhea"].minRightSideBearing == 0 assert compiler.otf["hhea"].xMaxExtent == 0 def test_os2_no_widths(self, testufo): for glyph in testufo: glyph.width = 0 compiler = OutlineTTFCompiler(testufo) compiler.compile() assert compiler.otf["OS/2"].xAvgCharWidth == 0 def test_missing_component(self, emptyufo): ufo = emptyufo a = ufo.newGlyph("a") pen = a.getPen() pen.moveTo((0, 0)) pen.lineTo((100, 0)) pen.lineTo((100, 100)) pen.lineTo((0, 100)) pen.closePath() # a mixed contour/component glyph, which is decomposed by the # TTGlyphPen; one of the components does not exist thus should # be dropped b = ufo.newGlyph("b") pen = b.getPen() pen.moveTo((0, 200)) pen.lineTo((100, 200)) pen.lineTo((50, 300)) pen.closePath() pen.addComponent("a", (1, 0, 0, 1, 0, 0)) pen.addComponent("c", (1, 0, 0, 1, 0, 0)) # missing d = ufo.newGlyph("d") pen = d.getPen() pen.addComponent("c", (1, 0, 0, 1, 0, 0)) # missing e = ufo.newGlyph("e") pen = e.getPen() pen.addComponent("a", (1, 0, 0, 1, 0, 0)) pen.addComponent("c", (1, 0, 0, 1, 0, 0)) # missing compiler = OutlineTTFCompiler(ufo) ttFont = compiler.compile() glyf = ttFont["glyf"] assert glyf["a"].numberOfContours == 1 assert glyf["b"].numberOfContours == 2 assert glyf["d"].numberOfContours == 0 assert glyf["e"].numberOfContours == -1 # composite glyph assert len(glyf["e"].components) == 1 def test_contour_starts_with_offcurve_point(self, emptyufo): ufo = emptyufo a = ufo.newGlyph("a") pen = a.getPointPen() pen.beginPath() pen.addPoint((0, 0), None) pen.addPoint((0, 10), None) pen.addPoint((10, 10), None) pen.addPoint((10, 0), None) pen.addPoint((5, 0), "qcurve") pen.endPath() compiler = OutlineTTFCompiler(ufo) ttFont = compiler.compile() glyf = ttFont["glyf"] assert glyf["a"].numberOfContours == 1 coords, endPts, flags = glyf["a"].getCoordinates(glyf) assert list(coords) == [(0, 0), (0, 10), (10, 10), (10, 0), (5, 0)] assert endPts == [4] assert list(flags) == [0, 0, 0, 0, 1] def test_setupTable_meta(self, testufo): testufo.lib["public.openTypeMeta"] = { "appl": b"BEEF", "bild": b"AAAA", "dlng": ["en-Latn", "nl-Latn"], "slng": ["Latn"], "PRIB": b"Some private bytes", "PRIA": "Some private ascii string", "PRIU": "Some private unicode string…", } compiler = OutlineTTFCompiler(testufo) ttFont = compiler.compile() meta = ttFont["meta"] assert meta.data["appl"] == b"BEEF" assert meta.data["bild"] == b"AAAA" assert meta.data["dlng"] == "en-Latn,nl-Latn" assert meta.data["slng"] == "Latn" assert meta.data["PRIB"] == b"Some private bytes" assert meta.data["PRIA"] == b"Some private ascii string" assert meta.data["PRIU"] == "Some private unicode string…".encode("utf-8") class OutlineOTFCompilerTest: def test_setupTable_CFF_all_blues_defined(self, testufo): testufo.info.postscriptBlueFuzz = 2 testufo.info.postscriptBlueShift = 8 testufo.info.postscriptBlueScale = 0.049736 testufo.info.postscriptForceBold = False testufo.info.postscriptBlueValues = [-12, 0, 486, 498, 712, 724] testufo.info.postscriptOtherBlues = [-217, -205] testufo.info.postscriptFamilyBlues = [-12, 0, 486, 498, 712, 724] testufo.info.postscriptFamilyOtherBlues = [-217, -205] compiler = OutlineOTFCompiler(testufo) compiler.otf = TTFont(sfntVersion="OTTO") compiler.setupTable_CFF() cff = compiler.otf["CFF "].cff private = cff[list(cff.keys())[0]].Private assert private.BlueFuzz == 2 assert private.BlueShift == 8 assert private.BlueScale == 0.049736 assert private.ForceBold == 0 assert private.BlueValues == [-12, 0, 486, 498, 712, 724] assert private.OtherBlues == [-217, -205] assert private.FamilyBlues == [-12, 0, 486, 498, 712, 724] assert private.FamilyOtherBlues == [-217, -205] def test_setupTable_CFF_no_blues_defined(self, testufo): # no blue values defined testufo.info.postscriptBlueValues = [] testufo.info.postscriptOtherBlues = [] testufo.info.postscriptFamilyBlues = [] testufo.info.postscriptFamilyOtherBlues = [] # the following attributes have no effect testufo.info.postscriptBlueFuzz = 2 testufo.info.postscriptBlueShift = 8 testufo.info.postscriptBlueScale = 0.049736 testufo.info.postscriptForceBold = False compiler = OutlineOTFCompiler(testufo) compiler.otf = TTFont(sfntVersion="OTTO") compiler.setupTable_CFF() cff = compiler.otf["CFF "].cff private = cff[list(cff.keys())[0]].Private # expect default values as defined in fontTools' cffLib.py assert private.BlueFuzz == 1 assert private.BlueShift == 7 assert private.BlueScale == 0.039625 assert private.ForceBold == 0 # CFF PrivateDict has no blues attributes assert not hasattr(private, "BlueValues") assert not hasattr(private, "OtherBlues") assert not hasattr(private, "FamilyBlues") assert not hasattr(private, "FamilyOtherBlues") def test_setupTable_CFF_some_blues_defined(self, testufo): testufo.info.postscriptBlueFuzz = 2 testufo.info.postscriptForceBold = True testufo.info.postscriptBlueValues = [] testufo.info.postscriptOtherBlues = [-217, -205] testufo.info.postscriptFamilyBlues = [] testufo.info.postscriptFamilyOtherBlues = [] compiler = OutlineOTFCompiler(testufo) compiler.otf = TTFont(sfntVersion="OTTO") compiler.setupTable_CFF() cff = compiler.otf["CFF "].cff private = cff[list(cff.keys())[0]].Private assert private.BlueFuzz == 2 assert private.BlueShift == 7 # default assert private.BlueScale == 0.039625 # default assert private.ForceBold is True assert not hasattr(private, "BlueValues") assert private.OtherBlues == [-217, -205] assert not hasattr(private, "FamilyBlues") assert not hasattr(private, "FamilyOtherBlues") @staticmethod def get_charstring_program(ttFont, glyphName): cff = ttFont["CFF "].cff charstrings = cff[list(cff.keys())[0]].CharStrings c, _ = charstrings.getItemAndSelector(glyphName) c.decompile() return c.program def assertProgramEqual(self, expected, actual): assert len(expected) == len(actual) for exp_token, act_token in zip(expected, actual): if isinstance(exp_token, str): assert exp_token == act_token else: assert not isinstance(act_token, str) assert exp_token == pytest.approx(act_token) def test_setupTable_CFF_round_all(self, testufo): # by default all floats are rounded to integer compiler = OutlineOTFCompiler(testufo) otf = compiler.otf = TTFont(sfntVersion="OTTO") compiler.setupTable_CFF() # glyph 'd' in TestFont.ufo contains float coordinates program = self.get_charstring_program(otf, "d") self.assertProgramEqual( program, [ -26, 151, 197, "rmoveto", -34, -27, -27, -33, -33, 27, -27, 34, 33, 27, 27, 33, 33, -27, 27, -33, "hvcurveto", "endchar", ], ) def test_setupTable_CFF_round_none(self, testufo): # roundTolerance=0 means 'don't round, keep all floats' compiler = OutlineOTFCompiler(testufo, roundTolerance=0) otf = compiler.otf = TTFont(sfntVersion="OTTO") compiler.setupTable_CFF() program = self.get_charstring_program(otf, "d") self.assertProgramEqual( program, [ -26, 150.66, 197.32, "rmoveto", -33.66, -26.67, -26.99, -33.33, -33.33, 26.67, -26.66, 33.66, 33.33, 26.66, 26.66, 33.33, 33.33, -26.66, 26.99, -33.33, "hvcurveto", "endchar", ], ) def test_setupTable_CFF_round_some(self, testufo): # only floats 'close enough' are rounded to integer compiler = OutlineOTFCompiler(testufo, roundTolerance=0.34) otf = compiler.otf = TTFont(sfntVersion="OTTO") compiler.setupTable_CFF() program = self.get_charstring_program(otf, "d") self.assertProgramEqual( program, [ -26, 150.66, 197, "rmoveto", -33.66, -27, -27, -33, -33, 27, -27, 33.66, 33.34, 26.65, 27, 33, 33, -26.65, 27, -33.34, "hvcurveto", "endchar", ], ) def test_setupTable_CFF_optimize(self, testufo): compiler = OutlineOTFCompiler(testufo, optimizeCFF=True) otf = compiler.otf = TTFont(sfntVersion="OTTO") compiler.setupTable_CFF() program = self.get_charstring_program(otf, "a") self.assertProgramEqual( program, [-12, 66, "hmoveto", 256, "hlineto", -128, 510, "rlineto", "endchar"], ) def test_setupTable_CFF_no_optimize(self, testufo): compiler = OutlineOTFCompiler(testufo, optimizeCFF=False) otf = compiler.otf = TTFont(sfntVersion="OTTO") compiler.setupTable_CFF() program = self.get_charstring_program(otf, "a") self.assertProgramEqual( program, [-12, 66, 0, "rmoveto", 256, 0, "rlineto", -128, 510, "rlineto", "endchar"], ) def test_makeGlyphsBoundingBoxes(self, testufo): compiler = OutlineOTFCompiler(testufo) # with default roundTolerance, all coordinates and hence the bounding # box values are rounded with otRound() assert compiler.glyphBoundingBoxes["d"] == (90, 77, 211, 197) def test_makeGlyphsBoundingBoxes_floats(self, testufo): # specifying a custom roundTolerance affects which coordinates are # rounded; in this case, the top-most Y coordinate stays a float # (197.32), hence the bbox.yMax (198) is rounded using math.ceiling() compiler = OutlineOTFCompiler(testufo, roundTolerance=0.1) assert compiler.glyphBoundingBoxes["d"] == (90, 77, 211, 198) def test_importTTX(self, testufo): compiler = OutlineOTFCompiler(testufo) otf = compiler.otf = TTFont(sfntVersion="OTTO") compiler.importTTX() assert "CUST" in otf assert otf["CUST"].data == b"\x00\x01\xbe\xef" assert otf.sfntVersion == "OTTO" def test_no_contour_glyphs(self, testufo): for glyph in testufo: glyph.clearContours() compiler = OutlineOTFCompiler(testufo) compiler.compile() assert compiler.otf["hhea"].advanceWidthMax == 600 assert compiler.otf["hhea"].minLeftSideBearing == 0 assert compiler.otf["hhea"].minRightSideBearing == 0 assert compiler.otf["hhea"].xMaxExtent == 0 def test_optimized_default_and_nominal_widths(self, FontClass): ufo = FontClass() ufo.info.unitsPerEm = 1000 for glyphName, width in ( (".notdef", 500), ("space", 250), ("a", 388), ("b", 410), ("c", 374), ("d", 374), ("e", 388), ("f", 410), ("g", 388), ("h", 410), ("i", 600), ("j", 600), ("k", 600), ("l", 600), ): glyph = ufo.newGlyph(glyphName) glyph.width = width compiler = OutlineOTFCompiler(ufo) compiler.otf = TTFont(sfntVersion="OTTO") compiler.setupTable_hmtx() compiler.setupTable_CFF() cff = compiler.otf["CFF "].cff topDict = cff[list(cff.keys())[0]] private = topDict.Private assert private.defaultWidthX == 600 assert private.nominalWidthX == 303 charStrings = topDict.CharStrings # the following have width == defaultWidthX, so it's omitted for g in ("i", "j", "k", "l"): assert charStrings.getItemAndSelector(g)[0].program == ["endchar"] # 'space' has width 250, so the width encoded in its charstring is: # 250 - nominalWidthX assert charStrings.getItemAndSelector("space")[0].program == [-53, "endchar"] def test_optimized_default_but_no_nominal_widths(self, FontClass): ufo = FontClass() ufo.info.familyName = "Test" ufo.info.styleName = "R" ufo.info.ascender = 1 ufo.info.descender = 1 ufo.info.capHeight = 1 ufo.info.xHeight = 1 ufo.info.unitsPerEm = 1000 ufo.info.postscriptDefaultWidthX = 500 for glyphName, width in ( (".notdef", 500), ("space", 500), ("a", 500), ): glyph = ufo.newGlyph(glyphName) glyph.width = width font = compileOTF(ufo) cff = font["CFF "].cff private = cff[list(cff.keys())[0]].Private assert private.defaultWidthX == 500 assert private.nominalWidthX == 0 class GlyphOrderTest: def test_compile_original_glyph_order(self, testufo): DEFAULT_ORDER = [ ".notdef", "space", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", ] compiler = OutlineTTFCompiler(testufo) compiler.compile() assert compiler.otf.getGlyphOrder() == DEFAULT_ORDER def test_compile_tweaked_glyph_order(self, testufo): NEW_ORDER = [ ".notdef", "space", "b", "a", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", ] testufo.lib["public.glyphOrder"] = NEW_ORDER compiler = OutlineTTFCompiler(testufo) compiler.compile() assert compiler.otf.getGlyphOrder() == NEW_ORDER def test_compile_strange_glyph_order(self, testufo): """Move space and .notdef to end of glyph ids ufo2ft always puts .notdef first. """ NEW_ORDER = ["b", "a", "c", "d", "space", ".notdef"] EXPECTED_ORDER = [ ".notdef", "b", "a", "c", "d", "space", "e", "f", "g", "h", "i", "j", "k", "l", ] testufo.lib["public.glyphOrder"] = NEW_ORDER compiler = OutlineTTFCompiler(testufo) compiler.compile() assert compiler.otf.getGlyphOrder() == EXPECTED_ORDER class NamesTest: @pytest.mark.parametrize( "prod_names_key, prod_names_value", [(USE_PRODUCTION_NAMES, False), (GLYPHS_DONT_USE_PRODUCTION_NAMES, True)], ids=["useProductionNames", "Don't use Production Names"], ) def test_compile_without_production_names( self, testufo, prod_names_key, prod_names_value ): expected = [ ".notdef", "space", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", ] result = compileTTF(testufo, useProductionNames=False) assert result.getGlyphOrder() == expected testufo.lib[prod_names_key] = prod_names_value result = compileTTF(testufo) assert result.getGlyphOrder() == expected def test_compile_with_production_names(self, testufo): original = [ ".notdef", "space", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", ] modified = [ ".notdef", "uni0020", "uni0061", "uni0062", "uni0063", "uni0064", "uni0065", "uni0066", "uni0067", "uni0068", "uni0069", "uni006A", "uni006B", "uni006C", ] result = compileTTF(testufo) assert result.getGlyphOrder() == original result = compileTTF(testufo, useProductionNames=True) assert result.getGlyphOrder() == modified testufo.lib[USE_PRODUCTION_NAMES] = True result = compileTTF(testufo) assert result.getGlyphOrder() == modified def test_postprocess_production_names_no_notdef(self, testufo): import ufo2ft del testufo[".notdef"] assert ".notdef" not in testufo result = compileTTF(testufo, useProductionNames=False) assert ".notdef" in result.getGlyphOrder() pp = ufo2ft.postProcessor.PostProcessor(result, testufo, glyphSet=None) try: f = pp.process(useProductionNames=True) except Exception as e: pytest.xfail("Unexpected exception: " + str(e)) assert ".notdef" in f.getGlyphOrder() CUSTOM_POSTSCRIPT_NAMES = { ".notdef": ".notdef", "space": "foo", "a": "bar", "b": "baz", "c": "meh", "d": "doh", "e": "bim", "f": "bum", "g": "bam", "h": "bib", "i": "bob", "j": "bub", "k": "kkk", "l": "lll", } @pytest.mark.parametrize("use_production_names", [None, True]) def test_compile_with_custom_postscript_names(self, testufo, use_production_names): testufo.lib["public.postscriptNames"] = self.CUSTOM_POSTSCRIPT_NAMES result = compileTTF(testufo, useProductionNames=use_production_names) assert sorted(result.getGlyphOrder()) == sorted( self.CUSTOM_POSTSCRIPT_NAMES.values() ) @pytest.mark.parametrize("use_production_names", [None, True]) def test_compile_with_custom_postscript_names_notdef_preserved( self, testufo, use_production_names ): custom_names = dict(self.CUSTOM_POSTSCRIPT_NAMES) del custom_names[".notdef"] testufo.lib["public.postscriptNames"] = custom_names result = compileTTF(testufo, useProductionNames=use_production_names) assert result.getGlyphOrder() == [ ".notdef", "foo", "bar", "baz", "meh", "doh", "bim", "bum", "bam", "bib", "bob", "bub", "kkk", "lll", ] def test_warn_name_exceeds_max_length(self, testufo, caplog): long_name = 64 * "a" testufo.newGlyph(long_name) with caplog.at_level(logging.WARNING, logger="ufo2ft.postProcessor"): result = compileTTF(testufo, useProductionNames=True) assert "length exceeds 63 characters" in caplog.text assert long_name in result.getGlyphOrder() def test_duplicate_glyph_names(self, testufo): order = ["ab", "ab.1", "a-b", "a/b", "ba"] testufo.lib["public.glyphOrder"] = order testufo.lib["public.postscriptNames"] = {"ba": "ab"} for name in order: if name not in testufo: testufo.newGlyph(name) result = compileTTF(testufo, useProductionNames=True).getGlyphOrder() assert result[1] == "ab" assert result[2] == "ab.1" assert result[3] == "ab.2" assert result[4] == "ab.3" assert result[5] == "ab.4" def test_too_long_production_name(self, testufo): name = "_".join(("a",) * 16) testufo.newGlyph(name) result = compileTTF(testufo, useProductionNames=True).getGlyphOrder() # the production name uniXXXX would exceed the max length so the # original name is used assert name in result class ColrCpalTest: def test_colr_cpal(self, FontClass): testufo = FontClass(getpath("ColorTest.ufo")) assert "com.github.googlei18n.ufo2ft.colorLayerMapping" in testufo.lib assert "com.github.googlei18n.ufo2ft.colorPalettes" in testufo.lib result = compileTTF(testufo) assert "COLR" in result assert "CPAL" in result layers = { gn: [(layer.name, layer.colorID) for layer in layers] for gn, layers in result["COLR"].ColorLayers.items() } assert layers == { "a": [("a.color1", 0), ("a.color2", 1)], "b": [("b.color1", 1), ("b.color2", 0)], "c": [("c.color2", 1), ("c.color1", 0)], } def test_colr_cpal_raw(self, FontClass): testufo = FontClass(getpath("ColorTestRaw.ufo")) assert "com.github.googlei18n.ufo2ft.colorLayers" in testufo.lib assert "com.github.googlei18n.ufo2ft.colorPalettes" in testufo.lib result = compileTTF(testufo) palettes = [ [(c.red, c.green, c.blue, c.alpha) for c in p] for p in result["CPAL"].palettes ] assert palettes == [[(255, 76, 26, 255), (0, 102, 204, 255)]] layers = { gn: [(layer.name, layer.colorID) for layer in layers] for gn, layers in result["COLR"].ColorLayers.items() } assert layers == {"a": [("a.color1", 0), ("a.color2", 1)]} def test_colr_cpal_otf(self, FontClass): testufo = FontClass(getpath("ColorTest.ufo")) assert "com.github.googlei18n.ufo2ft.colorLayerMapping" in testufo.lib assert "com.github.googlei18n.ufo2ft.colorPalettes" in testufo.lib result = compileOTF(testufo) assert "COLR" in result assert "CPAL" in result layers = { gn: [(layer.name, layer.colorID) for layer in layers] for gn, layers in result["COLR"].ColorLayers.items() } assert layers == { "a": [("a.color1", 0), ("a.color2", 1)], "b": [("b.color1", 1), ("b.color2", 0)], "c": [("c.color2", 1), ("c.color1", 0)], } def test_colr_cpal_interpolatable_ttf(self, FontClass): testufo = FontClass(getpath("ColorTest.ufo")) assert "com.github.googlei18n.ufo2ft.colorLayerMapping" in testufo.lib assert "com.github.googlei18n.ufo2ft.colorPalettes" in testufo.lib result = list(compileInterpolatableTTFs([testufo]))[0] assert "COLR" in result assert "CPAL" in result layers = { gn: [(layer.name, layer.colorID) for layer in layers] for gn, layers in result["COLR"].ColorLayers.items() } assert layers == { "a": [("a.color1", 0), ("a.color2", 1)], "b": [("b.color1", 1), ("b.color2", 0)], "c": [("c.color2", 1), ("c.color1", 0)], } class CmapTest: def test_cmap_BMP(self, testufo): compiler = OutlineOTFCompiler(testufo) otf = compiler.otf = TTFont(sfntVersion="OTTO") compiler.setupTable_cmap() assert "cmap" in otf cmap = otf["cmap"] assert len(cmap.tables) == 2 cmap4_0_3 = cmap.tables[0] cmap4_3_1 = cmap.tables[1] assert (cmap4_0_3.platformID, cmap4_0_3.platEncID) == (0, 3) assert (cmap4_3_1.platformID, cmap4_3_1.platEncID) == (3, 1) assert cmap4_0_3.language == cmap4_3_1.language assert cmap4_0_3.language == 0 mapping = {c: chr(c) for c in range(0x61, 0x6D)} mapping[0x20] = "space" assert cmap4_0_3.cmap == cmap4_3_1.cmap assert cmap4_0_3.cmap == mapping def test_cmap_nonBMP_with_UVS(self, testufo): u1F170 = testufo.newGlyph("u1F170") u1F170.unicode = 0x1F170 testufo.newGlyph("u1F170.text") testufo.lib["public.unicodeVariationSequences"] = { "FE0E": { "1F170": "u1F170.text", }, "FE0F": { "1F170": "u1F170", }, } compiler = OutlineOTFCompiler(testufo) otf = compiler.compile() assert "cmap" in otf cmap = otf["cmap"] cmap.compile(otf) assert len(cmap.tables) == 5 cmap4_0_3 = cmap.tables[0] cmap12_0_4 = cmap.tables[1] cmap14_0_5 = cmap.tables[2] cmap4_3_1 = cmap.tables[3] cmap12_3_10 = cmap.tables[4] assert (cmap4_0_3.platformID, cmap4_0_3.platEncID) == (0, 3) assert (cmap4_3_1.platformID, cmap4_3_1.platEncID) == (3, 1) assert cmap4_0_3.language == cmap4_3_1.language assert cmap4_0_3.language == 0 mapping = {c: chr(c) for c in range(0x61, 0x6D)} mapping[0x20] = "space" assert cmap4_0_3.cmap == cmap4_3_1.cmap assert cmap4_0_3.cmap == mapping assert (cmap12_0_4.platformID, cmap12_0_4.platEncID) == (0, 4) assert (cmap12_3_10.platformID, cmap12_3_10.platEncID) == (3, 10) assert cmap12_0_4.language == cmap12_3_10.language assert cmap12_0_4.language == 0 mapping[0x1F170] = "u1F170" assert cmap12_0_4.cmap == cmap12_3_10.cmap assert cmap12_0_4.cmap == mapping assert (cmap14_0_5.platformID, cmap14_0_5.platEncID) == (0, 5) assert cmap14_0_5.language == 0 assert cmap14_0_5.uvsDict == { 0xFE0E: [(0x1F170, "u1F170.text")], 0xFE0F: [(0x1F170, None)], } ASCII = [chr(c) for c in range(0x20, 0x7E)] @pytest.mark.parametrize( "unicodes, expected", [ [ASCII + ["Þ"], {0}], # Latin 1 [ASCII + ["Ľ"], {1}], # Latin 2: Eastern Europe [ASCII + ["Ľ", "┤"], {1, 58}], # Latin 2 [["Б"], {2}], # Cyrillic [["Б", "Ѕ", "┤"], {2, 57}], # IBM Cyrillic [["Б", "╜", "┤"], {2, 49}], # MS-DOS Russian [["Ά"], {3}], # Greek [["Ά", "½", "┤"], {3, 48}], # IBM Greek [["Ά", "√", "┤"], {3, 60}], # Greek, former 437 G [ASCII + ["İ"], {4}], # Turkish [ASCII + ["İ", "┤"], {4, 56}], # IBM turkish [["א"], {5}], # Hebrew [["א", "√", "┤"], {5, 53}], # Hebrew [["ر"], {6}], # Arabic [["ر", "√"], {6, 51}], # Arabic [["ر", "√", "┤"], {6, 51, 61}], # Arabic; ASMO 708 [ASCII + ["ŗ"], {7}], # Windows Baltic [ASCII + ["ŗ", "┤"], {7, 59}], # MS-DOS Baltic [ASCII + ["₫"], {8}], # Vietnamese [["ๅ"], {16}], # Thai [["エ"], {17}], # JIS/Japan [["ㄅ"], {18}], # Chinese: Simplified chars [["ㄱ"], {19}], # Korean wansung [["央"], {20}], # Chinese: Traditional chars [["곴"], {21}], # Korean Johab [ASCII + ["♥"], {30}], # OEM Character Set [ASCII + ["þ", "┤"], {54}], # MS-DOS Icelandic [ASCII + ["╚"], {62, 63}], # WE/Latin 1 [ASCII + ["┤", "√", "Å"], {50}], # MS-DOS Nordic [ASCII + ["┤", "√", "é"], {52}], # MS-DOS Canadian French [ASCII + ["┤", "√", "õ"], {55}], # MS-DOS Portuguese [ASCII + ["‰", "∑"], {29}], # Macintosh Character Set (US Roman) [[" ", "0", "1", "2", "අ"], {0}], # always fallback to Latin 1 ], ) def test_calcCodePageRanges(emptyufo, unicodes, expected): font = emptyufo for i, c in enumerate(unicodes): font.newGlyph("glyph%d" % i).unicode = ord(c) compiler = OutlineOTFCompiler(font) compiler.compile() assert compiler.otf["OS/2"].ulCodePageRange1 == intListToNum( expected, start=0, length=32 ) assert compiler.otf["OS/2"].ulCodePageRange2 == intListToNum( expected, start=32, length=32 ) def test_custom_layer_compilation(layertestrgufo): ufo = layertestrgufo font_otf = compileOTF(ufo, layerName="Medium") assert font_otf.getGlyphOrder() == [".notdef", "e"] font_ttf = compileTTF(ufo, layerName="Medium") assert font_ttf.getGlyphOrder() == [".notdef", "e"] def test_custom_layer_compilation_interpolatable(layertestrgufo, layertestbdufo): ufo1 = layertestrgufo ufo2 = layertestbdufo master_ttfs = list( compileInterpolatableTTFs([ufo1, ufo1, ufo2], layerNames=[None, "Medium", None]) ) assert master_ttfs[0].getGlyphOrder() == [ ".notdef", "a", "e", "s", "dotabovecomb", "edotabove", ] assert master_ttfs[1].getGlyphOrder() == [".notdef", "e"] assert master_ttfs[2].getGlyphOrder() == [ ".notdef", "a", "e", "s", "dotabovecomb", "edotabove", ] sparse_tables = [tag for tag in master_ttfs[1].keys() if tag != "GlyphOrder"] assert SPARSE_TTF_MASTER_TABLES.issuperset(sparse_tables) @pytest.mark.parametrize("inplace", [False, True], ids=["not inplace", "inplace"]) def test_custom_layer_compilation_interpolatable_from_ds(designspace, inplace): result = compileInterpolatableTTFsFromDS(designspace, inplace=inplace) assert (designspace is result) == inplace master_ttfs = [s.font for s in result.sources] assert master_ttfs[0].getGlyphOrder() == [ ".notdef", "a", "e", "s", "dotabovecomb", "edotabove", ] assert master_ttfs[1].getGlyphOrder() == [".notdef", "e"] assert master_ttfs[2].getGlyphOrder() == [ ".notdef", "a", "e", "s", "dotabovecomb", "edotabove", ] sparse_tables = [tag for tag in master_ttfs[1].keys() if tag != "GlyphOrder"] assert SPARSE_TTF_MASTER_TABLES.issuperset(sparse_tables) # sentinel value used by varLib to ignore the post table for this sparse # master when building the MVAR table assert master_ttfs[1]["post"].underlinePosition == -0x8000 assert master_ttfs[1]["post"].underlineThickness == -0x8000 @pytest.mark.parametrize("inplace", [False, True], ids=["not inplace", "inplace"]) def test_custom_layer_compilation_interpolatable_otf_from_ds(designspace, inplace): result = compileInterpolatableOTFsFromDS(designspace, inplace=inplace) assert (designspace is result) == inplace master_otfs = [s.font for s in result.sources] assert master_otfs[0].getGlyphOrder() == [ ".notdef", "a", "e", "s", "dotabovecomb", "edotabove", ] assert master_otfs[1].getGlyphOrder() == [".notdef", "e"] assert master_otfs[2].getGlyphOrder() == [ ".notdef", "a", "e", "s", "dotabovecomb", "edotabove", ] sparse_tables = [tag for tag in master_otfs[1].keys() if tag != "GlyphOrder"] assert SPARSE_OTF_MASTER_TABLES.issuperset(sparse_tables) def test_compilation_from_ds_missing_source_font(designspace): designspace.sources[0].font = None with pytest.raises(AttributeError, match="missing required 'font'"): compileInterpolatableTTFsFromDS(designspace) def test_compile_empty_ufo(FontClass): ufo = FontClass() font = compileTTF(ufo) assert font["name"].getName(1, 3, 1).toUnicode() == "New Font" assert font["name"].getName(2, 3, 1).toUnicode() == "Regular" assert font["name"].getName(4, 3, 1).toUnicode() == "New Font Regular" assert font["head"].unitsPerEm == 1000 assert font["OS/2"].sTypoAscender == 800 assert font["OS/2"].sCapHeight == 700 assert font["OS/2"].sxHeight == 500 assert font["OS/2"].sTypoDescender == -200 def test_pass_on_conversion_error(FontClass): ufo = FontClass() ufo.info.unitsPerEm = 2000 # Draw quarter circle glyph = ufo.newGlyph("test") pen = glyph.getPointPen() pen.beginPath() pen.addPoint((0, 43), segmentType="line") pen.addPoint((25, 43)) pen.addPoint((43, 25)) pen.addPoint((43, 0), segmentType="curve") pen.addPoint((0, 0), segmentType="line") pen.endPath() font1 = compileTTF(ufo) # Default error: 0.001 font2 = compileTTF(ufo, cubicConversionError=0.0005) # One off-curve: font1_coords = list(font1["glyf"]["test"].coordinates) assert font1_coords == [(0, 43), (0, 0), (43, 0), (43, 43)] # Two off-curves: font2_coords = list(font2["glyf"]["test"].coordinates) assert font2_coords == [(0, 43), (0, 0), (43, 0), (43, 19), (19, 43)] if __name__ == "__main__": import sys sys.exit(pytest.main(sys.argv)) ufo2ft-2.30.0/tests/preProcessor_test.py000066400000000000000000000404131434012334300202410ustar00rootroot00000000000000import logging import os import pytest from cu2qu.ufo import CURVE_TYPE_LIB_KEY from fontTools import designspaceLib import ufo2ft from ufo2ft.constants import ( COLOR_LAYER_MAPPING_KEY, COLOR_LAYERS_KEY, COLOR_PALETTES_KEY, ) from ufo2ft.filters import FILTERS_KEY, loadFilterFromString from ufo2ft.filters.explodeColorLayerGlyphs import ExplodeColorLayerGlyphsFilter from ufo2ft.preProcessor import ( TTFInterpolatablePreProcessor, TTFPreProcessor, _init_explode_color_layer_glyphs_filter, ) def getpath(filename): dirname = os.path.dirname(__file__) return os.path.join(dirname, "data", filename) def glyph_has_qcurve(ufo, glyph_name): return any( s.segmentType == "qcurve" for contour in ufo[glyph_name] for s in contour ) class TTFPreProcessorTest: def test_no_inplace(self, FontClass): ufo = FontClass(getpath("TestFont.ufo")) glyphSet = TTFPreProcessor(ufo, inplace=False).process() assert not glyph_has_qcurve(ufo, "c") assert glyph_has_qcurve(glyphSet, "c") assert CURVE_TYPE_LIB_KEY not in ufo.layers.defaultLayer.lib def test_inplace_remember_curve_type(self, FontClass, caplog): caplog.set_level(logging.ERROR) ufo = FontClass(getpath("TestFont.ufo")) assert CURVE_TYPE_LIB_KEY not in ufo.lib assert CURVE_TYPE_LIB_KEY not in ufo.layers.defaultLayer.lib assert not glyph_has_qcurve(ufo, "c") TTFPreProcessor(ufo, inplace=True, rememberCurveType=True).process() assert CURVE_TYPE_LIB_KEY not in ufo.lib assert ufo.layers.defaultLayer.lib[CURVE_TYPE_LIB_KEY] == "quadratic" assert glyph_has_qcurve(ufo, "c") logger = "ufo2ft.filters.cubicToQuadratic" with caplog.at_level(logging.INFO, logger=logger): TTFPreProcessor(ufo, inplace=True, rememberCurveType=True).process() assert len(caplog.records) == 1 assert "Curves already converted to quadratic" in caplog.text assert glyph_has_qcurve(ufo, "c") def test_inplace_no_remember_curve_type(self, FontClass): ufo = FontClass(getpath("TestFont.ufo")) assert CURVE_TYPE_LIB_KEY not in ufo.lib assert CURVE_TYPE_LIB_KEY not in ufo.layers.defaultLayer.lib for _ in range(2): TTFPreProcessor(ufo, inplace=True, rememberCurveType=False).process() assert CURVE_TYPE_LIB_KEY not in ufo.lib assert CURVE_TYPE_LIB_KEY not in ufo.layers.defaultLayer.lib assert glyph_has_qcurve(ufo, "c") def test_custom_filters(self, FontClass): ufo1 = FontClass(getpath("TestFont.ufo")) ufo1.lib[FILTERS_KEY] = [ {"name": "transformations", "kwargs": {"OffsetX": -40}, "pre": True} ] ufo2 = FontClass(getpath("TestFont.ufo")) ufo2.lib[FILTERS_KEY] = [{"name": "transformations", "kwargs": {"OffsetY": 10}}] glyphSets0 = TTFPreProcessor(ufo1).process() glyphSets1 = TTFPreProcessor(ufo2).process() assert (glyphSets0["a"][0][0].x - glyphSets1["a"][0][0].x) == -40 assert (glyphSets1["a"][0][0].y - glyphSets0["a"][0][0].y) == 10 def test_custom_filters_as_argument(self, FontClass): from ufo2ft.filters import RemoveOverlapsFilter, TransformationsFilter ufo1 = FontClass(getpath("TestFont.ufo")) ufo2 = FontClass(getpath("TestFont.ufo")) filter1 = RemoveOverlapsFilter(backend="pathops") filter2 = TransformationsFilter(include=["d"], pre=True, OffsetY=-200) filter3 = TransformationsFilter(OffsetX=10) glyphSets0 = TTFPreProcessor( ufo1, filters=[filter1, filter2, filter3] ).process() glyphSets1 = TTFPreProcessor( ufo2, filters=[filter1, filter2, filter3] ).process() # Both UFOs have the same filters applied assert (glyphSets0["a"][0][0].x - glyphSets1["a"][0][0].x) == 0 # "a" has initially its starting point at (66, 0) assert (glyphSets0["a"][0][0].x, glyphSets0["a"][0][0].y) == (76, 0) assert (glyphSets1["a"][0][0].x, glyphSets1["a"][0][0].y) == (76, 0) # A component was shifted to overlap with another in a pre-filter # filter2, before overlaps were removed in a post-filter filter1 assert len(glyphSets0["d"].components) == 0 def test_custom_filters_in_both_lib_and_argument_with_ellipsis(self, FontClass): from ufo2ft.filters import TransformationsFilter ufo = FontClass(getpath("TestFont.ufo")) ufo.lib[FILTERS_KEY] = [ {"name": "transformations", "kwargs": {"OffsetX": 10}, "pre": True} ] glyphSet = TTFPreProcessor( ufo, filters=[..., TransformationsFilter(OffsetY=-10)] ).process() a = glyphSet["a"] assert (a[0][0].x, a[0][0].y) == (ufo["a"][0][0].x + 10, ufo["a"][0][0].y - 10) class TTFInterpolatablePreProcessorTest: def test_no_inplace(self, FontClass): ufo1 = FontClass(getpath("TestFont.ufo")) ufo2 = FontClass(getpath("TestFont.ufo")) ufos = [ufo1, ufo2] assert CURVE_TYPE_LIB_KEY not in ufo1.lib assert CURVE_TYPE_LIB_KEY not in ufo1.layers.defaultLayer.lib assert not glyph_has_qcurve(ufo1, "c") glyphSets = TTFInterpolatablePreProcessor(ufos, inplace=False).process() for i in range(2): assert glyph_has_qcurve(glyphSets[i], "c") assert CURVE_TYPE_LIB_KEY not in ufos[i].lib assert CURVE_TYPE_LIB_KEY not in ufos[i].layers.defaultLayer.lib def test_inplace_remember_curve_type(self, FontClass): ufo1 = FontClass(getpath("TestFont.ufo")) ufo2 = FontClass(getpath("TestFont.ufo")) ufos = [ufo1, ufo2] assert CURVE_TYPE_LIB_KEY not in ufo1.lib assert CURVE_TYPE_LIB_KEY not in ufo1.layers.defaultLayer.lib assert not glyph_has_qcurve(ufo1, "c") TTFInterpolatablePreProcessor( ufos, inplace=True, rememberCurveType=True ).process() assert ufo1.layers.defaultLayer.lib[CURVE_TYPE_LIB_KEY] == "quadratic" assert glyph_has_qcurve(ufo1, "c") assert ufo2.layers.defaultLayer.lib[CURVE_TYPE_LIB_KEY] == "quadratic" assert glyph_has_qcurve(ufo2, "c") def test_inplace_no_remember_curve_type(self, FontClass): ufo1 = FontClass(getpath("TestFont.ufo")) ufo2 = FontClass(getpath("TestFont.ufo")) ufos = [ufo1, ufo2] for _ in range(2): TTFInterpolatablePreProcessor( ufos, inplace=True, rememberCurveType=False ).process() assert CURVE_TYPE_LIB_KEY not in ufo1.layers.defaultLayer.lib assert CURVE_TYPE_LIB_KEY not in ufo2.layers.defaultLayer.lib assert glyph_has_qcurve(ufo1, "c") assert glyph_has_qcurve(ufo2, "c") def test_custom_filters(self, FontClass): ufo1 = FontClass(getpath("TestFont.ufo")) ufo1.lib[FILTERS_KEY] = [ {"name": "transformations", "kwargs": {"OffsetX": -40}, "pre": True} ] ufo2 = FontClass(getpath("TestFont.ufo")) ufo2.lib[FILTERS_KEY] = [{"name": "transformations", "kwargs": {"OffsetY": 10}}] ufos = [ufo1, ufo2] glyphSets = TTFInterpolatablePreProcessor(ufos).process() assert (glyphSets[0]["a"][0][0].x - glyphSets[1]["a"][0][0].x) == -40 assert (glyphSets[1]["a"][0][0].y - glyphSets[0]["a"][0][0].y) == 10 def test_custom_filters_as_argument(self, FontClass): ufo1 = FontClass(getpath("TestFont.ufo")) ufo2 = FontClass(getpath("TestFont.ufo")) filter1 = loadFilterFromString("RemoveOverlapsFilter(backend='pathops')") filter2 = loadFilterFromString( "TransformationsFilter(OffsetY=-200, include=['d'], pre=True)" ) filter3 = loadFilterFromString("TransformationsFilter(OffsetX=10)") ufos = [ufo1, ufo2] glyphSets = TTFInterpolatablePreProcessor( ufos, filters=[filter1, filter2, filter3], ).process() # Both UFOs have the same filters applied assert (glyphSets[0]["a"][0][0].x - glyphSets[1]["a"][0][0].x) == 0 # "a" has initially its starting point at (66, 0) assert (glyphSets[0]["a"][0][0].x, glyphSets[0]["a"][0][0].y) == (76, 0) assert (glyphSets[1]["a"][0][0].x, glyphSets[1]["a"][0][0].y) == (76, 0) # A component was shifted to overlap with another in a pre-filter # filter2, before overlaps were removed in a post-filter filter1 assert len(glyphSets[0]["d"].components) == 0 def test_custom_filters_in_both_lib_and_argument_with_ellipsis(self, FontClass): from ufo2ft.filters import TransformationsFilter ufo1 = FontClass(getpath("TestFont.ufo")) ufo1.lib[FILTERS_KEY] = [ {"name": "transformations", "kwargs": {"OffsetX": 10}, "pre": True} ] ufo2 = FontClass(getpath("TestFont.ufo")) ufo2.lib[FILTERS_KEY] = [ {"name": "transformations", "kwargs": {"OffsetX": 20}, "pre": True} ] glyphSets = TTFInterpolatablePreProcessor( [ufo1, ufo2], filters=[..., TransformationsFilter(OffsetY=-10)] ).process() a1 = glyphSets[0]["a"] assert (a1[0][0].x, a1[0][0].y) == ( ufo1["a"][0][0].x + 10, ufo1["a"][0][0].y - 10, ) a2 = glyphSets[1]["a"] assert (a2[0][0].x, a2[0][0].y) == ( ufo2["a"][0][0].x + 20, ufo2["a"][0][0].y - 10, ) class SkipExportGlyphsTest: def test_skip_export_glyphs_filter(self, FontClass): from ufo2ft.util import _GlyphSet ufo = FontClass(getpath("IncompatibleMasters/NewFont-Regular.ufo")) skipExportGlyphs = ["b", "d"] glyphSet = _GlyphSet.from_layer(ufo, skipExportGlyphs=skipExportGlyphs) assert set(glyphSet.keys()) == {"a", "c", "e", "f"} assert len(glyphSet["a"]) == 1 assert not glyphSet["a"].components assert len(glyphSet["c"]) == 5 # 4 "d" components decomposed plus 1 outline assert list(c.baseGlyph for c in glyphSet["c"].components) == ["a"] assert len(glyphSet["e"]) == 1 assert list(c.baseGlyph for c in glyphSet["e"].components) == ["c", "c"] assert not glyphSet["f"] assert list(c.baseGlyph for c in glyphSet["f"].components) == ["a", "a"] def test_skip_export_glyphs_filter_nested(self, FontClass): from ufo2ft.util import _GlyphSet ufo = FontClass() glyph_N = ufo.newGlyph("N") glyph_N.width = 100 pen = glyph_N.getPen() pen.moveTo((0, 0)) pen.lineTo((300, 0)) pen.lineTo((300, 400)) pen.lineTo((0, 400)) pen.closePath() glyph_o = ufo.newGlyph("o") glyph_o.width = 100 pen = glyph_o.getPen() pen.moveTo((0, 0)) pen.lineTo((300, 0)) pen.lineTo((300, 300)) pen.lineTo((0, 300)) pen.closePath() glyph_onumero = ufo.newGlyph("_o.numero") glyph_onumero.width = 100 pen = glyph_onumero.getPen() pen.addComponent("o", (-1, 0, 0, -1, 0, 100)) pen.moveTo((0, 0)) pen.lineTo((300, 0)) pen.lineTo((300, 50)) pen.lineTo((0, 50)) pen.closePath() glyph_numero = ufo.newGlyph("numero") glyph_numero.width = 200 pen = glyph_numero.getPen() pen.addComponent("N", (1, 0, 0, 1, 0, 0)) pen.addComponent("_o.numero", (1, 0, 0, 1, 400, 0)) skipExportGlyphs = ["_o.numero"] glyphSet = _GlyphSet.from_layer(ufo, skipExportGlyphs=skipExportGlyphs) assert len(glyphSet["numero"].components) == 1 # The "N" component assert len(glyphSet["numero"]) == 2 # The two contours of "o" and "_o.numero" def test_skip_export_glyphs_designspace(self, FontClass): # Designspace has a public.skipExportGlyphs lib key excluding "b" and "d". designspace = designspaceLib.DesignSpaceDocument.fromfile( getpath("IncompatibleMasters/IncompatibleMasters.designspace") ) for source in designspace.sources: source.font = FontClass( getpath(os.path.join("IncompatibleMasters", source.filename)) ) ufo2ft.compileInterpolatableTTFsFromDS(designspace, inplace=True) for source in designspace.sources: assert source.font.getGlyphOrder() == [".notdef", "a", "c", "e", "f"] gpos_table = source.font["GPOS"].table assert gpos_table.LookupList.Lookup[0].SubTable[0].Coverage.glyphs == [ "a", "e", "f", ] glyphs = source.font["glyf"].glyphs for g in glyphs.values(): g.expand(source.font["glyf"]) assert glyphs["a"].numberOfContours == 1 assert not hasattr(glyphs["a"], "components") assert glyphs["c"].numberOfContours == 6 assert not hasattr(glyphs["c"], "components") assert glyphs["e"].numberOfContours == 13 assert not hasattr(glyphs["e"], "components") assert glyphs["f"].isComposite() def test_skip_export_glyphs_multi_ufo(self, FontClass): # Bold has a public.skipExportGlyphs lib key excluding "b", "d" and "f". ufo1 = FontClass(getpath("IncompatibleMasters/NewFont-Regular.ufo")) ufo2 = FontClass(getpath("IncompatibleMasters/NewFont-Bold.ufo")) fonts = ufo2ft.compileInterpolatableTTFs([ufo1, ufo2], inplace=True) for font in fonts: assert set(font.getGlyphOrder()) == {".notdef", "a", "c", "e"} gpos_table = font["GPOS"].table assert gpos_table.LookupList.Lookup[0].SubTable[0].Coverage.glyphs == ["a"] glyphs = font["glyf"].glyphs for g in glyphs.values(): g.expand(font["glyf"]) assert glyphs["a"].numberOfContours == 1 assert not hasattr(glyphs["a"], "components") assert glyphs["c"].numberOfContours == 6 assert not hasattr(glyphs["c"], "components") assert glyphs["e"].numberOfContours == 13 assert not hasattr(glyphs["e"], "components") def test_skip_export_glyphs_single_ufo(self, FontClass): # UFO has a public.skipExportGlyphs lib key excluding "b", "d" and "f". ufo = FontClass(getpath("IncompatibleMasters/NewFont-Bold.ufo")) font = ufo2ft.compileTTF(ufo, inplace=True) assert set(font.getGlyphOrder()) == {".notdef", "a", "c", "e"} gpos_table = font["GPOS"].table assert gpos_table.LookupList.Lookup[0].SubTable[0].Coverage.glyphs == ["a"] glyphs = font["glyf"].glyphs for g in glyphs.values(): g.expand(font["glyf"]) assert glyphs["a"].numberOfContours == 1 assert not hasattr(glyphs["a"], "components") assert glyphs["c"].numberOfContours == 6 assert not hasattr(glyphs["c"], "components") assert glyphs["e"].numberOfContours == 13 assert not hasattr(glyphs["e"], "components") @pytest.fixture def color_ufo(FontClass): ufo = FontClass() ufo.lib[COLOR_PALETTES_KEY] = [[(1, 0.3, 0.1, 1), (0, 0.4, 0.8, 1)]] return ufo class InitExplodeColorLayerGlyphsFilterTest: def test_no_color_palettes(self, FontClass): ufo = FontClass() filters = [] _init_explode_color_layer_glyphs_filter(ufo, filters) assert not filters def test_no_color_layer_mapping(self, color_ufo): filters = [] _init_explode_color_layer_glyphs_filter(color_ufo, filters) assert not filters def test_explicit_color_layers(self, color_ufo): color_ufo.lib[COLOR_LAYERS_KEY] = {"a": [("a.z_0", 1), ("a.z_1", 0)]} filters = [] _init_explode_color_layer_glyphs_filter(color_ufo, filters) assert not filters def test_font_color_layer_mapping(self, color_ufo): color_ufo.lib[COLOR_LAYER_MAPPING_KEY] = [("z_0", 1), ("z_1", 0)] filters = [] _init_explode_color_layer_glyphs_filter(color_ufo, filters) assert isinstance(filters[0], ExplodeColorLayerGlyphsFilter) def test_glyph_color_layer_mapping(self, color_ufo): color_ufo.newGlyph("a").lib[COLOR_LAYER_MAPPING_KEY] = [("z_0", 0), ("z_1", 1)] filters = [] _init_explode_color_layer_glyphs_filter(color_ufo, filters) assert isinstance(filters[0], ExplodeColorLayerGlyphsFilter) ufo2ft-2.30.0/tests/testSupport.py000066400000000000000000000016621434012334300170730ustar00rootroot00000000000000import contextlib import os import sys import types class _TempModule: """Temporarily replace a module in sys.modules with an empty namespace""" def __init__(self, mod_name): mod_name = str(mod_name) self.mod_name = mod_name self.module = types.ModuleType(mod_name) self._saved_module = [] def __enter__(self): mod_name = self.mod_name try: self._saved_module.append(sys.modules[mod_name]) except KeyError: pass sys.modules[mod_name] = self.module return self def __exit__(self, *args): if self._saved_module: sys.modules[self.mod_name] = self._saved_module[0] else: del sys.modules[self.mod_name] self._saved_module = [] @contextlib.contextmanager def pushd(target): saved = os.getcwd() os.chdir(target) try: yield saved finally: os.chdir(saved) ufo2ft-2.30.0/tox.ini000066400000000000000000000023411434012334300143110ustar00rootroot00000000000000[tox] envlist = lint, py3{7,8,9}-cov, htmlcov skip_missing_interpreters = true [testenv] deps = -r requirements.txt -r dev-requirements.txt ; download the latest pip, setuptools and wheel when creating the venv download = true commands = # run the test suite against the package installed inside tox env. # We use parallel mode and then combine later so that coverage.py will take # paths like .tox/py37/lib/python3.7/site-packages/fontTools and collapse # them into Lib/fontTools. cov: coverage run --parallel-mode -m pytest {posargs} !cov: pytest {posargs} [testenv:lint] skip_install = true deps = -r dev-requirements.txt commands = black --check --diff . isort --check-only --diff . flake8 [testenv:htmlcov] deps = coverage skip_install = true commands = coverage combine coverage xml coverage report coverage html [testenv:codecov] passenv = * deps = coverage codecov skip_install = true ignore_outcome = true commands = coverage combine codecov --env TOXENV [flake8] select = C, E, F, W, B, B9 ignore = E203, E266, E501, W503 max-line-length = 88 exclude = .git, __pycache__, build, dist, .eggs, .tox, venv, venv*, .venv, .venv* [isort] profile = black