pax_global_header00006660000000000000000000000064146653142700014522gustar00rootroot0000000000000052 comment=b6982a193d0898135bb6d34da6fa3b23cdd152d5 glean_parser-15.0.1/000077500000000000000000000000001466531427000142505ustar00rootroot00000000000000glean_parser-15.0.1/.circleci/000077500000000000000000000000001466531427000161035ustar00rootroot00000000000000glean_parser-15.0.1/.circleci/config.yml000066400000000000000000000151771466531427000201060ustar00rootroot00000000000000--- version: 2.1 commands: test-start: steps: - checkout - run: name: environment command: | echo 'export PATH=.:$HOME/.local/bin:$PATH' >> $BASH_ENV - run: name: Upgrade pip command: | pip install --upgrade --user pip test-min-requirements: steps: - run: name: install minimum requirements command: | # Use requirements-builder to determine the minimum versions of # all requirements and test those # We install requirements-builder itself into its own venv, since # otherwise its dependencies might install newer versions of # glean_parser's dependencies. python3 -m venv .rb .rb/bin/pip install requirements-builder .rb/bin/requirements-builder --level=min setup.py > min_requirements.txt pip install --progress-bar off --user -U -r min_requirements.txt test-python-version: parameters: requirements-file: type: string default: "requirements_dev.txt" steps: - run: name: Install required native dependencies command: | pip install --progress-bar off --user -U -r <> sudo apt update -q sudo apt upgrade -q sudo apt install \ --yes --no-install-recommends \ openjdk-11-jdk-headless \ ruby \ golang-go make install-kotlin-linters - run: name: Install glean_parser # Set CC to something that isn't a working compiler so we # can detect if any of the dependencies require a compiler # to be installed. We can't count on a working compiler # being available to pip on all of the platforms we need to # support, so we need to make sure the dependencies are all # pure Python or provide pre-built wheels. command: CC=broken_compiler pip install . --user - run: name: test command: make test-full lint: parameters: requirements-file: type: string default: "requirements_dev.txt" steps: - run: name: install command: | pip install --progress-bar off --user -U -r <> - run: name: install # Set CC to something that isn't a working compiler so we # can detect if any of the dependencies require a compiler # to be installed. We can't count on a working compiler # being available to pip on all of the platforms we need to # support, so we need to make sure the dependencies are all # pure Python or provide pre-built wheels. command: CC=broken_compiler pip install . --user - run: name: lint command: make lint license-check: steps: - run: name: install command: | pip install --user pip-licenses CC=broken_compiler pip install . --user - run: name: license-check command: | pip-licenses --fail-on 'GNU General Public License v3 (GPLv3)' jobs: build-38: docker: - image: cimg/python:3.8-node steps: - test-start - test-python-version build-38-min: docker: - image: cimg/python:3.8-node steps: - test-start - test-min-requirements - test-python-version build-39: docker: - image: cimg/python:3.9-node steps: - test-start - test-python-version build-310: docker: - image: cimg/python:3.10-node steps: - test-start - test-python-version - run: name: make-docs command: | make docs touch docs/_build/html/.nojekyll - persist_to_workspace: root: docs/_build paths: html build-311: docker: - image: cimg/python:3.11-node steps: - test-start - test-python-version build-312: docker: - image: cimg/python:3.12-node steps: - test-start - test-python-version lint: docker: - image: cimg/python:3.12 steps: - test-start - lint license-check: docker: - image: cimg/python:3.12 steps: - test-start - license-check docs-deploy: docker: - image: node:8.10.0 steps: - checkout - add_ssh_keys: fingerprints: - "9b:25:aa:bf:39:b6:4a:e7:c3:52:cf:ab:23:81:3d:52" - attach_workspace: at: docs/_build - run: name: install gh-pages command: | npm install -g --silent gh-pages@2.0.1 git config user.email "glean-ci@nowhere.com" git config user.name "glean-ci" - run: name: deploy command: | gh-pages --dotfiles --message "[ci skip] updates" --dist docs/_build/html pypi-deploy: docker: - image: cimg/python:3.8 steps: - checkout - run: name: environment command: | echo 'export PATH=.:$HOME/.local/bin:$PATH' >> $BASH_ENV - run: name: Upgrade pip command: | pip install --upgrade --user pip - run: name: install Python dependencies command: | pip install --user -U -r requirements_dev.txt - run: name: deploy # Requires that the TWINE_USERNAME and TWINE_PASSWORD environment # variables are configured in CircleCI's environment variables. command: | make release workflows: version: 2 build: jobs: - lint: filters: tags: only: /.*/ - license-check: filters: tags: only: /.*/ - build-38: filters: tags: only: /.*/ - build-38-min: filters: tags: only: /.*/ - build-39: filters: tags: only: /.*/ - build-310: filters: tags: only: /.*/ - build-311: filters: tags: only: /.*/ - build-312: filters: tags: only: /.*/ - docs-deploy: requires: - build-310 filters: branches: only: main - pypi-deploy: requires: - build-38 filters: branches: ignore: /.*/ tags: only: /v[0-9]+(\.[0-9]+)*/ glean_parser-15.0.1/.editorconfig000066400000000000000000000004441466531427000167270ustar00rootroot00000000000000# http://editorconfig.org root = true [*] indent_style = space indent_size = 4 trim_trailing_whitespace = true insert_final_newline = true charset = utf-8 end_of_line = lf [*.bat] indent_style = tab end_of_line = crlf [LICENSE] insert_final_newline = false [Makefile] indent_style = tab glean_parser-15.0.1/.github/000077500000000000000000000000001466531427000156105ustar00rootroot00000000000000glean_parser-15.0.1/.github/CODEOWNERS000066400000000000000000000007411466531427000172050ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # This CODEOWNERS file defines individuals or teams that are responsible # for code in this repository. # See https://help.github.com/articles/about-codeowners/ for details. * @mozilla/glean glean_parser/*_server.py @akkomar tests/*_server.py @akkomar server_telemetry/* @akkomar glean_parser-15.0.1/.github/ISSUE_TEMPLATE.md000066400000000000000000000005031466531427000203130ustar00rootroot00000000000000* Glean Parser version: * Python version: * Operating System: ### Description Describe what you were trying to get done. Tell us what happened, what went wrong, and what you expected to happen. ### What I Did ``` Paste the command(s) you ran and the output. If there was a crash, please include the traceback here. ``` glean_parser-15.0.1/.github/dependabot.yml000066400000000000000000000001521466531427000204360ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "pip" directory: "/" schedule: interval: "daily" glean_parser-15.0.1/.github/pull_request_template.md000066400000000000000000000010141466531427000225450ustar00rootroot00000000000000### Pull Request checklist ### - [ ] **Quality**: This PR builds and tests run cleanly - `make test` runs without emitting any warnings - `make lint` runs without emitting any errors - [ ] **Tests**: This PR includes thorough tests or an explanation of why it does not - [ ] **Changelog**: This PR includes a changelog entry to `CHANGELOG.md` or an explanation of why it does not need one - Any breaking changes to language binding APIs are noted explicitly glean_parser-15.0.1/.gitignore000066400000000000000000000023321466531427000162400ustar00rootroot00000000000000docs/glean_parser.rst docs/modules.rst # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # dotenv .env # virtualenv .venv venv/ ENV/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .vscode/ detekt*.jar ktlint glean_parser-15.0.1/.swiftlint.yml000066400000000000000000000002511466531427000170720ustar00rootroot00000000000000identifier_name: # Turn off it complaining about `id` or `let t = title`, etc, but keep # warnings around e.g. enum names. min_length: warning: 0 error: 0 glean_parser-15.0.1/AUTHORS.md000066400000000000000000000007071466531427000157230ustar00rootroot00000000000000# Credits ## Development Lead - Jan-Erik Rediger - Alessio Placitelli ## Contributors See [the full list of contributors](https://github.com/mozilla/glean_parser/graphs/contributors). ## Acknowledgements This package was created with [Cookiecutter](https://github.com/audreyr/cookiecutter) and the [audreyr/cookiecutter-pypackage](https://github.com/audreyr/cookiecutter-pypackage) project template. glean_parser-15.0.1/CHANGELOG.md000066400000000000000000000756331466531427000160770ustar00rootroot00000000000000# Changelog ## Unreleased ## 15.0.1 - Rust codegen: use correctly named parameter for events without extras ([#750](https://github.com/mozilla/glean_parser/pull/750)) ## 15.0.0 - **Breaking change:** Updating Kotlin template import statement as part of removing `service-glean` from Android Components. ([bug 1906941](https://bugzilla.mozilla.org/show_bug.cgi?id=1906941)) - **Breaking change:** Do not generate Geckoview Streaming helper code ([#743](https://github.com/mozilla/glean_parser/pull/743)) ## 14.5.2 - Revert updating Kotlin template import statement as part of removing `service-glean` from Android Components. This change was BREAKING. It will be reintroduced in a major release afterwards ([#744](https://github.com/mozilla/glean_parser/pull/744)) ## 14.5.1 - BUGFIX: Rust object metrics: Accept `null` in place of empty arrays ## 14.5.0 - BUGFIX: Fix Rust codegen to properly handle events with multiple types of extras ([bug 1911165](https://bugzilla.mozilla.org/show_bug.cgi?id=1911165)) - Updating Kotlin template import statement as part of removing `service-glean` from Android Components. ([bug 1906941](https://bugzilla.mozilla.org/show_bug.cgi?id=1906941)) ## 14.4.0 - Fix JS and Ruby server templates to correctly send event extra values as strings ([DENG-4405](https://mozilla-hub.atlassian.net/browse/DENG-4405)) - ENHANCEMENT: Extra keys in `extra_keys:` fields may now contain any printable ASCII characters ([bug 1910976](https://bugzilla.mozilla.org/show_bug.cgi?id=1910976)) ## 14.3.0 - Add the `module_spec` option to the javascript_server outputter ([#726](https://github.com/mozilla/glean_parser/pull/726)) - BUGFIX: Fix the Rust codegen for changes to how `labeled_*` metrics are constructed ([bug 1909244](https://bugzilla.mozilla.org/show_bug.cgi?id=1909244)) - Generate a serializer for array wrappers ([bug 1908157](https://bugzilla.mozilla.org/show_bug.cgi?id=1908157)) ## 14.2.0 - New Metric Types: `labeled_{custom|memory|timing}_distribution` ([bug 1657947](https://bugzilla.mozilla.org/show_bug.cgi?id=1657947)) ## 14.1.3 - Fix Kotlin/Swift code generation for object metrics, now generating top-level typealiases where needed ([#722](https://github.com/mozilla/glean_parser/pull/722)) ## 14.1.2 - ping schedule: Gracefully handle missing ping ([#705](https://github.com/mozilla/glean_parser/pull/705)) ## 14.1.1 - Replace deprecated methods and package ([#699](https://github.com/mozilla/glean_parser/pull/699)) ## 14.1.0 - Add Go log outputter support for datetime (`go_server`) ([#693](https://github.com/mozilla/glean_parser/pull/693)) ## 14.0.1 - BUGFIX: Fix missing `ping_arg` in util.py ([#687](https://github.com/mozilla/glean_parser/pull/687)) ## 14.0.0 - BREAKING CHANGE: Expose the optional `enabled` property on pings, defaulting to `enabled: true` ([#681](https://github.com/mozilla/glean_parser/pull/681)) - BREAKING CHANGE: Support metadata field `ping_schedule` for pings ([bug 1804711](https://bugzilla.mozilla.org/show_bug.cgi?id=1804711)) - Add support for event metric type in server JavaScript outputter ([DENG-2407](https://mozilla-hub.atlassian.net/browse/DENG-2407)) - Add Swift and Kotlin codegen support for the object metric type object ([#685](https://github.com/mozilla/glean_parser/pull/685)) ## 13.0.1 - Use faster C yaml parser if available ([#677](https://github.com/mozilla/glean_parser/pull/677)) ## 13.0.0 - BREAKING CHANGE: Support metadata field `include_info_sections` ([bug 1866559](https://bugzilla.mozilla.org/show_bug.cgi?id=1866559)) ## 12.0.1 - Fix Rust codegen for object metric type ([#662](https://github.com/mozilla/glean_parser/pull/662)) ## 12.0.0 - Add new metric type object (only Rust codegen support right now) ([#587](https://github.com/mozilla/glean_parser/pull/587)) ## 11.1.0 - Add Go log outputter (`go_server`) ([#645](https://github.com/mozilla/glean_parser/pull/645)) - Add Python log outputter (`python_server`) ([MPP-3642](https://mozilla-hub.atlassian.net/browse/MPP-3642)) ## 11.0.1 - Fix javascript_server template to include non-event metric parameters in #record call for event metrics ([#643](https://github.com/mozilla/glean_parser/pull/643)) - events: Increase extra key limit to 50 ([Bug 1869429](https://bugzilla.mozilla.org/show_bug.cgi?id=1869429)) ## 11.0.0 - Add updated logging logic for Ruby Server ([#642](https://github.com/mozilla/glean_parser/pull/642)) - Add support for event metric type in server-side JavaScript outputter ([DENG-1736](https://mozilla-hub.atlassian.net/browse/DENG-1736)) - BREAKING CHANGE: Dropped support for Python 3.7 ([#638](https://github.com/mozilla/glean_parser/pull/638)) - Add official support for Python 3.11+ ([#638](https://github.com/mozilla/glean_parser/pull/638)) ## 10.0.3 - Warn about empty or TODO-tagged data reviews in the list ([#634](https://github.com/mozilla/glean_parser/pull/634)) - Allow `unit` field on all metrics, but warn for all but quantity and custom distribution ([#636](https://github.com/mozilla/glean_parser/pull/636)) ## 10.0.2 - Allow `unit` field for string again, but warn about it in the linter ([#634](https://github.com/mozilla/glean_parser/pull/634)) ## 10.0.1 - Allow `unit` field for custom distribution again ([#633](https://github.com/mozilla/glean_parser/pull/633)) ## 10.0.0 - Add Ruby log outputter (`ruby_server`) ([#620](https://github.com/mozilla/glean_parser/pull/620)) - BREAKING CHANE: `ping` lifetime metrics on the events ping are now disallowed ([#625](https://github.com/mozilla/glean_parser/pull/625)) - Disallow `unit` field for anything but quantity ([#630](https://github.com/mozilla/glean_parser/pull/630)). Note that this was already considered the case, now the code enforces it. ## 9.0.0 - BREAKING CHANGE: Dropped support for Python 3.6 ([#615](https://github.com/mozilla/glean_parser/issues/615)) - Allow metadata to configure precise timestamps in pings ([#592](https://github.com/mozilla/glean_parser/pull/592)) ## 8.1.1 - Small updates to the `javascript_server` tempalte to address lint warnings ([#598](https://github.com/mozilla/glean_parser/pull/598)) ## 8.1.0 - Increased the maximum metric name length in version 2.0.0 schema ([#596](https://github.com/mozilla/glean_parser/pull/596)) ## 8.0.0 - BREAKING CHANGE: Remove exposed `lint_yaml_files` function ([#580](https://github.com/mozilla/glean_parser/pull/580)) - Rust: Removed `__glean_metric_maps` from the Rust Jinja template. This functionality is better placed downstream ([Bug 1816526](https://bugzilla.mozilla.org/show_bug.cgi?id=1816526)) - New lint: check that all referenced pings are known ([#584](https://github.com/mozilla/glean_parser/pull/584)) - Add experimental server-side JavaScript outputter ([FXA-7922](https://mozilla-hub.atlassian.net/browse/FXA-7922)) ## 7.2.1 - Unbreak last minor release ([#579](https://github.com/mozilla/glean_parser/pull/579)) ## 7.2.0 - Remove yamllint integration ([#578](https://github.com/mozilla/glean_parser/pull/578)) ## 7.1.0 - ENHANCEMENT: Labels in `labels:` fields may now contain any printable ASCII characters ([bug 1672273](https://bugzilla.mozilla.org/show_bug.cgi?id=1672273)) - BUGFIX: Enforce ordering of generation of Pings, Metrics and Tags such that order is deterministic ([bug 1820334](https://bugzilla.mozilla.org/show_bug.cgi?id=1820334)) ## 7.0.0 - BUGFIX: Remove internal-only fields from serialized metrics data ([#550](https://github.com/mozilla/glean_parser/pull/550)) - FEATURE: New subcommand: `dump` to dump the metrics data as JSON ([#550](https://github.com/mozilla/glean_parser/pull/550)) - BUGFIX: Kotlin: Generate enums with the right generic bound for ping reason codes ([#551](https://github.com/mozilla/glean_parser/pull/551)). - **BREAKING CHANGE:** Fully remove support for the old events API ([#549](https://github.com/mozilla/glean_parser/pull/549)) Adds a new lint `OLD_EVENT_API` to warn about missing `type` attributes on event extra keys. Note that the Glean SDK already dropped support for the old events API. ## 6.4.0 - BUGFIX: Correct code generation for labeled metrics in Rust ([#533](https://github.com/mozilla/glean_parser/pull/533)) - BUGFIX: Correctly serialize `Rates` for Rust code ([#530](https://github.com/mozilla/glean_parser/pull/530)) - Feature: Wrap labeled metric's static labels list as CoW strings (requires updated Glean support) ([#534](https://github.com/mozilla/glean_parser/pull/534)) ## 6.3.0 - events: Increase extras limit to 15 ([bug 1798713](https://bugzilla.mozilla.org/show_bug.cgi?id=1798713)) ## 6.2.1 - Add support for Rate, Denominator and Numerator metrics for JavaScript. ([bug 1793777](https://bugzilla.mozilla.org/show_bug.cgi?id=1793777)) ## 6.2.0 - [data-review] Use a template to generate the Data Review Request template ([bug 1772605](https://bugzilla.mozilla.org/show_bug.cgi?id=1772605)) - Make tag and no\_lint order deterministic ([#518](https://github.com/mozilla/glean_parser/pull/518)) ## 6.1.2 - Swift: Add a conditional `import Foundation` to support generating metrics when Glean is delivered via the AppServices iOS megazord ## 6.1.1 - Rust: Use correct name for a ping in generated code. ## 6.1.0 - [data-review] Include extra keys' names and descriptions in data review template ([bug 1767027](https://bugzilla.mozilla.org/show_bug.cgi?id=1767027)) - Raise limit on number of statically-defined labels to 4096. ([bug 1772163](https://bugzilla.mozilla.org/show_bug.cgi?id=1772163)) - Fix Rust code generation for new UniFFI interface ([#491](https://github.com/mozilla/glean_parser/pull/491), [#494](https://github.com/mozilla/glean_parser/pull/494), [#495](https://github.com/mozilla/glean_parser/pull/495)) ## 6.0.1 - Relax version requirement for MarkupSafe. Now works with MarkupSafe v1.1.1 to v2.0.1 inclusive again. ## 6.0.0 - BUGFIX: Add missing `extra_args` to Rust constructor generation ([bug 1765855](https://bugzilla.mozilla.org/show_bug.cgi?id=1765855)) - **Breaking change:** `glean_parser` now generates metrics compatible with the UniFFI-powered Glean SDK. This is not backwards-compatible with previous versions. - Generate Rate, Denominator and Numerator metrics for Kotlin and Swift - Explicitly skip Rate, Denominator and Numerator metrics for JavaScript. These will cause a build failure by default, but can be turned into warnings on request. Use `-s fail_rates=false` to enable warning-only mode. ## 5.1.2 - BUGFIX: Revert changes made on v5.1.1. - The issues addressed by those changes, were non-issues and result of misuse of the APIs. ## 5.1.1 - BUGFIX: Fix issues with Swift templates ([bug 1749494](https://bugzilla.mozilla.org/show_bug.cgi?id=1749494)) - Make metrics and pings all `public` - Make pings `static` ## 5.1.0 - Add support for build info generation for JavaScript and Typescript targets ([bug 1749494](https://bugzilla.mozilla.org/show_bug.cgi?id=1749494)) ## 5.0.1 - Fix the logic for the metric expiration by version ([bug 1753194](https://bugzilla.mozilla.org/show_bug.cgi?id=1753194)) ## 5.0.0 - Remove C# support ([#436](https://github.com/mozilla/glean_parser/pull/436)). - Add support for Rust code generation ([bug 1677434](https://bugzilla.mozilla.org/show_bug.cgi?id=1677434)) - Report an error if no files are passed ([bug 1751730](https://bugzilla.mozilla.org/show_bug.cgi?id=1751730)) - [data-review] Report an error if no metrics match provided bug number ([bug 1752576](https://bugzilla.mozilla.org/show_bug.cgi?id=1752576)) - [data-review] Include notification_emails in list of those responsible ([bug 1752576](https://bugzilla.mozilla.org/show_bug.cgi?id=1752576)) - Add support for expiring metrics by the provided major version ([bug 1753194](https://bugzilla.mozilla.org/show_bug.cgi?id=1753194)) ## 4.4.0 - Support global file-level tags in metrics.yaml ([bug 1745283](https://bugzilla.mozilla.org/show_bug.cgi?id=1745283)) - Glinter: Reject metric files if they use `unit` by mistake. It should be `time_unit` ([#432](https://github.com/mozilla/glean_parser/pull/432)). - Automatically generate a build date when generating build info ([#431](https://github.com/mozilla/glean_parser/pull/431)). Enabled for Kotlin and Swift. This can be changed with the `build_date` command line option. `build_date=0` will use a static unix epoch time. `build_date=2022-01-03T17:30:00` will parse the ISO8601 string to use (as a UTC timestamp). Other values will throw an error. Example: glean_parser translate --format kotlin --option build_date=2021-11-01T01:00:00 path/to/metrics.yaml ## 4.3.1 - BUGFIX: Skip tags for code generation ([#409](https://github.com/mozilla/glean_parser/pull/409)) ## 4.3.0 - Support tags in glean parser ([bug 1734011](https://bugzilla.mozilla.org/show_bug.cgi?id=1734011)) ## 4.2.0 - Improve the schema validation error messages. They will no longer include `OrderedDict(...)` on Python 3.7 and later ([bug 1733395](https://bugzilla.mozilla.org/show_bug.cgi?id=1733395)) - Officially support Python 3.10 ## 4.1.1 (2021-09-28) - Update private import paths on Javascript / Typescript templates. ([bug 1702468](https://bugzilla.mozilla.org/show_bug.cgi?id=1702468)) ## 4.1.0 (2021-09-16) - Add support for Node.js platform on Javascript / Typescript templates. ([bug 1728982](https://bugzilla.mozilla.org/show_bug.cgi?id=1728982)) ## 4.0.0 (2021-08-20) - Add support for Text metric type ([#374](https://github.com/mozilla/glean_parser/pull/374)) - Reserve the `default` ping name. It can't be used as a ping name, but it can be used in `send_in_pings` ([#376](https://github.com/mozilla/glean_parser/pull/376)) ## 3.8.0 (2021-08-18) - Expose ping reasons enum on JavaScript / TypeScript templates. ([bug 1719136](https://bugzilla.mozilla.org/show_bug.cgi?id=1719136)) - Define an interface with the allowed extras for each event on the TypeScript template. ([bug 1693487](https://bugzilla.mozilla.org/show_bug.cgi?id=1693487)) ## 3.7.0 (2021-07-13) - New lint: Check for redundant words in ping names ([#355](https://github.com/mozilla/glean_parser/pull/355)) - Add support for URL metric type ([#361](https://github.com/mozilla/glean_parser/pull/361)) ## 3.6.0 (2021-06-11) - Add a command `data-review` to generate a skeleton Data Review Request for all metrics matching a supplied bug number. ([bug 1704541](https://bugzilla.mozilla.org/show_bug.cgi?id=1704541)) - Enable custom distribution outside of GeckoView (`gecko_datapoint` becomes optional) ## 3.5.0 (2021-06-03) - Transform generated folder into QML Module when building Javascript templates for the Qt platform. ([bug 1707896](https://bugzilla.mozilla.org/show_bug.cgi?id=1707896) - Import the Glean QML module from inside each generated file, removing the requirement to import Glean before importing any of the generated files; - Prodive a `qmldir` file exposing all generated files; - Drop the `namespace` option for Javascript templates; - Add a new `version` option for Javascript templates, required when building for Qt, which expected the Glean QML module version. ## 3.4.0 (2021-05-28) - Add missing import for Kotlin code ([#339](https://github.com/mozilla/glean_parser/pull/339)) - Use a plain Kotlin type in the generated interface implementation ([#339](https://github.com/mozilla/glean_parser/pull/339)) - Generate additional generics for event metrics ([#339](https://github.com/mozilla/glean_parser/pull/339)) - For Kotlin skip generating `GleanBuildInfo.kt` when requested (with `with_buildinfo=false`) ([#341](https://github.com/mozilla/glean_parser/pull/341)) ## 3.3.2 (2021-05-18) - Fix another bug in the Swift code generation when generating extra keys ([#334](https://github.com/mozilla/glean_parser/pull/334)) ## 3.3.1 (2021-05-18) - Fix Swift code generation bug for pings ([#333](https://github.com/mozilla/glean_parser/pull/333)) ## 3.3.0 (2021-05-18) - Generate new event API construct ([#321](https://github.com/mozilla/glean_parser/pull/321)) ## 3.2.0 (2021-04-28) - Add option to add extra introductory text to generated markdown ([#298](https://github.com/mozilla/glean_parser/pull/298)) - Add support for Qt in Javascript templates ([bug 1706252](https://bugzilla.mozilla.org/show_bug.cgi?id=1706252)) - Javascript templates will now accept the `platform` option. If this option is set to `qt` the generated templates will be Qt compatible. Default value is `webext`. ## 3.1.2 (2021-04-21) - BUGFIX: Remove the "DO NOT COMMIT" notice from the documentation. ## 3.1.1 (2021-04-19) - Recommend to not commit as well as to not edit the generated files. ([bug 1706042](https://bugzilla.mozilla.org/show_bug.cgi?id=1706042)) - BUGFIX: Include import statement for labeled metric subtypes in Javascript and Typescript templates. ## 3.1.0 (2021-04-16) - Add support for labeled metric types in Javascript and Typescript templates. ## 3.0.0 (2021-04-13) - Raise limit on number of statically-defined lables to 100. ([bug 1702263](https://bugzilla.mozilla.org/show_bug.cgi?id=1702263)) - BUGFIX: Version 2.0.0 of the schema now allows the "special" `glean_.*` ping names for Glean-internal use again. - Remove support for JWE metric types. ## 2.5.0 (2021-02-23) - Add parser and object model support for `rate` metric type. ([bug 1645166](https://bugzilla.mozilla.org/show_bug.cgi?id=1645166)) - Add parser and object model support for telemetry_mirror property. ([bug 1685406](https://bugzilla.mozilla.org/show_bug.cgi?id=1685406)) - Update the Javascript template to match Glean.js expectations. ([bug 1693516](https://bugzilla.mozilla.org/show_bug.cgi?id=1693516)) - Glean.js has updated it's export strategy. It will now export each metric type as an independent module; - Glean.js has dropped support for non ES6 modules. - Add support for generating Typescript code. ([bug 1692157](https://bugzilla.mozilla.org/show_bug.cgi?id=1692157)) - The templates added generate metrics and pings code for Glean.js. ## 2.4.0 (2021-02-18) - **Experimental:** `glean_parser` has a new subcommand `coverage` to convert raw coverage reports into something consumable by coverage tools, such as codecov.io - The path to the file that each metric is defined in is now stored on the `Metric` object in `defined_in["filepath"]`. ## 2.3.0 (2021-02-17) - Leverage the `glean_namespace` to provide correct import when building for Javascript. ## 2.2.0 (2021-02-11) - The Kotlin generator now generates static build information that can be passed into `Glean.initialize` to avoid calling the package manager at runtime. ## 2.1.0 (2021-02-10) - Add support for generating Javascript code. - The templates added generate metrics and pings code for Glean.js. ## 2.0.0 (2021-02-05) - New versions 2.0.0 of the `metrics.yaml` and `pings.yaml` schemas now ship with `glean_parser`. These schemas are different from version 1.0.0 in the following ways: - Bugs must be specified as URLs. Bug numbers are disallowed. - The legacy ping names containing underscores are no longer allowed. These included `deletion_request`, `bookmarks_sync`, `history_sync`, `session_end`, `all_pings`, `glean_*`). In these cases, the `_` should be replaced with `-`. To upgrade your app or library to use the new schema, replace the version in the `$schema` value with `2-0-0`. - **Breaking change:** It is now an error to use bug numbers (rather than URLs) in ping definitions. - Add the line number that metrics and pings were originally defined in the yaml files. ## 1.29.1 (2020-12-17) - BUGFIX: Linter output can now be redirected correctly (1675771). ## 1.29.0 (2020-10-07) - **Breaking change:** `glean_parser` will now return an error code when any of the input files do not exist (unless the `--allow-missing-files` flag is passed). - Generated code now includes a comment next to each metric containing the name of the metric in its original `snake_case` form. - When metrics don't provide a `unit` parameter, it is not included in the output (as provided by probe-scraper). ## 1.28.6 (2020-09-24) - BUGFIX: Ensure Kotlin arguments are deterministically ordered ## 1.28.5 (2020-09-14) - Fix deploy step to update pip before deploying to pypi. ## 1.28.4 (2020-09-14) - The `SUPERFLUOUS_NO_LINT` warning has been removed from the glinter. It likely did more harm than good, and makes it hard to make `metrics.yaml` files that pass across different versions of `glean_parser`. - Expired metrics will now produce a linter warning, `EXPIRED_METRIC`. - Expiry dates that are more than 730 days (\~2 years) in the future will produce a linter warning, `EXPIRATION_DATE_TOO_FAR`. - Allow using the Quantity metric type outside of Gecko. - New parser configs `custom_is_expired` and `custom_validate_expires` added. These are both functions that take the `expires` value of the metric and return a bool. (See `Metric.is_expired` and `Metric.validate_expires`). These will allow FOG to provide custom validation for its version-based `expires` values. ## 1.28.3 (2020-07-28) - BUGFIX: Support HashSet and Dictionary in the C\## generated code. ## 1.28.2 (2020-07-28) - BUGFIX: Generate valid C\## code when using Labeled metric types. ## 1.28.1 (2020-07-24) - BUGFIX: Add missing column to correctly render markdown tables in generated documentation. ## 1.28.0 (2020-07-23) - **Breaking change:** The internal ping `deletion-request` was misnamed in pings.py causing the linter to not allow use of the correctly named ping for adding legacy ids to. Consuming apps will need to update their metrics.yaml if they are using `deletion_request` in any `send_in_pings` to `deletion-request` after updating. ## 1.27.0 (2020-07-21) - Rename the `data_category` field to `data_sensitivity` to be clearer. ## 1.26.0 (2020-07-21) - Add support for JWE metric types. - Add a `data_sensitivity` field to all metrics for specifying the type of data collected in the field. ## 1.25.0 (2020-07-17) - Add support for generating C\## code. - BUGFIX: The memory unit is now correctly passed to the MemoryDistribution metric type in Swift. ## 1.24.0 (2020-06-30) - BUGFIX: look for metrics in send\_if\_empty pings. Metrics for these kinds of pings were being ignored. ## 1.23.0 (2020-06-27) - Support for Python 3.5 has been dropped. - BUGFIX: The ordering of event extra keys will now match with their enum, fixing a serious bug where keys of extras may not match the correct values in the data payload. See . ## 1.22.0 (2020-05-28) - **Breaking change:** (Swift only) Combine all metrics and pings into a single generated file `Metrics.swift`. ## 1.21.0 (2020-05-25) - `glinter` messages have been improved with more details and to be more actionable. - A maximum of 10 `extra_keys` is now enforced for `event` metric types. - BUGFIX: the `Lifetime` enum values now match the values of the implementation in mozilla/glean. ## 1.20.4 (2020-05-07) - BUGFIX: yamllint errors are now reported using the correct file name. ## 1.20.3 (2020-05-06) - Support for using `timing_distribution`'s `time_unit` parameter to control the range of acceptable values is documented. The default unit for this use case is `nanosecond` to avoid creating a breaking change. See [bug 1630997](https://bugzilla.mozilla.org/show_bug.cgi?id=1630997) for more information. ## 1.20.2 (2020-04-24) - Dependencies that depend on the version of Python being used are now specified using the [Declaring platform specific dependencies syntax in setuptools](https://setuptools.readthedocs.io/en/latest/setuptools.html##declaring-platform-specific-dependencies). This means that more recent versions of dependencies are likely to be installed on Python 3.6 and later, and unnecessary backport libraries won't be installed on more recent Python versions. ## 1.20.1 (2020-04-21) - The minimum version of the runtime dependencies has been lowered to increase compatibility with other tools. These minimum versions are now tested in CI, in addition to testing the latest versions of the dependencies that was already happening in CI. ## 1.20.0 (2020-04-15) - **Breaking change:** glinter errors found during the `translate` command will now return an error code. glinter warnings will be displayed, but not return an error code. - `glean_parser` now produces a linter warning when `user` lifetime metrics are set to expire. See [bug 1604854](https://bugzilla.mozilla.org/show_bug.cgi?id=1604854) for additional context. ## 1.19.0 (2020-03-18) - **Breaking change:** The regular expression used to validate labels is stricter and more correct. - Add more information about pings to markdown documentation: - State whether the ping includes client id; - Add list of data review links; - Add list of related bugs links. - `glean_parser` now makes it easier to write external translation functions for different language targets. - BUGFIX: `glean_parser` now works on 32-bit Windows. ## 1.18.3 (2020-02-24) - Dropped the `inflection` dependency. - Constrained the `zipp` and `MarkupSafe` transitive dependencies to versions that support Python 3.5. ## 1.18.2 (2020-02-14) - BUGFIX: Fix rendering of first element of reason list. ## 1.18.1 (2020-02-14) - BUGFIX: Reason codes are displayed in markdown output for built-in pings as well. - BUGFIX: Reason descriptions are indented correctly in markdown output. - BUGFIX: To avoid a compiler error, the `@JvmName` annotation isn't added to private members. ## 1.18.0 (2020-02-13) - **Breaking Change (Java API)** Have the metrics names in Java match the names in Kotlin. See [Bug 1588060](https://bugzilla.mozilla.org/show_bug.cgi?id=1588060). - The reasons a ping are sent are now included in the generated markdown documentation. ## 1.17.3 (2020-02-05) - BUGFIX: The version of Jinja2 now specifies < 3.0, since that version no longer supports Python 3.5. ## 1.17.2 (2020-02-05) - BUGFIX: Fixes an import error in generated Kotlin code. ## 1.17.1 (2020-02-05) - BUGFIX: Generated Swift code now includes `import Glean`, unless generating for a Glean-internal build. ## 1.17.0 (2020-02-03) - Remove default schema URL from `validate_ping` - Make `schema` argument required for CLI - BUGFIX: Avoid default import in Swift code for Glean itself - BUGFIX: Restore order of fields in generated Swift code ## 1.16.0 (2020-01-15) - Support for `reason` codes on pings was added. ## 1.15.6 (2020-02-06) - BUGFIX: The version of Jinja2 now specifies < 3.0, since that version no longer supports Python 3.5 (backported from 1.17.3). ## 1.15.5 (2019-12-19) - BUGFIX: Also allow the legacy name `all_pings` for `send_in_pings` parameter on metrics ## 1.15.4 (2019-12-19) - BUGFIX: Also allow the legacy name `all_pings` ## 1.15.3 (2019-12-13) - Add project title to markdown template. - Remove "Sorry about that" from markdown template. - BUGFIX: Replace dashes in variable names to force proper naming ## 1.15.2 (2019-12-12) - BUGFIX: Use a pure Python library for iso8601 so there is no compilation required. ## 1.15.1 (2019-12-12) - BUGFIX: Add some additional ping names to the non-kebab-case allow list. ## 1.15.0 (2019-12-12) - Restrict new pings names to be kebab-case and change `all_pings` to `all-pings` ## 1.14.0 (2019-12-06) - `glean_parser` now supports Python versions 3.5, 3.6, 3.7 and 3.8. ## 1.13.0 (2019-12-04) - The `translate` command will no longer clear extra files in the output directory. - BUGFIX: Ensure all newlines in comments are prefixed with comment markers - BUGFIX: Escape Swift keywords in variable names in generated code - Generate documentation for pings that are sent if empty ## 1.12.0 (2019-11-27) - Reserve the `deletion_request` ping name - Added a new flag `send_if_empty` for pings ## 1.11.0 (2019-11-13) - The `glinter` command now performs `yamllint` validation on registry files. ## 1.10.0 (2019-11-11) - The Kotlin linter `detekt` is now run during CI, and for local testing if installed. - Python 3.8 is now tested in CI (in addition to Python 3.7). Using `tox` for this doesn't work in modern versions of CircleCI, so the `tox` configuration has been removed. - `yamllint` has been added to test the YAML files on CI. - ⚠ Metric types that don't yet have implementations in glean-core have been removed. This includes `enumeration`, `rate`, `usage`, and `use_counter`, as well as many labeled metrics that don't exist. ## 1.9.5 (2019-10-22) - Allow a Swift lint for generated code - New lint: Restrict what metric can go into the `baseline` ping - New lint: Warn for slight misspellings in ping names - BUGFIX: change Labeled types labels from lists to sets. ## 1.9.4 (2019-10-16) - Use lists instead of sets in Labeled types labels to ensure that the order of the labels passed to the `metrics.yaml` is kept. - `glinter` will now check for duplicate labels and error if there are any. ## 1.9.3 (2019-10-09) - Add labels from Labeled types to the Extra column in the Markdown template. ## 1.9.2 (2019-10-08) - BUGFIX: Don't call `is_internal_metric` on `Ping` objects. ## 1.9.1 (2019-10-07) - Don't include Glean internal metrics in the generated markdown. ## 1.9.0 (2019-10-04) - Glinter now warns when bug numbers (rather than URLs) are used. - BUGFIX: add `HistogramType` and `MemoryUnit` imports in Kotlin generated code. ## 1.8.4 (2019-10-02) - Removed unsupported labeled metric types. ## 1.8.3 (2019-10-02) - Fix indentation for generated Swift code ## 1.8.2 (2019-10-01) - Created labeled metrics and events in Swift code and wrap it in a configured namespace ## 1.8.1 (2019-09-27) - BUGFIX: `memory_unit` is now passed to the Kotlin generator. ## 1.8.0 (2019-09-26) - A new parser config, `do_not_disable_expired`, was added to turn off the feature that expired metrics are automatically disabled. This is useful if you want to retain the disabled value that is explicitly in the `metrics.yaml` file. - `glinter` will now report about superfluous `no_lint` entries. ## 1.7.0 (2019-09-24) - A `glinter` tool is now included to find common mistakes in metric naming and setup. This check is run during `translate` and warnings will be displayed. ⚠ These warnings will be treated as errors in a future revision. ## 1.6.1 (2019-09-17) - BUGFIX: `GleanGeckoMetricsMapping` must include `LabeledMetricType` and `CounterMetricType`. ## 1.6.0 (2019-09-17) - NEW: Support for outputting metrics in Swift. - BUGFIX: Provides a helpful error message when `geckoview_datapoint` is used on an metric type that doesn't support GeckoView exfiltration. - Generate a lookup table for Gecko categorical histograms in `GleanGeckoMetricsMapping`. - Introduce a 'Swift' output generator. ## 1.4.1 (2019-08-28) - Documentation only. ## 1.4.0 (2019-08-27) - Added support for generating markdown documentation from `metrics.yaml` files. ## 1.3.0 (2019-08-22) - `quantity` metric type has been added. ## 1.2.1 (2019-08-13) - BUGFIX: `includeClientId` was not being output for PingType. ## 1.2.0 (2019-08-13) - `memory_distribution` metric type has been added. - `custom_distribution` metric type has been added. - `labeled_timespan` is no longer an allowed metric type. ## 1.1.0 (2019-08-05) - Add a special `all_pings` value to `send_in_pings`. ## 1.0.0 (2019-07-29) - First release to start following strict semver. ## 0.1.0 (2018-10-15) - First release on PyPI. glean_parser-15.0.1/CODE_OF_CONDUCT.md000066400000000000000000000012631466531427000170510ustar00rootroot00000000000000# Community Participation Guidelines This repository is governed by Mozilla's code of conduct and etiquette guidelines. For more details, please read the [Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/). ## How to Report For more information on how to report violations of the Community Participation Guidelines, please read our '[How to Report](https://www.mozilla.org/about/governance/policies/participation/reporting/)' page. glean_parser-15.0.1/CONTRIBUTING.md000066400000000000000000000135661466531427000165140ustar00rootroot00000000000000# Contributing Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. You can contribute in many ways: ## Types of Contributions ### Report Bugs Report bugs at [bugzilla](https://bugzilla.mozilla.org/enter_bug.cgi?assigned_to=nobody%40mozilla.org&bug_ignored=0&bug_severity=normal&bug_status=NEW&cf_fission_milestone=---&cf_fx_iteration=---&cf_fx_points=---&cf_status_firefox65=---&cf_status_firefox66=---&cf_status_firefox67=---&cf_status_firefox_esr60=---&cf_status_thunderbird_esr60=---&cf_tracking_firefox65=---&cf_tracking_firefox66=---&cf_tracking_firefox67=---&cf_tracking_firefox_esr60=---&cf_tracking_firefox_relnote=---&cf_tracking_thunderbird_esr60=---&product=Data%20Platform%20and%20Tools&component=Glean%3A%20SDK&contenttypemethod=list&contenttypeselection=text%2Fplain&defined_groups=1&flag_type-203=X&flag_type-37=X&flag_type-41=X&flag_type-607=X&flag_type-721=X&flag_type-737=X&flag_type-787=X&flag_type-799=X&flag_type-800=X&flag_type-803=X&flag_type-835=X&flag_type-846=X&flag_type-855=X&flag_type-864=X&flag_type-916=X&flag_type-929=X&flag_type-930=X&flag_type-935=X&flag_type-936=X&flag_type-937=X&form_name=enter_bug&maketemplate=Remember%20values%20as%20bookmarkable%20template&op_sys=Unspecified&priority=P3&&rep_platform=Unspecified&status_whiteboard=%5Btelemetry%3Aglean-rs%3Am%3F%5D&target_milestone=---&version=unspecified). If you are reporting a bug, please include: - Your operating system name and version. - Any details about your local setup that might be helpful in troubleshooting. - Detailed steps to reproduce the bug. ### Fix Bugs Look through the GitHub issues for bugs. Anything tagged with "bug" and "help wanted" is open to whoever wants to implement it. ### Implement Features Look through the GitHub issues for features. Anything tagged with "enhancement" and "help wanted" is open to whoever wants to implement it. ### Write Documentation `glean_parser` could always use more documentation, whether as part of the official `glean_parser` docs, in docstrings, or even on the web in blog posts, articles, and such. ### Submit Feedback The best way to send feedback is to file an issue at TODO If you are proposing a feature: - Explain in detail how it would work. - Keep the scope as narrow as possible, to make it easier to implement. - Remember that this is a volunteer-driven project, and that contributions are welcome :) ## Get Started! Ready to contribute? Here's how to set up `glean_parser` for local development. 1. Fork the `glean_parser` repo on GitHub. 2. Clone your fork locally: ```sh $ git clone git@github.com:your_name_here/glean_parser.git ``` 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development: ```sh $ mkvirtualenv glean_parser $ cd glean_parser/ $ pip install --editable . ``` 4. Create a branch for local development: ```sh $ git checkout -b name-of-your-bugfix-or-feature ``` Now you can make your changes locally. 5. To test your changes to `glean_parser`: Install the testing dependencies: ```sh $ pip install -r requirements_dev.txt ``` Optionally, if you want to ensure that the generated Kotlin code lints correctly, install a Java SDK, and then run: ```sh $ make install-kotlin-linters ``` Then make sure that all lints and tests are passing: ```sh $ make lint $ make test ``` 6. Commit your changes and push your branch to GitHub: ```sh $ git add . $ git commit -m "Your detailed description of your changes." $ git push origin name-of-your-bugfix-or-feature ``` 7. Submit a pull request through the GitHub website. ## Pull Request Guidelines Before you submit a pull request, check that it meets these guidelines: 1. The pull request should include tests. 2. If the pull request adds functionality, the docs should be updated. Put your new functionality into a function with a docstring, and describe public-facing features in the docs. 3. The pull request should work for Python 3.8+ (The CI system will take care of testing all of these Python versions). 4. The pull request should update the changelog in `CHANGELOG.md`. ## Tips To run a subset of tests: ```sh $ py.test tests.test_glean_parser ``` ## Deploying A reminder for the maintainers on how to deploy. Get a clean main branch with all of the changes from `upstream`: ```sh $ git checkout main $ git fetch upstream $ git rebase upstream/main ``` - Update the header with the new version and date in `CHANGELOG.md`. - (By using the setuptools-scm package, there is no need to update the version anywhere else). - Make sure all your changes are committed. - Push the changes upstream. (Normally pushing directly without review is frowned upon, but the `main` branch is protected from force pushes and release tagging requires the same permissions as pushing to `main`): ```sh $ git push upstream main ``` - Wait for [continuous integration to pass](https://circleci.com/gh/mozilla/glean_parser/tree/main) on main. - Make the release on GitHub using [this link](https://github.com/mozilla/glean_parser/releases/new) - Both the tag and the release title should be in the form `vX.Y.Z`. - Copy and paste the relevant part of the `CHANGELOG.md` file into the description. - Tagging the release will trigger a CI workflow which will build the distribution of `glean_parser` and publish it to PyPI. The continuous integration system will then automatically deploy to PyPI. See also: - The [instructions for updating the version of `glean_parser` used by the Glean SDK](https://mozilla.github.io/glean/dev/upgrading-glean-parser.html). - The [instructions for updating the version of `glean_parser` used by Glean.js](https://github.com/mozilla/glean.js/blob/main/docs/guides/update_glean_parser.md). glean_parser-15.0.1/LICENSE000066400000000000000000000405251466531427000152630ustar00rootroot00000000000000Mozilla Public License Version 2.0 ================================== 1. Definitions -------------- 1.1. "Contributor" means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. 1.2. "Contributor Version" means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor's Contribution. 1.3. "Contribution" means Covered Software of a particular Contributor. 1.4. "Covered Software" means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. 1.5. "Incompatible With Secondary Licenses" means (a) that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or (b) that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. 1.6. "Executable Form" means any form of the work other than Source Code Form. 1.7. "Larger Work" means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. 1.8. "License" means this document. 1.9. "Licensable" means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. 1.10. "Modifications" means any of the following: (a) any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or (b) any new file in Source Code Form that contains any Covered Software. 1.11. "Patent Claims" of a Contributor means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. 1.12. "Secondary License" means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. 1.13. "Source Code Form" means the form of the work preferred for making modifications. 1.14. "You" (or "Your") means an individual or a legal entity exercising rights under this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, "control" means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. 2. License Grants and Conditions -------------------------------- 2.1. Grants Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: (a) under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and (b) under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. 2.2. Effective Date The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. 2.3. Limitations on Grant Scope The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: (a) for any code that a Contributor has removed from Covered Software; or (b) for infringements caused by: (i) Your and any other third party's modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or (c) under Patent Claims infringed by Covered Software in the absence of its Contributions. This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). 2.4. Subsequent Licenses No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). 2.5. Representation Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. 2.6. Fair Use This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. 2.7. Conditions Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. 3. Responsibilities ------------------- 3.1. Distribution of Source Form All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients' rights in the Source Code Form. 3.2. Distribution of Executable Form If You distribute Covered Software in Executable Form then: (a) such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and (b) You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients' rights in the Source Code Form under this License. 3.3. Distribution of a Larger Work You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). 3.4. Notices You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. 3.5. Application of Additional Terms You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. 4. Inability to Comply Due to Statute or Regulation --------------------------------------------------- If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. 5. Termination -------------- 5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. 5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. ************************************************************************ * * * 6. Disclaimer of Warranty * * ------------------------- * * * * Covered Software is provided under this License on an "as is" * * basis, without warranty of any kind, either expressed, implied, or * * statutory, including, without limitation, warranties that the * * Covered Software is free of defects, merchantable, fit for a * * particular purpose or non-infringing. The entire risk as to the * * quality and performance of the Covered Software is with You. * * Should any Covered Software prove defective in any respect, You * * (not any Contributor) assume the cost of any necessary servicing, * * repair, or correction. This disclaimer of warranty constitutes an * * essential part of this License. No use of any Covered Software is * * authorized under this License except under this disclaimer. * * * ************************************************************************ ************************************************************************ * * * 7. Limitation of Liability * * -------------------------- * * * * Under no circumstances and under no legal theory, whether tort * * (including negligence), contract, or otherwise, shall any * * Contributor, or anyone who distributes Covered Software as * * permitted above, be liable to You for any direct, indirect, * * special, incidental, or consequential damages of any character * * including, without limitation, damages for lost profits, loss of * * goodwill, work stoppage, computer failure or malfunction, or any * * and all other commercial damages or losses, even if such party * * shall have been informed of the possibility of such damages. This * * limitation of liability shall not apply to liability for death or * * personal injury resulting from such party's negligence to the * * extent applicable law prohibits such limitation. Some * * jurisdictions do not allow the exclusion or limitation of * * incidental or consequential damages, so this exclusion and * * limitation may not apply to You. * * * ************************************************************************ 8. Litigation ------------- Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party's ability to bring cross-claims or counter-claims. 9. Miscellaneous ---------------- This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. 10. Versions of the License --------------------------- 10.1. New Versions Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. 10.2. Effect of New Versions You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. 10.3. Modified Versions If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. Exhibit A - Source Code Form License Notice ------------------------------------------- This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. You may add additional accurate notices of copyright ownership. Exhibit B - "Incompatible With Secondary Licenses" Notice --------------------------------------------------------- This Source Code Form is "Incompatible With Secondary Licenses", as defined by the Mozilla Public License, v. 2.0. glean_parser-15.0.1/MANIFEST.in000066400000000000000000000005361466531427000160120ustar00rootroot00000000000000include AUTHORS.md include CONTRIBUTING.md include CHANGELOG.md include LICENSE include README.md recursive-include tests * recursive-exclude * __pycache__ recursive-exclude * *.py[co] recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif recursive-include glean_parser/schemas *.yaml recursive-include glean_parser/templates * glean_parser-15.0.1/Makefile000066400000000000000000000047421466531427000157170ustar00rootroot00000000000000.PHONY: clean clean-test clean-pyc clean-build docs help define PRINT_HELP_PYSCRIPT import re, sys for line in sys.stdin: match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) if match: target, help = match.groups() print("%-20s %s" % (target, help)) endef export PRINT_HELP_PYSCRIPT help: @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts clean-build: ## remove build artifacts rm -fr build/ rm -fr dist/ rm -fr .eggs/ find . -name '*.egg-info' -exec rm -fr {} + find . -name '*.egg' -exec rm -fr {} + clean-pyc: ## remove Python file artifacts find . -name '*.pyc' -exec rm -f {} + find . -name '*.pyo' -exec rm -f {} + find . -name '*~' -exec rm -f {} + find . -name '__pycache__' -exec rm -fr {} + clean-test: ## remove test and coverage artifacts rm -f .coverage rm -fr htmlcov/ rm -fr .pytest_cache lint: ## check style python3 -m ruff check glean_parser tests setup.py python3 -m yamllint glean_parser tests python3 -m mypy glean_parser fmt: ## autoformat files python3 -m ruff format glean_parser tests setup.py test: ## run tests quickly with the default Python py.test test-full: ## run tests, including those with additional dependencies py.test --run-web-tests --run-node-tests --run-ruby-tests --run-go-tests coverage: ## check code coverage quickly with the default Python coverage run --source glean_parser -m pytest coverage report -m coverage html docs: ## generate Sphinx HTML documentation, including API docs rm -f docs/glean_parser.rst rm -f docs/modules.rst sphinx-apidoc -o docs/ glean_parser $(MAKE) -C docs clean $(MAKE) -C docs html release: dist ## package and upload a release twine upload dist/* dist: clean ## builds source and wheel package python setup.py sdist python setup.py bdist_wheel ls -l dist install: clean ## install the package to the active Python's site-packages pip install . install-kotlin-linters: ## install ktlint and detekt for linting Kotlin output test -f ktlint || curl -sSLO https://github.com/shyiko/ktlint/releases/download/0.29.0/ktlint echo "03c9f9f78f80bcdb44c292d95e4d9abf221daf5e377673c1b6675a8003eab94d *ktlint" | shasum -a256 -c - chmod a+x ktlint test -f detekt-cli.jar || curl -sSL --output "detekt-cli.jar" https://github.com/detekt/detekt/releases/download/v1.23.6/detekt-cli-1.23.6-all.jar echo "898dcf810e891f449e4e3f9f4a4e2dc75aecf8e1089df41a42a69adb2cbbcffa *detekt-cli.jar" | shasum -a256 -c - glean_parser-15.0.1/README.md000066400000000000000000000043521466531427000155330ustar00rootroot00000000000000# Glean Parser Parser tools for Mozilla's Glean telemetry. ## Features Contains various utilities for handling `metrics.yaml` and `pings.yaml` for [the Glean SDKs](https://mozilla.github.io/glean). This includes producing generated code for various integrations, linting and coverage testing. ## Documentation - [How to Contribute](https://github.com/mozilla/glean_parser/blob/main/CONTRIBUTING.md). Please file bugs in [bugzilla](https://bugzilla.mozilla.org/enter_bug.cgi?assigned_to=nobody%40mozilla.org&bug_ignored=0&bug_severity=normal&bug_status=NEW&cf_fission_milestone=---&cf_fx_iteration=---&cf_fx_points=---&cf_status_firefox65=---&cf_status_firefox66=---&cf_status_firefox67=---&cf_status_firefox_esr60=---&cf_status_thunderbird_esr60=---&cf_tracking_firefox65=---&cf_tracking_firefox66=---&cf_tracking_firefox67=---&cf_tracking_firefox_esr60=---&cf_tracking_firefox_relnote=---&cf_tracking_thunderbird_esr60=---&product=Data%20Platform%20and%20Tools&component=Glean%3A%20SDK&contenttypemethod=list&contenttypeselection=text%2Fplain&defined_groups=1&flag_type-203=X&flag_type-37=X&flag_type-41=X&flag_type-607=X&flag_type-721=X&flag_type-737=X&flag_type-787=X&flag_type-799=X&flag_type-800=X&flag_type-803=X&flag_type-835=X&flag_type-846=X&flag_type-855=X&flag_type-864=X&flag_type-916=X&flag_type-929=X&flag_type-930=X&flag_type-935=X&flag_type-936=X&flag_type-937=X&form_name=enter_bug&maketemplate=Remember%20values%20as%20bookmarkable%20template&op_sys=Unspecified&priority=P3&&rep_platform=Unspecified&status_whiteboard=%5Btelemetry%3Aglean-rs%3Am%3F%5D&target_milestone=---&version=unspecified). - [User documentation for Glean](https://mozilla.github.io/glean/). - [`glean_parser` developer documentation](https://mozilla.github.io/glean_parser/). ## Requirements - Python 3.8 (or later) The following library requirements are installed automatically when `glean_parser` is installed by `pip`. - appdirs - Click - diskcache - Jinja2 - jsonschema - PyYAML ## Usage ```sh $ glean_parser --help ``` Read in `metrics.yaml`, translate to Kotlin format, and output to `output_dir`: ```sh $ glean_parser translate -o output_dir -f kotlin metrics.yaml ``` Check a Glean ping against the ping schema: ```sh $ glean_parser check < ping.json ``` glean_parser-15.0.1/docs/000077500000000000000000000000001466531427000152005ustar00rootroot00000000000000glean_parser-15.0.1/docs/Makefile000066400000000000000000000011451466531427000166410ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = python -msphinx SPHINXPROJ = glean_parser SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) glean_parser-15.0.1/docs/_static/000077500000000000000000000000001466531427000166265ustar00rootroot00000000000000glean_parser-15.0.1/docs/_static/glean.jpeg000066400000000000000000003424721466531427000205770ustar00rootroot00000000000000JFIFC       C @" u   !1AW "QVXa2Tq#367BUr$45RSbstu%&8CDEHcdv⣴'9Gwef ] !1"AQaqr24RST#5Bcd36bCs7U$&D%tE ?P                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                              E8u^[iؒ^31'x Y3$6+l2"?rV};iz8}=j٢/i'c(d"$9M9l?fR|<훀.,3Ҵ49G8+1E( Z)i/PβıKbх -cO+,PmI5#Njoph>]e4qA Œ )ݖ]|&B8ȉcB \fI\#Q5U*JHτ2/cSږuE=c?E'q;&3]GfEӋael,㒭Q,6BOţYƓVSC:64Nؿ-_a`|bZtZډ*f_3p- Jr$*bߪV<7aFXb)jeIpBqvHft2jKDcP^Fx<%Hpy$r|*u$ҫ Zrr"&)WL?tI^"<vOU{JL>~NϾO*0GW7a$@S?2v}UG"<Dy%zwʪ2IPQDW36b/8~GEw T6Xb2 JS5mߩf4<mɶ"<)$?tI^"<vOU{JL>~NϾO*0GW7a$@S?2v}UG"<Dy%zwʪ>L> |#+-kO̝|UQa$@otI^n;]~d_򪏽Dy%z{J p?S'g'T%HQi4'c#+=y`sZ9& 6<r      `dFP1Gd@(dDR)L-M'V-kLx"$%$DE]۰kM4VI0Qz6ĈAºiAtȪFL> |#+- kO̰=|UQa$@otI^n;]~d_򪏽Dy%z{J p?S'g'T}|#+?.AFqGR2"ufr&dRHڊelg=sXK&*i̥Nr25(6NvH_)#D+em+]JArs 󉽮L>{3q>n?\%!j´ͭ8˨&-W4ҹԯiU{JL>w~]>ϾO*0GW7a$@S?2v}UG"<Dy%zwʪ>L>RdDB܏íkeҶIIQ\GF\d>a/2_/Ī%ttr˹ g/#.;ԬY(Η(Sis[ \*]s  -œmKQJcQ㟫X疃 W|pE -+ XL> |#+- ~PϾO*0GW7a$@S?2v}UG"<Dy%zwʪ>L> |#+-kO̝|UQa$@otI^n;]~d_򪏽Dy%z[K6BJBߊߋl9l`os)MvXWQR̠u]6De6ָ֔%Y]WipR;Y?d>țµV­d-FyJ׺vl1IbM2\bBl4q4}9IT|Yȱ0̩dbKg X}faFg/^[yK!ŦʥrhT吐°OĔЬm u$!#8uDt%jtWm6nFՇSMDSpH%MIGf݉nw;[.NHxt4&;p쀈ZwڶFl4qK"XgIÆ4g0*H&1d#/t,1 x A.fQӤ=sxDz#%cîy;_W9.ţ]'}py'%> 6?vv^4p+ضxݗYӣp-;ݻٚ2V:7A#uRg*a5f)ܯ.hĆȭ,`"Gp{ l ;HDl35 !gDѰ1ا ]hII?nUQ: ᦰźB֏ooW pcYx'Ao)|"襁$91$2b)V\D[㱝1WKŕ`ikU<)b,UAr&oX)IǴỺX{m{"i2ܫb#Иxrnm 惁ARd@tsZi#}t9 ^{o |'ze='IJn6ͷ ܻb?ܾ!F8`'zhVRp0d͌R%/-#hGԾڸh:l*IJhAS ,0Bj4&h5v_W0/<#,QfI蚳4;'We۳ƻ0:7،řdt9X1@&Y C`ڏ&\@n~;yo)oOp|v'IF>n~jH}nE?Wy؝%│G }W]btoG$w[S=\%wQ{>n~;yo)oOp|v'IF{>n~j│Hqu3DSq0.żI5nJ/Ŵ}n\kfZI} 9<ѥִT%9EL>;C     pW cnB&0B+\Ԥ p \bLS2{jMFpdI> bYq&a<[vqtj.(jBj1ٓ89dI4Ye~0:r-fX{2)S+u(q\?R[VXN-2gjٹ,k!#$\b%+Op>ᐯ/DI6i; øA C-hkPhFСT܇C[?ror/X_bTm)R/hE{n6!!:7؊ѹĆrᯑ䔴GA4 Tr4>'N$ Q2 c |%!䎳=#0/ږ5{DW-wN eŠAJ73˷7▗;IW~p[ Hv>ezEfGYfGYkKx <%`8c-^xGXva$uo va$uoޙ@qy ;Z]?UNђ(6b$8Dprn>Ec<ċEme~cb;h#Y;&Xp)3ʒ+ N5Z|lղ1}cs 2VP8pw㶫 ZLl!YS1n+(}sR77KwRni>BÏ 1~dOzΓ߮ܪIH%ˤM I5hV{{p\}va$uolEۛB Z s NʗVḍbFsCtWIpE=8q{?;oڏZ ix-0!0Rm&úEfdRvc}y#c}-#TFİl0P_^{ \6^vmfaaل<G5ل<GI{ye~9W[ ׬awyٯXX}va$uo va$uoYA".|_UL0/aL1o; !䎳=#0:7؏Ep? }su|s"݂ayݯXrI N)kfYDBc2I3$)>x[3)V!!CSzerQ2ݐQ)FULQa+RYi-{ )CdMCu{dA"8%!aOHF]Mc!jiJ14;[5A[s6 +~.}~xM/~Da*eh                       OM3jCFpIƵwNdv;÷ )XNI-+v3e\I;ˣKepoEĬePk[>"-b>"nur̉<3VyԟڠɅ/ID,H"u;?EGfeqje4tg7t-% @|M"o ; utd 9Zaɶ\6$q縹Ƥl&0P """"""" v[Z*绶^||>1ضwf;et(d;hp ńz$!klXgaAiӺ֝|2[6,XHݨRk݊e-]o ʲW&#%4*JȲ[ceT߰>vl&8 ҿĴBǒ(y;]@4КS 0 iUeS2׈cQh$͖O3>21lZ+*U1 l2ϋjLZΓ6OX>,W=w\霶FDd{w|0 BNE&3? 9HR0/Q]A=ʶ+l,E -6ܫn f~h3OrcW)hWFb$ڐM ϭ\4Ǘo([™ai[k7)viGcN3Na*Ѯ6&w"Z*w23kZ1\55V8"DY]Nff+t2*n҂T\eN12K1̶ۈ̆f͗\-.|qz)Tồ##n;[^1~cw;%ްnZ\Fazis"3/Xt/+#"gsYMjcǝrX5 u˽asE&^iu#3/X5/+#"gsYMj㿂ǝrX5 vK`^ ? ?!2QRr?&|o '5S&mWI&f۟tK[? /#2Mb14Z/QڛRWsVmv3+Zַ2/U0˃qcwQC74i hUkPJu1S~GEw TVe OYoRTЦԅmFB+Rw7Ոe6~VK?j+]        ýp)]Q!I\ԣ;_ TY|:mܾ] m6˰3]!Q58=1:\;n )[c+c߉)b!*lQDFdf^-w-.~^Fe ,a@{ ;0] %es yƊ; qrsBF'*((D23mΫ;;!Q$iT,x8T{법5QVyw3qY/-~1Qm|Al05Vp6K]J-JAy.:w@ 5J*\?3+9c;18GֶSGuawXڎy/K5Lb[e?Z%m)pԎ_OJФKaddjȄHdEs;\LT-huc{; 2.7%!7H'~\1sE#SWg Tm9J؝Mgŵ tS t7T ? q0ΩZQ- IҢ2,>6b\sL`7[ڟb} F޲.[Kh~G=S撧rZO*.21SMntHM{8Oo-l8#qi5P u˽`֦;/y.!iQRK[HWr4- .gcGis"3/X`-) ŏ*؍!tSZ1\ jcǝrX7-.~^Fe84r**c 2MkZٙ%%ad+#"E波=+qm%2CNŹS$ J2+QO'Q μظ$rkb u[٥hCg3ڤ%|\C92~^Fǒ麖}Z`E~ҭq Ij8 KQ' e+grr&b-MZh%5lʷs!ח7]Hs#S<-,;* dWB$&,n{7\xE7yuʢa(AӦoùGv2/=H2x7oc}_?FLn\SNw"Hz*6V"""h e_j['3~ ~aQSD};އkڦ*)1L|͇ںҏu=/|C2SEv/\m1--g.%drM˺]JfS[EBWAkL3i媶/mTȣdSPx u˽`֦;0Ly.OMU Aۆ"bSZIfEr+3.is#3/XaJqdH2?*Z<)hS8i覵q~cw:ްiu"/X|ܴ^ 8/+#"ύD波:ްkW&Z]|/7-.~Be%|YNk:)\w_]1~cw>Z\3 Kzx_9>7ΊLw_]1~cιw>Z]|37-.~^Fe%|LNk:)Lw_]v:UPaZ֛N0QYol-ybv3K#T҈xB![˶.$+X7[SKZԥ(fgfc%ɖuuih@aBF; !Da]g6v} cEwMP7* 1r6:!o-ŝF<o~xtg9J@hˁ=h)"eo%@=>^J ÛaŐ>so?P؁ap"p~2Pg=Zg .4re{$jπ^nn LL;5'z6jFJbҘdK<擠 Jم (QuzyP[;Dq).?z1N0 `55\*Q~BX-/K5;N@"6BԣRG5޹1luvllw"9q%2vhm."?*RPZIJJfv".|&Ge㐎m᥄Ok&UiX>IS݈QĻⷽIq_ Y6ٵ|8Y;.֏iB-]EY[\b/Sy]YKZJQDW% m}$Rr$sZ3F S?~c}t/(#5&ܗ;a_Td1?x[k09c/cڕ4޷U2On ۊI31.#7> ->*ü\=u[H]qZIt T=E"Je;-TfQQd#!Rvp-{ ߇m s&e5Ҥj#w<I^ ",$lgk<>;؉e$OM\A7I PDv>FDV=Gc#+݂iVFޑ*=*Ʋml1$Z@ĂsHd: QvÇa EBSNz'نiKpgb?wxM3bk+7kD^3Ncwor̉>ma>ZZWBMK@s U-i²b_!{d="fBWR"^$On0_;"Ü)Nc:ͬȆ9yfvJD2W>gFaKbjpkZ%êhNmhŀͅIlBs%/NVX' $gfoLÚTqkeJXљc6:.`FFFDĔ.)ȗrKUu^yv\}ġkZv$g"V+ebamQ A]ӲVZnjc4RI!z©dMbHvocuOgDbQs؈Q;\\D2Egy2kܻ^5wTb^tYϹ3csMՊ,l[;H0-6{lwfϒ(Ui"ܫc"zc}Lj>ޭ s[|,LE*r-|FGQp_HY]#,M&Y|S)S0UW3Q(D[{87ai>x%F"45%Z: fC "K:kY(Jjkءƅ„hhn2+Z sWKɬ /KDfR2%Xc.I~UO%~U0&*}%ny1VDSeͦA(j,ֹ؊ X}#Gt bQ5jf8'ӅyJ!d|g&2Jj&2J51UH 51UH bz7 jG3׺%ŒZMZLU&23"ʊqi!FT/+~iZZj6Q65$mfemHM $b`f+$?i6D\ECP5/k9؈CȘTTmfFgV270߫9%u;iѲmۛ9|!p*  ԅҝ~GEw Tl_]0UA<VexԀ+<@@Ant)#Z;f*0:v-R_m<~Ko?nV<@_m% &)GbI tcʴY(G/5'7JBĉ0HU|x0`5@N$P`-kǬ:1 lvsG@wN#yضM\>kǬ5ڶ[;b6p{Όz\>jo(w4{St7}d:1p{Όz[ eNӾHq-kǬ:1 lvsG;wN#yضM\>i'TY][Kb"CAơ֔DI2[Z{SݵƟts¬)u4eO+mᡫFUK(x$.](W^J)kZM)P.z1aA" ңvthf-Rb߄SI_ Rb߄SI_&0_ʿ?0_; n|=pb&4'x?A1/|ݗDʥ6 Jd[SZ AFO& GfQ͘X.9 2<-%v#3Gc">!U[xg{lcaP9fnlԦֻ썓l[~ {ٜ1s#[)#Ԁ:˰-z-@5/" Ȼ4#4ȸ;e$WᘎX.ѭ*W;+t/Kõx2|Hζ3bEF{4 aolZ6 L!%1CH#oDMt37bͨڭc+\c,%<G4 "feM|v1ഃ(A FBgqMi;C3 QBNZ7|,Go/qJִؒ?X NGطNޛn&&cIɓf2"p˸!L@d'AܸwxKH#|*n2u BУJ Vɢb]v݈QGW;q 9*)G"&Y9 *.""h e_j['3~ ~aQSD};އk[RMqrq j"2wi\eu5 RS__TTf~ڟGql=TT6ҚS aJǕ)'222=8/m!bliK۰Z59ͮѻG*D$CZw0Mۇ!JТ;TF)r)-2cj$.ěD͹yoclcR [KS8qN,ȝQ]JQff|cJ39qs@nDW5~^T5ʉE.ť%Q̩-4T-sX\4>jۮ1oj$1oj$ BüuO$*:"X5 $I7SS/a/>  Ў Xg^ީ1oj$1oj$S/aS/a֤|={zžtP +cͰg=:Q"5pDfJmR"X^j X}#t^tBwM[3eyϋ3n~ZՔ=&Q6AFԉXQ-0װF1@XkI!re|~%!O@tqVPͱQd6b j?U"HabZSo26V4G#bgzYUa*dֈ9h-ɰ0(k^ˋdM:Z.ƃ6u]U Ֆl=#h_Sꈚ_ft,7 qgbVep&-MUdG4}W?]WUsi"!,S /ʍQ%N#twϺ.6_ʿ; L~|MvQeJZmHȸĒYd|ڂjd@c9g˘漫̢ڀiTTB`N8⏁)I7s?6.u[&1p 9Qu 23_jl4[e[i1\JUt)FEöĆzav 6({k,;)Km~; a83FԂhw+FyG1wFxWo7@N_ϴ"k7P$V\aͿCb ߋl9l@0qS{ߑ8?xXx Z(ݡ TT\LZ쥫bl?म}+ $Ê^4D6\CEm=n ""-C [*I1hTC7J^QmC*3ddPWbc司󝷼4]Mɕg6~N!jV"O'zfU!Tq qʡtڇD״c?%FaXN9~W1̟Ɛ #.؈bt nw+]:7tlܽ0aZSn \;Ib&%mfْ><Dz1|"ًwk':w!u(QpnlnHiG\'HHFaH""%kYѵ;2HhN5ƛ;+vFgLF;AUk^⎛óY3OFv%ϵQHEj/)ATPbHuCҏĎZ\b6}qgL{pAx<Ϙh_hFĆӰ8דּP,I-qcϣ>\eYـm(LLuj$ O.WXߺ;|Ƿtw,n#-n)tI)@}m OIWY5cR!f˔EMiRY~2Sse!M~F_ ._;|Ƿtw,n;rߒ!0iMAۄ(taG|P=N YՆ̞تCiz#4'$ʛD9*e6l24yeq8&=zAF(*LDǷ),Ȑ#?Uv"6nV𺓒uKS#*c1[.8L0DjUTfSٌb}O|4=p!-m\RRI K;\eLRSQ=#K"K)iU8ixf=1{|qgL{qײzcd6T$S1I].̃g0 zR-⡹`?_;|Ƿtw,n"9~ !R?4U!DT|qV;j:I#ôtpc)#23|D{ K٤[i.QItRͩ3t1sJTDj,c#e7 |iH=ڢ({.M=U؆;L^GI"s[Si:46 {kG,[!wHf؋Z3GHKI"ۄl\k2'v.1[^yؗ}85j;gs3u\vv\U\#hF_jZmUw4VE|[K6B!?g>gjS-5=R9lhk2(f ghWN}b%o^r|ڮ%a5cDST¦RUF8Pۃ%Js/s`-⡹`"M1J"̭+*Rvy[ap幩Jq&K,[v7yş11]}L_zg|1ꨝ! :hنI*?췩v[CsU~H8Y=q8&=M3Lk%H[pӺK*@22P/W3f"0>_O79dPj6/.c6ј}DZA>bcۈHI~%aeL,~Neá붢ʃK ͂7{=ô0ˁaDo ;iMԥҝ~GEw Tl_]0UA<VexԀ+<@@An4)b'w|[ Oթ 2}h+ %R[nPV*Rtk]F̪JUc;KSPh[&Lwްs|+f3r8脬4AeQ7tK]u2gohuB#41%)Ԟb+ yş11g{m+V41&CC3&30ʨ/,8eZl$N_c\;e|}q=qgL{pAx<Ϙ;kg9iO>v =S+,|{>bcۇ;|Ƿ[>6o'SJx0.WX_c\O~8Y=q8&=uNoA:SO@u2gv ={Ax<Ϙyş11îύsz Ҟ /2} >{=a)u=dyTS$ iL)k?f$tw,n8Ypƞ&[N>/ C5`7-⡹`?P_7|Ƿtw,n:~ !-~i%TQ 453=`*}pl/i\xq.'0cdG4+6AZ^Qb{LG1ǞCOsZnSwePyd\KvF%lڦ[~ {ٜ1s#[)#Ԁ:˰aD%i B[]Yo2tUz%ᡙ[Cm&-Fv""-f|CckGT4`42Iw:F*ŶbWE::覟4b~U;Vٜ*;L[L!aXr&!Ķ(7Z+>I_ :YFK2J9Qf2SmxȷS?/ClKD*sv#; ،{ؓWUbD)Ja.HN.HJH!2?(~uTP+4"Gj1Ԛ7 KI7UJF5Bgt6TWiSh;L/oM2w0SU%+HQK@KH\[+l%6YVj"uax/LDBZbM^njYF"5 +TF'RJkuiCF%- pI$⌔N $eo+mz0iR .2wzp^4(4๮ *K@mHMb]]28=54:bYX}ZDz2!(I}hpitƾMKēxi ER3 hsp X *,zo7[t$&#gb e*l8jkkg<>#F6rߊqa4柜VWWP[A?ڹ|8o0/0-LQ6H37i7nEs6dyNDG [Fc߁Mډil]Ԩ8=&bB^UStDE-Wq?mK *̯};އk[_}TџP_}TџPS8}PԭEQSĩ"9X]H}N:t[r#-FDᙖR6<)ifDѢS@9Ihlv:$SSR}WKNiٺMƦZ"ĥ; TdYL{ 1wSGCsU~jlsE 6(<(d)$ #)! N 1%FDw(bmَO^g0i%ݴX?s3qi,ާ*oS e {qgL{pAx<ϘΛf>ԺtoS U*@) U {qgL{pAx<Ϙt߃1;G;Qp88VO_$~/EmyY"2QlS+(ddeb{qgL{q<]m;ra {NasWf3K8*e^d1 ECo ^i!GOB6z^db0Qk j[!_u4,*J>2!\:]!9`Q#L_t\%2fF,pZzDI0;}`)JC7k='G^j&b(Q ξ >^b'HԵAJ2h=n3JHDFfdD;o~,U4E*\t9[ [/|$T~"+0;DUQC*Nϩ6F\ ţy/Ur$Ĝ'1suqpkqFA/,ۧ&Sv o3BIT~Ruͥ%$GU4)PKi+EM]HF QJJEY.^/a5"z`v;/*8}H5ڷxMV#fn@7ht6 IW4Rva"%9f5 &y9澆i3ȝ!51)yQjEtwJ^T]0})5Ow᯽i3ȥ!50IDyKʎsR+cRԊK?jSkڞ_{0g:Ckaن".tπhA8]`tԱ/O狄Ɣ|w H~Fh`0h{\[ns5K?aSWNޛEk14p$9NJ(_#5q5'u]02(bn 5Ȁh?}ʶٳ4E -6ܫm3K?4P_鏙ڧ1z@5m" E%]j<56u m[~O~W0$Sz'D}@[Ñ&Xy[L64ԥ!51XguY҉qTO%fGSoudI˔c;bKʖsR+ .ȦNv9c;@ӚskڷPB)MNUǕ}IE)yfL'Hs} |wJ^T]0;җ,>WLJYR^Ք&aJ"V #2#3_3Ub%0}᜗WRMĸwJ2Yf>r,Ÿ ^XHi: L%6yfFVDBԺ;<wp7~Pv77S4wYcȫJ.?+b g:|AZ.{^"" sM6lV*0:߱Z ~C/YU-Xy5fbg&|EF̪JI&D{c>桬s䷽8MnNaloAze Ca?:0Q Ys's(w6 &y9澆+ރqhw*⒕NkIHxt= }iKʎR+ f .&Nyy٧6,u ,R5:qWU &y9澆i3ȥ!51)yRjEtwJ^Tp]0F)gJr{VK7kfL'G} ;0gJCkccRԊ5"z`Rڔo&2 ܻ3sgنWLtGYR^ن<_C4ԝ!51)yRjEtwJ^T]0)gJr{S7kfL'Hs} ;0gJCkccRԊ5"`Rڔo';Cm[Wa57/CI"b!*D:CJQ[8T>G}zJ,u FL$fu O?$qQ+$LF\w+jbʳffZ^ ԃȮnΪ Um8_:83x\To%|X{I_?!eώbGfStܘJ6!<ٲfQbpt@utGIg ~u^+@Ƽڲݜ+A4E=ү! hSxn%((+{÷t4x(}.;ƻSVíq-m֋ȱG6bDOVGll,,F=QKYlۚd%&Fb[Kju#&JI "S~Dۈ~1._]-a[^x3N*Rmmqۢ?q pv4SRnT}ĺISE M{\Ǵ{/b+s\`mZ7e$Z֒23"JOe qF)m(!XnpA!M'DXnwIm@«lS@4m5f<Z_Zevu@De~,3/B n^dnnPITA6WRWjHaJcC-0V۩%I;er2o(_QM`YE*?m}>!@8p]R9pޢո=iA)IMI.v$!i-6d,y%m0;5k]p@ruv[[Xl6"3N㊁}RTT%i=ҼЂYNaj&=p-lE'1"쎐m!9m苝i̟Q\lȌh7\7ǴT,۟ E'Zi>B0 )]H['3~ ~a?l86Q%GM?U{z l"֥}uQ[F~Bu:f.F"74DAJ .I"e5~ e.Gm cbXZi6={җcw[#䵗{. moؔ}W2s5٥pK';6WCWM(z:kEBi)A10T|<l-£A}S1PGuU 3O.!Eƅ$*.23Tnj\#J*YJZ%.NuuZOiFFFdzl1zPl6-rq?[e[[yƕ&\eFEҸwZHΜ$fhN;lu_m2%LBkp&O4= gMN2MmCa-%aĹX-ye!u>-IDdfF3.4NAT-z.Ofd%0I["75 sIX AJt0Ŭ9i/2TJj+I2v]k<@._5TWP)]jD:!x+ &y9澆Xi2g)_C;җ=WLJғpR+],NYj ;0gJCkc&F28 a)[]bJecI##K}ԊcRԊLsz5_CT j5juKSE:30xDK?Ɣ)4ԑpU) (ubZymÊVEZ|[NeU Svwlm8#)'Ɠ+B2+W=k hkx~u :e>L%mivp4n0^` WJhѢ;e$fyTCPmƜ|76oyLs>/fx'n4Ntjl{ aM œZҷcIęHyG\cR򥇼ԊΌI{T*ߩ3[ɱɣ:b|=c,W&-} |IDycR[׿z]0|wJ^T]0T9fw5 &y9澆(T6k3Zw\){(o)eKʎsR+ # h F#r9Z;khC{JJpwr2(/5kWhm6 y5^v~J2wrȒ3FxN3m%.Ry юlHLߐ8'֬{5eUAў_x6ߠc:3{ do|lAb-zf*Mo϶6,Ûa( 7mեfIJRW33" Ect:L2̤^+aĭ?TQTݟTs-b8OD#pOK") A_l/}ep{kfԉk[SƷ.X"Xpe)=yƇ($SXXiA> Dk^Uvp;F>ܳcZSL*nŎm84YFC p@TY=ꕹh"#ڃ+xsI2A 3r=3>hXfSuI||:*RRf;3#C< *1ֵJ:9fW)vҗRjNI_nbN[9shksTWdFZs=;7{gL8<@֍nV/t#o ƝzOQa߇dBd iWI!?]G-5@hyg Fa31c:L3ŌC66Qu>' /|OE`zwEs}$;+Z+k#q .l=ytTyGET^.# HTj2Jj:,f!q 6<*Doj&K\M3JVT\ ۺG cET^.!AkW>JLɨ b%q7ٌc a=.6e,Dme3(ݵj[If{iɻ-V\ /Í-;TnbŠOFcH|qS)W +yCQU>' ;eSle_!EIP6Zgqi;gԢFv1Ikܙv{W벉_'>p5QxC (yFT6Eb#Ȋǩ4j f9%s/iےO6!!\,GA-/w [CUE+Fϵ, wbxYxMυGکQU>' o s_G]-5Ana{g/*I܁?ndDn>=-&IU&eqe'mYrp_9怙H@ uL'EsYF+oú' 8X֒FƲhn(%ɦPJ4vٌ1l4jzU}{%C"-ڸљp)'>Ug]uV)QkQԣ;S9{s)LZRx ˀcunL;5>5.qր*ipoMu-bH`c7S,W=ʷ\Yz i2-p3E ;}C4L%ʵ򤸒>"C;ڛ1݃F Ct,K%D1ql0.h>_R拏>HVE.[wsK-~ ?1>T&?r?oV@]fJ2+r.ƱFΧIzpfŐƱEɒ9o> m[}aֱ+i TO2pht2(xsN2yk$ ;yE8ВўsXfг]CT p &MȺsnW,'`zwEs}$U$qx&] + @]BYFCtlp<`MET^.>j*AtQvW|W' !?]G-5@hy֫3/Y {LQu>' QV5A3Em?Zai*T5Ves_g ð=墹澒+MOfMTA-Ú&,W;Fm8DaUd:bsms+KKJͫ\؎vӀvy\=ްvy\=ްmYl'oQ}rj1NYT-ĺKCj]+%e[|B>N1Y+=}@V7Dl(fI6Ԏg=fc>͂<6J̵ӌ|~~]8g\a0nMbj|#ʳ-t+5pN1Y+h[Syؚ_?Vigd\$SjewQ.6j+)*Jd1xP!@Ѹ/ÞF~Pz5X^^_W7Jg,G)gډLΩS2vJz|i>4FF[m16,:,цzHY˷d:޵Ynhljk,A尒s$$ 0p!$DC5qs\RVCL(@ &)*fiSGXٖ¹47؋cZI\e7;EF%F3L^[B-]q6|,e ?hID;/\}Ig:u]nе,6W쳡l 'fUvz2)mPSJKPɲ\|n}ֱܕqqQxC ( 4(() (h< E3n4%أII"!)c2+l"/`zwEs}$RُlZ񦡺0ehԬp`(D :ْd7V>F'R/|OEΖ0ͧ/:'VL˜hpsWj6}t2"V"{G]\_I@Q*JGY_92ӛ'aN5cRC*"TT`iWbf^B!~ǏZF۲-7cJ9s"v{83Jn|&iu1K!gh.C ٣f(DbB&oMġ*2"]Mm>3#ZeaIf Pҕ;8mcrmlÇё+ XicJ5m"1W+ΤiuougƄ&=N{UD_.SN=KyHCR~(ni%yإc_ѸXqj6ʛRxk^"3";ԘD׎TlHUM3>+(ol"Kɣ^V\d!:iC0' u (z9oxk2Y 9S@g0b!gEg%eU' i;OE5MUΠJu0㊇NcR6ZAوEc==-5@hyټֵ֥ ڲQ#C Ԍi" 4pV2ز%-(3-fkUăE[B@$Lh\w?76I\ ᨷ"32%jBsw#=xzQuo, @DpvZAhvݏ1`DsN Fz"aZ=Q5_Դ¦J\[SVr+lLNz!?]VdSclMErN7LN#>MmwD]\_I-gm tP) dAzy0жS&KT7;`MFTYr; z(yDy]\_I HY?~"gwEHz!?]QuFԬ$}uQTꈓ^:ˎ2D" D=gqk;+Z+k"i&y-gN3 L\ZtOnbR*KpzJ$%ff%7kh͈~ U%}(Bjf2ft8h-: 3qw儤MqaXƶthem[4rg26'6!h^|W҃\؇]W҅wc;z+=yV193}G@΍̾c ͈ 5%}(t@VR{JJ^mDFybMGbHrq&^^]{ hI' $ҿf,X4 a8Mpx+2KƲQ(d(ȌއJlWJ8*aIHI[gb Qq)#3Iva\q'OqBrq5pۅhԈ4iFV̮37%9n|(vhꍄvh.}  TZQ&׺JIٲҠfIim2AS2hqN7Jn,%%3 >L*9y|ĔIp3oNa8jp uqo(_TRd3u8esiŠu޺V@/7`q?CM@%M֧㰋[w%GB7 O+i4VQq@qFbFhs\(AL{9-V2U4|Dv a?NRw~ ;NU^MHY2"5\V)f(W**[ -n-(XF0FI"-vff|#.]vMgV&sF\@&'3[-(ݡJ j      nUq~hVE|ru,Em{ڹOD\ .RIṱ} klxf,DQ+b k\,L}NZz‰ GcܟO3H ,;IZaĤ!^۶gFL_FAO@LxERL9Q<%!p幙>,䯥 J jn:=U 8UEx3kgFL_F5G&s/h2}sbMw_J sbMy_Jn)MY-TC̿ڱb+Q ffkiՎ8{JJ_b8hI8mv"SK}ga9#q J$UW(DZHW-[K؞eCIaf8QS<ᑗ"fF;g;SpלL(ѧaD/L63r?s~8^z@!t_]0[94W@LfP{ՙp6 O[ mصH\{FB5jB 2}h+ %R[nPV+95ȥ+_ZF̪2g&Dfe/;5.| -)w-Y^=0⋌ gD5FLh3NRݛai;5˸'lɜ?AlFɰba5YGˢ Pən<rl,&䯥 Jnu TxPp?q5G&s/Xѣ9ѴD>䯥F璾#]F-JPkkJP׎)MSTC̿ڱlɜ?themOlCj>y+AlDjy+Ac;z)yV%3@+L)!bӉJ+L)OBLD f S7eD"Q>^mG\؇]W҆-J\ҵ:Kv&XfdK%bh2GcȽ}ĵisPb n8}#@5 m!eA9y+BCY_RS?Ҏ1'q13vky;:MVd~!`\C,pFؙqv<N($g-8()źHW-`/6qr2IX lDKrߐ8'֬̉{5eUAўx7ߠc:3ôbv7@N_ϴ"k7P$V\aͿCb ߋl9l@0qS{ߑ8?xXx Z(/4E9h$ FklݸCf4FA y ɻh$lT&98hȤjLM8}+I@UwMJķOlBɜGm-7o+]1O*ڒUKJRF K;'uudWQ Id㦶efE)lQn= 0u,T%#B8n<[T[;i a0 ZTh ٦MݽT0o7[4+cI:>Rzc S %jxMlTwvV@['9/5aG3eTMIy,q$38Եh7ago νң.}Xi?xjH fVɟ_ ^t>uwUU0:.)4:q+LCRgvȈĥFhDFj}&; O5H2R^)K$yȌs>#2+/^#b&Q oĺUσap}(e>8r#-* 5'E&4pe^r#L[tC bap !5YiK֟#JĈò9 Qhhh(I]+a/0A%1q˼SڢrYEU,L([Ue+g ϚJ+s7N̑ZR+0 䟤5qCw'6 zGC^{'Y) U7ujOڃWޏ`_!X;'Y) SoQ*9 䟤5qCw'6 zGC^vOr1 ,[V5iŵl-"`Z(k\EZF#jO% S3Q w22͔$l!8; tWYhqA!C LlYWI @]mdnvC:`IVS8WlvC&K?S3S忾$/l,8s)bݑxiF a#F8XXi?xY1x&,y$, ئFJPh,MWgs 0_iQy? Xi?x <'9/}ZGJs& dcx1yK׺Twvv[aN -jZQ4l5aG3e+jC$K09ߣ*fu63+:7r3\{M:; Ip?DaHtG3e4lw;%?}ָ>!'lOF1\=zڰ~#~6^Lytuw3%)уlOE4V@xOs6^Lytuw39%)уlOE4V@xOs6^Lytuw,s+UU"e%Og=(,W^ENJ+d|}cN2e`zC=Xpi:N~+!՞*R0@m-mao!!{ac}Sض< \:Y/aOEi>n()Iẍ|$R\hʊ)޼iΰQLd I[$<:" E+%eFkK5(j1 _68{@kbb#\Wp.&pRDL)] bV^E&F{%]K{HBLceRs4+yf{$ԣ?ƶ+:*Tj2;3r#s͘Ij>2Jl?rkeUl! zyBrm̆uэO>SJbc&)!S cpLgDgl2#2JJgcۭ9/"SR[ ڗaYlK- I =.V5O7Ěf*Ue)޶BiJK+wPbH f+Vդ0ŃH"cci\NfQ!˙ Twv{ts 0V@xOs6^>jH fMHNd>lo/:Iv:J07J0uaG3e <'9/ ZGJs& 3cx1yKTwvY`a]80Ym"RBZBJU}ДMg+lV4lW=84{ɎG'L1Sjn//3'uM-Ȉ{6MK*d-Z\66mI;T83&]qakSL{<甈872bܝ2MJ/ar3# lUIoEn͵֑~Tj񨲯Ypp ')UD+Io9"ƈt}ni(ߊv) %bNxt̎2v-ô`ZR'5v:[< eYS]f9s8RtiWp])Z. [ujbȭEm/x 6ЁkIB5dVpzהV1cF"BsA&1H9%k넙{ρe|8aXm&V2-9&2%]8~lHMCo>CrgtQ+BMEcR8R&G8kn,x|@\q?mK _cq~/*gی$^5iZE<Ѻ~2)egTۍ$Mdw\yHP0]mM;BҤ  uioGݹN)GTsR6f^1}d̴XsSk@ž(N(M"C&r8IӳcB҇1NJ6F?>j*:tk%.=fbawYԸyTK:Bho+xA;Xw%$̍&FFFdch84tùu{Ni@Fn1j%\RHFnv,4] {cr2^!=I5'a‡H5adnuq=#uwuwH fV@xPDs6^*ZGJs& Iscx1yKTwvTwv{Ն͗4l5i)̙4͍C/6[N(\+NaL<5*v3Iks 0V@xOx <'lv83YpkZ d9hkG5*fBp0OuI'uvzJlcx_}o}G{]i~#V@xOx>2S3k67=}G{]i;$=_R0,YI(db_yovUfdjH /SNyKC!Cf!R wn#FVv+n۞b$1 mm ;yH;8>aw *ܤ_}fb#]"J<\*{ٛi T{Ց/{c ]:3{ wFyslvK[T  ߋl9lY[s6 P;*oq{' S+E{$3*ޟ0o4i9NVq|JVDi9%H2x{Mq#ض+"0d4r~U coŶ"3٭.Zگg(kzҖq1 $)KI|>09H^7 t鸧Rԛۂ~./tqKvF3rrTɹ7r7SPwA/nSRl%XR56< ZCd?-!5 K4 ;jGKfv>bPT7-xد\bSrIXK1۵_=oɆimyS}\APܵ_?bp ^%s++ &|z7ÆimyLei IHhFe}WO07_joQ1eăJJTfx,3`k Z<1O. Ba{2j6&ɷǢY rX紽5{B tFumpCE E4=Di^7#_!&RlQjf{8ԅĶ{C*|QJ@@VשDȫjTCPpQ)|Pi}DGHWQF^!n+jMn;<`l{V%vz 2;l>7Dk s\tpѱGo^Ϲd^\8{_Y9d~fssuH!;8s 4 # w&u:T>e:οy*`2x'֬ˇǩWxR_HwXhSj _2}h+ %R[nPV+9#3"9|A\F̪ܽN.5.| -)w-ET_p%VrXM"8IUTI4[DF|g'APܵW?bqX4== X)RoV}zqd}m{ 7SXG[{>m٣ ffAo9£v1eG;R6_C(APܵ_?bqPT7-xد\a? ? #9Mw'c*JWkĮ~z8`8`-0:i/<@.+5Cr׉|!ߔl"ώn9M3'c*KWkľ~z~8`~8aSmDs2lik޲RC_kSvf[H }4^|d#ZR높ofuw+I9#' 80m 4КkV4xrAWI'3HdLpz#"6Qt"|f$Pv,kĻ^wXM0z+XM{aFOZQ&ՀAPTփ_h/!԰wekľ~z*KW1ǩ^~L8n=OoⰛaSS<\ej 놠nZ+^ķXM0=oɆ Nۜ- ^%+i8s7EJxPb\m֓%%IR̔#22221=OoⰛaǩV~L85s];Q,EW'BV2=u,{*u%dMN'*"%NVc>qML(4ȾYħjEktܦRT=L-L$CJ;JE_D4][)9,Ckn#~,3+ӲۻkA?wuCr׉|PT7-xد\b;S&pz7Ím-0:jw/<APܵ_?bp ^%s+/ &7xM0[s`t:_yeڂkĮ~zǫ4WšGO8]a_+覒{n(fW!n=O_Ⱋa{\_a$#jx ӡu_ g4?K'O]h:M-{*";C͟T#1?ب*ZYJLCstg8Iڔ#ͼyLr+zP5Ea Y:O.|S,$]2dYhQ(22;kӇ|,r٨Ocx{ ZwMҺZA48i u6 '4;kpИ@#]i%hQ]*#%##6nZ+^ZWZ;_GR$}\{5lGr _ ϸS&ajXStYhs 4vS)i"0l&`= - ^%s+5Cr׉|'=oɆ7xM0 [s`to:_yeڂkľ~z*KW1ǩV~L8n=OoⰛaSso?PزmkH|{(>"!tV2b?&m8L,:Pn cpq Xl;Jڛ$>1 ݪqeI"Layz&JIQԩA/`cAP*4Zm{Z[ӁuWFFF e鎡b]POO!ŕj1XO)EL|ebQxe!#LYq oJs"Nvsޮ>S@~aW܆mM Bƕ5_(u5eYu}K$vGcm|ʑ;9ق̥xf, 6ܴGWT8yUZaXbbAb,+_x!:;X'*'+ίg mA#a JpVoD7ܧm[?agt5/BDE!5"" $(̊ӱC:K041++ƱEɒ9o> mW@ ER8C9Lp8D)e 8xrn$JL YwR܊bG2 1bC3JaO8c#2m޷#os[j3O5,ֲ4h76Ba9L6fƊBa} bĎe? wR܊bG2h7 cϨo`"}ECd.]_S-Lb:.,Ed$DEW—ChJ@hrq"r.팸ˈK%Gp{ 9 M]Q^X}p umWI %m3+ L<&a=þ\Kv>s.'Z?s~"W-E])4W@Lu1SY>f\>=HSDDB5jB FB+Ro[m<~Ko?nV<@^MXvg&dgi|FF̪2g&"~k4\-i}Ek[SX@F\b3QnD3ʮ+5L_qJ??ғZ9Eɸq%ǡњUT31 Xp8/>VlSܝ'#RsrtITti Bnoo-uا:OG?7ғvӱenoo-uا:Oo#RsrtIT;i Bv77btlSܝ'#RsrtIT;i Bv77btv:1š9S]k*93q͹nRwFuKr)"1:EU{,heĭط%MIjO %}diϘo`)i^[W`BinhnjlG 5R^5-7IhMwR܊bG2~E1#OXGx4@4h7ޢ!2Megn.(*IUҹ~V7zzkN;ň袶c㱊,oz[ /~_+9=)jI̢ӋJsmp\ZCdp0B2m-4JBV"".""WC+&s27A-i%:)'&Hʛ. `(ڝWhI[cv'Sf;buJ F8p[P9](+p q(W)m̖"%KgVˇ"j9fؐ4՜4gn՗׋R j*/|HQ'`9 *EDGE=~!:;kQ~333k@# Aլ&zK%ѰɅiG\Ie5)m"W(GfRIT2#f 2uP񚌈m ^/1"jv]kYFW=fUmA-d`i1 d#M 9 jڗ1fë .]+(u-Ȧ$s)@mi-K:ndW:)a;05qբ0kQA ڳ7y{]Է"̧[LHSFV}C{֚b>3ƎrX&C[èm%u*fv""X6cf"6 )v9 '!I3$_̘M/|Bē q.λnE1#O]Է"̧q+9)L| 0ތ*#7KnSelک;oc^h7ZD'ӲlXN-p3#ptle7/<x&nE1#O]Կ"̧h75wD>au鐺k?wwDzTN$8ÂӪZU 72؇COw;%6n"h±HZ3@X˳o`5YO'RY dT Z6+RBI-)ڋؽ $6aÍd״Z`֝e.s&'`ՎnCΉhKq޴<ʹ ?fAqj_E۱au鐺i?ww5Kr)ԱRؐ[x{ gh7 ch7!2M5gn.!u,GĎe?3M\9Tb5W0稙ӯ@1%)y*4-J۟eF <>⦒ӳ;PW0]ii4'23KlDփg%Cë FV>ҕmˆmrxZ{10[0]l֓#Jt~Ċ"TSZWsӋh״n~gc""Dc_u U{WRI{\'̉IZV%l#ړ2"Q, z1C' ,(vV<";wl"9k np 'Eփ7hvGm_]Կ"̧_LHSXXKK:CGlQ5d}9fI24J{HwD>(k%:Yv5$g!3!X/hsb;ؚĎe? wR܊bG2h7 cϨo`8]5՟̻ؾ뺗}Kb?aO>]Է"̧}# y XGx4@QA 7y{]Կ"̧uBjua^RrF CSSyJg,}q-JC 58nbO7(yЄv쵏,LKCxӚD8GLs i퐠Q')34-~ w`r4'1%kfmD*=}jȗ\1P\G1?1cb7@N_ϴ"k7P$V\aͿCb ߋl9l@0qS{ߑ8?xXx Z((ȵ@]+f4N`]ۼlj_+]hQ)'Ȇ!atB]ie(HSSDYh8rP`!0 )}XPW"QO> *(V'f4}uWxZ+cib >4AITŪ>eŪ>e텱Ծ xJ6PԾ xJ6Q-nݡ~v=%玒UwJ|7b]ҟ2j_<%kJ(j_<%kJ(v˷˿ ;WsIj+hh4ZvGE%m$5m3ޅ9.vE6gv)MVF{`mΘόgBi'EI֛Xk*(Oj@5n2"±Z nO,ESb Cňo~Bbp}!ox6rp[47tG"FP>GGw-:dY""htgaȕ֮cXgS5If %L]+hcXgfwrלğ!/ &XUs\TK[Ҧm$6^{v{ե~SKEW󙄓jzY'T$54C7Z(a-"jc<%kJ(ijo|hht9ڙ)QC!<&rޭ-žo:XoVaO7,s_<%kJ(j_<%kJ(uTK߆z_'<#TT,l-M*nyO;9xnYoV` *XʾDt49YKyir𭷀.9e~7ˑ;+ TFzJWDw[Jˆ?UJyDk3"sIuK~0"{ʝQʢڐ5-֜v+elF%K}i_EG&(2YXLjf\hu)YM!cQJE.̂]B΃] !ŀGih̦Z'|m`IV#O]0oO@ENA(Z)KN6d.;Y6mߦ~a/i1XG/2dKmDw#+22# ҡ@eLWꅘCќ4*FWôKIk ɶc&[E` vf ;EV$6çlؖc&$huk>mߦ~az y0cR+_|WCR+_|WFTK߆J/:Kz y0b8MJvRS-Zeo*A-ZuFh[.vkmwkw|@𕯾m+®tjkFkHϴ#pixYq!'D{6 ؼRw`ٰ\ZppCҴd">^Zf64;uvRxȆph=U +|!RIdHxWQc EqrMQ)lf™9 ڊƇ[QI?ZM keDKS5B(Pdgnj2KƳ[0Omk]++oѐh=-g CAuj$Űx*l&[h8.X?l86Q%u8?YGU4J[ {z l"֥}uQ[F~B:aqn+X:R/sܷ[oft; rrQu_-ĩ%֛ӺSpw>Ӗ˔}22GtG5{sݳK@5B4jCpW'z y0az[<ߘtϩ|@𕯾m+衩@𕯾m+VO%V%Z[w<ߘt޽-žo:XԾ xJ6PԾ xJ6P?(; 58.'mMGÆTf쫒\3UﴌBj_<%kJ(PNwIWԴ,Un?'q?4n e;"w+q#?2[]aƂ<"W b4j@#[C"=gmI)2%ʌĮ5ʏ~@Z2%| }W+Fy}o~66;~{ do|lAb-zf*Mo϶6,Ûa( 7b7?b^n֬5o2[;UV3CGSj_,z)px yұptWJh8M@JΖq+&:4EO4ͧ ԭ2HDqD^UǑ" pT"sTT<gCE9Zd.4lWIҞUQQq#6m6ȍ.*_anLm[qh,8M|)')[+F-">ʷm Jȭb5Y7ݙƭk/?SFh^g"QtU$$Qƛj0Ih$)W;3v4$JGwƼ[v|IRf88CqH+4xmH"TnFPXjFPX𵡡'zR?>kCBNGq} u)m|˒xkQCPehfi &=xr(Fj~F>ѺbJv5L@4pQTk2XVWYXV2>##qgjGYCdhڊBLh#o!*6i5X Ԟ|/f)nM/>67ǫԭJڭ '6'vwϭb#JDSꂡ vOםde[532.tm_(qH,j;#&Qt%SmYYƶC3%6Z2$vn)s^։7#+a ]jJʶf4WKI:vն|#G<5pA FYՕZŏWed('Zw_i y?(kR\TDc#JlJjR#;as&Y30Cqb8I;ޥlY!:qk/?k/? Zw#;a ;ґE1G,d#/wV5V5Dž ;ґE0ֆHR#G(Z* I- T\}Q7o-JSj3ΧgWݑ}G:@GR^k/?k/? Zw#:a ;ґ1G,d#/}5ѼA31s1e$,y~il-{}R3}LJWSM2 ]+8H}Eq(JFYi)q} GV-GN3i|S1CS/JMkFl;733uP}Q+e*+h6E˥:οy*`2x'֬ˇǩWxSnŪCV*0:v-R5 Fo gTck۔EդGJIGb9|EZF̪IR_)|M_f桬s䷽8MnNal'Bj3gx[˦3JEa."ArBW'[h_4KCzF 12 d\QK[D{d-$f!9kCBNGq} UZ|z7#Aͅə\n>2Q*`Hۆe=խD)$t;, ;TQ6Ddf{OiRN"îqI!VGlNiD#s^ 1luM#7IYt;/(\&c +,V@ I;@bJZ`VW0B b綠\e+ faM,$`Qq7n 0rE3u'zR?_e%#wq} )m|d#/}G`-;E'ta3BC²{1}[4Q$$eBJZJ333P?K`a'fD̖nHe32ԈoGw&l{ E1f\]HXk[JVе*V;BXE5 .> "_\:]5\]9!eͮef !ؤ8Gd; &pfeHDhF9ٷhWQ:D#VU(a\$]iX `sa>6󆶛$Ó(:(wHtԫwlirHlλ S6wu:~tum_(qH,5m_(qH,xZГ)_C hhIޔ/pG)d#/sV5V5LJ ;ґ0ֆHR#xgi$xȏX3.~18mfbˈQwb4G>8*R%?2sGJ7\5<_a')62Se͒;BT-H:NBf]8{!$.%$ÙڜZ+ 4iRM*Ičv<=hhIޔ/4$JGu–VO9B``u%SL4L)Mfc)rl8R̍E!ΛNP̗!qہI٥DFW#lt)Y-"DL\D:޸F$eXFd5VS*1D.)whBQL6Repq V{fC1[-/uNövvTےd}VZh\ӵȶsţ6+m(o,t/fM 6d3MV+^ŚjFPX֞XLV%BL+ZS>63.n-L4gIdvZi6ōbn&:˩%0-IZOiAˌTZ[+r$'a{kZ:jL l&`s+Ʋ-Zݓ:ŏmCFcֆГ)_C[_/rVO9Bum_(qH,Q>8ge+\B>zhhsvQgu >䌋CBNGq} S :j,!Zn#%pP-JֈHݔdflymryqiuSyp!6k#UHpHxIž bC]&hN-F\Ղ?bBC>idK(Wj2Y;ã=oo~ h,ELo%@==^IÛaŐ>so?P؁ap"p~2PN"UG$W'k`aYtBo^"xt{dQ2覢3I!iœwi{ЬiƴO+ g`BbTΉoBJ^;NLf)#h@q8Y0vq FY ^+0P[mx>.0Oki=60ob-#V՗2hɲnaQ҈1mܕERS\C;0uvH7si ǰ!w5̗ ~Slj!MЪhuZm)+qm蔧ާB[^_^( oL&}Έ&}ΈC-e5hNgZNk 1Qrd3;9k[O[V:;ps-eTD&nL-[1e#ؓ+(Y{~ x%0Vn~r4^uuGR[C$0r;3ٳm'ukbR/۝ 9]rs ׽W׊>B[^g^( oLp] 7IMp] D[ "j<8ʸKQޘ"]# qZRyմapCmq}ùpȋhQ>s:Ixm䛕TEp|D:Hqb:ijnRf\lbYd[VšyHByű;Csy4b+vsδiet6hzREҝ~GEw Tn&0닗BdKy> L UeW_+gMDŽhnpր`\Y81[14)SM5Y|.U/ʼA3 ;"j*\˯ EV_ yu S)7dzvDUeW_*Sog8([ Oթ [V/ʼAdV&e=<9 Dƶ EbE"\3ҷv4X\ւH waY9%$l81ZA?r@k:tEՇbF̪݊r|E5.| -)w-h3SKVS4s A7A bkެk!-Vk FRS)V_N߅xt#m+-}dxfP /81kxܠX"+Č C*9sWQޘGKz`'5tEt0'5tEt1o/%Ds?*׽V_k!-邚*&D.s6vQUv3!t7IMp] RQ fʌm qϛmF{DfEr؆!A6CmYq _\nh>VS877`h/4PF=}@V7Dl'ʟCLDJV$YD1 EV_ yukB=$(.-.4 oؖ> 8* ;"j*\˯ EV_ yu S+)7dzvDUeW_*Sog8(r.P5Y|.UX-O'4_,pQ QU^]~j*\˯ZNiNY=ࣰ5[1xЋLJԄn)FFE}I dc%0`VBxS,`9db-u T*y*YE9tb>$ܚR{鈟FZ$+`ۍfdgd[\ s$Ԯ إW륱:kS^*-ɵKB[^_^( oL&}Έ&}Έ(2H?:O'3{~ x%0~"bqXe&-i%"ϯ8vwIMp] y5d>590)&PɓaبZ!2̒JRFJM{x rϳ,pM^'9sZHuBMM$. p!TR8TTک޲O%i8wR.մĂI|/+;\AJ==2C,5%aIJpZƊ?sZ37^\M[&[jR!Dw#.#'';\AJ==>GWk{noIKv✁[lC0ִP"KD8 kiNgeJ~،. mMiCGR |佸 ݰ]WBywLKq,mh6hI+X65~hrQ-,ϷҒZdSs^.$Kb[jhQGkw $3*I>8K7ʿ$9L v NB\}SgŲt# :Ѓ9W\J؛7|kȲ bD_c7ysm!6fe{\JMuշmPupaּsI_vr5sws8 UWPĻr˸;C{b=yaMܭ{繲{wb(0Gk)5fTSsMK%Ŷ \{՞ x%05U5⏐ͿOrkaOrkcVz[ "5hNg\:GKz`kޫk!-郛èeL&ÉV+* g2ԳJ`(a/yn&\q9 i+VX&!&4\38IO$Pdj3՝m&ȉW#3-+czGFWq #ʦ%*i-3+r-1=efJk,LDwbywÊm֔KBv4d|GqܹelhCni Ipc9kŤ] ۾s6Cp8g4PE)νQޘx%0aeوP i:2dra<~!mD:DfG[f:"KzEV[.ҕȰ17A b K6؇k2vR+NJ8s׽WՊ>B[^_V( oL&}Έ&}Έ0C-ewhNg\:GKz`4gDҐJ4rIuY&U#!3"O dgP&}Έ&}Έ2v<ˬ)v9va=PxLAP@腧?*̽[NBдT >:R ZMZpdd.gIjGlguKfR,{M-q:c4Se8fG2v5 \{VyIFW,Fv5 *bޅu+48Ӊ24*-ddFF]qYYFsbgfDFsNhhEZwUvmAwߠ-ޫk!-遯z|F(1iOR4LtBMB{hb7$-$+G*+/otEt1֩@kq;%qcKuL1GptWQޘ(T2u-3**QpKCLjQF녗i߈_={\>DWC;*Dr(vS#e\ԥ٤DV + ̇5){x99v *4&dWQxτlDks.Ryўl #"^p@rPtgswFy}o~N_ϴ"k7P$V\aͿCb ߋl9l@0qS{ߑ8?xXx Z(hu_֛Lߒ8܋\UƑq 5fҺ:KIUhulq*L{Ȏ%)k.C𑌡A+mDFFBlG!dlw{3$ZO[trxհ*:(yNTJ^G{HIF\$c`-zI΀GCghwƺ6TJ@y'Ii;ccln燳^ B[\QJFJme׽w#.#!v#/-42Fp#g[?EU\%!Sv朻 DӲemreC U7V:DtU2hflD|$QTGv2222#!F_KLgF@y9a;済tz5hԇFH8.^b:CrKs-}4|GHnZi~e-A˖+yb>j \[댯$>j<[K!i M/̵è7ro?7CPo~o؇\e|!B'wwIvKSK)$dfW.M)bm[()'hD2N8b %YsƔ2j#,V~}m.g6j?RA|2:z2 YaB¸jp3sC:ùb%j<[K؎_Mtƚ[k飋P~o؏,V~:+0~j<[K!i*D0Bs7)2 t:[\iue{R2I]&~*)iT9TڸA)G'[0 ؂KDw2m43L||k~&&%qלQJRόɓE̶'7<6W0ѤR#kLN޵e QΦWվń/a6M=iMΧ͕C{ ˉG|vŁkcXZW.] ~u'BL߉P@x6Dd}ܛaY'NawKϩgh _ k6ݝxXģ6RHF'JJ/ȐW/" i*^.p7jCa-$l!26 %ˏnctl, ג7YHcQW+ٻِ*X;F[h]-R>bLq5> "הPe~GL'zkXP(@](IccXgS%&Wt\%(2Y-'aOY)vtRi j7޶ˆncIe|wb= M/̵V8diә1*:I3D; 49RM]gw" ?7ro?7E?~ge)><6Y#ńTjƂHB!8lD rW؎ܴ_Mt妖Zh#b!7ro?7DO2S )Dn.)-!xK)$df]/i|CKI*WKJ]R.L-ҷ5\ԣ}ebxKBVTۺFFԸ^S /%2nñ 6]ռv:'D9f5#ZC[LNب`o=7mY%C( 8H88 =/Θ!=/Θ#mUc^P*r{pK={pK=mi{B; =/Θ!>9K=mi{B; =/Θ# -m>>g؇ 6ޚv~.s 0Rb" 0Bºv[3揽l9׏!djVJ\7C] k,գP `[9,,9ˡC4q'HkFڂě"2nGDf25‡ƩP$1е"˝-9${,٨k0lʬ,Ԭ72 \%5 f%1otw~p dz1(h7$rYqc 0tڣ\%2Rn}̻;,7bZBKs-}4W]pڻ!FJ7֒y,ؐm|gqa5.X/;6Gp2y,$6DCp" !>#7-42G߱ A˖+yb"q?DYB'wwIsv#/-42E)*i"K&: q(ȕtU̓s\CP~o؏0%"b;:[.-RgsQGh5{ִMPEkuv0s]8i*˴\iaq9tTa'L{}_1E*'ͷ…wAS{}_1C{}_1CU\O?oM;K? 9N@\oq}~tDZq}~tDZUq<m4/x(\t9qǸaޥ=Ǹaޥ?UcӴsT-j]tϱ)Im3VN냗4Jpf""Q]Z,E4f5\6rdպrs \њI5 ( Ā[J~HsHGeG*$ĵE,$n\ UFWS!!]FWܕB{?"+ vʥ,xs`hox--oK]%ܠS|hm 6I%"JHi5ĭ4P 1jnMD&R#3skgp3sԣ33f|b[r|?<1Mk;>;g;)'ܵziqVI|5t}ۋ$p3"3er6%\Ih]TVʽmk{#4oW@)FEbm/YI)կdmG,: nd rJsEg_kɧep4:_.ahIX)&v5.X;yN4G:Y!0AA\6tI9vA݀Ƒ4nt妖Zhv#7-42G߱ A˖+yb0q?D]B'wwIs#7-42A4djƊXȏivPo~o؏0!e8f#Z2?_)H}"xyt/b:CrK-}4|GHnZineAr~o؆߱LC/O:GHnZineb:CrKs-}4p\[#b!_)H}"xyt?b:CrKs-}4;[k飃Po~o؆߱LC&Żλ[k؎ܴ_M:,V~5.XuW`A}"xyt?bZB",h~FXN{4͵C\<+v!4jjRI9GuXx⭈Z|CH\[ٓH2f AH`#qœthf8)Z\22;X\+W'])T=Dp[;u&,/)sK^ dD\Tdj&11KJutiJHNE<].ñZVe 73Dm4!>Kawse5+M*c4M»Jׅ((AN3m\6/&ĭS&L@Ҝ}hld[ |nvd^vCu !Irviy W9w_(Bf︜MB4%ȕ#4qu1۫䯒ae_Sc3NJMTBTK2Jn=D-g(K2gу i/p9Qķl넌BuCiCƿ`(%ZGm oޔbXVrގ)7{L}l. mGm k\5޲ޛza|wVl;^ fW t C׳K `qZIdqvt;dhW_!>#7-42G߱ A˖+yb5o2S "xyt7b:CrKs-}4;飇Po~o؆߱LC&ŻλrY\\TJZ[q0RBݞ"Ans%6bآ+lU>]\@U*j#gqi}yiQ%6AeTDT=2ٮk1 7Z~mS_jt եGANP Ԍ*-Z~7ۺD{ -*74I#YHajOHQu$F]e{]BSlJQe of=nb]2Kn:JJIQ،Fgb1˾N[yn6j]"%sIn%R(j)'ɕu:,ɒ)Ž-y¸kvnineb:CrKs-}4p~5.X~LC-"xyt7b:CrKs-}4}GHnZi~e A˖+yb[댯$>j<[KyRɣi-R zRK+)&]{dcWzHRh]37(ls]u+6Ѷ.edRv4i>^߱F/hm+*>&K1:e0Bޕ*s2n"IRLBoqܵ۞͘a~k`x:hѳZ],=bĴ ՐxK$Z,[Vaml<}-,ŰcIj$V1E#+lDM2ߐ8'֬{5eUAў_x6?7Vc:3¼c1Md$>so?Pزmll) iNBTPeD iACP-½f3aN_a_ZXtS?簵%31\ Yu)JJJҤl4n.G@&U4 =)Jz"ŴjV~:[ff˙͖9n; kfRgaa,V{r/ ׵ú ¿>Fe&)s3RL"͘:1";U#F[ wu˄GVu ŏ >&Q? s\\'wSxWֽv^j3r?%w r?%w Vo)<ú ¿>C/ϭ{Q ->㽀k ->㽀j|LOuv^ju9~~}kڌ\Mua\NG5[kd?;r3 ׵ú/ۘUoֽ˄GbuZ2y:zi 1&DK)Ga\O^B̋ah ^!lЍ__c9ú ¿>Er{E ߣ3 ^6KryGmIGzA{Tc6 d,RN uZ/xGkEpDj2JHD\bWv]̟棗 u#šA-$`6 0ea`~>FbS5UU=2HBLTFdhI#+)\| sNG.MVYk+.R=ŰО$[&!؅ahV[eHCmLklV&㽀j|LOuvfju9~~}kڌ\Oma\OmaU[*fJ|O3/ϭ{P;+^g~Kk`~K`?S2Sy`u9~~}kڇaN_a_Z?.]X}{.ew Vo)<ú ¿>C5yj3r?%w<'$]WڿxU[*fJ|O3/ϭ{QbF9U? =H.5ĔxY5y3Vk\&䶰?+bZ1INCǒ\œ%ěk"$K2,D Ufgs;nh:hWMju-4 IMm 7 {#O荸˗D oIb);kѤE˽956eV+93#2)|A؊5.| -)w+͢=Ƙ)bMFF[Чyxlv_aN_a_Zh:ku P"4YARqS&iʹ_`˄GU׺%-ɡs3#f3Mm2p*eF\&亰?\'䶰?_-3%>'wSxWֽvfj3r?%w r?%w Vo)<^}kڈKahX*a jor)i˞'aȲ3#,ٸmŭN1:I,.w#N&ytTBS0PKuPVS+JT"h:̉SLTPPUUY^lbԳ+=t,8-uQ^=63- ֵĭ)IV" (4 2kvO\3Judg:c;l*ʿ'$2]В܈ꆃIW \XnoAS~6&mb ėCnq.< [:&& ȷe]uR$j3=DE{QCZ%pPΥ1唹mDfF~ː}; M^@'TUjClքMNfSG+sU>RcXOj~DK R˩9S!)Yi&[J+M*3.kJRq-ASRߤ;[7Ȣ]ў gcz@kf?#Wܭ]BŏX=/XlK~N_"OAvFx3a3 oH_r5w =`+cW{pc-C:y|/=͆~F7l31 u}/Xܭ]BŏX?ϔD (dk_g6ސ<+cW{qcr5w =`>Rߤ;7Ȣ]ў gcz@kf?#Wܭ]BŏX=/XlK~N_"OAvFx3a3 oH_r5w =`+cW{qc-C:y|/=͆~F7l31 u}/ܭ]BŏX?ϔD (d7x"!]-I.kRݓ&!*|EڬqddVX86/Xa:ido p'c+- m!&$#5T욓ʱ6iR,h.a`4ڨl7 k6=Ԥ)ɨֹD79jIx^*5ZadA>[^I ̷3T{6Սy6_Z-XQo94q-_(n5x7]ƀ#Z))ZM I)*+"o42ZQ 5pjo㱐uÆsgƤp2y:xe42*HZI^][[XFهpqAr-qN&OaB6Y[JK{k">!ߤ buQɩJI=umpU/5an"֥}uQ[F~B5'mٷUN"s̴g7 ^讕Gm W﬩:GXRӉ]V=/]Ϲ!{&׶lk1wm׌e_ݲu 0ӆ"i maRwt*4#bOaN__ZwSWֽ˄G5˄Gk~m|U:̔gXaN__ZwSxWֽ˄ܖG5˄G5[kd?-YUI .* 9,Zb^e%J"^Bj~ ф;H:8)5 J![؏-qNu Re$Zde}#JLSr;;S9{Jԫ/ e)(4 _`Ig_`Ig[pX (!RNӝkZ6ZRdo"q51ZYkJwT('Ƣ4bvfj5KԳz:(f>Y'$d{ 0JJ6_?FMܘ)&e+j*ݻ{-(}{SIƂ龸Dt8ÌZ "@( st2G^gaN__ZaN_a_Z@.[X}{.[X}{Y|ROuv^ju9~~}kڌ\Oma\OuaU[*fJ|O3}c 5X4v߶pAmRLDtnG\xNix]Xpڿ[Uᜎj?*)u!:wte*^8fR6RlRrksriyf8>+P-j*+ #xlM=U *x?hVR8Ҵڙ"(ujY'uA[.Ik`mʰUYS`+嫖-Q$LC{viR-)S躒[U^CS \B"XSjI$)&W##dfBiqj>S CIb3t5- Qlq;nJ^r,n՟zYhA"4&Oh E]#==`9m(suWosg GL<[[eV-~ma1Q'3QX-~ T{_}jȗ\1P\cўؤs 1Md4>so?PزmDdec +DHpp(!Ltk"OqWDopNM@,ˮIm?DJ=D9 K:JТ##F60 h,GhʨI$~+؉dW>WOYRc@ h;}R#$Xo:4FQ|8KIf{XdTeWm͟Ie k}T_',QjSK%$T*jgxb<Ȅ2f$ȸ.!Ԕ]YHD%ON\.aIJiWQx̄nʺWȮű}7U ݟxfcna8[(oW[sH7\?}2@Er/> 1,qƳwFTODQ(克n. %w<ͺ AHiFl(ƳηĝcU$5U$j6](i,KqvarDkM:NN4/;Xީd{YOU$5z9T^~ttⳘXl_C&L:lÄdfٗs*eo~dap.> aw1m^ҝ-\0ܥFD>"AQC3a; B]a~5c/1"32"+no{th1^}+Z> !YW!"*YMDQ{4v%]@񑑓c➉}F^yf5(fvbpm9(YuD]y]n$a0CfPhJDD55 ^%ůo%dbB'nʂe~.'񬅙t%"6O~zGZ(<21" m[ yմ'PtͪxeoI 2#lfDDw"l[nF-_ʢ<4T :dpno.=ڜ2"5P#U ^Y+_>xrYڵ#OC]pi͊ $֍zZOmYXQQCYXQQF}%k؟\=%k؟\aLEe&?v-nO8kI/GJ)3+bۮGYXQQF}%k؟\=%k؟\;LEe&?;-nO۵U$5G*Ҏ5#^Y+_>^Y+_>b'/1kvݬWr(!Wr(#R> u咵O u咵O"x2n' m*Ҏ~-/?J:Hԏyd|뇺yd|i $eۉ>v_ʣXH%{*~5Ej͟v"+ZְdboݜYX"E.:!iݨ8UF6Z;W&?dv"2n{#Dnf\b&PVOOY ^ /*3*ZrdW2XSP`ٝYi[_{-~ j>K{cQZ;t&C@S=8kA7NGB][,>_ʢ>Ә|Ԟ"ThN,%Y+SV}TKbӏ>NGo63[]⯤+e%֊aOK +m~ʢ_ʣH1זJϱ>{1זJϱ>vL~:vZݸ-k+r(!Wr(#R> u咵O u咵O"2n' m\eGrF>k+/?J:Hԏyd|뇺yd|i $eۉ>v_ʣ_ʣH1זJϱ>{1זJϱ>vL~:vZݸ-k+??J:Hk+/?J:Hԏyd|덒h8~}>-CNb&FFg<##_tjd^UQ,Cn:LcdltQL&:&gd~⒵*Ҏ*Ҏ:/ |*?Ÿ)1'SJxO;VgTE波)ĬV5+G:%}ҍR1>e4|D1岧y\`S[JRڙkVn3fb'&$f\vEOnȵ;촥7]qp}otkpl ;4a2=N>#!i{7kERu8@€< Ɍt<Ge:{\qq7EhւIVAb#`sm QIAH1qNǯ1vqm~22ⅆjF/ZEr_Cy{ܙFF M/UnEۇtnל^xI@*AH%Ğrlk]ꋹ%ìjXJ:jjY AV.REl1syT=v*5>Ũԣ?ť+/Uh`a!oPdC89@v?5eCEI-7'A?jvw 4j!b""fƉ6%z-VA().+? nQmN8oD|s_; Z)4R$]ʒ@ՏJ%%_mGݵofXS5mfM!fr[7vju؄kvCBE:w1nڍ^V14 q'd("vںxХw6>IJL;uRKmhUҤ\Hj^+UUY U"Y]cm$FҌC$ߌOe Xs]tr>r2/mq=&C- j9VbI>j%EqSƑ!iizBZt#ϚcUw24Dddv2> {(ޚCYݸ;o|EaKKZfkEhwA c]Rbʚ/siF¿M}uB§ޗ+_]G m%Y{W@ZԯΪ?h([KVS_&5_\oFqpm2Q6e[-k5}uQ[F~Bu8 XzM(={/{Gٿn1~e}Ҏ؁R KK\7(F4 I,E I@'CTWoYXQyQCYxQQGXեE>RcsiQ^IjSJxO;UEY]ebGGI ebGGIkQVI3iP갧L}Ҟ*ST"Qy餤P1-LE"RHZ'2pV3j{,exJG~uOzܷ67 칷b͟6{eT=2Zۏf'Aq$V_M&@t 4AW3Y3W3Y%sz"z{&DN4xc$$4^uonGN!Gr;m#R~-kJD`t7vGFF 1W%b4#G A[b#N尓FQ A'RidJJ}svU$Q tĹbp[3ɸ\Km[N hfVe [Tf=!wsiQùOz[˷>H%tsk˺:6N 5vuU$5U$uiQVI3iQV\#M)<]R/Ίk+Q^(-VQ.?Yv)*"Q|[nW!fқ-,+&6m*?Ÿ˘4)*״A밿13ⰱ)3YZļ40+y$Da Iq-[J+iR(M*Q{XlJ=)3i^iנe38[J;sY\dF>"G܅elͷq&!Dv42=_sD "e@P4u*+SJֳ_gG͡="Y.Rf}+_6"5݂9Iўp؈G pO_Yk>ڃ<ؿY/s| 1Md4>so?Pزm3Af|yk{;m"87@vwMp26̵t vöKv*0JՔk$bMF>E![P\)Y\W|qAp 1 jc@ps*5m82"7b%1mee!i%$˸dcKMFsCkAQGRQ0p,@Mk\ْLB>h[Cj9eW;l'"SqagoM$3`̺;UE=ݴNty6Wm4򶜪GhI1B 帇 M8B~7Aqeٔ+u ;oD=ya8o=y*elQ5lHK2Wc߀Ѝ*b*֋vͱ,$JS"ЀPEy_9)]}PLC0>llI3.%^=$Ģe3wG:Q,|2Kx2'ulʀa'/ >3f&Fh9IaG:RUpJ) "" v)eڰG75ۀ|'/aVefB1";`l t6I,%Ve֐}kE7d'`4is%wj*8fqw&Y#QqpO2#*jq? zU;.%FmCڴ6&2"b}xY;Cg8Hoﶲ6ewl|q9{qSBrSJpЇt.Ɨ]+Y% >G;Bљ. 6Hl!YN M[fl"YA-ʶ;,B!?g>gjS-56 ߃7 JlE9WqUߌɵ0^̷m 9Q3\4i`-d1֔pt#iW?qUߌɵqUߌɵ0Ok{#} y ?ss ٵmusUUG9mzUG9mzbvH9**6*6vmoyAo;[]o$sQ|^Q|^c;67؝sJg>M@{g>M@[PyN[9%h^TPډ;LekbO6Jn"ĩIQ)*22;#=sQ˜~{]W01{U^LmyGDEbr!nOL3oƀ`TJPi+|**kI8|q9&đNeL5Vq@HYp؂mJ),ˤQ $h WG+ zc5U|c:Ϫ7_ Vm; "-'0DO/}YE`LGLa RD\su<┥slwD3@; 3pUX[bATZo7Y& 5\aj%''!q.vWIG!yoa%D:^wAndG|aLEmaJUYcqHJuiۏsaӰǞ. uiX]M[Q v`–͉k:oy/Z$4z)}%mRb :I;R[ DJ; Kbʎjuֽq*׹\grN!fuв22@tsqsʒN4Jo+[椛 @Zhx)ʥ^ >DdHja|w:'LUmegq ;>kCCSBV.}՟ukq-̈gF,c J"]ο*HQm-7J+m;j$̄#M洚~lj.Z-•pTa0 k5y@{^x0^',lR'AZ wݼ&jo5ɚ ;ONSN@*yLk5y@v x<3HhUv\sJR],4RgqTy=&YE;re$j[rP]1Rsh6JIeZ3 @5-Ҧ4AR]W8jQ V%8ermQrfd<֢JxL$V fPPN٤,z؉{j3;è%[si˳]+TznۍNe W`M7ӀȾC87aKZܒW_p 4Ǧq"FMW0e 2 6!FiVď\tbAԮrY"̺HjWOf]$r_Knv Cnj8MU gJf=)PuCтr-pHG!G V.xuD/Ŕog BS1[I!N! j"BBM;xЦ!> jw1w]3.uA8r*tqK򂌓73mn2հݱX(fa^|}6p HGem4pnOy:ƍbE:wUӉߤmނds]: 6?O/W!aSK_EZ=իjUV (kXȼ0[xVJ Lm&gs$1"" |!V^mxMP[{Ded ]A% WBn+9jnOGD9&X)j5~ 0Bڃ7V@euY n_C{kmV͵+\H..S?YIl˙I؏iVRiTLnIòaDաhv؊q]S㏅EAi7GuO>5t+\H..S?YI>n 񞻚 n n:z.S?YI J),ˤU7ojgkP4UMVP5}W9+mGE@CB-H$C2#J7ne{IAR%YKm 䱷tĸnI:֤I#;Cܑ>Y6On PȥvKĠ,s,s޹Ka{=jUa""v"ĺ,Ҷ֓T22##!=q"=&RN,E;)tҌ'RTEcw#=kf O PK#f, J"ہ=L6b52$.=D{l"WL lMG(cC\(UlƹH-; o n n1jBeu?Xa ̐gO\Je#peFE`Dt(9:i kCA1¢洛 n:;}K_YGJ),ˤ>n Ԏ񞻺 n!Jϩ&*X꺢IcȒ0N|+!gfIffw33peCRE}g2#!g^ "!g:,Fѡ <`pq^}jz/=e88n2^bQʘQ{6)FG򐒫JiG:rĈ)f nSm=J),ˤ0[-xVTivr6ZoZw{Gi'XDDAm||_YAlV?},B!?g>gjS-5?' &u%FKC!u #.;C$b 7\QØQZ t eNyNa3"Vh,2Fb1 Q pY=Fo,,ח9^\DU g /q uPJ FDfv;g3L:IHVT΋3JaM;TW;;%2{I1Zu,vQ1$&dZXka HphUFMkodwwwO;J'|^:"ʖ;TR؎m-HJlFHevKZJbV7L% S>~23 tF>&DǚxBxv=ZZ#mG˼A-)D3"/L/+ɣ~jiGR17"RDDYp źF0OD,+Hwr8)2"3ܬwH`xXDcXsi$3B mD1xFnKA{ ǐ+Uᘘ5TCCAjSlh69{bFcEAR-)߹$5km杪/2Ea蘚w$u,{)}₁%%wFDYiOТ;XGfpG VLjv ׉iO'cF99tf9lQi42,Jz%Զ߂Q 4o1.*q&W(s-ܸ ۟dz2{$ZJp#E8tq[=}'/h(홌 )H,}8[,0"pX(7.WP"pJQJ™Oc| | iD[_Ȟ+ss`6 psGu6 28ن`s~)ct(CjyU̻+I*FԜ5]+%(fer;mWZTi!V%e'Ed7}TJKj"ix1hPRv0_6m&: vhG5)Df53dYۻ}#[H܉~uN:Zո4WQkR"ת7^{62TN̈́dԝUm+l7:3c'CE 1 lSmNRW3BNNh[F#=pZ?YC$ Nd \Gc{qmtIrXCu4m RtDDU=CrzChnq2 I8Dkn [AhA[Cf6ԺCiuJBe22j a%q\sDWdH2A6|Vl{ 4.-QJ5$IJv߸\e͝&E\@+LF F+t${юh'AhΨ\a&xXﺻ9*)!Gs4ܸ [z{0fe+ b^}'#6x'Hl6!}RֵlM:f2G*M0pr ٙ5$36esR3L8Enu5pxw/70pi!vaJLJ 粑vJl6!aSK:QLUFHwFšn&,ff,|^d5j6u=Z~vk֖s6vi*&.9]bLP4u9wK^\o{}*nj-8q qn2șyNCfZj2?_n knn2ق 1i[?YydȠ]ΩX|MOdCqmakϼq)KQJ3nf|&?R>œ2/L/i`bB&ӺK >ia | iO^5|7 9~^?4@Oa}oŸ41knn "xNx$υCN,/فOٌ_Ȟ+ss`knn BsL.&|/ wIUnXڮjnJRI!e͙(I!s헸#ӳ*.J*:]ӐT\w־3q),xiL(vʕIF`XťL[Le9ooޘ^&?;w0f3Wxmn6\5|7 L@S?KJ_ ‡X^<)L/ ;xS_f3M"xO^5 3(~i%Ÿ40>ic4'>D[PS?KI ‡X-UKQ(xKi֥Z\w# #Hb)4ТmDّ\ȈpK%m"xC#JGMs'jMƔpԁ#}fKÏ#3t £44(tg9:1M^)؉RH]O:Sy"epZT"㲭kp >~#aVhykmiqT#Jd}K&F兏EC&fFkqot]cQ[}ZچU9\̊ oRR[Rչv=K13ɠŇK~(~qo̚{$e ̡MY7hW V#f;;G|k_3JbJ2U0|;mdW L A2Hjjh&`"3-]ͪ\ḬJ8pp#+^@ 3-vC!ͧ)WHRLE+6A9Y{WmM:'=xD&.KlV1N?u>ЦRPTa;kzU CimϴCEsmp2 R;K#˸{Fv>IxzȗCt>'ꪛVƚ6q _gB {?LLEgjE&J[VSJ͵ǽφImHH#a;kxriIHiC(v*1Ch.AU؈Ɲi/֜3-ز`rnHkp 샵~ܘ-)VV#ZkBUP%^P^!},c|LkkazP%^PU59[N6Ml/_?5J;XY^6'+z)ۦ0y魅@yC0 W?@נk +oE;t۾&55|`((~*aex؜nw濦UP%^P^,n^Ny/ڞb<_T2K eF48UVRO{pS-xG\ĜLŸXd5̶73)DS +8 ™dԌ24׺qҡӅ1cٝqLAp S0;Ѐ ڊ    )F\M:s0nU7R\ݕhLJ"3O\HItHeJXMz>5;h]6-A  ap"c0RYKiH΄ri.I#|T/zG> VY0^v$ I_nϊ"C((F,%͚V9"#Z5wd)SۤicDѓW"e|Hh5#,Q(',w_1.-^$vԐPa3&%5{ٷ~lvHΣEkAc4N~{ ޽wÆDN?y/evolEQ wG+0kHsI|4{d^]ޥywz;;N/'~ _?b{Q(tg%E[aywz;};tA翤vWjxc{dZZ{m^i1?U4ScmC'[KNR%B2M̸̌tlnS%wnωs!: tGw$ų$q B4A4d1t訸R 2٭\Q(JKjfdDE4b8OLca+ G\ECA8,iJδl#;he#:ݚbi#7s,KInFW-#(wVfjls"Ns5aLq_B׏S4$ȎGo\ꈭ)TTTU Iz6^Ř$go/C<;MʰV9M8ʓkܖ&V"ūІZQ`̦*^6؆R2fǺ\qҸǥ8'"hu6mEJJcKJ? Xw8fu2v= D> uZbZ-(8W ?@U'ة^.2JlL&1 >ᩤ(yEX,)9̂1Dj(ywr?^0G;c ] L&bo9O*kIPK˺Ki]D̵$fDW>"^ YeCu]P$&Ipf9&s DnˇyDHy2JG #7S7)~3s>+rBSE4<5'99i#GGox+gʤةlB ʝu% #3?Ts9p%T85\:iJm{$1/vkȆh.}P!sf*Y4e#61:Ӭ:mMڍ BIQ:UMXELͥm˥[`a.ȃ<,ֹpwHk¬Pv+BQJJ'227Uc#hSSKMprwH5;L=p.LT=A/fۤDTË$fE`Kpo4e|*#y ; U%x<%3RTӔj)pP>&fE}0EʵO|U^f*g+c1JW[BC-lȒ3ǰt@eZ>M*3Dyr!J48iRGc##FGhb-Vi4\/ešq2JRFf~"i6ujiy<1n&)ЈsciO/+c''D/)Bðigiٵ)(VS'f|#^zOaS#{M< t2RfNCdFfl30.'l ^EQJĤ}oO;Iv%Xb1Ϫ==s9=HKD(HA)k3Rgy`$HM4ob,D\t-rsYme,I01I,Ty2j[Q%)I\gD\f2S&W}@E)*J*###𑌥8S+I-mT*+ ˻,0L%fQpI,XrQ&eTd:!YYaO:C%chaΫ,(22#ȢYv Yv!0KqIB5)GDE`AH "f U2_f[fVDխaͤyLU-eq.UMk#,\KUa$ЉJ|=i쓚b!ź#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@An4/p.^OuJ[ biq$%k{v $- }hmw=v6觊4˰y'SK"NT'emƼ`Lږ@̛r{;!+N9]Vph}[O }[&}6a>zyܑ,/9Lel$(ofZ3ؓ#W Y*'2>%2t̟z<$uRb5%H EȐ0 [aQA]SC\JK3C'b믨]|+HK(zMQWx EFguҘV(H]s,ٵeKJ?#goT'@:ihXqHiإÛHR[J}^.#1z_Q>[+a'|u8SiG뇬ktMgL3^Vas'jIj%yYY/VW?Y 2~cRpc.,e$KڧmG:EuT½1BR)cou2f)M0g|$+1fRNӽR Ad"xR&RiqQ$n)Y,!iΓI.wMk^c}}˰LPW= eK+JIݓ[H$$:J3k'ۆ{pĥPe th8Ƴ_kCBJ 4;)uDT%ex̑gI3LiubtVAb"q娄u _&lҲ+WUF>I(,pVQ.,o,* hIYis9#Yκ ćt:/Iy*"Ȧ@4i )EY0ElzZf:ApP\-Pci*5IՂUP%pك~wYPI\$.v-V[5[}"ZE2iK?cPf αp 52/JqWoSg:Oˠ"eOLc&1G%!I+5yZiW=̶^a:0̺M0]k}R8=%m2Plɶ{eb-+WSJ\z粺sh gZ8.Iπ1ToOK.qrтҋG$\剜"Se(I%2:[,cO&"`+*,!|2eEe ZNi!tmĽ !&#%){Za"NZ݇%Opk3FIaZIl"iQ0%6>%w. ~ *:j+~lָm8Cd(vq GrJϋf*ImSP*i4Qx A8jFgbs%'QTitɸz_cE4wmP2qę{qyd4n$#;$2`0lVjjt51(B /[vg;rϯo6-UK?rgP'c)#*"bQ/E,[jBټYTDJ33Q$XĞjjz`t{p1P%9D,q -&GFCD3 jSݓˋl`ٟQ̫U2.s]9Ct~֚BCI"b0iMn'i#"kAeb28%TLiF&)zrsC 6 >љ>Sm&5)V-n3QKD)F|&fⶋKԧM7rtCAa4uViuBj*$J>elǑH%Z; 4ŝ- q' .vV-\9DI3QkZxF? .'P?HF8ju%1:8 nB1jT֢/jGnT\)+|CT߈.ԏ}4$NҖy,L&_("雊ןE>xѤ"ϰit4C˦eΥ82ʳbaΨ/flX_a&1)'Nc#k:e ϭô[Sh"2QHKrb;MFJҴQw#ZyRit- utI%hlv#I(jHuŭFa9?ۮԏL,KĪ7y "x3u]5W-u*vNn$Ëf23%;T!HY]?;V0Pp@7IL>}MƜm+Eȏ1R5)Zmbe>]c< 47`t~҆)htHS3iL:L̘lJl-ڥ".&c{UQuU%+C3P.:PQtQ)lVӾ~Lh9, CǺ&y] Q"hl鵜2UYI&dyHc;;)sZMR^mDeRnKGL2:m/ݦ`T, k;LgRlӳJwreԎ]~L742a.$DȻ{":F qk8VgJM'ni3#Ii-ph˥)h9pFLH-Y9I."s>L9ì}Fj5%A&m:8PJș%ŭ%f"K[IdFhXm$IJDQ*"?|[v>#ՃҼ2Z.]'`-Iɢ fׂt%E,l?'@>zzSUHRe_Z8kqej$m3ֵu+j2C ;f("Z)a2זS%d#-W2t&> 1K QXRʧn@ Pц6h25̥nۇf=tIŬ)Oy&첪4i 6^L3̥-ŷhDڹg*a;፵32ҷeq9 mԫ9ln ˇlNaUF~cHU.QMץK$1+Cim'3 ڐIV4Z5sPT%k3~/ql7U+-w!g}LN8Efb2%l>Jdٳa+'Ci1S1#pƜ`}$4Jtqd#UFl/E,2S I\LMCU7äj[.5uf{6Iv3)K*c DAmlQET+gVzַQJ35(f|fQ),;Zlˎ3 ,}r6R5Ro¤Kj]HM#&88j~vNEL+uQnv2BLak*RKB*I܌Gz4QV񷏘 U^\G0tíQlu:`3Q,CI('ֵf;[JwC,e5+&Gټ%NI3sFv4+xv -Hꊖ)jsDt [D'9. Er3#۴2t&>5[C\dBd2⛁;M-}  W]uPI51Mfoœf4nV[Mb xLcFD;P3t(̒|AnOۄЕnn6cu6cSCָ/8KIm[ Y+p#>3=*`vMi,,CEG5b fnX5]&K.Vg<eLD0D!T\[2i>pE)JQFffw3>0Ye+ªNy8K͖lKuyDvPl܍.z%94H3 7w-˴xv "H*:=<LKyiVWd b9}"t tPzdIU2bV6Jry:bBtC+i"3)}Ġ{RiJM]%ϵ%*@kC.e sYf.ٲnV[MÔĆj2!aiq[Ym'rRL+0C>TIKGAS4fza7VIx۝ũө-2TZ6J D(f(FMn,YbJuΕMP i"No˸fic7̢k7""S:R֣3Q#R{- EG0f&h(eO35O3U6O4?SJ E8t{8a %d(;qggr_Kꪢꐩ&9P.[3ą{lV>2ѕ1 @4oY&z)⮊4 NQSW"70df3Jlw1rz"zyP m&LiIHmx`Ȧam7;_n^4RSL)t4GBVb4&etiQ [kKҤTGc#.2i V,pc}"Џ4hnĴJW?㥙&ز߄Z~vH77LɄ&1j<­ ݬ#`eSؕ\3/q !aݚbi6m,e)qj"UcЕc~4Hy&/V6ɘhXY[,p%KwTBiV'bD`"oי h_d_pmRHk 1Êk ,3Pq+$6SU8lk=·I\!N'ό5<"ERbc6ő%8S:Dd%Uc"=Dc[ $>)N5 %i9{ɨGuf:dG))Dv#3IX-pqYrʋk|IFJ!C Aj4ǜC7&䛧b K J $$x%6jKhA)"^"DC V]Y$g2qKǺZwfTK3Uvh"Q=_$Bd=4 ' K_:fn u24'ƭy J**a]aoos$V,>>3cO*TXͥD~:{(DֳJ#+ X 4I!EIIJHDDNl" Ea4wviY`R-pk07b-fo![۰Ԥ'IiM7a3I\і qy܇[n,c^.,g-jtKi2378Mcy3c/*bEMbc)’8T; dвJn3; +3XZT&IUa&:->r#]L:qP Ļ&J[d]niIoىRq^%e 3DD3>YIq>lM؈6 v9)DRaMI{5vϺ @}"]p6I'Vl4| c4o#-%wD,/6}v is.NU4ZdSHܙuihISs)¹\ƗV&&&UPR3Eť$DuԬ"JHbO b9}")m.'Fy9J2\'b0!8oYܷfWc˷`>otSцff" M,KN'3.]xF`:_R'0Ǻ:EfU;FO\1ѾX\J#tÌ a9Ƣ2-寰MD)]7(QFFj"{ K:QXzhܕS  i #i܈I-}}kʦbSʧOY(kOdfd(ՔFdWI{R3Y, f`g-2w*K E :f(B%YOrj.!jźˌ`_. .. toctree:: :maxdepth: 2 :caption: Contents: readme installation metrics-yaml pings-yaml tags-yaml modules contributing authors history Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` glean_parser-15.0.1/docs/installation.md000066400000000000000000000016411466531427000202250ustar00rootroot00000000000000# Installation ## Stable release To install `glean_parser`, run this command in your terminal: ```sh $ pip install glean_parser ``` This is the preferred method to install `glean_parser`, as it will always install the most recent stable release. If you don't have [pip](https://pip.pypa.io) installed, this [Python installation guide](http://docs.python-guide.org/en/latest/starting/installation/) can guide you through the process. ## From sources The sources for `glean_parser` can be downloaded from the [Github repo](https://github.com/mozilla/glean_parser). You can either clone the public repository: ```sh $ git clone https://github.com/mozilla/glean_parser.git ``` Or download the [tarball](https://github.com/mozilla/glean_parser/tarball/HEAD): ```sh $ curl -OL https://github.com/mozilla/glean_parser/tarball/HEAD ``` Once you have a copy of the source, you can install it with: ```sh $ pip install . ``` glean_parser-15.0.1/docs/make.bat000066400000000000000000000014061466531427000166060ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=python -msphinx ) set SOURCEDIR=. set BUILDDIR=_build set SPHINXPROJ=glean_parser if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The Sphinx module was not found. Make sure you have Sphinx installed, echo.then set the SPHINXBUILD environment variable to point to the full echo.path of the 'sphinx-build' executable. Alternatively you may add the echo.Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% :end popd glean_parser-15.0.1/docs/metrics-yaml.rst000066400000000000000000000006641466531427000203460ustar00rootroot00000000000000``metrics.yaml`` file ===================== Documentation for the ``metrics.yaml`` file has moved to `metrics parameters in the Glean user documentation `_. JSON Schema ----------- There is a formal schema for validating ``metrics.yaml`` files, included in its entirety below: .. literalinclude:: ../glean_parser/schemas/metrics.2-0-0.schema.yaml :language: yaml glean_parser-15.0.1/docs/pings-yaml.rst000066400000000000000000000006371466531427000200200ustar00rootroot00000000000000``pings.yaml`` file =================== Documentation for the ``pings.yaml`` file has moved to `custom pings in the Glean user documentation `_. JSON Schema ----------- There is a formal schema for validating ``pings.yaml`` files, included in its entirety below: .. literalinclude:: ../glean_parser/schemas/pings.2-0-0.schema.yaml :language: yaml glean_parser-15.0.1/docs/readme.md000077700000000000000000000000001466531427000204442../README.mdustar00rootroot00000000000000glean_parser-15.0.1/docs/tags-yaml.rst000066400000000000000000000006371466531427000176360ustar00rootroot00000000000000``tags.yaml`` file ===================== Documentation for the ``tags.yaml`` file is in the `tags parameters in the Glean user documentation `_. JSON Schema ----------- There is a formal schema for validating ``tags.yaml`` files, included in its entirety below: .. literalinclude:: ../glean_parser/schemas/tags.1-0-0.schema.yaml :language: yaml glean_parser-15.0.1/glean_parser/000077500000000000000000000000001466531427000167125ustar00rootroot00000000000000glean_parser-15.0.1/glean_parser/__init__.py000066400000000000000000000010031466531427000210150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Top-level package for Glean parser.""" import importlib.metadata try: __version__ = importlib.metadata.version(__name__) except importlib.metadata.PackageNotFoundError: # package is not installed pass __author__ = """The Glean Team""" __email__ = "glean-team@mozilla.com" glean_parser-15.0.1/glean_parser/__main__.py000077500000000000000000000206711466531427000210150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Console script for glean_parser.""" import datetime import io from pathlib import Path import sys import click import json import glean_parser from . import coverage as mod_coverage from . import data_review as mod_data_review from . import lint from . import translate as mod_translate from . import validate_ping from . import translation_options CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) @click.command(context_settings=CONTEXT_SETTINGS) @click.argument( "input", type=click.Path(exists=False, dir_okay=False, file_okay=True, readable=True), nargs=-1, ) @click.option( "--output", "-o", type=click.Path(dir_okay=True, file_okay=False, writable=True), nargs=1, required=True, ) @click.option( "--format", "-f", type=click.Choice(list(mod_translate.OUTPUTTERS.keys())), required=True, ) @click.option( "--option", "-s", help="Backend-specific option. Must be of the form key=value.\ Pass 'help' for valid options", type=str, multiple=True, required=False, is_eager=True, callback=translation_options.translate_options, ) @click.option( "--allow-reserved", is_flag=True, help=( "If provided, allow the use of reserved fields. " "Should only be set when building the Glean library itself." ), ) @click.option( "--allow-missing-files", is_flag=True, help=("Do not treat missing input files as an error."), ) @click.option( "--require-tags", is_flag=True, help=("Require tags to be specified for metrics and pings."), ) @click.option( "--expire-by-version", help="Expire metrics by version, with the provided major version.", type=click.INT, required=False, ) def translate( input, format, output, option, allow_reserved, allow_missing_files, require_tags, expire_by_version, ): """ Translate metrics.yaml and pings.yaml files to other formats. """ option_dict = {} for opt in option: key, val = opt.split("=", 1) option_dict[key] = val sys.exit( mod_translate.translate( [Path(x) for x in input], format, Path(output), option_dict, { "allow_reserved": allow_reserved, "allow_missing_files": allow_missing_files, "require_tags": require_tags, "expire_by_version": expire_by_version, }, ) ) @click.command() @click.option( "--schema", "-s", type=str, nargs=1, required=True, help=("HTTP url or file path to Glean ping schema. If remote, will cache to disk."), ) def check(schema): """ Validate the contents of a Glean ping. The ping contents are read from stdin, and the validation errors are written to stdout. """ sys.exit( validate_ping.validate_ping( io.TextIOWrapper(sys.stdin.buffer, encoding="utf-8"), io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8"), schema_url=schema, ) ) @click.command() @click.argument( "input", type=click.Path(exists=True, dir_okay=False, file_okay=True, readable=True), nargs=-1, ) @click.option( "--allow-reserved", is_flag=True, help=( "If provided, allow the use of reserved fields. " "Should only be set when building the Glean library itself." ), ) @click.option( "--allow-missing-files", is_flag=True, help=("Do not treat missing input files as an error."), ) @click.option( "--require-tags", is_flag=True, help=("Require tags to be specified for metrics and pings."), ) def glinter(input, allow_reserved, allow_missing_files, require_tags): """ Runs a linter over the metrics. """ sys.exit( lint.glinter( [Path(x) for x in input], { "allow_reserved": allow_reserved, "allow_missing_files": allow_missing_files, "require_tags": require_tags, }, ) ) @click.command() @click.argument( "input", type=click.Path(exists=True, dir_okay=False, file_okay=True, readable=True), nargs=-1, ) @click.option( "--allow-reserved", is_flag=True, help=( "If provided, allow the use of reserved fields. " "Should only be set when building the Glean library itself." ), ) @click.option( "--allow-missing-files", is_flag=True, help=("Do not treat missing input files as an error."), ) @click.option( "--require-tags", is_flag=True, help=("Require tags to be specified for metrics and pings."), ) def dump(input, allow_reserved, allow_missing_files, require_tags): """ Dump the list of metrics/pings as JSON to stdout. """ results = glean_parser.parser.parse_objects( [Path(x) for x in input], { "allow_reserved": allow_reserved, "allow_missing_files": allow_missing_files, "require_tags": require_tags, }, ) errs = list(results) assert len(errs) == 0 metrics = { metric.identifier(): metric.serialize() for category, probes in results.value.items() for probe_name, metric in probes.items() } def date_serializer(o): if isinstance(o, datetime.datetime): return o.isoformat() print( json.dumps( metrics, sort_keys=True, indent=2, separators=(",", ": "), default=date_serializer, ) ) @click.command() @click.option( "-c", "--coverage_file", type=click.Path(exists=True, dir_okay=False, file_okay=True, readable=True), required=True, multiple=True, ) @click.argument( "metrics_files", type=click.Path(exists=True, dir_okay=False, file_okay=True, readable=True), nargs=-1, ) @click.option( "-o", "--output", type=click.Path(exists=False, dir_okay=False, file_okay=True, writable=True), required=True, ) @click.option( "--format", "-f", type=click.Choice(list(mod_coverage.OUTPUTTERS.keys())), required=True, ) @click.option( "--allow-reserved", is_flag=True, help=( "If provided, allow the use of reserved fields. " "Should only be set when building the Glean library itself." ), ) def coverage(coverage_file, metrics_files, format, output, allow_reserved): """ Produce a coverage analysis file given raw coverage output and a set of metrics.yaml files. """ sys.exit( mod_coverage.coverage( [Path(x) for x in coverage_file], [Path(x) for x in metrics_files], format, Path(output), { "allow_reserved": allow_reserved, }, ) ) @click.command() @click.argument("bug", type=str) @click.argument( "metrics_files", type=click.Path(exists=True, dir_okay=False, file_okay=True, readable=True), nargs=-1, ) def data_review_request(bug, metrics_files): """ Generate a skeleton Data Review Request for all metrics in METRICS_FILES whose bug_numbers fields contain the provided BUG string. For example, providing "1694739" matches "https://bugzilla.mozilla.org/show_bug.cgi?id=1694739". To ensure substrings don't match, the provided bug string will match only if it is bounded by non-word characters. Prints to stdout. """ sys.exit(mod_data_review.generate(bug, [Path(x) for x in metrics_files])) @click.group() @click.version_option(glean_parser.__version__, prog_name="glean_parser") def main(args=None): """Command line utility for glean_parser.""" pass main.add_command(translate) main.add_command(check) main.add_command(glinter) main.add_command(dump) main.add_command(coverage) main.add_command(data_review_request, "data-review") def main_wrapper(args=None): """ A simple wrapper around click's `main` to display the glean_parser version when there is an error. """ try: main(args=args) except SystemExit as e: if e.code != 0: print( f"ERROR running glean_parser v{glean_parser.__version__}", file=sys.stderr, ) raise if __name__ == "__main__": main_wrapper() # pragma: no cover glean_parser-15.0.1/glean_parser/coverage.py000066400000000000000000000104651466531427000210650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Produce coverage reports from the raw information produced by the `GLEAN_TEST_COVERAGE` feature. """ import json from .metrics import ObjectTree from pathlib import Path import sys from typing import Any, Dict, List, Optional, Sequence, Set from . import parser from . import util def _outputter_codecovio(metrics: ObjectTree, output_path: Path): """ Output coverage in codecov.io format as defined here: https://docs.codecov.io/docs/codecov-custom-coverage-format :param metrics: The tree of metrics, already annotated with coverage by `_annotate_coverage`. :param output_path: The file to output to. """ coverage: Dict[str, List] = {} for category in metrics.values(): for metric in category.values(): defined_in = metric.defined_in if defined_in is not None: path = defined_in["filepath"] if path not in coverage: with open(path) as fd: nlines = len(list(fd.readlines())) lines = [None] * nlines coverage[path] = lines file_section = coverage[path] file_section[int(defined_in["line"])] = getattr(metric, "covered", 0) with open(output_path, "w") as fd: json.dump({"coverage": coverage}, fd) OUTPUTTERS = {"codecovio": _outputter_codecovio} def _annotate_coverage(metrics, coverage_entries): """ Annotate each metric with whether it is covered. Sets the attribute `covered` to 1 on each metric that is covered. """ mapping = {} for category in metrics.values(): for metric in category.values(): mapping[metric.identifier()] = metric for entry in coverage_entries: metric_id = _coverage_entry_to_metric_id(entry) if metric_id in mapping: mapping[metric_id].covered = 1 def _coverage_entry_to_metric_id(entry: str) -> str: """ Convert a coverage entry to a metric id. Technically, the coverage entries are rkv database keys, so are not just the metric identifier. This extracts the metric identifier part out. """ # If getting a glean error count, report it as covering the metric the # error occurred in, not the `glean.error.*` metric itself. if entry.startswith("glean.error."): entry = entry.split("/")[-1] # If a labeled metric, strip off the label part return entry.split("/")[0] def _read_coverage_entries(coverage_reports: List[Path]) -> Set[str]: """ Read coverage entries from one or more files, and deduplicates them. """ entries = set() for coverage_report in coverage_reports: with open(coverage_report) as fd: for line in fd.readlines(): entries.add(line.strip()) return entries def coverage( coverage_reports: List[Path], metrics_files: Sequence[Path], output_format: str, output_file: Path, parser_config: Optional[Dict[str, Any]] = None, file=sys.stderr, ) -> int: """ Commandline helper for coverage. :param coverage_reports: List of coverage report files, output from the Glean SDK when the `GLEAN_TEST_COVERAGE` environment variable is set. :param metrics_files: List of Path objects to load metrics from. :param output_format: The coverage output format to produce. Must be one of `OUTPUTTERS.keys()`. :param output_file: Path to output coverage report to. :param parser_config: Parser configuration object, passed to `parser.parse_objects`. :return: Non-zero if there were any errors. """ if parser_config is None: parser_config = {} if output_format not in OUTPUTTERS: raise ValueError(f"Unknown outputter {output_format}") metrics_files = util.ensure_list(metrics_files) all_objects = parser.parse_objects(metrics_files, parser_config) if util.report_validation_errors(all_objects): return 1 entries = _read_coverage_entries(coverage_reports) _annotate_coverage(all_objects.value, entries) OUTPUTTERS[output_format](all_objects.value, output_file) return 0 glean_parser-15.0.1/glean_parser/data_review.py000066400000000000000000000041501466531427000215560ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Produce skeleton Data Review Requests. """ from pathlib import Path from typing import Sequence import re from . import parser from . import util def generate( bug: str, metrics_files: Sequence[Path], ) -> int: """ Commandline helper for Data Review Request template generation. :param bug: pattern to match in metrics' bug_numbers lists. :param metrics_files: List of Path objects to load metrics from. :return: Non-zero if there were any errors. """ metrics_files = util.ensure_list(metrics_files) # Accept any value of expires. parser_options = { "allow_reserved": True, "custom_is_expired": lambda expires: False, "custom_validate_expires": lambda expires: True, } all_objects = parser.parse_objects(metrics_files, parser_options) if util.report_validation_errors(all_objects): return 1 # I tried [\W\Z] but it complained. So `|` it is. reobj = re.compile(f"\\W{bug}\\W|\\W{bug}$") durations = set() responsible_emails = set() filtered_metrics = list() for metrics in all_objects.value.values(): for metric in metrics.values(): if not any([len(reobj.findall(bug)) == 1 for bug in metric.bugs]): continue filtered_metrics.append(metric) durations.add(metric.expires) if metric.expires == "never": responsible_emails.update(metric.notification_emails) if len(filtered_metrics) == 0: print(f"I'm sorry, I couldn't find metrics matching the bug number {bug}.") return 1 template = util.get_jinja2_template( "data_review.jinja2", filters=(("snake_case", util.snake_case),), ) print( template.render( metrics=filtered_metrics, durations=durations, responsible_emails=responsible_emails, ) ) return 0 glean_parser-15.0.1/glean_parser/go_server.py000066400000000000000000000124551466531427000212660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Outputter to generate server Go code for collecting events. This outputter is different from the rest of the outputters in that the code it generates does not use the Glean SDK. It is meant to be used to collect events in server-side environments. In these environments SDK assumptions to measurement window and connectivity don't hold. Generated code takes care of assembling pings with metrics, and serializing to messages conforming to Glean schema. Warning: this outputter supports limited set of metrics, see `SUPPORTED_METRIC_TYPES` below. The generated code creates the following: * Two methods for logging an Event metric one with and one without user request info specified """ from collections import defaultdict from pathlib import Path from typing import Any, Dict, Optional, List from . import __version__ from . import metrics from . import util # Adding a metric here will require updating the `generate_metric_type` function # and require adjustments to `metrics` variables the the template. SUPPORTED_METRIC_TYPES = ["string", "quantity", "event", "datetime"] def generate_event_type_name(metric: metrics.Metric) -> str: return f"Event{util.Camelize(metric.category)}{util.Camelize(metric.name)}" def generate_metric_name(metric: metrics.Metric) -> str: return f"{metric.category}.{metric.name}" def generate_extra_name(extra: str) -> str: return util.Camelize(extra) def generate_metric_argument_name(metric: metrics.Metric) -> str: return f"{util.Camelize(metric.category)}{util.Camelize(metric.name)}" def generate_metric_type(metric_type: str) -> str: if metric_type == "quantity": return "int64" elif metric_type == "string": return "string" elif metric_type == "boolean": return "bool" elif metric_type == "datetime": return "time.Time" else: print("❌ Unable to generate Go type from metric type: " + metric_type) exit return "NONE" def clean_string(s: str) -> str: return s.replace("\n", " ").rstrip() def output_go( objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] ) -> None: """ Given a tree of objects, output Go code to `output_dir`. The output is a single file containing all the code for assembling pings with metrics, serializing, and submitting. :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. """ template = util.get_jinja2_template( "go_server.jinja2", filters=( ("event_type_name", generate_event_type_name), ("event_extra_name", generate_extra_name), ("metric_name", generate_metric_name), ("metric_argument_name", generate_metric_argument_name), ("go_metric_type", generate_metric_type), ("clean_string", clean_string), ), ) PING_METRIC_ERROR_MSG = ( " Server-side environment is simplified and only supports the events ping type." + " You should not be including pings.yaml with your parser call" + " or referencing any other pings in your metric configuration." ) if "pings" in objs: print("❌ Ping definition found." + PING_METRIC_ERROR_MSG) return # Go through all metrics in objs and build a map of # ping->list of metric categories->list of metrics # for easier processing in the template. ping_to_metrics: Dict[str, Dict[str, List[metrics.Metric]]] = defaultdict(dict) for _category_key, category_val in objs.items(): for _metric_name, metric in category_val.items(): if isinstance(metric, metrics.Metric): if metric.type not in SUPPORTED_METRIC_TYPES: print( "❌ Ignoring unsupported metric type: " + f"{metric.type}:{metric.name}." + " Reach out to Glean team to add support for this" + " metric type." ) continue for ping in metric.send_in_pings: if ping != "events": ( print( "❌ Non-events ping reference found." + PING_METRIC_ERROR_MSG + f"Ignoring the {ping} ping type." ) ) continue metrics_by_type = ping_to_metrics[ping] metrics_list = metrics_by_type.setdefault(metric.type, []) metrics_list.append(metric) if "event" not in ping_to_metrics["events"]: print("❌ No event metrics found...at least one event metric is required") return extension = ".go" filepath = output_dir / ("server_events" + extension) with filepath.open("w", encoding="utf-8") as fd: fd.write( template.render( parser_version=__version__, events_ping=ping_to_metrics["events"] ) ) glean_parser-15.0.1/glean_parser/javascript.py000066400000000000000000000257361466531427000214470ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Outputter to generate Javascript code for metrics. """ import enum import json from pathlib import Path from typing import Any, Dict, Optional, Callable from . import __version__ from . import metrics from . import util def javascript_datatypes_filter(value: util.JSONType) -> str: """ A Jinja2 filter that renders Javascript literals. Based on Python's JSONEncoder, but overrides: - lists to use listOf - sets to use setOf - Rate objects to a CommonMetricData initializer (for external Denominators' Numerators lists) """ class JavascriptEncoder(json.JSONEncoder): def iterencode(self, value): if isinstance(value, enum.Enum): yield from super().iterencode(util.camelize(value.name)) elif isinstance(value, list): yield "[" first = True for subvalue in value: if not first: yield ", " yield from self.iterencode(subvalue) first = False yield "]" elif isinstance(value, set): yield "[" first = True for subvalue in sorted(list(value)): if not first: yield ", " yield from self.iterencode(subvalue) first = False yield "]" elif isinstance(value, metrics.Rate): yield "CommonMetricData(" first = True for arg_name in util.common_metric_args: if hasattr(value, arg_name): if not first: yield ", " yield f"{util.camelize(arg_name)} = " yield from self.iterencode(getattr(value, arg_name)) first = False yield ")" else: yield from super().iterencode(value) return "".join(JavascriptEncoder().iterencode(value)) def class_name_factory(platform: str) -> Callable[[str], str]: """ Returns a function that receives an obj_type and returns the correct class name for that type in the current platform. """ def class_name(obj_type: str) -> str: if obj_type == "ping": class_name = "PingType" else: if obj_type.startswith("labeled_"): obj_type = obj_type[8:] class_name = util.Camelize(obj_type) + "MetricType" if platform == "qt": return "Glean.Glean._private." + class_name return class_name return class_name def extra_type_name(extra_type: str) -> str: """ Returns the equivalent TypeScript type to an extra type. """ if extra_type == "quantity": return "number" return extra_type def import_path(obj_type: str) -> str: """ Returns the import path of the given object inside the @mozilla/glean package. """ if obj_type == "ping": import_path = "ping" else: if obj_type.startswith("labeled_"): obj_type = obj_type[8:] import_path = "metrics/" + obj_type return import_path def args(obj_type: str) -> Dict[str, object]: """ Returns the list of arguments for each object type. """ if obj_type == "ping": return {"common": util.ping_args, "extra": []} return {"common": util.common_metric_args, "extra": util.extra_metric_args} def generate_build_date(date: Optional[str]) -> str: """ Generate the build Date object. """ ts = util.build_date(date) data = [ str(ts.year), # In JavaScript the first month of the year in calendars is JANUARY which is 0. # In Python it's 1-based str(ts.month - 1), str(ts.day), str(ts.hour), str(ts.minute), str(ts.second), ] components = ", ".join(data) # DatetimeMetricType takes a `Date` instance. return f"new Date({components})" # noqa def output( lang: str, objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None, ) -> None: """ Given a tree of objects, output Javascript or Typescript code to `output_dir`. :param lang: Either "javascript" or "typescript"; :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. :param options: options dictionary, with the following optional keys: - `platform`: Which platform are we building for. Options are `webext` and `qt`. Default is `webext`. - `version`: The version of the Glean.js Qt library being used. This option is mandatory when targeting Qt. Note that the version string must only contain the major and minor version i.e. 0.14. - `with_buildinfo`: If "true" a `gleanBuildInfo.(js|ts)` file is generated. Otherwise generation of that file is skipped. Defaults to "false". - `build_date`: If set to `0` a static unix epoch time will be used. If set to a ISO8601 datetime string (e.g. `2022-01-03T17:30:00`) it will use that date. Other values will throw an error. If not set it will use the current date & time. """ if options is None: options = {} platform = options.get("platform", "webext") accepted_platforms = ["qt", "webext", "node"] if platform not in accepted_platforms: raise ValueError( f"Unknown platform: {platform}. Accepted platforms are: {accepted_platforms}." # noqa ) version = options.get("version") if platform == "qt" and version is None: raise ValueError( "'version' option is required when building for the 'qt' platform." ) template = util.get_jinja2_template( "javascript.jinja2", filters=( ("class_name", class_name_factory(platform)), ("extra_type_name", extra_type_name), ("import_path", import_path), ("js", javascript_datatypes_filter), ("args", args), ), ) for category_key, category_val in objs.items(): extension = ".js" if lang == "javascript" else ".ts" filename = util.camelize(category_key) + extension filepath = output_dir / filename types = set( [ # This takes care of the regular metric type imports # as well as the labeled metric subtype imports, # thus the removal of the `labeled_` substring. # # The actual LabeledMetricType import is conditioned after # the `has_labeled_metrics` boolean. obj.type if not obj.type.startswith("labeled_") else obj.type[8:] for obj in category_val.values() ] ) has_labeled_metrics = any( getattr(metric, "labeled", False) for metric in category_val.values() ) with filepath.open("w", encoding="utf-8") as fd: fd.write( template.render( parser_version=__version__, category_name=category_key, objs=category_val, extra_args=util.extra_args, platform=platform, version=version, has_labeled_metrics=has_labeled_metrics, types=types, lang=lang, ) ) # Jinja2 squashes the final newline, so we explicitly add it fd.write("\n") with_buildinfo = options.get("with_buildinfo", "").lower() == "true" build_date = options.get("build_date", None) if with_buildinfo: # Write out the special "build info" file template = util.get_jinja2_template( "javascript.buildinfo.jinja2", ) # This filename needs to start with "glean" so it can never # clash with a metric category filename = "gleanBuildInfo" + extension filepath = output_dir / filename with filepath.open("w", encoding="utf-8") as fd: fd.write( template.render( parser_version=__version__, platform=platform, build_date=generate_build_date(build_date), ) ) fd.write("\n") if platform == "qt": # Explicitly create a qmldir file when building for Qt template = util.get_jinja2_template("qmldir.jinja2") filepath = output_dir / "qmldir" with filepath.open("w", encoding="utf-8") as fd: fd.write( template.render( parser_version=__version__, categories=objs.keys(), version=version ) ) # Jinja2 squashes the final newline, so we explicitly add it fd.write("\n") def output_javascript( objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None ) -> None: """ Given a tree of objects, output Javascript code to `output_dir`. :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. :param options: options dictionary, with the following optional keys: - `namespace`: The identifier of the global variable to assign to. This will only have and effect for Qt and static web sites. Default is `Glean`. - `platform`: Which platform are we building for. Options are `webext` and `qt`. Default is `webext`. """ output("javascript", objs, output_dir, options) def output_typescript( objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None ) -> None: """ Given a tree of objects, output Typescript code to `output_dir`. # Note The only difference between the typescript and javascript templates, currently is the file extension. :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. :param options: options dictionary, with the following optional keys: - `namespace`: The identifier of the global variable to assign to. This will only have and effect for Qt and static web sites. Default is `Glean`. - `platform`: Which platform are we building for. Options are `webext` and `qt`. Default is `webext`. """ output("typescript", objs, output_dir, options) glean_parser-15.0.1/glean_parser/javascript_server.py000066400000000000000000000215041466531427000230220ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Outputter to generate server Javascript code for collecting events. This outputter is different from the rest of the outputters in that the code it generates does not use the Glean SDK. It is meant to be used to collect events in server-side environments. In these environments SDK assumptions to measurement window and connectivity don't hold. Generated code takes care of assembling pings with metrics, serializing to messages conforming to Glean schema, and logging with mozlog. Then it's the role of the ingestion pipeline to pick the messages up and process. Warning: this outputter supports limited set of metrics, see `SUPPORTED_METRIC_TYPES` below. There are two patterns for event structure supported in this environment: * Events as `Event` metric type, where we generate a single class per ping with `record{event_name}` method for each event metric. This is recommended to use for new applications as it allows to fully leverage standard Data Platform tools post-ingestion. * Custom pings-as-events, where for each ping we generate a class with a single `record` method, usually with an `event_name` string metric. Therefore, unlike in other outputters, here we don't generate classes for each metric. """ from collections import defaultdict from pathlib import Path from typing import Any, Dict, Optional, List from . import __version__ from . import metrics from . import util # Adding a metric here will require updating the `generate_js_metric_type` function # and might require changes to the template. SUPPORTED_METRIC_TYPES = ["string", "event"] def event_class_name( ping_name: str, metrics_by_type: Dict[str, List[metrics.Metric]] ) -> str: # For compatibility with FxA codebase we don't want to add "Logger" suffix # when custom pings without event metrics are used. event_metric_exists = "event" in metrics_by_type suffix = "Logger" if event_metric_exists else "" return util.Camelize(ping_name) + "ServerEvent" + suffix def generate_metric_name(metric: metrics.Metric) -> str: return f"{metric.category}.{metric.name}" def generate_metric_argument_name(metric: metrics.Metric) -> str: return f"{metric.category}_{metric.name}" def generate_js_metric_type(metric: metrics.Metric) -> str: return metric.type def generate_ping_factory_method( ping: str, metrics_by_type: Dict[str, List[metrics.Metric]] ) -> str: # `ServerEventLogger` better describes role of the class that this factory # method generates, but for compatibility with existing FxA codebase # we use `Event` suffix if no event metrics are defined. event_metric_exists = "event" in metrics_by_type suffix = "ServerEventLogger" if event_metric_exists else "Event" return f"create{util.Camelize(ping)}{suffix}" def generate_event_metric_record_function_name(metric: metrics.Metric) -> str: return f"record{util.Camelize(metric.category)}{util.Camelize(metric.name)}" def clean_string(s: str) -> str: return s.replace("\n", " ").rstrip() def output( lang: str, objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None, ) -> None: """ Given a tree of objects, output Javascript or Typescript code to `output_dir`. The output is a single file containing all the code for assembling pings with metrics, serializing, and submitting. :param lang: Either "javascript" or "typescript"; :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. """ if options is None: options = {} module_spec = options.get("module_spec", "es") accepted_module_specs = ["es", "commonjs"] if module_spec not in accepted_module_specs: raise ValueError( f"Unknown module_spec: {module_spec}. Accepted specs are: {accepted_module_specs}." # noqa ) template = util.get_jinja2_template( "javascript_server.jinja2", filters=( ("event_class_name", event_class_name), ("metric_name", generate_metric_name), ("metric_argument_name", generate_metric_argument_name), ("js_metric_type", generate_js_metric_type), ("factory_method", generate_ping_factory_method), ( "event_metric_record_function_name", generate_event_metric_record_function_name, ), ("clean_string", clean_string), ), ) event_metric_exists = False # Go through all metrics in objs and build a map of # ping->list of metric categories->list of metrics # for easier processing in the template. ping_to_metrics: Dict[str, Dict[str, List[metrics.Metric]]] = defaultdict(dict) for _category_key, category_val in objs.items(): for _metric_name, metric in category_val.items(): if isinstance(metric, metrics.Metric): if metric.type not in SUPPORTED_METRIC_TYPES: print( "❌ Ignoring unsupported metric type: " + f"{metric.type}:{metric.name}." + " Reach out to Glean team to add support for this" + " metric type." ) continue if metric.type == "event": # This is used in the template - generated code is slightly # different when event metric type is used. event_metric_exists = True for ping in metric.send_in_pings: metrics_by_type = ping_to_metrics[ping] metrics_list = metrics_by_type.setdefault(metric.type, []) metrics_list.append(metric) # Order pings_to_metrics for backwards compatibility with the existing FxA codebase. # Put pings without `event` type metrics first. ping_to_metrics = dict( sorted(ping_to_metrics.items(), key=lambda item: "event" in item[1]) ) PING_METRIC_ERROR_MSG = ( " Server-side environment is simplified and this" + " parser doesn't generate individual metric files. Make sure to pass all" + " your ping and metric definitions in a single invocation of the parser." ) if "pings" not in objs: # If events are meant to be sent in custom pings, we need to make sure they # are defined. Otherwise we won't have destination tables defined and # submissions won't pass validation at ingestion. if event_metric_exists: if "events" not in ping_to_metrics: # Event metrics can be sent in standard `events` ping # or in custom pings. print( "❌ " + PING_METRIC_ERROR_MSG + "\n You need to either send your event metrics in standard" + " `events` ping or define a custom one." ) return else: print("❌ No ping definition found." + PING_METRIC_ERROR_MSG) return if not ping_to_metrics: print("❌ No pings with metrics found." + PING_METRIC_ERROR_MSG) return extension = ".js" if lang == "javascript" else ".ts" filepath = output_dir / ("server_events" + extension) with filepath.open("w", encoding="utf-8") as fd: fd.write( template.render( parser_version=__version__, pings=ping_to_metrics, event_metric_exists=event_metric_exists, module_spec=module_spec, lang=lang, ) ) def output_javascript( objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None ) -> None: """ Given a tree of objects, output Javascript code to `output_dir`. :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. :param options: options dictionary, with the following optional keys: - `module_spec`: Module specification to use. Options are `es`, `commonjs`. Default is `es`. """ output("javascript", objs, output_dir, options) def output_typescript( objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None ) -> None: """ Given a tree of objects, output Typescript code to `output_dir`. :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. """ output("typescript", objs, output_dir, options) glean_parser-15.0.1/glean_parser/kotlin.py000066400000000000000000000224571466531427000205760ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Outputter to generate Kotlin code for metrics. """ import enum import json from pathlib import Path from typing import Any, Dict, List, Optional, Union # noqa from . import __version__ from . import metrics from . import pings from . import util def kotlin_datatypes_filter(value: util.JSONType) -> str: """ A Jinja2 filter that renders Kotlin literals. Based on Python's JSONEncoder, but overrides: - lists to use listOf - dicts to use mapOf - sets to use setOf - enums to use the like-named Kotlin enum - Rate objects to a CommonMetricData initializer (for external Denominators' Numerators lists) """ class KotlinEncoder(json.JSONEncoder): def iterencode(self, value): if isinstance(value, list): yield "listOf(" first = True for subvalue in value: if not first: yield ", " yield from self.iterencode(subvalue) first = False yield ")" elif isinstance(value, dict): yield "mapOf(" first = True for key, subvalue in value.items(): if not first: yield ", " yield from self.iterencode(key) yield " to " yield from self.iterencode(subvalue) first = False yield ")" elif isinstance(value, enum.Enum): # UniFFI generates SCREAMING_CASE enum variants. yield (value.__class__.__name__ + "." + util.screaming_case(value.name)) elif isinstance(value, set): yield "setOf(" first = True for subvalue in sorted(list(value)): if not first: yield ", " yield from self.iterencode(subvalue) first = False yield ")" elif isinstance(value, metrics.Rate): yield "CommonMetricData(" first = True for arg_name in util.common_metric_args: if hasattr(value, arg_name): if not first: yield ", " yield f"{util.camelize(arg_name)} = " yield from self.iterencode(getattr(value, arg_name)) first = False yield ")" else: yield from super().iterencode(value) return "".join(KotlinEncoder().iterencode(value)) def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str: """ Returns the Kotlin type to use for a given metric or ping object. """ generate_enums = getattr(obj, "_generate_enums", []) if len(generate_enums): generic = None for member, suffix in generate_enums: if len(getattr(obj, member)): if isinstance(obj, metrics.Event): generic = util.Camelize(obj.name) + suffix else: generic = util.camelize(obj.name) + suffix else: if isinstance(obj, metrics.Event): generic = "NoExtras" else: generic = "No" + suffix return "{}<{}>".format(class_name(obj.type), generic) generate_structure = getattr(obj, "_generate_structure", []) if len(generate_structure): generic = util.Camelize(obj.name) + "Object" return "{}<{}>".format(class_name(obj.type), generic) return class_name(obj.type) def extra_type_name(typ: str) -> str: """ Returns the corresponding Kotlin type for event's extra key types. """ if typ == "boolean": return "Boolean" elif typ == "string": return "String" elif typ == "quantity": return "Int" else: return "UNSUPPORTED" def structure_type_name(typ: str) -> str: """ Returns the corresponding Kotlin type for structure items. """ if typ == "boolean": return "Boolean" elif typ == "string": return "String" elif typ == "number": return "Int" else: return "UNSUPPORTED" def class_name(obj_type: str) -> str: """ Returns the Kotlin class name for a given metric or ping type. """ if obj_type == "ping": return "PingType" if obj_type.startswith("labeled_"): obj_type = obj_type[8:] return util.Camelize(obj_type) + "MetricType" def generate_build_date(date: Optional[str]) -> str: """ Generate the build timestamp. """ ts = util.build_date(date) data = [ str(ts.year), # In Java the first month of the year in calendars is JANUARY which is 0. # In Python it's 1-based str(ts.month - 1), str(ts.day), str(ts.hour), str(ts.minute), str(ts.second), ] components = ", ".join(data) # DatetimeMetricType takes a `Calendar` instance. return f'Calendar.getInstance(TimeZone.getTimeZone("GMT+0")).also {{ cal -> cal.set({components}) }}' # noqa def output_kotlin( objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None ) -> None: """ Given a tree of objects, output Kotlin code to `output_dir`. :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. :param options: options dictionary, with the following optional keys: - `namespace`: The package namespace to declare at the top of the generated files. Defaults to `GleanMetrics`. - `glean_namespace`: The package namespace of the glean library itself. This is where glean objects will be imported from in the generated code. - `with_buildinfo`: If "true" a `GleanBuildInfo.kt` file is generated. Otherwise generation of that file is skipped. Defaults to "true". - `build_date`: If set to `0` a static unix epoch time will be used. If set to a ISO8601 datetime string (e.g. `2022-01-03T17:30:00`) it will use that date. Other values will throw an error. If not set it will use the current date & time. """ if options is None: options = {} namespace = options.get("namespace", "GleanMetrics") glean_namespace = options.get("glean_namespace", "mozilla.components.service.glean") namespace_package = namespace[: namespace.rfind(".")] with_buildinfo = options.get("with_buildinfo", "true").lower() == "true" build_date = options.get("build_date", None) # Write out the special "build info" object template = util.get_jinja2_template( "kotlin.buildinfo.jinja2", ) if with_buildinfo: build_date = generate_build_date(build_date) # This filename needs to start with "Glean" so it can never clash with a # metric category with (output_dir / "GleanBuildInfo.kt").open("w", encoding="utf-8") as fd: fd.write( template.render( parser_version=__version__, namespace=namespace, namespace_package=namespace_package, glean_namespace=glean_namespace, build_date=build_date, ) ) fd.write("\n") template = util.get_jinja2_template( "kotlin.jinja2", filters=( ("kotlin", kotlin_datatypes_filter), ("type_name", type_name), ("extra_type_name", extra_type_name), ("class_name", class_name), ("structure_type_name", structure_type_name), ), ) for category_key, category_val in objs.items(): filename = util.Camelize(category_key) + ".kt" filepath = output_dir / filename obj_types = sorted( list(set(class_name(obj.type) for obj in category_val.values())) ) has_labeled_metrics = any( getattr(metric, "labeled", False) for metric in category_val.values() ) has_object_metrics = any( isinstance(metric, metrics.Object) for metric in category_val.values() ) with filepath.open("w", encoding="utf-8") as fd: fd.write( template.render( parser_version=__version__, category_name=category_key, objs=category_val, obj_types=obj_types, common_metric_args=util.common_metric_args, extra_metric_args=util.extra_metric_args, ping_args=util.ping_args, namespace=namespace, has_labeled_metrics=has_labeled_metrics, has_object_metrics=has_object_metrics, glean_namespace=glean_namespace, ) ) # Jinja2 squashes the final newline, so we explicitly add it fd.write("\n") glean_parser-15.0.1/glean_parser/lint.py000066400000000000000000000467541466531427000202520ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import enum from pathlib import Path import re import sys from typing import ( Any, Callable, Dict, Generator, List, Iterable, Optional, Tuple, Union, ) # noqa from . import metrics from . import parser from . import pings from . import tags from . import util # Yield only an error message LintGenerator = Generator[str, None, None] # Yield fully constructed GlinterNits NitGenerator = Generator["GlinterNit", None, None] class CheckType(enum.Enum): warning = 0 error = 1 def _split_words(name: str) -> List[str]: """ Helper function to split words on either `.` or `_`. """ return re.split("[._-]", name) def _english_list(items: List[str]) -> str: """ Helper function to format a list [A, B, C] as "'A', 'B', or 'C'". """ if len(items) == 0: return "" elif len(items) == 1: return f"'{items[0]}'" else: return "{}, or '{}'".format( ", ".join([f"'{x}'" for x in items[:-1]]), items[-1] ) def _hamming_distance(str1: str, str2: str) -> int: """ Count the # of differences between strings str1 and str2, padding the shorter one with whitespace """ diffs = 0 if len(str1) < len(str2): str1, str2 = str2, str1 len_dist = len(str1) - len(str2) str2 += " " * len_dist for ch1, ch2 in zip(str1, str2): if ch1 != ch2: diffs += 1 return diffs def check_common_prefix( category_name: str, metrics: Iterable[metrics.Metric] ) -> LintGenerator: """ Check if all metrics begin with a common prefix. """ metric_words = sorted([_split_words(metric.name) for metric in metrics]) if len(metric_words) < 2: return first = metric_words[0] last = metric_words[-1] for i in range(min(len(first), len(last))): if first[i] != last[i]: break if i > 0: common_prefix = "_".join(first[:i]) yield ( f"Within category '{category_name}', all metrics begin with " f"prefix '{common_prefix}'." "Remove the prefixes on the metric names and (possibly) " "rename the category." ) def check_unit_in_name( metric: metrics.Metric, parser_config: Dict[str, Any] ) -> LintGenerator: """ The metric name ends in a unit. """ TIME_UNIT_ABBREV = { "nanosecond": "ns", "microsecond": "us", "millisecond": "ms", "second": "s", "minute": "m", "hour": "h", "day": "d", } MEMORY_UNIT_ABBREV = { "byte": "b", "kilobyte": "kb", "megabyte": "mb", "gigabyte": "gb", } name_words = _split_words(metric.name) unit_in_name = name_words[-1] time_unit = getattr(metric, "time_unit", None) memory_unit = getattr(metric, "memory_unit", None) unit = getattr(metric, "unit", None) if time_unit is not None: if ( unit_in_name == TIME_UNIT_ABBREV.get(time_unit.name) or unit_in_name == time_unit.name ): yield ( f"Suffix '{unit_in_name}' is redundant with time_unit " f"'{time_unit.name}'. Only include time_unit." ) elif ( unit_in_name in TIME_UNIT_ABBREV.keys() or unit_in_name in TIME_UNIT_ABBREV.values() ): yield ( f"Suffix '{unit_in_name}' doesn't match time_unit " f"'{time_unit.name}'. " "Confirm the unit is correct and only include time_unit." ) elif memory_unit is not None: if ( unit_in_name == MEMORY_UNIT_ABBREV.get(memory_unit.name) or unit_in_name == memory_unit.name ): yield ( f"Suffix '{unit_in_name}' is redundant with memory_unit " f"'{memory_unit.name}'. " "Only include memory_unit." ) elif ( unit_in_name in MEMORY_UNIT_ABBREV.keys() or unit_in_name in MEMORY_UNIT_ABBREV.values() ): yield ( f"Suffix '{unit_in_name}' doesn't match memory_unit " f"{memory_unit.name}'. " "Confirm the unit is correct and only include memory_unit." ) elif unit is not None: if unit_in_name == unit: yield ( f"Suffix '{unit_in_name}' is redundant with unit param " f"'{unit}'. " "Only include unit." ) def check_category_generic( category_name: str, metrics: Iterable[metrics.Metric] ) -> LintGenerator: """ The category name is too generic. """ GENERIC_CATEGORIES = ["metrics", "events"] if category_name in GENERIC_CATEGORIES: yield ( f"Category '{category_name}' is too generic. " f"Don't use {_english_list(GENERIC_CATEGORIES)} for category names" ) def check_bug_number( metric: Union[metrics.Metric, pings.Ping], parser_config: Dict[str, Any] ) -> LintGenerator: number_bugs = [str(bug) for bug in metric.bugs if isinstance(bug, int)] if len(number_bugs): yield ( f"For bugs {', '.join(number_bugs)}: " "Bug numbers are deprecated and should be changed to full URLs. " f"For example, use 'http://bugzilla.mozilla.org/{number_bugs[0]}' " f"instead of '{number_bugs[0]}'." ) def check_valid_in_baseline( metric: metrics.Metric, parser_config: Dict[str, Any] ) -> LintGenerator: allow_reserved = parser_config.get("allow_reserved", False) if not allow_reserved and "baseline" in metric.send_in_pings: yield ( "The baseline ping is Glean-internal. " "Remove 'baseline' from the send_in_pings array." ) def check_misspelled_pings( metric: metrics.Metric, parser_config: Dict[str, Any] ) -> LintGenerator: for ping in metric.send_in_pings: for builtin in pings.RESERVED_PING_NAMES: distance = _hamming_distance(ping, builtin) if distance == 1: yield f"Ping '{ping}' seems misspelled. Did you mean '{builtin}'?" def check_tags_required( metric_or_ping: Union[metrics.Metric, pings.Ping], parser_config: Dict[str, Any] ) -> LintGenerator: if parser_config.get("require_tags", False) and not len( metric_or_ping.metadata.get("tags", []) ): yield "Tags are required but no tags specified" def check_user_lifetime_expiration( metric: metrics.Metric, parser_config: Dict[str, Any] ) -> LintGenerator: if metric.lifetime == metrics.Lifetime.user and metric.expires != "never": yield ( "Metrics with 'user' lifetime cannot have an expiration date. " "They live as long as the user profile does. " "Set expires to 'never'." ) def check_expired_date( metric: metrics.Metric, parser_config: Dict[str, Any] ) -> LintGenerator: try: metric.validate_expires() except ValueError as e: yield (str(e)) def check_expired_metric( metric: metrics.Metric, parser_config: Dict[str, Any] ) -> LintGenerator: if metric.is_expired(): yield ("Metric has expired. Please consider removing it.") def check_old_event_api( metric: metrics.Metric, parser_config: Dict[str, Any] ) -> LintGenerator: # Glean v52.0.0 removed the old events API. # The metrics-2-0-0 schema still supports it. # We want to warn about it. # This can go when we introduce 3-0-0 if not isinstance(metric, metrics.Event): return if not all("type" in x for x in metric.extra_keys.values()): yield ("The old event API is gone. Extra keys require a type.") def check_metric_on_events_lifetime( metric: metrics.Metric, parser_config: Dict[str, Any] ) -> LintGenerator: """A non-event metric on the Events ping only makes sense if its value is immutable over the life of the ping.""" if ( "events" in metric.send_in_pings and "all_pings" not in metric.send_in_pings and metric.type != "event" and metric.lifetime == metrics.Lifetime.ping ): yield ( "Non-event metrics sent on the Events ping should not have the ping" " lifetime." ) def check_unexpected_unit( metric: metrics.Metric, parser_config: Dict[str, Any] ) -> LintGenerator: """ `unit` was allowed on all metrics and recently disallowed. We now warn about its use on all but quantity and custom distribution metrics. """ allowed_types = [metrics.Quantity, metrics.CustomDistribution] if not any([isinstance(metric, ty) for ty in allowed_types]) and metric.unit: yield ( "The `unit` property is only allowed for quantity " + "and custom distribution metrics." ) def check_empty_datareview( metric: metrics.Metric, parser_config: Dict[str, Any] ) -> LintGenerator: disallowed_datareview = ["", "todo"] data_reviews = [dr.lower() in disallowed_datareview for dr in metric.data_reviews] if any(data_reviews): yield "List of data reviews should not contain empty strings or TODO markers." def check_redundant_ping( pings: pings.Ping, parser_config: Dict[str, Any] ) -> LintGenerator: """ Check if the pings contains 'ping' as the prefix or suffix, or 'ping' or 'custom' """ ping_words = _split_words(pings.name) if len(ping_words) != 0: ping_first_word = ping_words[0] ping_last_word = ping_words[-1] if ping_first_word == "ping": yield ("The prefix 'ping' is redundant.") elif ping_last_word == "ping": yield ("The suffix 'ping' is redundant.") elif "ping" in ping_words: yield ("The word 'ping' is redundant.") elif "custom" in ping_words: yield ("The word 'custom' is redundant.") def check_unknown_ping( check_name: str, check_type: CheckType, all_pings: Dict[str, pings.Ping], metrics: Dict[str, metrics.Metric], parser_config: Dict[str, Any], ) -> NitGenerator: """ Check that all pings in `send_in_pings` for all metrics are either a builtin ping or in the list of defined custom pings. """ available_pings = [p for p in all_pings] for _, metric in metrics.items(): if check_name in metric.no_lint: continue send_in_pings = metric.send_in_pings for target_ping in send_in_pings: if target_ping in pings.RESERVED_PING_NAMES: continue if target_ping not in available_pings: msg = f"Ping `{target_ping} `in `send_in_pings` is unknown." name = ".".join([metric.category, metric.name]) nit = GlinterNit( check_name, name, msg, check_type, ) yield nit # The checks that operate on an entire category of metrics: # {NAME: (function, is_error)} CATEGORY_CHECKS: Dict[ str, Tuple[Callable[[str, Iterable[metrics.Metric]], LintGenerator], CheckType] ] = { "COMMON_PREFIX": (check_common_prefix, CheckType.error), "CATEGORY_GENERIC": (check_category_generic, CheckType.error), } # The checks that operate on individual metrics: # {NAME: (function, is_error)} METRIC_CHECKS: Dict[ str, Tuple[Callable[[metrics.Metric, dict], LintGenerator], CheckType] ] = { "UNIT_IN_NAME": (check_unit_in_name, CheckType.error), "BUG_NUMBER": (check_bug_number, CheckType.error), "BASELINE_PING": (check_valid_in_baseline, CheckType.error), "MISSPELLED_PING": (check_misspelled_pings, CheckType.error), "TAGS_REQUIRED": (check_tags_required, CheckType.error), "EXPIRATION_DATE_TOO_FAR": (check_expired_date, CheckType.warning), "USER_LIFETIME_EXPIRATION": (check_user_lifetime_expiration, CheckType.warning), "EXPIRED": (check_expired_metric, CheckType.warning), "OLD_EVENT_API": (check_old_event_api, CheckType.warning), "METRIC_ON_EVENTS_LIFETIME": (check_metric_on_events_lifetime, CheckType.error), "UNEXPECTED_UNIT": (check_unexpected_unit, CheckType.warning), "EMPTY_DATAREVIEW": (check_empty_datareview, CheckType.warning), } # The checks that operate on individual pings: # {NAME: (function, is_error)} PING_CHECKS: Dict[ str, Tuple[Callable[[pings.Ping, dict], LintGenerator], CheckType] ] = { "BUG_NUMBER": (check_bug_number, CheckType.error), "TAGS_REQUIRED": (check_tags_required, CheckType.error), "REDUNDANT_PING": (check_redundant_ping, CheckType.error), } ALL_OBJECT_CHECKS: Dict[ str, Tuple[ Callable[ # check name, check type, pings, metrics, config [str, CheckType, dict, dict, dict], NitGenerator, ], CheckType, ], ] = { "UNKNOWN_PING_REFERENCED": (check_unknown_ping, CheckType.error), } class GlinterNit: def __init__(self, check_name: str, name: str, msg: str, check_type: CheckType): self.check_name = check_name self.name = name self.msg = msg self.check_type = check_type def format(self): return ( f"{self.check_type.name.upper()}: {self.check_name}: " f"{self.name}: {self.msg}" ) def _lint_item_tags( item_name: str, item_type: str, item_tag_names: List[str], valid_tag_names: List[str], ) -> List[GlinterNit]: invalid_tags = [tag for tag in item_tag_names if tag not in valid_tag_names] return ( [ GlinterNit( "INVALID_TAGS", item_name, f"Invalid tags specified in {item_type}: {', '.join(invalid_tags)}", CheckType.error, ) ] if len(invalid_tags) else [] ) def _lint_pings( category: Dict[str, Union[metrics.Metric, pings.Ping, tags.Tag]], parser_config: Dict[str, Any], valid_tag_names: List[str], ) -> List[GlinterNit]: nits: List[GlinterNit] = [] for ping_name, ping in sorted(list(category.items())): assert isinstance(ping, pings.Ping) for check_name, (check_func, check_type) in PING_CHECKS.items(): new_nits = list(check_func(ping, parser_config)) if len(new_nits): if check_name not in ping.no_lint: nits.extend( GlinterNit( check_name, ping_name, msg, check_type, ) for msg in new_nits ) nits.extend( _lint_item_tags( ping_name, "ping", ping.metadata.get("tags", []), valid_tag_names, ) ) return nits def _lint_all_objects( objects: Dict[str, Dict[str, Union[metrics.Metric, pings.Ping, tags.Tag]]], parser_config: Dict[str, Any], ) -> List[GlinterNit]: nits: List[GlinterNit] = [] pings = objects.get("pings") if not pings: return [] metrics = objects.get("all_metrics") if not metrics: return [] for check_name, (check_func, check_type) in ALL_OBJECT_CHECKS.items(): new_nits = list( check_func(check_name, check_type, pings, metrics, parser_config) ) nits.extend(new_nits) return nits def lint_metrics( objs: metrics.ObjectTree, parser_config: Optional[Dict[str, Any]] = None, file=sys.stderr, ) -> List[GlinterNit]: """ Performs glinter checks on a set of metrics objects. :param objs: Tree of metric objects, as returns by `parser.parse_objects`. :param file: The stream to write errors to. :returns: List of nits. """ if parser_config is None: parser_config = {} nits: List[GlinterNit] = [] valid_tag_names = [tag for tag in objs.get("tags", [])] nits.extend(_lint_all_objects(objs, parser_config)) for category_name, category in sorted(list(objs.items())): if category_name == "pings": nits.extend(_lint_pings(category, parser_config, valid_tag_names)) continue if category_name == "tags": # currently we have no linting for tags continue # Make sure the category has only Metrics, not Pings or Tags category_metrics = dict( (name, metric) for (name, metric) in category.items() if isinstance(metric, metrics.Metric) ) for cat_check_name, (cat_check_func, check_type) in CATEGORY_CHECKS.items(): if any( cat_check_name in metric.no_lint for metric in category_metrics.values() ): continue nits.extend( GlinterNit(cat_check_name, category_name, msg, check_type) for msg in cat_check_func(category_name, category_metrics.values()) ) for _metric_name, metric in sorted(list(category_metrics.items())): for check_name, (check_func, check_type) in METRIC_CHECKS.items(): new_nits = list(check_func(metric, parser_config)) if len(new_nits): if check_name not in metric.no_lint: nits.extend( GlinterNit( check_name, ".".join([metric.category, metric.name]), msg, check_type, ) for msg in new_nits ) # also check that tags for metric are valid nits.extend( _lint_item_tags( ".".join([metric.category, metric.name]), "metric", metric.metadata.get("tags", []), valid_tag_names, ) ) if len(nits): print("Sorry, Glean found some glinter nits:", file=file) for nit in nits: print(nit.format(), file=file) print("", file=file) print("Please fix the above nits to continue.", file=file) print( "To disable a check, add a `no_lint` parameter " "with a list of check names to disable.\n" "This parameter can appear with each individual metric, or at the " "top-level to affect the entire file.", file=file, ) return nits def glinter( input_filepaths: Iterable[Path], parser_config: Optional[Dict[str, Any]] = None, file=sys.stderr, ) -> int: """ Commandline helper for glinter. :param input_filepaths: List of Path objects to load metrics from. :param parser_config: Parser configuration object, passed to `parser.parse_objects`. :param file: The stream to write the errors to. :return: Non-zero if there were any glinter errors. """ if parser_config is None: parser_config = {} errors = 0 objs = parser.parse_objects(input_filepaths, parser_config) errors += util.report_validation_errors(objs) nits = lint_metrics(objs.value, parser_config=parser_config, file=file) errors += len([nit for nit in nits if nit.check_type == CheckType.error]) if errors == 0: print("✨ Your metrics are Glean! ✨", file=file) return 0 print(f"❌ Found {errors} errors.") return 1 glean_parser-15.0.1/glean_parser/markdown.py000066400000000000000000000215521466531427000211130ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Outputter to generate Markdown documentation for metrics. """ from pathlib import Path from typing import Any, Dict, List, Optional, Tuple, Union from urllib.parse import urlsplit, parse_qs from . import __version__ from . import metrics from . import pings from . import util from collections import defaultdict def extra_info(obj: Union[metrics.Metric, pings.Ping]) -> List[Tuple[str, str]]: """ Returns a list of string to string tuples with extra information for the type (e.g. extra keys for events) or an empty list if nothing is available. """ extra_info = [] if isinstance(obj, metrics.Event): for key in obj.allowed_extra_keys: extra_info.append((key, obj.extra_keys[key]["description"])) if isinstance(obj, metrics.Labeled) and obj.ordered_labels is not None: for label in obj.ordered_labels: extra_info.append((label, None)) if isinstance(obj, metrics.Quantity): extra_info.append(("unit", obj.unit)) return extra_info def ping_desc( ping_name: str, custom_pings_cache: Optional[Dict[str, pings.Ping]] = None ) -> str: """ Return a text description of the ping. If a custom_pings_cache is available, look in there for non-reserved ping names description. """ desc = "" if ping_name in pings.RESERVED_PING_NAMES: desc = ( "This is a built-in ping that is assembled out of the " "box by the Glean SDK." ) elif ping_name == "all-pings": desc = "These metrics are sent in every ping." elif custom_pings_cache is not None and ping_name in custom_pings_cache: desc = custom_pings_cache[ping_name].description return desc def metrics_docs(obj_name: str) -> str: """ Return a link to the documentation entry for the Glean SDK metric of the requested type. """ # We need to fixup labeled stuff, as types are singular and docs refer # to them as plural. fixedup_name = obj_name if obj_name.startswith("labeled_"): fixedup_name += "s" return f"https://mozilla.github.io/glean/book/user/metrics/{fixedup_name}.html" def ping_docs(ping_name: str) -> str: """ Return a link to the documentation entry for the requested Glean SDK built-in ping. """ if ping_name not in pings.RESERVED_PING_NAMES: return "" return f"https://mozilla.github.io/glean/book/user/pings/{ping_name}.html" def if_empty( ping_name: str, custom_pings_cache: Optional[Dict[str, pings.Ping]] = None ) -> bool: if custom_pings_cache is not None and ping_name in custom_pings_cache: return custom_pings_cache[ping_name].send_if_empty else: return False def ping_reasons( ping_name: str, custom_pings_cache: Dict[str, pings.Ping] ) -> Dict[str, str]: """ Returns the reasons dictionary for the ping. """ if ping_name == "all-pings": return {} elif ping_name in custom_pings_cache: return custom_pings_cache[ping_name].reasons return {} def ping_data_reviews( ping_name: str, custom_pings_cache: Optional[Dict[str, pings.Ping]] = None ) -> Optional[List[str]]: if custom_pings_cache is not None and ping_name in custom_pings_cache: return custom_pings_cache[ping_name].data_reviews else: return None def ping_review_title(data_url: str, index: int) -> str: """ Return a title for a data review in human readable form. :param data_url: A url for data review. :param index: Position of the data review on list (e.g: 1, 2, 3...). """ url_object = urlsplit(data_url) # Bugzilla urls like `https://bugzilla.mozilla.org/show_bug.cgi?id=1581647` query = url_object.query params = parse_qs(query) # GitHub urls like `https://github.com/mozilla-mobile/fenix/pull/1707` path = url_object.path short_url = path[1:].replace("/pull/", "#") if params and params["id"]: return f"Bug {params['id'][0]}" elif url_object.netloc == "github.com": return short_url return f"Review {index}" def ping_bugs( ping_name: str, custom_pings_cache: Optional[Dict[str, pings.Ping]] = None ) -> Optional[List[str]]: if custom_pings_cache is not None and ping_name in custom_pings_cache: return custom_pings_cache[ping_name].bugs else: return None def ping_include_client_id( ping_name: str, custom_pings_cache: Optional[Dict[str, pings.Ping]] = None ) -> bool: if custom_pings_cache is not None and ping_name in custom_pings_cache: return custom_pings_cache[ping_name].include_client_id else: return False def data_sensitivity_numbers( data_sensitivity: Optional[List[metrics.DataSensitivity]], ) -> str: if data_sensitivity is None: return "unknown" else: return ", ".join(str(x.value) for x in data_sensitivity) def output_markdown( objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None ) -> None: """ Given a tree of objects, output Markdown docs to `output_dir`. This produces a single `metrics.md`. The file contains a table of contents and a section for each ping metrics are collected for. :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. :param options: options dictionary, with the following optional key: - `project_title`: The projects title. """ if options is None: options = {} # Build a dictionary that associates pings with their metrics. # # { # "baseline": [ # { ... metric data ... }, # ... # ], # "metrics": [ # { ... metric data ... }, # ... # ], # ... # } # # This also builds a dictionary of custom pings, if available. custom_pings_cache: Dict[str, pings.Ping] = defaultdict() metrics_by_pings: Dict[str, List[metrics.Metric]] = defaultdict(list) for _category_key, category_val in objs.items(): for obj in category_val.values(): # Filter out custom pings. We will need them for extracting # the description if isinstance(obj, pings.Ping): custom_pings_cache[obj.name] = obj # Pings that have `send_if_empty` set to true, # might not have any metrics. They need to at least have an # empty array of metrics to show up on the template. if obj.send_if_empty and not metrics_by_pings[obj.name]: metrics_by_pings[obj.name] = [] # If this is an internal Glean metric, and we don't # want docs for it. if isinstance(obj, metrics.Metric) and not obj.is_internal_metric(): # If we get here, obj is definitely a metric we want # docs for. for ping_name in obj.send_in_pings: metrics_by_pings[ping_name].append(obj) # Sort the metrics by their identifier, to make them show up nicely # in the docs and to make generated docs reproducible. for ping_name in metrics_by_pings: metrics_by_pings[ping_name] = sorted( metrics_by_pings[ping_name], key=lambda x: x.identifier() ) project_title = options.get("project_title", "this project") introduction_extra = options.get("introduction_extra") template = util.get_jinja2_template( "markdown.jinja2", filters=( ("extra_info", extra_info), ("metrics_docs", metrics_docs), ("ping_desc", lambda x: ping_desc(x, custom_pings_cache)), ("ping_send_if_empty", lambda x: if_empty(x, custom_pings_cache)), ("ping_docs", ping_docs), ("ping_reasons", lambda x: ping_reasons(x, custom_pings_cache)), ("ping_data_reviews", lambda x: ping_data_reviews(x, custom_pings_cache)), ("ping_review_title", ping_review_title), ("ping_bugs", lambda x: ping_bugs(x, custom_pings_cache)), ( "ping_include_client_id", lambda x: ping_include_client_id(x, custom_pings_cache), ), ("data_sensitivity_numbers", data_sensitivity_numbers), ), ) filename = "metrics.md" filepath = output_dir / filename with filepath.open("w", encoding="utf-8") as fd: fd.write( template.render( parser_version=__version__, metrics_by_pings=metrics_by_pings, project_title=project_title, introduction_extra=introduction_extra, ) ) # Jinja2 squashes the final newline, so we explicitly add it fd.write("\n") glean_parser-15.0.1/glean_parser/metrics.py000066400000000000000000000351301466531427000207340ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Classes for each of the high-level metric types. """ import enum from typing import Any, Dict, List, Optional, Type, Union # noqa from . import pings from . import tags from . import util # Important: if the values are ever changing here, make sure # to also fix mozilla/glean. Otherwise language bindings may # break there. class Lifetime(enum.Enum): ping = 0 application = 1 user = 2 class DataSensitivity(enum.Enum): technical = 1 interaction = 2 stored_content = 3 web_activity = 3 # Old, deprecated name highly_sensitive = 4 class Metric: typename: str = "ERROR" glean_internal_metric_cat: str = "glean.internal.metrics" metric_types: Dict[str, Any] = {} default_store_names: List[str] = ["metrics"] def __init__( self, type: str, category: str, name: str, bugs: List[str], description: str, notification_emails: List[str], expires: Any, metadata: Optional[Dict] = None, data_reviews: Optional[List[str]] = None, version: int = 0, disabled: bool = False, lifetime: str = "ping", send_in_pings: Optional[List[str]] = None, unit: Optional[str] = None, gecko_datapoint: str = "", no_lint: Optional[List[str]] = None, data_sensitivity: Optional[List[str]] = None, defined_in: Optional[Dict] = None, telemetry_mirror: Optional[str] = None, _config: Optional[Dict[str, Any]] = None, _validated: bool = False, ): # Avoid cyclical import from . import parser self.type = type self.category = category self.name = name self.bugs = bugs self.description = description self.notification_emails = notification_emails self.expires = expires if metadata is None: metadata = {} self.metadata = metadata if data_reviews is None: data_reviews = [] self.data_reviews = data_reviews self.version = version self.disabled = disabled self.lifetime = getattr(Lifetime, lifetime) if send_in_pings is None: send_in_pings = ["default"] self.send_in_pings = send_in_pings self.unit = unit self.gecko_datapoint = gecko_datapoint if no_lint is None: no_lint = [] self.no_lint = no_lint if data_sensitivity is not None: self.data_sensitivity = [ getattr(DataSensitivity, x) for x in data_sensitivity ] self.defined_in = defined_in if telemetry_mirror is not None: self.telemetry_mirror = telemetry_mirror # _validated indicates whether this metric has already been jsonschema # validated (but not any of the Python-level validation). if not _validated: data = { "$schema": parser.METRICS_ID, self.category: {self.name: self._serialize_input()}, } # type: Dict[str, util.JSONType] for error in parser.validate(data): raise ValueError(error) # Store the config, but only after validation. if _config is None: _config = {} self._config = _config # Metrics in the special category "glean.internal.metrics" need to have # an empty category string when identifying the metrics in the ping. if self.category == Metric.glean_internal_metric_cat: self.category = "" def __init_subclass__(cls, **kwargs): # Create a mapping of all of the subclasses of this class if cls not in Metric.metric_types and hasattr(cls, "typename"): Metric.metric_types[cls.typename] = cls super().__init_subclass__(**kwargs) @classmethod def make_metric( cls, category: str, name: str, metric_info: Dict[str, util.JSONType], config: Optional[Dict[str, Any]] = None, validated: bool = False, ): """ Given a metric_info dictionary from metrics.yaml, return a metric instance. :param: category The category the metric lives in :param: name The name of the metric :param: metric_info A dictionary of the remaining metric parameters :param: config A dictionary containing commandline configuration parameters :param: validated True if the metric has already gone through jsonschema validation :return: A new Metric instance. """ if config is None: config = {} metric_type = metric_info["type"] if not isinstance(metric_type, str): raise TypeError(f"Unknown metric type {metric_type}") return cls.metric_types[metric_type]( category=category, name=name, defined_in=getattr(metric_info, "defined_in", None), _validated=validated, _config=config, **metric_info, ) def serialize(self) -> Dict[str, util.JSONType]: """ Serialize the metric back to JSON object model. """ d = self.__dict__.copy() # Convert enum fields back to strings for key, val in d.items(): if isinstance(val, enum.Enum): d[key] = d[key].name if isinstance(val, set): d[key] = sorted(list(val)) if isinstance(val, list) and len(val) and isinstance(val[0], enum.Enum): d[key] = [x.name for x in val] del d["name"] del d["category"] if not d["unit"]: d.pop("unit") d.pop("_config", None) d.pop("_generate_enums", None) d.pop("_generate_structure", None) return d def _serialize_input(self) -> Dict[str, util.JSONType]: d = self.serialize() modified_dict = util.remove_output_params(d, "defined_in") return modified_dict def identifier(self) -> str: """ Create an identifier unique for this metric. Generally, category.name; however, Glean internal metrics only use name. """ if not self.category: return self.name return ".".join((self.category, self.name)) def is_disabled(self) -> bool: return self.disabled or self.is_expired() def is_expired(self) -> bool: def default_handler(expires) -> bool: return util.is_expired(expires, self._config.get("expire_by_version")) return self._config.get("custom_is_expired", default_handler)(self.expires) def validate_expires(self): def default_handler(expires): return util.validate_expires(expires, self._config.get("expire_by_version")) return self._config.get("custom_validate_expires", default_handler)( self.expires ) def is_internal_metric(self) -> bool: return self.category in (Metric.glean_internal_metric_cat, "") class Boolean(Metric): typename = "boolean" class String(Metric): typename = "string" class StringList(Metric): typename = "string_list" class Counter(Metric): typename = "counter" class Quantity(Metric): typename = "quantity" class TimeUnit(enum.Enum): nanosecond = 0 microsecond = 1 millisecond = 2 second = 3 minute = 4 hour = 5 day = 6 class TimeBase(Metric): def __init__(self, *args, **kwargs): self.time_unit = getattr(TimeUnit, kwargs.pop("time_unit", "millisecond")) super().__init__(*args, **kwargs) class Timespan(TimeBase): typename = "timespan" class TimingDistribution(TimeBase): typename = "timing_distribution" def __init__(self, *args, **kwargs): self.time_unit = getattr(TimeUnit, kwargs.pop("time_unit", "nanosecond")) Metric.__init__(self, *args, **kwargs) class MemoryUnit(enum.Enum): byte = 0 kilobyte = 1 megabyte = 2 gigabyte = 3 class MemoryDistribution(Metric): typename = "memory_distribution" def __init__(self, *args, **kwargs): self.memory_unit = getattr(MemoryUnit, kwargs.pop("memory_unit", "byte")) super().__init__(*args, **kwargs) class HistogramType(enum.Enum): linear = 0 exponential = 1 class CustomDistribution(Metric): typename = "custom_distribution" def __init__(self, *args, **kwargs): self.range_min = kwargs.pop("range_min", 1) self.range_max = kwargs.pop("range_max") self.bucket_count = kwargs.pop("bucket_count") self.histogram_type = getattr( HistogramType, kwargs.pop("histogram_type", "exponential") ) super().__init__(*args, **kwargs) class Datetime(TimeBase): typename = "datetime" class Event(Metric): typename = "event" default_store_names = ["events"] def __init__(self, *args, **kwargs): self.extra_keys = kwargs.pop("extra_keys", {}) self.validate_extra_keys(self.extra_keys, kwargs.get("_config", {})) super().__init__(*args, **kwargs) self._generate_enums = [("allowed_extra_keys_with_types", "Extra")] @property def allowed_extra_keys(self): # Sort keys so that output is deterministic return sorted(list(self.extra_keys.keys())) @property def allowed_extra_keys_with_types(self): # Sort keys so that output is deterministic return sorted( [(k, v.get("type", "string")) for (k, v) in self.extra_keys.items()], key=lambda x: x[0], ) @staticmethod def validate_extra_keys(extra_keys: Dict[str, str], config: Dict[str, Any]) -> None: if not config.get("allow_reserved") and any( k.startswith("glean.") for k in extra_keys.keys() ): raise ValueError( "Extra keys beginning with 'glean.' are reserved for " "Glean internal use." ) class Uuid(Metric): typename = "uuid" class Url(Metric): typename = "url" class Jwe(Metric): typename = "jwe" def __init__(self, *args, **kwargs): raise ValueError( "JWE support was removed. " "If you require this send an email to glean-team@mozilla.com." ) class CowString(str): """ Wrapper class for strings that should be represented as a `Cow<'static, str>` in Rust, or `String` in other target languages. This wraps `str`, so unless `CowString` is specifically handled it acts (and serializes) as a string. """ def __init__(self, val: str): self.inner: str = val def __eq__(self, other): return self.inner == other.inner def __hash__(self): return self.inner.__hash__() def __lt__(self, other): return self.inner.__lt__(other.inner) class Labeled(Metric): labeled = True def __init__(self, *args, **kwargs): labels = kwargs.pop("labels", None) if labels is not None: self.ordered_labels = labels self.labels = set([CowString(label) for label in labels]) else: self.ordered_labels = None self.labels = None super().__init__(*args, **kwargs) def serialize(self) -> Dict[str, util.JSONType]: """ Serialize the metric back to JSON object model. """ d = super().serialize() d["labels"] = self.ordered_labels del d["ordered_labels"] return d class LabeledBoolean(Labeled, Boolean): typename = "labeled_boolean" class LabeledString(Labeled, String): typename = "labeled_string" class LabeledCounter(Labeled, Counter): typename = "labeled_counter" class LabeledCustomDistribution(Labeled, CustomDistribution): typename = "labeled_custom_distribution" class LabeledMemoryDistribution(Labeled, MemoryDistribution): typename = "labeled_memory_distribution" class LabeledTimingDistribution(Labeled, TimingDistribution): typename = "labeled_timing_distribution" class Rate(Metric): typename = "rate" def __init__(self, *args, **kwargs): self.denominator_metric = kwargs.pop("denominator_metric", None) super().__init__(*args, **kwargs) class Denominator(Counter): typename = "denominator" # A denominator is a counter with an additional list of numerators. numerators: List[Rate] = [] class Text(Metric): typename = "text" class Object(Metric): typename = "object" def __init__(self, *args, **kwargs): structure = kwargs.pop("structure", None) if not structure: raise ValueError("`object` is missing required parameter `structure`") self._generate_structure = self.validate_structure(structure) super().__init__(*args, **kwargs) ALLOWED_TOPLEVEL = {"type", "properties", "items"} ALLOWED_TYPES = ["object", "array", "number", "string", "boolean"] @staticmethod def _validate_substructure(structure): extra = set(structure.keys()) - Object.ALLOWED_TOPLEVEL if extra: extra = ", ".join(extra) allowed = ", ".join(Object.ALLOWED_TOPLEVEL) raise ValueError( f"Found additional fields: {extra}. Only allowed: {allowed}" ) if "type" not in structure or structure["type"] not in Object.ALLOWED_TYPES: raise ValueError("invalid or missing `type` in object structure") if structure["type"] == "object": if "items" in structure: raise ValueError("`items` not allowed in object structure") if "properties" not in structure: raise ValueError("`properties` missing for type `object`") for key in structure["properties"]: value = structure["properties"][key] structure["properties"][key] = Object._validate_substructure(value) if structure["type"] == "array": if "properties" in structure: raise ValueError("`properties` not allowed in array structure") if "items" not in structure: raise ValueError("`items` missing for type `array`") value = structure["items"] structure["items"] = Object._validate_substructure(value) return structure @staticmethod def validate_structure(structure): if None: raise ValueError("`structure` needed for object metric.") structure = Object._validate_substructure(structure) return structure ObjectTree = Dict[str, Dict[str, Union[Metric, pings.Ping, tags.Tag]]] glean_parser-15.0.1/glean_parser/parser.py000066400000000000000000000377561466531427000206020ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Code for parsing metrics.yaml files. """ import functools from pathlib import Path import textwrap from typing import Any, cast, Dict, Generator, Iterable, Optional, Set, Tuple, Union import jsonschema # type: ignore from jsonschema.exceptions import ValidationError # type: ignore from .metrics import Metric, ObjectTree from .pings import Ping, RESERVED_PING_NAMES from .tags import Tag from . import util from .util import DictWrapper ROOT_DIR = Path(__file__).parent SCHEMAS_DIR = ROOT_DIR / "schemas" METRICS_ID = "moz://mozilla.org/schemas/glean/metrics/2-0-0" PINGS_ID = "moz://mozilla.org/schemas/glean/pings/2-0-0" TAGS_ID = "moz://mozilla.org/schemas/glean/tags/1-0-0" def _update_validator(validator): """ Adds some custom validators to the jsonschema validator that produce nicer error messages. """ def required(validator, required, instance, schema): if not validator.is_type(instance, "object"): return missing_properties = set( property for property in required if property not in instance ) if len(missing_properties): missing_properties = sorted(list(missing_properties)) yield ValidationError( f"Missing required properties: {', '.join(missing_properties)}" ) validator.VALIDATORS["required"] = required def _load_file( filepath: Path, parser_config: Dict[str, Any] ) -> Generator[str, None, Tuple[Dict[str, util.JSONType], Optional[str]]]: """ Load a metrics.yaml or pings.yaml format file. If the `filepath` does not exist, raises `FileNotFoundError`, unless `parser_config["allow_missing_files"]` is `True`. """ try: content = util.load_yaml_or_json(filepath) except FileNotFoundError: if not parser_config.get("allow_missing_files", False): raise else: return {}, None except Exception as e: yield util.format_error(filepath, "", textwrap.fill(str(e))) return {}, None if content is None: yield util.format_error(filepath, "", f"'{filepath}' file can not be empty.") return {}, None if not isinstance(content, dict): return {}, None if content == {}: return {}, None schema_key = content.get("$schema") if not isinstance(schema_key, str): raise TypeError(f"Invalid schema key {schema_key}") filetype: Optional[str] = None try: filetype = schema_key.split("/")[-2] except IndexError: filetype = None if filetype not in ("metrics", "pings", "tags"): filetype = None for error in validate(content, filepath): content = {} yield error return content, filetype @functools.lru_cache(maxsize=1) def _load_schemas() -> Dict[str, Tuple[Any, Any]]: """ Load all of the known schemas from disk, and put them in a map based on the schema's $id. """ schemas = {} for schema_path in SCHEMAS_DIR.glob("*.yaml"): schema = util.load_yaml_or_json(schema_path) resolver = util.get_null_resolver(schema) validator_class = jsonschema.validators.validator_for(schema) _update_validator(validator_class) validator_class.check_schema(schema) validator = validator_class(schema, resolver=resolver) schemas[schema["$id"]] = (schema, validator) return schemas def _get_schema( schema_id: str, filepath: Union[str, Path] = "" ) -> Tuple[Any, Any]: """ Get the schema for the given schema $id. """ schemas = _load_schemas() if schema_id not in schemas: raise ValueError( util.format_error( filepath, "", f"$schema key must be one of {', '.join(schemas.keys())}", ) ) return schemas[schema_id] def _get_schema_for_content( content: Dict[str, util.JSONType], filepath: Union[str, Path] ) -> Tuple[Any, Any]: """ Get the appropriate schema for the given JSON content. """ schema_url = content.get("$schema") if not isinstance(schema_url, str): raise TypeError("Invalid $schema type {schema_url}") return _get_schema(schema_url, filepath) def validate( content: Dict[str, util.JSONType], filepath: Union[str, Path] = "" ) -> Generator[str, None, None]: """ Validate the given content against the appropriate schema. """ try: schema, validator = _get_schema_for_content(content, filepath) except ValueError as e: yield str(e) else: yield from ( util.format_error(filepath, "", util.pprint_validation_error(e)) for e in validator.iter_errors(content) ) def _instantiate_metrics( all_objects: ObjectTree, sources: Dict[Any, Path], content: Dict[str, util.JSONType], filepath: Path, config: Dict[str, Any], ) -> Generator[str, None, None]: """ Load a list of metrics.yaml files, convert the JSON information into Metric objects, and merge them into a single tree. """ global_no_lint = content.get("no_lint", []) global_tags = content.get("$tags", []) assert isinstance(global_tags, list) for category_key, category_val in sorted(content.items()): if category_key.startswith("$"): continue if category_key == "no_lint": continue if not config.get("allow_reserved") and category_key.split(".")[0] == "glean": yield util.format_error( filepath, f"For category '{category_key}'", "Categories beginning with 'glean' are reserved for " "Glean internal use.", ) continue all_objects.setdefault(category_key, DictWrapper()) if not isinstance(category_val, dict): raise TypeError(f"Invalid content for {category_key}") for metric_key, metric_val in sorted(category_val.items()): try: metric_obj = Metric.make_metric( category_key, metric_key, metric_val, validated=True, config=config ) except Exception as e: yield util.format_error( filepath, f"On instance {category_key}.{metric_key}", str(e), metric_val.defined_in["line"], ) metric_obj = None else: if ( not config.get("allow_reserved") and "all-pings" in metric_obj.send_in_pings ): yield util.format_error( filepath, f"On instance {category_key}.{metric_key}", 'Only internal metrics may specify "all-pings" ' 'in "send_in_pings"', metric_val.defined_in["line"], ) metric_obj = None if metric_obj is not None: metric_obj.no_lint = sorted(set(metric_obj.no_lint + global_no_lint)) if len(global_tags): metric_obj.metadata["tags"] = sorted( set(metric_obj.metadata.get("tags", []) + global_tags) ) if isinstance(filepath, Path): metric_obj.defined_in["filepath"] = str(filepath) already_seen = sources.get((category_key, metric_key)) if already_seen is not None: # We've seen this metric name already yield util.format_error( filepath, "", ( f"Duplicate metric name '{category_key}.{metric_key}' " f"already defined in '{already_seen}'" ), metric_obj.defined_in["line"], ) else: all_objects[category_key][metric_key] = metric_obj sources[(category_key, metric_key)] = filepath def _instantiate_pings( all_objects: ObjectTree, sources: Dict[Any, Path], content: Dict[str, util.JSONType], filepath: Path, config: Dict[str, Any], ) -> Generator[str, None, None]: """ Load a list of pings.yaml files, convert the JSON information into Ping objects. """ global_no_lint = content.get("no_lint", []) assert isinstance(global_no_lint, list) ping_schedule_reverse_map: Dict[str, Set[str]] = dict() for ping_key, ping_val in sorted(content.items()): if ping_key.startswith("$"): continue if ping_key == "no_lint": continue if not config.get("allow_reserved"): if ping_key in RESERVED_PING_NAMES: yield util.format_error( filepath, f"For ping '{ping_key}'", f"Ping uses a reserved name ({RESERVED_PING_NAMES})", ) continue if not isinstance(ping_val, dict): raise TypeError(f"Invalid content for ping {ping_key}") ping_val["name"] = ping_key if "metadata" in ping_val and "ping_schedule" in ping_val["metadata"]: if ping_key in ping_val["metadata"]["ping_schedule"]: yield util.format_error( filepath, f"For ping '{ping_key}'", "ping_schedule contains its own ping name", ) continue for ping_schedule in ping_val["metadata"]["ping_schedule"]: if ping_schedule not in ping_schedule_reverse_map: ping_schedule_reverse_map[ping_schedule] = set() ping_schedule_reverse_map[ping_schedule].add(ping_key) try: ping_obj = Ping( defined_in=getattr(ping_val, "defined_in", None), _validated=True, **ping_val, ) except Exception as e: yield util.format_error(filepath, f"On instance '{ping_key}'", str(e)) continue if ping_obj is not None: ping_obj.no_lint = sorted(set(ping_obj.no_lint + global_no_lint)) if isinstance(filepath, Path) and ping_obj.defined_in is not None: ping_obj.defined_in["filepath"] = str(filepath) already_seen = sources.get(ping_key) if already_seen is not None: # We've seen this ping name already yield util.format_error( filepath, "", f"Duplicate ping name '{ping_key}' " f"already defined in '{already_seen}'", ) else: all_objects.setdefault("pings", {})[ping_key] = ping_obj sources[ping_key] = filepath for scheduler, scheduled in ping_schedule_reverse_map.items(): if scheduler in all_objects["pings"] and isinstance( all_objects["pings"][scheduler], Ping ): scheduler_obj: Ping = cast(Ping, all_objects["pings"][scheduler]) scheduler_obj.schedules_pings = sorted(list(scheduled)) def _instantiate_tags( all_objects: ObjectTree, sources: Dict[Any, Path], content: Dict[str, util.JSONType], filepath: Path, config: Dict[str, Any], ) -> Generator[str, None, None]: """ Load a list of tags.yaml files, convert the JSON information into Tag objects. """ global_no_lint = content.get("no_lint", []) assert isinstance(global_no_lint, list) for tag_key, tag_val in sorted(content.items()): if tag_key.startswith("$"): continue if tag_key == "no_lint": continue if not isinstance(tag_val, dict): raise TypeError(f"Invalid content for tag {tag_key}") tag_val["name"] = tag_key try: tag_obj = Tag( defined_in=getattr(tag_val, "defined_in", None), _validated=True, **tag_val, ) except Exception as e: yield util.format_error(filepath, f"On instance '{tag_key}'", str(e)) continue if tag_obj is not None: tag_obj.no_lint = sorted(set(tag_obj.no_lint + global_no_lint)) if isinstance(filepath, Path) and tag_obj.defined_in is not None: tag_obj.defined_in["filepath"] = str(filepath) already_seen = sources.get(tag_key) if already_seen is not None: # We've seen this tag name already yield util.format_error( filepath, "", f"Duplicate tag name '{tag_key}' " f"already defined in '{already_seen}'", ) else: all_objects.setdefault("tags", {})[tag_key] = tag_obj sources[tag_key] = filepath def _preprocess_objects(objs: ObjectTree, config: Dict[str, Any]) -> ObjectTree: """ Preprocess the object tree to better set defaults. """ for category in objs.values(): for obj in category.values(): if not isinstance(obj, Metric): continue if not config.get("do_not_disable_expired", False) and hasattr( obj, "is_disabled" ): obj.disabled = obj.is_disabled() if hasattr(obj, "send_in_pings"): if "default" in obj.send_in_pings: obj.send_in_pings = obj.default_store_names + [ x for x in obj.send_in_pings if x != "default" ] obj.send_in_pings = sorted(list(set(obj.send_in_pings))) return objs @util.keep_value def parse_objects( filepaths: Iterable[Path], config: Optional[Dict[str, Any]] = None ) -> Generator[str, None, ObjectTree]: """ Parse one or more metrics.yaml and/or pings.yaml files, returning a tree of `metrics.Metric`, `pings.Ping`, and `tags.Tag` instances. The result is a generator over any errors. If there are no errors, the actual metrics can be obtained from `result.value`. For example:: result = metrics.parse_metrics(filepaths) for err in result: print(err) all_metrics = result.value The result value is a dictionary of category names to categories, where each category is a dictionary from metric name to `metrics.Metric` instances. There are also the special categories `pings` and `tags` containing all of the `pings.Ping` and `tags.Tag` instances, respectively. :param filepaths: list of Path objects to metrics.yaml, pings.yaml, and/or tags.yaml files :param config: A dictionary of options that change parsing behavior. Supported keys are: - `allow_reserved`: Allow values reserved for internal Glean use. - `do_not_disable_expired`: Don't mark expired metrics as disabled. This is useful when you want to retain the original "disabled" value from the `metrics.yaml`, rather than having it overridden when the metric expires. - `allow_missing_files`: Do not raise a `FileNotFoundError` if any of the input `filepaths` do not exist. """ if config is None: config = {} all_objects: ObjectTree = DictWrapper() sources: Dict[Any, Path] = {} filepaths = util.ensure_list(filepaths) for filepath in filepaths: content, filetype = yield from _load_file(filepath, config) if filetype == "metrics": yield from _instantiate_metrics( all_objects, sources, content, filepath, config ) elif filetype == "pings": yield from _instantiate_pings( all_objects, sources, content, filepath, config ) elif filetype == "tags": yield from _instantiate_tags( all_objects, sources, content, filepath, config ) return _preprocess_objects(all_objects, config) glean_parser-15.0.1/glean_parser/pings.py000066400000000000000000000065531466531427000204150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Classes for managing the description of pings. """ from typing import Dict, List, Optional from . import util RESERVED_PING_NAMES = ["baseline", "metrics", "events", "deletion-request", "default"] class Ping: def __init__( self, name: str, description: str, bugs: List[str], notification_emails: List[str], metadata: Optional[Dict] = None, data_reviews: Optional[List[str]] = None, include_client_id: bool = False, send_if_empty: bool = False, reasons: Optional[Dict[str, str]] = None, defined_in: Optional[Dict] = None, no_lint: Optional[List[str]] = None, enabled: Optional[bool] = None, _validated: bool = False, ): # Avoid cyclical import from . import parser self.name = name self.description = description self.bugs = bugs self.notification_emails = notification_emails if metadata is None: metadata = {} self.metadata = metadata self.precise_timestamps = self.metadata.get("precise_timestamps", True) self.include_info_sections = self.metadata.get("include_info_sections", True) if enabled is None: enabled = True self.enabled = enabled self.schedules_pings: List[str] = [] if data_reviews is None: data_reviews = [] self.data_reviews = data_reviews self.include_client_id = include_client_id self.send_if_empty = send_if_empty if reasons is None: reasons = {} self.reasons = reasons self.defined_in = defined_in if no_lint is None: no_lint = [] self.no_lint = no_lint # _validated indicates whether this ping has already been jsonschema # validated (but not any of the Python-level validation). if not _validated: data: Dict[str, util.JSONType] = { "$schema": parser.PINGS_ID, self.name: self._serialize_input(), } for error in parser.validate(data): raise ValueError(error) _generate_enums = [("reason_codes", "ReasonCodes")] @property def type(self) -> str: return "ping" @property def reason_codes(self) -> List[str]: return sorted(list(self.reasons.keys())) def serialize(self) -> Dict[str, util.JSONType]: """ Serialize the metric back to JSON object model. """ d = self.__dict__.copy() del d["name"] return d def _serialize_input(self) -> Dict[str, util.JSONType]: d = self.serialize() modified_dict = util.remove_output_params(d, "defined_in") modified_dict = util.remove_output_params(modified_dict, "precise_timestamps") modified_dict = util.remove_output_params( modified_dict, "include_info_sections" ) modified_dict = util.remove_output_params(modified_dict, "schedules_pings") return modified_dict def identifier(self) -> str: """ Used for the "generated from ..." comment in the output. """ return self.name glean_parser-15.0.1/glean_parser/python_server.py000066400000000000000000000112671466531427000222020ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Outputter to generate server Python code for collecting events. This outputter is different from the rest of the outputters in that the code it generates does not use the Glean SDK. It is meant to be used to collect events in server-side environments. In these environments SDK assumptions to measurement window and connectivity don't hold. Generated code takes care of assembling pings with metrics, and serializing to messages conforming to Glean schema. Warning: this outputter supports limited set of metrics, see `SUPPORTED_METRIC_TYPES` below. The generated code creates a `ServerEventLogger` class for each ping that has at least one event metric. The class has a `record` method for each event metric. """ from collections import defaultdict from pathlib import Path from typing import Any, Dict, Optional, List from . import __version__ from . import metrics from . import util # Adding a metric here will require updating the `generate_metric_type` function # and require adjustments to `metrics` variables the the template. SUPPORTED_METRIC_TYPES = ["string", "quantity", "event"] def camelize(s: str) -> str: return util.Camelize(s) def generate_metric_type(metric_type: str) -> str: if metric_type == "quantity": return "int" elif metric_type == "string": return "str" elif metric_type == "boolean": return "bool" else: print("❌ Unable to generate Python type from metric type: " + metric_type) exit return "NONE" def clean_string(s: str) -> str: return s.replace("\n", " ").rstrip() def generate_ping_factory_method(ping: str) -> str: return f"create_{util.snake_case(ping)}_server_event_logger" def generate_event_record_function_name(event_metric: metrics.Metric) -> str: return ( f"record_{util.snake_case(event_metric.category)}_" + f"{util.snake_case(event_metric.name)}" ) def output_python( objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] ) -> None: """ Given a tree of objects, output Python code to `output_dir`. The output is a file containing all the code for assembling pings with metrics, serializing, and submitting, and an empty `__init__.py` file to make the directory a package. :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. """ template = util.get_jinja2_template( "python_server.jinja2", filters=( ("camelize", camelize), ("py_metric_type", generate_metric_type), ("clean_string", clean_string), ("factory_method", generate_ping_factory_method), ("record_event_function_name", generate_event_record_function_name), ), ) # Go through all metrics in objs and build a map of # ping->list of metric categories->list of metrics # for easier processing in the template. ping_to_metrics: Dict[str, Dict[str, List[metrics.Metric]]] = defaultdict(dict) for _category_key, category_val in objs.items(): for _metric_name, metric in category_val.items(): if isinstance(metric, metrics.Metric): if metric.type not in SUPPORTED_METRIC_TYPES: print( "❌ Ignoring unsupported metric type: " + f"{metric.type}:{metric.name}." + " Reach out to Glean team to add support for this" + " metric type." ) continue for ping in metric.send_in_pings: metrics_by_type = ping_to_metrics[ping] metrics_list = metrics_by_type.setdefault(metric.type, []) metrics_list.append(metric) for ping, metrics_by_type in ping_to_metrics.items(): if "event" not in metrics_by_type: print( f"❌ No event metrics found for ping: {ping}." + " At least one event metric is required." ) return extension = ".py" filepath = output_dir / ("server_events" + extension) with filepath.open("w", encoding="utf-8") as fd: fd.write(template.render(parser_version=__version__, pings=ping_to_metrics)) # create an empty `__init__.py` file to make the directory a package init_file = output_dir / "__init__.py" with init_file.open("w", encoding="utf-8") as fd: fd.write("") glean_parser-15.0.1/glean_parser/ruby_server.py000066400000000000000000000121061466531427000216330ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Outputter to generate server ruby code for collecting events. This outputter is different from the rest of the outputters in that the code it generates does not use the Glean SDK. It is meant to be used to collect events using "events as pings" pattern in server-side environments. In these environments SDK assumptions to measurement window and connectivity don't hold. Generated code takes care of assembling pings with metrics, serializing to messages conforming to Glean schema, and logging using a standard Ruby logger. Then it's the role of the ingestion pipeline to pick the messages up and process. Warning: this outputter supports a limited set of metrics, see `SUPPORTED_METRIC_TYPES` below. """ from collections import defaultdict from pathlib import Path from typing import Any, Dict, List, Optional from . import __version__, metrics, util SUPPORTED_METRIC_TYPES = ["string", "event"] def ping_class_name(pingName: str) -> str: return f"Glean{util.Camelize(pingName)}Logger" def generate_metric_name(metric: metrics.Metric) -> str: return f"{metric.category}.{metric.name}" def generate_metric_argument_name(metric: metrics.Metric) -> str: return f"{metric.category}_{metric.name}" def generate_metric_argument_description(metric: metrics.Metric) -> str: return metric.description.replace("\n", " ").rstrip() def event_class_name(metric: metrics.Metric) -> str: return f"{util.Camelize(generate_metric_argument_name(metric))}Event" def output_ruby( objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] ) -> None: """ Given a tree of objects, output ruby code to `output_dir`. The output is a single file containing all the code for assembling pings with metrics, serializing, and submitting. :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. """ template = util.get_jinja2_template( "ruby_server.jinja2", filters=( ("ping_class_name", ping_class_name), ("metric_name", generate_metric_name), ("metric_argument_name", generate_metric_argument_name), ("metric_argument_description", generate_metric_argument_description), ("event_class_name", event_class_name), ), ) # In this environment we don't use a concept of measurement window for collecting # metrics. Only "events as pings" are supported. # For each ping we generate code which contains all the logic for assembling it # with metrics, serializing, and submitting. Therefore we don't generate classes for # each metric as in standard outputters. PING_METRIC_ERROR_MSG = ( " Server-side environment is simplified and only supports the events ping type." + " You should not be including pings.yaml with your parser call" + " or referencing any other pings in your metric configuration." ) if "pings" in objs: print("❌ Ping definition found." + PING_METRIC_ERROR_MSG) return # Go through all metrics in objs and build a map of # ping->list of metric categories->list of metrics # for easier processing in the template. ping_to_metrics: Dict[str, Dict[str, List[metrics.Metric]]] = defaultdict(dict) for _category_key, category_val in objs.items(): for _metric_name, metric in category_val.items(): if isinstance(metric, metrics.Metric): if metric.type not in SUPPORTED_METRIC_TYPES: print( "❌ Ignoring unsupported metric type: " + f"{metric.type}:{metric.name}." + " Reach out to Glean team to add support for this" + " metric type." ) continue for ping in metric.send_in_pings: if ping != "events": ( print( "❌ Non-events ping reference found." + PING_METRIC_ERROR_MSG + f"Ignoring the {ping} ping type." ) ) continue metrics_by_type = ping_to_metrics[ping] metrics_list = metrics_by_type.setdefault(metric.type, []) metrics_list.append(metric) if "event" not in ping_to_metrics["events"]: print("❌ No event metrics found...at least one event metric is required") return extension = ".rb" filepath = output_dir / ("server_events" + extension) with filepath.open("w", encoding="utf-8") as fd: fd.write( template.render( parser_version=__version__, pings=ping_to_metrics, ) ) glean_parser-15.0.1/glean_parser/rust.py000066400000000000000000000162461466531427000202720ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Outputter to generate Rust code for metrics. """ import enum import json from pathlib import Path from typing import Any, Dict, Optional, Union from . import __version__ from . import metrics from . import pings from . import tags from . import util def rust_datatypes_filter(value): """ A Jinja2 filter that renders Rust literals. Based on Python's JSONEncoder, but overrides: - dicts and sets to raise an error - sets to vec![] (used in labels) - enums to become Class::Value - lists to vec![] (used in send_in_pings) - null to None - strings to "value".into() - Rate objects to a CommonMetricData initializer (for external Denominators' Numerators lists) """ class RustEncoder(json.JSONEncoder): def iterencode(self, value): if isinstance(value, dict): raise ValueError("RustEncoder doesn't know dicts {}".format(str(value))) elif isinstance(value, enum.Enum): yield (value.__class__.__name__ + "::" + util.Camelize(value.name)) elif isinstance(value, set): yield "vec![" first = True for subvalue in sorted(list(value)): if not first: yield ", " yield from self.iterencode(subvalue) first = False yield "]" elif isinstance(value, list): yield "vec![" first = True for subvalue in list(value): if not first: yield ", " yield from self.iterencode(subvalue) first = False yield "]" elif value is None: yield "None" # `CowStr` is a `str`, so needs to be before next case elif isinstance(value, metrics.CowString): yield f'::std::borrow::Cow::from("{value.inner}")' elif isinstance(value, str): yield f"{json.dumps(value)}.into()" elif isinstance(value, metrics.Rate): yield "CommonMetricData(" first = True for arg_name in util.common_metric_args: if hasattr(value, arg_name): if not first: yield ", " yield f"{util.camelize(arg_name)} = " yield from self.iterencode(getattr(value, arg_name)) first = False yield ")" else: yield from super().iterencode(value) return "".join(RustEncoder().iterencode(value)) def ctor(obj): """ Returns the scope and name of the constructor to use for a metric object. Necessary because LabeledMetric is constructed using LabeledMetric::new not LabeledMetric::new """ if getattr(obj, "labeled", False): return "LabeledMetric::new" return class_name(obj.type) + "::new" def type_name(obj): """ Returns the Rust type to use for a given metric or ping object. """ if getattr(obj, "labeled", False): return "LabeledMetric<{}>".format(class_name(obj.type)) generate_enums = getattr(obj, "_generate_enums", []) # Extra Keys? Reasons? if len(generate_enums): generic = None for name, suffix in generate_enums: if len(getattr(obj, name)): generic = util.Camelize(obj.name) + suffix else: if isinstance(obj, metrics.Event): generic = "NoExtraKeys" else: generic = "No" + suffix return "{}<{}>".format(class_name(obj.type), generic) generate_structure = getattr(obj, "_generate_structure", []) if len(generate_structure): generic = util.Camelize(obj.name) + "Object" return "{}<{}>".format(class_name(obj.type), generic) return class_name(obj.type) def extra_type_name(typ: str) -> str: """ Returns the corresponding Rust type for event's extra key types. """ if typ == "boolean": return "bool" elif typ == "string": return "String" elif typ == "quantity": return "u32" else: return "UNSUPPORTED" def structure_type_name(typ: str) -> str: """ Returns the corresponding Rust type for structure items. """ if typ == "boolean": return "bool" elif typ == "string": return "String" elif typ == "number": return "i64" else: return "UNSUPPORTED" def class_name(obj_type): """ Returns the Rust class name for a given metric or ping type. """ if obj_type == "ping": return "Ping" if obj_type.startswith("labeled_"): obj_type = obj_type[8:] return util.Camelize(obj_type) + "Metric" def extra_keys(allowed_extra_keys): """ Returns the &'static [&'static str] ALLOWED_EXTRA_KEYS for impl ExtraKeys """ return "&[" + ", ".join([f'"{key}"' for key in allowed_extra_keys]) + "]" class Category: """ Data struct holding information about a metric to be used in the template. """ def __init__( self, name: str, objs: Dict[str, Union[metrics.Metric, pings.Ping, tags.Tag]], contains_pings: bool, ): self.name = name self.objs = objs self.contains_pings = contains_pings def output_rust( objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None ) -> None: """ Given a tree of objects, output Rust code to `output_dir`. :param objs: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. :param options: options dictionary, not currently used for Rust """ if options is None: options = {} template = util.get_jinja2_template( "rust.jinja2", filters=( ("rust", rust_datatypes_filter), ("snake_case", util.snake_case), ("camelize", util.camelize), ("type_name", type_name), ("extra_type_name", extra_type_name), ("structure_type_name", structure_type_name), ("ctor", ctor), ("extra_keys", extra_keys), ), ) filename = "glean_metrics.rs" filepath = output_dir / filename categories = [] for category_key, category_val in objs.items(): contains_pings = any( isinstance(obj, pings.Ping) for obj in category_val.values() ) cat = Category(category_key, category_val, contains_pings) categories.append(cat) with filepath.open("w", encoding="utf-8") as fd: fd.write( template.render( parser_version=__version__, categories=categories, extra_metric_args=util.extra_metric_args, common_metric_args=util.common_metric_args, ) ) glean_parser-15.0.1/glean_parser/schemas/000077500000000000000000000000001466531427000203355ustar00rootroot00000000000000glean_parser-15.0.1/glean_parser/schemas/metrics.1-0-0.schema.yaml000066400000000000000000000461561466531427000245730ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. --- $schema: http://json-schema.org/draft-07/schema# title: Metrics description: | Schema for the metrics.yaml files for Mozilla's Glean telemetry SDK. The top-level of the `metrics.yaml` file has a key defining each category of metrics. Categories must be snake_case, and they may also have dots `.` to define subcategories. $id: moz://mozilla.org/schemas/glean/metrics/1-0-0 definitions: token: type: string pattern: "^[A-Za-z_][A-Za-z0-9_\\.]*$" snake_case: type: string pattern: "^[a-z_][a-z0-9_]*$" dotted_snake_case: type: string pattern: "^[a-z_][a-z0-9_]{0,29}(\\.[a-z_][a-z0-9_]{0,29})*$" maxLength: 40 kebab_case: type: string # Bug 1601270; we allow 3 specific existing snake_cased ping names for now, # but these special cases can be removed once the number of legacy clients # sufficiently dwindles, likely in 2020H2. pattern: "^[a-z][a-z0-9-]{0,29}$\ |^deletion_request$|^bookmarks_sync$|^history_sync$|^session_end$|^all_pings$|^glean_.*$" long_id: allOf: - $ref: "#/definitions/snake_case" - maxLength: 40 short_id: allOf: - $ref: "#/definitions/snake_case" - maxLength: 30 labeled_metric_id: type: string pattern: "^[a-z_][a-z0-9_-]{0,29}(\\.[a-z_][a-z0-9_-]{0,29})*$" maxLength: 71 # Note: this should be category + metric + 1 metric: description: | Describes a single metric. See https://mozilla.github.io/glean_parser/metrics-yaml.html type: object additionalProperties: false properties: type: title: Metric type description: | **Required.** Specifies the type of a metric, like "counter" or "event". This defines which operations are valid for the metric, how it is stored and how data analysis tooling displays it. The supported types are: - `event`: Record a specific event (with optional metadata). Additional properties: `extra_keys`. - `boolean`: A metric storing values of true or false. - `string`: A metric storing Unicode string values. - `string_list`: a list of Unicode strings. - `counter`: A numeric value that can only be incremented. - `quantity`: A numeric value that is set directly. - `timespan`: Represents a time interval. Additional properties: `time_unit`. - `timing_distribution`: Record the distribution of multiple timings. Additional properties: `time_unit`. - `datetime`: A date/time value. Represented as an ISO datetime in UTC. Additional properties: `time_unit`. - `uuid`: Record a UUID v4. - `jwe`: Record a [JWE](https://tools.ietf.org/html/rfc7516) value. - `memory_distribution`: A histogram for recording memory usage values. Additional properties: `memory_unit`. - `custom_distribution`: A histogram with a custom range and number of buckets. This metric type is for legacy support only and is only allowed for metrics coming from GeckoView. Additional properties: `range_min`, `range_max`, `bucket_count`, `histogram_type`. - `rate`: Used to record the rate something happens relative to some other thing. For example, the number of HTTP connections that experience an error relative to the number of total HTTP connections made. - Additionally, labeled versions of many metric types are supported. These support the `labels`_ parameter, allowing multiple instances of the metric to be stored at a given set of labels. The labeled metric types include: `labeled_boolean`, `labeled_string`, `labeled_counter`. type: string enum: - event - boolean - string - string_list - counter - quantity - timespan - timing_distribution - custom_distribution - memory_distribution - datetime - uuid - jwe - labeled_boolean - labeled_string - labeled_counter description: title: Description description: | **Required.** A textual description of what this metric does, what it means, and its edge cases or any other helpful information. Descriptions may contain [markdown syntax](https://www.markdownguide.org/basic-syntax/). type: string lifetime: title: Lifetime description: | Defines the lifetime of the metric. It must be one of the following values: - `ping` (default): The metric is reset each time it is sent in a ping. - `user`: The metric contains a property that is part of the user's profile and is never reset. - `application`: The metric contains a property that is related to the application, and is reset only at application restarts. enum: - ping - user - application default: ping send_in_pings: title: Send in pings description: | Which pings the metric should be sent on. If not specified, the metric is sent on the "default ping", which is the `events` ping for events, and the `metrics` ping for everything else. Most metrics don't need to specify this. (There is an additional special value of `all-pings` for internal Glean metrics only that is used to indicate that a metric may appear in any ping.) type: array items: $ref: "#/definitions/kebab_case" default: - default notification_emails: title: Notification emails description: | **Required.** A list of email addresses to notify for important events with the metric or when people with context or ownership for the metric need to be contacted. type: array minItems: 1 items: type: string format: email bugs: title: Related bugs description: | **Required.** A list of bug URLs (e.g. Bugzilla and Github) that are relevant to this metric, e.g., tracking its original implementation or later changes to it. Using bug numbers alone is deprecated and will be an error in the future. Each entry should be a full URL to the bug in its tracker. type: array minItems: 1 items: anyOf: - type: integer # Keep supporting integer for backward-compat - type: string format: uri data_reviews: title: Review references description: | **Required.** A list of URIs to any data collection reviews relevant to the metric. type: array items: type: string format: uri disabled: title: Disabled description: | If `true`, the metric is disabled, and any metric collection on it will be silently ignored at runtime. type: boolean default: false expires: title: Expires description: | **Required.** By default it may be one of the following values: - ``: An ISO date `yyyy-mm-dd` in UTC on which the metric expires. For example, `2019-03-13`. This date is checked at build time. Except in special cases, this form should be used so that the metric automatically "sunsets" after a period of time. - `never`: This metric never expires. - `expired`: This metric is manually expired. The default may be overriden in certain applications by the `custom_validate_expires` and `custom_is_expired` configs. type: string version: title: Metric version description: | The version of the metric. A monotonically increasing value. If not provided, defaults to 0. time_unit: title: Time unit description: | For timespans and datetimes, specifies the unit that the metric will be stored and displayed in. If not provided, it defaults to "millisecond". Time values are sent to the backend as integers, so `time_unit`_ determines the maximum resolution at which timespans are recorded. Times are always truncated, not rounded, to the nearest time unit. For example, a measurement of 25 ns will be returned as 0 ms if `time_unit` is `"millisecond"`. For timing distributions, times are always recorded and sent in nanoseconds, but `time_unit` controls the minimum and maximum values. If not provided, it defaults to "nanosecond". - nanosecond: 1ns <= x <= 10 minutes - microsecond: 1μs <= x <= ~6.94 days - millisecond: 1ms <= x <= ~19 years Valid when `type`_ is `timespan`, `timing_distribution` or `datetime`. enum: - nanosecond - microsecond - millisecond - second - minute - hour - day memory_unit: title: Memory unit description: | The unit that the incoming memory size values are recorded in. The units are the power-of-2 units, so "kilobyte" is correctly a "kibibyte". - kilobyte == 2^10 == 1,024 bytes - megabyte == 2^20 == 1,048,576 bytes - gigabyte == 2^30 == 1,073,741,824 bytes Values are automatically converted to and transmitted as bytes. Valid when `type`_ is `memory_distribution`. enum: - byte - kilobyte - megabyte - gigabyte labels: title: Labels description: | A list of labels for a labeled metric. If provided, the labels are enforced at run time, and recording to an unknown label is recorded to the special label `__other__`. If not provided, the labels may be anything, but using too many unique labels will put some labels in the special label `__other__`. Valid with any of the labeled metric types. anyOf: - type: array uniqueItems: true items: $ref: "#/definitions/labeled_metric_id" maxItems: 16 - type: "null" extra_keys: title: Extra keys description: | The acceptable keys on the "extra" object sent with events. This is an object mapping the key to an object containing metadata about the key. A maximum of 10 extra keys is allowed. This metadata object has the following keys: - `description`: **Required.** A description of the key. Valid when `type`_ is `event`. type: object propertyNames: $ref: "#/definitions/dotted_snake_case" additionalProperties: type: object properties: description: type: string required: - description maxProperties: 10 default: {} gecko_datapoint: title: Gecko Datapoint description: | This is a Gecko-specific property. It is the name of the Gecko metric to accumulate the data from, when using the Glean SDK in a product using GeckoView. See bug 1566356 for more context. type: string range_min: title: Range minimum description: | The minimum value of a custom distribution. Valid when `type`_ is `custom_distribution`. type: number default: 1 range_max: title: Range maximum description: | The maximum value of a custom distribution. Required when `type`_ is `custom_distribution`. type: number bucket_count: title: Bucket count description: | The number of buckets to include in a custom distribution. Required when `type`_ is `custom_distribution`. type: number minimum: 1 maximum: 100 histogram_type: title: Histogram type description: | The type of histogram bucketing to use: - `linear`: The buckets are linearly spaced within the range. - `exponential`: The buckets use the natural logarithmic so the smaller-valued buckets are smaller in size than the higher-valued buckets. Required when `type`_ is `custom_distribution`. enum: - linear - exponential unit: title: Unit description: | The unit of the metric, for metrics that don't already require a meaningful unit, such as `time_unit`. This is provided for informational purposes only and doesn't have any effect on data collection. type: string no_lint: title: Lint checks to skip description: | This parameter lists any lint checks to skip for this metric only. type: array items: type: string decrypted_name: title: Decrypted name description: | Name of the column where to persist the decrypted value stored in the JWE after processing. Required when `type`_ is `jwe`. type: string pattern: "^[a-z_][a-z0-9_]{0,29}(\\.[a-z_][a-z0-9_]{0,29})*$" data_sensitivity: title: The level of data sensitivity description: | There are four data collection categories related to data sensitivity [defined here](https://wiki.mozilla.org/Firefox/Data_Collection): - **Category 1: Technical Data:** (`technical`) Information about the machine or Firefox itself. Examples include OS, available memory, crashes and errors, outcome of automated processes like updates, safebrowsing, activation, version \#s, and buildid. This also includes compatibility information about features and APIs used by websites, addons, and other 3rd-party software that interact with Firefox during usage. - **Category 2: Interaction Data:** (`interaction`) Information about the user’s direct engagement with Firefox. Examples include how many tabs, addons, or windows a user has open; uses of specific Firefox features; session length, scrolls and clicks; and the status of discrete user preferences. - **Category 3: Web activity data:** (`web_activity`) Information about user web browsing that could be considered sensitive. Examples include users’ specific web browsing history; general information about their web browsing history (such as TLDs or categories of webpages visited over time); and potentially certain types of interaction data about specific webpages visited. - **Category 4: Highly sensitive data:** (`highly_sensitive`) Information that directly identifies a person, or if combined with other data could identify a person. Examples include e-mail, usernames, identifiers such as google ad id, apple id, fxaccount, city or country (unless small ones are explicitly filtered out), or certain cookies. It may be embedded within specific website content, such as memory contents, dumps, captures of screen data, or DOM data. type: array items: enum: - technical - interaction - web_activity - highly_sensitive type: string minLength: 1 uniqueItems: true required: - type - bugs - description - notification_emails - data_reviews - expires type: object propertyNames: anyOf: - allOf: - $ref: "#/definitions/dotted_snake_case" - not: description: "'pings' is reserved as a category name." const: pings - enum: ['$schema'] properties: $schema: type: string format: url no_lint: title: Lint checks to skip globally description: | This parameter lists any lint checks to skip for this whole file. type: array items: type: string additionalProperties: type: object propertyNames: anyOf: - $ref: "#/definitions/short_id" additionalProperties: allOf: - $ref: "#/definitions/metric" - if: properties: type: const: event then: properties: lifetime: description: | Event metrics must have ping lifetime. const: ping - if: not: properties: type: enum: - timing_distribution - custom_distribution - memory_distribution - quantity - boolean - string - labeled_counter then: properties: gecko_datapoint: description: | `gecko_datapoint` is only allowed for `timing_distribution`, `custom_distribution`, `memory_distribution`, `quantity`, `boolean`, `string` and `labeled_counter`. maxLength: 0 - if: properties: type: enum: - custom_distribution then: required: - gecko_datapoint description: | `custom_distribution` is only allowed for Gecko metrics. - if: properties: type: const: custom_distribution then: required: - range_max - bucket_count - histogram_type description: | `custom_distribution` is missing required parameters `range_max`, `bucket_count` and `histogram_type`. - if: properties: type: const: memory_distribution then: required: - memory_unit description: | `memory_distribution` is missing required parameter `memory_unit`. - if: properties: type: const: quantity then: required: - unit description: | `quantity` is missing required parameter `unit`. - if: properties: type: const: jwe then: required: - decrypted_name description: | `jwe` is missing required parameter `decrypted_name`. glean_parser-15.0.1/glean_parser/schemas/metrics.2-0-0.schema.yaml000066400000000000000000000640771466531427000245760ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. --- $schema: http://json-schema.org/draft-07/schema# title: Metrics description: | Schema for the metrics.yaml files for Mozilla's Glean telemetry SDK. The top-level of the `metrics.yaml` file has a key defining each category of metrics. Categories must be snake_case, and they may also have dots `.` to define subcategories. $id: moz://mozilla.org/schemas/glean/metrics/2-0-0 definitions: token: type: string pattern: "^[A-Za-z_][A-Za-z0-9_\\.]*$" snake_case: type: string pattern: "^[a-z_][a-z0-9_]*$" dotted_snake_case: type: string pattern: "^[a-z_][a-z0-9_]{0,29}(\\.[a-z_][a-z0-9_]{0,29})*$" maxLength: 40 event_extra_key: type: string pattern: "^[ -~]+$" maxLength: 40 # Prior to version 2.0.0 of the schema, special ping names with underscores # were also supported. kebab_case: type: string pattern: "^[a-z][a-z0-9-]{0,29}$" long_id: allOf: - $ref: "#/definitions/snake_case" - maxLength: 40 short_id: allOf: - $ref: "#/definitions/snake_case" - maxLength: 70 labeled_metric_id: type: string pattern: "^[ -~]+$" maxLength: 71 # Note: this should be category + metric + 1 metric: description: | Describes a single metric. See https://mozilla.github.io/glean_parser/metrics-yaml.html type: object additionalProperties: false properties: type: title: Metric type description: | **Required.** Specifies the type of a metric, like "counter" or "event". This defines which operations are valid for the metric, how it is stored and how data analysis tooling displays it. The supported types are: - `event`: Record a specific event (with optional metadata). Additional properties: `extra_keys`. - `boolean`: A metric storing values of true or false. - `string`: A metric storing Unicode string values. - `string_list`: a list of Unicode strings. - `counter`: A numeric value that can only be incremented. - `quantity`: A numeric value that is set directly. - `timespan`: Represents a time interval. Additional properties: `time_unit`. - `timing_distribution`: Record the distribution of multiple timings. Additional properties: `time_unit`. - `datetime`: A date/time value. Represented as an ISO datetime in UTC. Additional properties: `time_unit`. - `uuid`: Record a UUID v4. - `url`: Record a valid URL string. - `memory_distribution`: A histogram for recording memory usage values. Additional properties: `memory_unit`. - `custom_distribution`: A histogram with a custom range and number of buckets. This metric type is for legacy support only and is only allowed for metrics coming from GeckoView. Additional properties: `range_min`, `range_max`, `bucket_count`, `histogram_type`. - `rate`: Used to record the rate something happens relative to some other thing. For example, the number of HTTP connections that experience an error relative to the number of total HTTP connections made. - Additionally, labeled versions of many metric types are supported. These support the `labels`_ parameter, allowing multiple instances of the metric to be stored at a given set of labels. The labeled metric types include: `labeled_boolean`, `labeled_string`, `labeled_counter`, `labeled_custom_distribution`, `labeled_memory_distribution`, `labeled_timing_distribution`. - `text`: Record long text data. - `object`: Record structured data based on a pre-defined schema Additional properties: `structure`. type: string enum: - event - boolean - string - string_list - counter - quantity - timespan - timing_distribution - custom_distribution - memory_distribution - datetime - uuid - url - jwe - labeled_boolean - labeled_string - labeled_counter - labeled_custom_distribution - labeled_memory_distribution - labeled_timing_distribution - rate - text - object description: title: Description description: | **Required.** A textual description of what this metric does, what it means, and its edge cases or any other helpful information. Descriptions may contain [markdown syntax](https://www.markdownguide.org/basic-syntax/). type: string metadata: title: Metadata description: | Additional metadata about this metric. Currently limited to a list of tags. type: object properties: tags: title: Tags description: Which tags are specified for this metric. type: array items: type: string maxLength: 80 default: {} lifetime: title: Lifetime description: | Defines the lifetime of the metric. It must be one of the following values: - `ping` (default): The metric is reset each time it is sent in a ping. - `user`: The metric contains a property that is part of the user's profile and is never reset. - `application`: The metric contains a property that is related to the application, and is reset only at application restarts. enum: - ping - user - application default: ping send_in_pings: title: Send in pings description: | Which pings the metric should be sent on. If not specified, the metric is sent on the "default ping", which is the `events` ping for events, and the `metrics` ping for everything else. Most metrics don't need to specify this. (There is an additional special value of `all-pings` for internal Glean metrics only that is used to indicate that a metric may appear in any ping.) type: array items: anyOf: - $ref: "#/definitions/kebab_case" # Allow "special" ping names that start with "glean_" used # internally by the Glean SDK - type: string pattern: "^glean_.*$" default: - default notification_emails: title: Notification emails description: | **Required.** A list of email addresses to notify for important events with the metric or when people with context or ownership for the metric need to be contacted. type: array minItems: 1 items: type: string format: email bugs: title: Related bugs description: | **Required.** A list of bug URLs (e.g. Bugzilla and Github) that are relevant to this metric, e.g., tracking its original implementation or later changes to it. Prior to version 2.0.0 of the schema, bugs could also be integers. type: array minItems: 1 items: type: string format: uri data_reviews: title: Review references description: | **Required.** A list of URIs to any data collection reviews relevant to the metric. type: array items: type: string format: uri disabled: title: Disabled description: | If `true`, the metric is disabled, and any metric collection on it will be silently ignored at runtime. type: boolean default: false expires: title: Expires description: | **Required.** By default it may be one of the following values: - ``: An ISO date `yyyy-mm-dd` in UTC on which the metric expires. For example, `2019-03-13`. This date is checked at build time. Except in special cases, this form should be used so that the metric automatically "sunsets" after a period of time. - ``: An integer greater than 0 representing the major version the metric expires in. For example, `11`. The version is checked at build time against the major provided to the glean_parser and is only valid if a major version is provided at built time. If no major version is provided at build time and expiration by major version is used for a metric, an error is raised. Note that mixing expiration by date and version is not allowed within a product. - `never`: This metric never expires. - `expired`: This metric is manually expired. The default may be overriden in certain applications by the `custom_validate_expires` and `custom_is_expired` configs. oneOf: - type: string - type: integer minimum: 1 version: title: Metric version description: | The version of the metric. A monotonically increasing value. If not provided, defaults to 0. time_unit: title: Time unit description: | For timespans and datetimes, specifies the unit that the metric will be stored and displayed in. If not provided, it defaults to "millisecond". Time values are sent to the backend as integers, so `time_unit`_ determines the maximum resolution at which timespans are recorded. Times are always truncated, not rounded, to the nearest time unit. For example, a measurement of 25 ns will be returned as 0 ms if `time_unit` is `"millisecond"`. For timing distributions, times are always recorded and sent in nanoseconds, but `time_unit` controls the minimum and maximum values. If not provided, it defaults to "nanosecond". - nanosecond: 1ns <= x <= 10 minutes - microsecond: 1μs <= x <= ~6.94 days - millisecond: 1ms <= x <= ~19 years Valid when `type`_ is `timespan`, `timing_distribution` or `datetime`. enum: - nanosecond - microsecond - millisecond - second - minute - hour - day memory_unit: title: Memory unit description: | The unit that the incoming memory size values are recorded in. The units are the power-of-2 units, so "kilobyte" is correctly a "kibibyte". - kilobyte == 2^10 == 1,024 bytes - megabyte == 2^20 == 1,048,576 bytes - gigabyte == 2^30 == 1,073,741,824 bytes Values are automatically converted to and transmitted as bytes. Valid when `type`_ is `memory_distribution`. enum: - byte - kilobyte - megabyte - gigabyte labels: title: Labels description: | A list of labels for a labeled metric. If provided, the labels are enforced at run time, and recording to an unknown label is recorded to the special label `__other__`. If not provided, the labels may be anything, but using too many unique labels will put some labels in the special label `__other__`. Valid with any of the labeled metric types. anyOf: - type: array uniqueItems: true items: $ref: "#/definitions/labeled_metric_id" maxItems: 4096 - type: "null" extra_keys: title: Extra keys description: | The acceptable keys on the "extra" object sent with events. This is an object mapping the key to an object containing metadata about the key. A maximum of 50 extra keys is allowed. This metadata object has the following keys: - `description`: **Required.** A description of the key. Valid when `type`_ is `event`. type: object propertyNames: $ref: "#/definitions/event_extra_key" additionalProperties: type: object properties: description: type: string type: type: string enum: - string - boolean - quantity required: - description maxProperties: 50 default: {} gecko_datapoint: title: Gecko Datapoint description: | This is a Gecko-specific property. It is the name of the Gecko metric to accumulate the data from, when using the Glean SDK in a product using GeckoView. See bug 1566356 for more context. type: string range_min: title: Range minimum description: | The minimum value of a custom distribution. Valid when `type`_ is `custom_distribution`. type: number default: 1 range_max: title: Range maximum description: | The maximum value of a custom distribution. Required when `type`_ is `custom_distribution`. type: number bucket_count: title: Bucket count description: | The number of buckets to include in a custom distribution. Required when `type`_ is `custom_distribution`. type: number minimum: 1 maximum: 100 histogram_type: title: Histogram type description: | The type of histogram bucketing to use: - `linear`: The buckets are linearly spaced within the range. - `exponential`: The buckets use the natural logarithmic so the smaller-valued buckets are smaller in size than the higher-valued buckets. Required when `type`_ is `custom_distribution`. enum: - linear - exponential unit: title: Unit description: | The unit of the metric. This is only required for metrics that don't already require a meaningful unit, e.g. `quantity` This is provided for informational purposes only and doesn't have any effect on data collection. Metric types like `timespan`, `datetime` and `timing_distribution` take a `time_unit` instead. type: string no_lint: title: Lint checks to skip description: | This parameter lists any lint checks to skip for this metric only. type: array items: type: string data_sensitivity: title: The level of data sensitivity description: | There are four data collection categories related to data sensitivity [defined here](https://wiki.mozilla.org/Firefox/Data_Collection): - **Category 1: Technical Data:** (`technical`) Information about the machine or Firefox itself. Examples include OS, available memory, crashes and errors, outcome of automated processes like updates, safebrowsing, activation, version \#s, and buildid. This also includes compatibility information about features and APIs used by websites, addons, and other 3rd-party software that interact with Firefox during usage. - **Category 2: Interaction Data:** (`interaction`) Information about the user’s direct engagement with Firefox. Examples include how many tabs, addons, or windows a user has open; uses of specific Firefox features; session length, scrolls and clicks; and the status of discrete user preferences. It also includes information about the user's in-product journeys and product choices helpful to understand engagement (attitudes). For example, selections of add-ons or tiles to determine potential interest categories etc. - **Category 3: Stored Content & Communications:** (`stored_content`, formerly Web activity data, `web_activity`) Information about what people store, sync, communicate or connect to where the information is generally considered to be more sensitive and personal in nature. Examples include users' saved URLs or URL history, specific web browsing history, general information about their web browsing history (such as TLDs or categories of webpages visited over time) and potentially certain types of interaction data about specific web pages or stories visited (such as highlighted portions of a story). It also includes information such as content saved by users to an individual account like saved URLs, tags, notes, passwords and files as well as communications that users have with one another through a Mozilla service. - **Category 4: Highly sensitive data or clearly identifiable personal data:** (`highly_sensitive`) Information that directly identifies a person, or if combined with other data could identify a person. This data may be embedded within specific website content, such as memory contents, dumps, captures of screen data, or DOM data. Examples include account registration data like name, password, and email address associated with an account, payment data in connection with subscriptions or donations, contact information such as phone numbers or mailing addresses, email addresses associated with surveys, promotions and customer support contacts. It also includes any data from different categories that, when combined, can identify a person, device, household or account. For example Category 1 log data combined with Category 3 saved URLs. Additional examples are: voice audio commands (including a voice audio file), speech-to-text or text-to-speech (including transcripts), biometric data, demographic information, and precise location data associated with a persistent identifier, individual or small population cohorts. This is location inferred or determined from mechanisms other than IP such as wi-fi access points, Bluetooth beacons, cell phone towers or provided directly to us, such as in a survey or a profile. type: array items: enum: - technical - interaction - stored_content - web_activity - highly_sensitive type: string minLength: 1 uniqueItems: true telemetry_mirror: title: Which probe in Telemetry to mirror this metric's value to. description: | The C++ enum form of the Scalar, Event, or Histogram to which we should mirror values. Use is limited to Firefox Desktop only. Has no effect when used with non-FOG outputters. See FOG's documentation on mirroring for details - https://firefox-source-docs.mozilla.org/toolkit/components/glean/mirroring.html type: string minLength: 6 denominator_metric: title: The name of the denominator for this `rate` metric. description: | Denominators for `rate` metrics may be private and internal or shared and external. External denominators are `counter` metrics. This field names the `counter` metric that serves as this `rate` metric's external denominator. The named denominator must be defined in this component so glean_parser can find it. type: string structure: title: A subset of a JSON schema definition description: | The expected structure of data, defined in a strict subset of YAML-dialect JSON Schema (Draft 7) supporting keys "type" (only values "object", "array", "number", "string", and "boolean"), "properties", and "items". type: object required: - type - bugs - description - notification_emails - data_reviews - expires type: object propertyNames: anyOf: - allOf: - $ref: "#/definitions/dotted_snake_case" - not: description: "'pings' is reserved as a category name." const: pings - not: description: "'tags' is reserved as a category name." const: tags - enum: ['$schema', '$tags'] properties: $schema: type: string format: url no_lint: title: Lint checks to skip globally description: | This parameter lists any lint checks to skip for this whole file. type: array items: type: string $tags: title: Tags that apply to the whole file description: | This denotes the list of tags that apply to all metrics in this file. type: array items: type: string additionalProperties: type: object propertyNames: anyOf: - $ref: "#/definitions/short_id" additionalProperties: allOf: - $ref: "#/definitions/metric" - if: properties: type: const: event then: properties: lifetime: description: | Event metrics must have ping lifetime. const: ping - if: not: properties: type: enum: - timing_distribution - custom_distribution - memory_distribution - quantity - boolean - string - labeled_counter then: properties: gecko_datapoint: description: | `gecko_datapoint` is only allowed for `timing_distribution`, `custom_distribution`, `memory_distribution`, `quantity`, `boolean`, `string` and `labeled_counter`. maxLength: 0 - if: properties: type: const: custom_distribution then: required: - range_max - bucket_count - histogram_type description: | `custom_distribution` is missing required parameters `range_max`, `bucket_count` and `histogram_type`. - if: properties: type: const: memory_distribution then: required: - memory_unit description: | `memory_distribution` is missing required parameter `memory_unit`. - if: properties: type: const: quantity then: required: - unit description: | `quantity` is missing required parameter `unit`. - if: properties: type: const: jwe then: required: - jwe_support_was_removed description: | JWE support was removed. If you require this send an email to glean-team@mozilla.com. - if: not: properties: type: const: rate then: properties: denominator_metric: description: | `denominator_metric` is only allowed for `rate`. maxLength: 0 - if: properties: type: const: text then: properties: lifetime: description: > Text metrics must have ping or application lifetime. enum: - ping - application data_sensitivity: description: > Text metrics require Category 3 (`stored_content` / `web_activity`) or Category 4 (`highly_sensitive`). type: array items: enum: - stored_content - web_activity - highly_sensitive send_in_pings: description: | Text metrics can only be sent in custom pings. Built-in pings are not allowed. type: array items: allOf: - $ref: "#/definitions/kebab_case" - not: description: > Text metrics can only be sent in custom pings. Built-in pings are not allowed." pattern: "^(metrics|baseline|events|deletion-request|default|glean_.*)$" - if: # This is a schema check: # This is true when the checked YAML passes the schema validation. # # If it has a datetime/timing_distribution/timespan type # AND has a `unit` property, then... properties: type: enum: - datetime - timing_distribution - timespan required: - unit # ... then `time_unit` is required, # because that's the only way we can force this to fail. then: required: - time_unit description: | This metric type uses the (optional) `time_unit` parameter, not `unit`. glean_parser-15.0.1/glean_parser/schemas/pings.1-0-0.schema.yaml000066400000000000000000000103331466531427000242310ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. --- $schema: http://json-schema.org/draft-07/schema# title: Pings description: | Schema for the pings.yaml files for Mozilla's Glean telemetry SDK. The top-level of the `pings.yaml` file has a key defining the name of each ping. The values contain metadata about that ping. Ping names must be kebab-case per https://docs.telemetry.mozilla.org/cookbooks/new_ping.html $id: moz://mozilla.org/schemas/glean/pings/1-0-0 definitions: dotted_snake_case: type: string pattern: "^[a-z_][a-z0-9_]{0,29}(\\.[a-z_][a-z0-9_]{0,29})*$" maxLength: 40 kebab_case: type: string # Bug 1601270; we allow 3 specific existing snake_cased ping names for now, # but these special cases can be removed once the number of legacy clients # sufficiently dwindles, likely in 2020H2. pattern: "^[a-z][a-z0-9-]{0,29}$\ |^deletion_request$|^bookmarks_sync$|^history_sync$|^session_end$|^all_pings$|^glean_.*$" type: object propertyNames: allOf: - anyOf: - $ref: "#/definitions/kebab_case" - enum: ['$schema', 'no_lint'] - not: enum: ['all-pings'] properties: $schema: type: string format: url no_lint: title: Lint checks to skip globally description: | This parameter lists any lint checks to skip for this whole file. type: array items: type: string additionalProperties: type: object properties: description: title: Description description: | **Required.** A textual description of the purpose of this ping and what it contains. Descriptions may contain [markdown syntax](https://www.markdownguide.org/basic-syntax/). type: string include_client_id: title: Include client id description: | **Required.** When `true`, include the `client_id` value in the ping. type: boolean send_if_empty: title: Send if empty description: | When `false` a ping is sent only if it contains data (the default). When `true` a ping is sent even if it contains no data. type: boolean notification_emails: title: Notification emails description: | **Required.** A list of email addresses to notify for important events with the ping or when people with context or ownership for the ping need to be contacted. type: array minItems: 1 items: type: string format: email bugs: title: Related bugs description: | **Required.** A list of bugs (e.g. Bugzilla and Github) that are relevant to this ping, e.g., tracking its original implementation or later changes to it. If a number, it is an ID to an issue in the default tracker (e.g. Mozilla's Bugzilla instance). If a string, it must be a URI to a bug page in a tracker. type: array minItems: 1 items: anyOf: - type: integer # Keep supporting integer for backward-compat - type: string format: uri data_reviews: title: Review references description: | **Required.** A list of URIs to any data collection reviews relevant to the ping. type: array items: type: string format: uri reasons: title: The reasons this ping can be sent. description: | A list of reasons that the ping might be triggered. Sent in the ping's `ping_info.reason` field. Specified as a mapping from reason codes (which are short strings), to a textual description of the reason. type: object propertyNames: type: string maxLength: 30 additionalProperties: type: string no_lint: title: Lint checks to skip description: | This parameter lists any lint checks to skip for this metric only. type: array items: type: string required: - description - include_client_id - bugs - notification_emails - data_reviews additionalProperties: false glean_parser-15.0.1/glean_parser/schemas/pings.2-0-0.schema.yaml000066400000000000000000000140051466531427000242320ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. --- $schema: http://json-schema.org/draft-07/schema# title: Pings description: | Schema for the pings.yaml files for Mozilla's Glean telemetry SDK. The top-level of the `pings.yaml` file has a key defining the name of each ping. The values contain metadata about that ping. Ping names must be kebab-case per https://docs.telemetry.mozilla.org/cookbooks/new_ping.html $id: moz://mozilla.org/schemas/glean/pings/2-0-0 definitions: dotted_snake_case: type: string pattern: "^[a-z_][a-z0-9_]{0,29}(\\.[a-z_][a-z0-9_]{0,29})*$" maxLength: 40 # Prior to version 2.0.0 of the schema, special ping names with underscores # were also supported. kebab_case: type: string pattern: "^[a-z][a-z0-9-]{0,29}$" type: object propertyNames: allOf: - anyOf: - $ref: "#/definitions/kebab_case" - enum: ['$schema', 'no_lint'] - not: enum: ['all-pings'] properties: $schema: type: string format: url no_lint: title: Lint checks to skip globally description: | This parameter lists any lint checks to skip for this whole file. type: array items: type: string additionalProperties: type: object properties: description: title: Description description: | **Required.** A textual description of the purpose of this ping and what it contains. Descriptions may contain [markdown syntax](https://www.markdownguide.org/basic-syntax/). type: string metadata: title: Metadata description: | Additional metadata about this ping. Currently limited to a list of tags. type: object properties: tags: title: Tags description: Which tags are specified for this ping. type: array items: type: string maxLength: 80 precise_timestamps: title: Precise Timestamps description: | When `true` Glean uses millisecond-precise timestamps for the ping's start/end time (the default). When `false` Glean uses minute-precise timestamps for the ping's start/end time. type: boolean include_info_sections: title: Include Info Sections description: | When `true`, assemble and include the `client_info` and `ping_info` sections in the ping on submission. When `false`, omit the `client_info` and `ping_info` sections when assembling the ping on submission. Defaults to `true`. Interaction with `include_client_id`: `include_client_id` only takes effect when `metadata.include_info_sections` is `true`. type: boolean ping_schedule: title: Ping Schedule description: | An optional array of ping names. When one of the listed pings is sent, then this ping will also be sent. A ping cannot list its own name in `ping_schedule`. type: array items: type: string maxLength: 30 default: {} include_client_id: title: Include client id description: | **Required.** When `true`, include the `client_id` value in the ping. Interaction with `metadata.include_info_sections`: `include_client_id` only takes effect when `metadata.include_info_sections` is `true`. type: boolean send_if_empty: title: Send if empty description: | When `false` a ping is sent only if it contains data (the default). When `true` a ping is sent even if it contains no data. type: boolean notification_emails: title: Notification emails description: | **Required.** A list of email addresses to notify for important events with the ping or when people with context or ownership for the ping need to be contacted. type: array minItems: 1 items: type: string format: email bugs: title: Related bugs description: | **Required.** A list of bugs (e.g. Bugzilla and Github) that are relevant to this ping, e.g., tracking its original implementation or later changes to it. It must be a URI to a bug page in a tracker. Prior to version 2.0.0 of the schema, bugs could also be integers. type: array minItems: 1 items: type: string format: uri data_reviews: title: Review references description: | **Required.** A list of URIs to any data collection reviews relevant to the ping. type: array items: type: string format: uri reasons: title: The reasons this ping can be sent. description: | A list of reasons that the ping might be triggered. Sent in the ping's `ping_info.reason` field. Specified as a mapping from reason codes (which are short strings), to a textual description of the reason. type: object propertyNames: type: string maxLength: 30 additionalProperties: type: string enabled: title: Whether or not this ping is enabled description: | **Optional.** When `true`, the ping will be sent as usual. When `false`, the ping will not be sent, but the data will continue to be collected but will not be cleared when the ping is submitted. Defaults to `true` if omitted. type: boolean no_lint: title: Lint checks to skip description: | This parameter lists any lint checks to skip for this metric only. type: array items: type: string required: - description - include_client_id - bugs - notification_emails - data_reviews additionalProperties: false glean_parser-15.0.1/glean_parser/schemas/tags.1-0-0.schema.yaml000066400000000000000000000023171466531427000240520ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. --- $schema: http://json-schema.org/draft-07/schema# title: Tags description: | Schema for the tags.yaml files for Mozilla's Glean telemetry SDK. The top-level of the `tags.yaml` file has a key defining the name of each tag. The values contain metadata about that tag (currently just a description). $id: moz://mozilla.org/schemas/glean/tags/1-0-0 type: object propertyNames: type: string maxLength: 80 properties: $schema: type: string format: url no_lint: title: Lint checks to skip globally description: | This parameter lists any lint checks to skip for this whole file. type: array items: type: string additionalProperties: type: object properties: description: title: Description description: | **Required.** A textual description of this tag. Descriptions may contain [markdown syntax](https://www.markdownguide.org/basic-syntax/). type: string required: - description additionalProperties: false glean_parser-15.0.1/glean_parser/swift.py000066400000000000000000000213751466531427000204300ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Outputter to generate Swift code for metrics. """ import enum import json from pathlib import Path from typing import Any, Dict, Optional, Union from . import __version__ from . import metrics from . import pings from . import tags from . import util # An (imcomplete) list of reserved keywords in Swift. # These will be replaced in generated code by their escaped form. SWIFT_RESERVED_NAMES = ["internal", "typealias"] def swift_datatypes_filter(value: util.JSONType) -> str: """ A Jinja2 filter that renders Swift literals. Based on Python's JSONEncoder, but overrides: - dicts to use `[key: value]` - sets to use `[...]` - enums to use the like-named Swift enum - Rate objects to a CommonMetricData initializer (for external Denominators' Numerators lists) """ class SwiftEncoder(json.JSONEncoder): def iterencode(self, value): if isinstance(value, dict): yield "[" first = True for key, subvalue in value.items(): if not first: yield ", " yield from self.iterencode(key) yield ": " yield from self.iterencode(subvalue) first = False yield "]" elif isinstance(value, enum.Enum): yield ("." + util.camelize(value.name)) elif isinstance(value, list): yield "[" first = True for subvalue in value: if not first: yield ", " yield from self.iterencode(subvalue) first = False yield "]" elif isinstance(value, set): yield "[" first = True for subvalue in sorted(list(value)): if not first: yield ", " yield from self.iterencode(subvalue) first = False yield "]" elif value is None: yield "nil" elif isinstance(value, metrics.Rate): yield "CommonMetricData(" first = True for arg_name in util.common_metric_args: if hasattr(value, arg_name): if not first: yield ", " yield f"{util.camelize(arg_name)}: " yield from self.iterencode(getattr(value, arg_name)) first = False yield ")" else: yield from super().iterencode(value) return "".join(SwiftEncoder().iterencode(value)) def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str: """ Returns the Swift type to use for a given metric or ping object. """ generate_enums = getattr(obj, "_generate_enums", []) if len(generate_enums): generic = None for member, suffix in generate_enums: if len(getattr(obj, member)): generic = util.Camelize(obj.name) + suffix else: if isinstance(obj, metrics.Event): generic = "NoExtras" else: generic = "No" + suffix return "{}<{}>".format(class_name(obj.type), generic) generate_structure = getattr(obj, "_generate_structure", []) if len(generate_structure): generic = util.Camelize(obj.name) + "Object" return "{}<{}>".format(class_name(obj.type), generic) return class_name(obj.type) def extra_type_name(typ: str) -> str: """ Returns the corresponding Swift type for event's extra key types. """ if typ == "boolean": return "Bool" elif typ == "string": return "String" elif typ == "quantity": return "Int32" else: return "UNSUPPORTED" def structure_type_name(typ: str) -> str: """ Returns the corresponding Swift type for structure items. """ if typ == "boolean": return "Bool" elif typ == "string": return "String" elif typ == "number": return "Int64" else: return "UNSUPPORTED" def class_name(obj_type: str) -> str: """ Returns the Swift class name for a given metric or ping type. """ if obj_type == "ping": return "Ping" if obj_type.startswith("labeled_"): obj_type = obj_type[8:] return util.Camelize(obj_type) + "MetricType" def variable_name(var: str) -> str: """ Returns a valid Swift variable name, escaping keywords if necessary. """ if var in SWIFT_RESERVED_NAMES: return "`" + var + "`" else: return var class BuildInfo: def __init__(self, build_date): self.build_date = build_date def generate_build_date(date: Optional[str]) -> str: """ Generate the build timestamp. """ ts = util.build_date(date) data = [ ("year", ts.year), ("month", ts.month), ("day", ts.day), ("hour", ts.hour), ("minute", ts.minute), ("second", ts.second), ] # The internal DatetimeMetricType API can take a `DateComponents` object, # which lets us easily specify the timezone. components = ", ".join([f"{name}: {val}" for (name, val) in data]) return f'DateComponents(calendar: Calendar.current, timeZone: TimeZone(abbreviation: "UTC"), {components})' # noqa class Category: """ Data struct holding information about a metric to be used in the template. """ name: str objs: Dict[str, Union[metrics.Metric, pings.Ping, tags.Tag]] contains_pings: bool def output_swift( objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None ) -> None: """ Given a tree of objects, output Swift code to `output_dir`. :param objects: A tree of objects (metrics and pings) as returned from `parser.parse_objects`. :param output_dir: Path to an output directory to write to. :param options: options dictionary, with the following optional keys: - namespace: The namespace to generate metrics in - glean_namespace: The namespace to import Glean from - allow_reserved: When True, this is a Glean-internal build - with_buildinfo: If "true" the `GleanBuildInfo` is generated. Otherwise generation of that file is skipped. Defaults to "true". - build_date: If set to `0` a static unix epoch time will be used. If set to a ISO8601 datetime string (e.g. `2022-01-03T17:30:00`) it will use that date. Other values will throw an error. If not set it will use the current date & time. """ if options is None: options = {} template = util.get_jinja2_template( "swift.jinja2", filters=( ("swift", swift_datatypes_filter), ("type_name", type_name), ("class_name", class_name), ("variable_name", variable_name), ("extra_type_name", extra_type_name), ("structure_type_name", structure_type_name), ), ) namespace = options.get("namespace", "GleanMetrics") glean_namespace = options.get("glean_namespace", "Glean") with_buildinfo = options.get("with_buildinfo", "true").lower() == "true" build_date = options.get("build_date", None) build_info = None if with_buildinfo: build_date = generate_build_date(build_date) build_info = BuildInfo(build_date=build_date) filename = "Metrics.swift" filepath = output_dir / filename categories = [] for category_key, category_val in objs.items(): contains_pings = any( isinstance(obj, pings.Ping) for obj in category_val.values() ) cat = Category() cat.name = category_key cat.objs = category_val cat.contains_pings = contains_pings categories.append(cat) with filepath.open("w", encoding="utf-8") as fd: fd.write( template.render( parser_version=__version__, categories=categories, common_metric_args=util.common_metric_args, extra_metric_args=util.extra_metric_args, namespace=namespace, glean_namespace=glean_namespace, allow_reserved=options.get("allow_reserved", False), build_info=build_info, ) ) # Jinja2 squashes the final newline, so we explicitly add it fd.write("\n") glean_parser-15.0.1/glean_parser/tags.py000066400000000000000000000025571466531427000202330ustar00rootroot00000000000000from typing import Dict, List, Optional from . import util class Tag: def __init__( self, name: str, description: str, defined_in: Optional[Dict] = None, no_lint: Optional[List[str]] = None, _validated: bool = False, ): # Avoid cyclical import from . import parser self.name = name self.description = description self.defined_in = defined_in if no_lint is None: no_lint = [] self.no_lint = no_lint # _validated indicates whether this tag has already been jsonschema # validated (but not any of the Python-level validation). if not _validated: data: Dict[str, util.JSONType] = { "$schema": parser.TAGS_ID, self.name: self._serialize_input(), } for error in parser.validate(data): raise ValueError(error) @property def type(self) -> str: return "tag" def _serialize_input(self) -> Dict[str, util.JSONType]: d = self.serialize() modified_dict = util.remove_output_params(d, "defined_in") return modified_dict def serialize(self) -> Dict[str, util.JSONType]: """ Serialize the tag back to JSON object model. """ d = self.__dict__.copy() del d["name"] return d glean_parser-15.0.1/glean_parser/templates/000077500000000000000000000000001466531427000207105ustar00rootroot00000000000000glean_parser-15.0.1/glean_parser/templates/data_review.jinja2000066400000000000000000000063271466531427000243110ustar00rootroot00000000000000!! Reminder: it is your responsibility to complete and check the correctness of !! this automatically-generated request skeleton before requesting Data !! Collection Review. See https://wiki.mozilla.org/Data_Collection for details. {# Data Review Request Template pulled from https://github.com/mozilla/data-review/blob/main/request.md #} DATA REVIEW REQUEST 1. What questions will you answer with this data? {{ "TODO: Fill this in." if not questions }} 2. Why does Mozilla need to answer these questions? Are there benefits for users? Do we need this information to address product or business requirements? {{ "TODO: Fill this in." if not why }} 3. What alternative methods did you consider to answer these questions? Why were they not sufficient? {{ "TODO: Fill this in." if not methods }} 4. Can current instrumentation answer these questions? {{ "TODO: Fill this in." if not current_instrumentation_answers }} 5. List all proposed measurements and indicate the category of data collection for each measurement, using the Firefox data collection categories found on the Mozilla wiki. Measurement Name | Measurement Description | Data Collection Category | Tracking Bug ---------------- | ----------------------- | ------------------------ | ------------ {% for metric in metrics %} {% if metric.type == "event" and metric.allowed_extra_keys %} {% for extra_name, extra_detail in metric.extra_keys.items() %} `{{ metric.category|snake_case }}.{{ metric.name|snake_case }}#{{ extra_name }} | {{ extra_detail["description"]|replace("\n", " ") }} | {{ metric.data_sensitivity|join(", ", attribute="name") }} | {{ metric.bugs|last }} {% endfor %} {% else %} `{{ metric.category|snake_case }}.{{ metric.name|snake_case }}` | {{ metric.description|replace("\n", " ") }} | {{ metric.data_sensitivity|join(", ", attribute="name") }} | {{ metric.bugs|last }} {% endif %} {% endfor %} 6. Please provide a link to the documentation for this data collection which describes the ultimate data set in a public, complete, and accurate way. This collection is Glean so is documented [in the Glean Dictionary](https://dictionary.telemetry.mozilla.org). 7. How long will this data be collected? {% if durations|length == 1 %} {% for duration in durations %} {% if duration == "never" %} This collection will be collected permanently. {% else %} This collection has expiry '{{duration}}'. {% endif %} {% endfor %} {% else %} Parts of this collection expire at different times: {{ durations|join(", ") }}. {% endif %} {% if "never" in durations %} {{ responsible_emails|join(", ") }} will be responsible for the permanent collections. {% endif %} 8. What populations will you measure? All channels, countries, and locales. No filters. 9. If this data collection is default on, what is the opt-out mechanism for users? These collections are Glean. The opt-out can be found in the product's preferences. 10. Please provide a general description of how you will analyze this data. {{ "TODO: Fill this in." if not analysis_how }} 11. Where do you intend to share the results of your analysis? {{ "TODO: Fill this in." if not analysis_where }} 12. Is there a third-party tool (i.e. not Glean or Telemetry) that you are proposing to use for this data collection? No. glean_parser-15.0.1/glean_parser/templates/go_server.jinja2000066400000000000000000000155771466531427000240210ustar00rootroot00000000000000{# The final Go code is autogenerated, but this template is not. Please file bugs! #} package glean // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. // AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT. // required imports import ( "encoding/json" "fmt" "strconv" "time" "github.com/google/uuid" ) // log type string used to identify logs to process in the Moz Data Pipeline var gleanEventMozlogType string = "glean-server-event" type GleanEventsLogger struct { AppID string // Application Id to identify application per Glean standards AppDisplayVersion string // Version of application emitting the event AppChannel string // Channel to differentiate logs from prod/beta/staging/devel } // exported type for public method parameters type RequestInfo struct { UserAgent string IpAddress string } // default empty values will be omitted in json from ping struct definition var defaultRequestInfo = RequestInfo{ UserAgent: "", IpAddress: "", } // structs to construct the glean ping type clientInfo struct { TelemetrySDKBuild string `json:"telemetry_sdk_build"` FirstRunDate string `json:"first_run_date"` OS string `json:"os"` OSVersion string `json:"os_version"` Architecture string `json:"architecture"` AppBuild string `json:"app_build"` AppDisplayVersion string `json:"app_display_version"` AppChannel string `json:"app_channel"` } type pingInfo struct { Seq int `json:"seq"` StartTime string `json:"start_time"` EndTime string `json:"end_time"` } type ping struct { DocumentNamespace string `json:"document_namespace"` DocumentType string `json:"document_type"` DocumentVersion string `json:"document_version"` DocumentID string `json:"document_id"` UserAgent string `json:"user_agent,omitempty"` IpAddress string `json:"ip_address,omitempty"` Payload string `json:"payload"` } type metrics map[string]map[string]interface{} type pingPayload struct { ClientInfo clientInfo `json:"client_info"` PingInfo pingInfo `json:"ping_info"` Metrics metrics `json:"metrics"` Events []gleanEvent `json:"events"` } type gleanEvent struct { Category string `json:"category"` Name string `json:"name"` Timestamp int64 `json:"timestamp"` Extra map[string]string `json:"extra"` } type logEnvelope struct { Timestamp string Logger string Type string Fields ping } func (g GleanEventsLogger) createClientInfo() clientInfo { // Fields with default values are required in the Glean schema, but not used in server context return clientInfo{ TelemetrySDKBuild: "glean_parser v{{ parser_version }}", FirstRunDate: "Unknown", OS: "Unknown", OSVersion: "Unknown", Architecture: "Unknown", AppBuild: "Unknown", AppDisplayVersion: g.AppDisplayVersion, AppChannel: g.AppChannel, } } func createPingInfo() pingInfo { {# times are ISO-8601 strings, e.g. "2023-12-19T22:09:17.440Z" #} var now = time.Now().UTC().Format("2006-01-02T15:04:05.000Z") return pingInfo{ Seq: 0, StartTime: now, EndTime: now, } } func (g GleanEventsLogger) createPing(documentType string, config RequestInfo, payload pingPayload) ping { var payloadJson, payloadErr = json.Marshal(payload) if payloadErr != nil { panic("Unable to marshal payload to json") } var documentId = uuid.New() return ping{ DocumentNamespace: g.AppID, DocumentType: documentType, DocumentVersion: "1", DocumentID: documentId.String(), UserAgent: config.UserAgent, IpAddress: config.IpAddress, Payload: string(payloadJson), } } // method called by each event method. // construct the ping, wrap it in the envelope, and print to stdout func (g GleanEventsLogger) record( documentType string, requestInfo RequestInfo, metrics metrics, events []gleanEvent, ) { var telemetryPayload = pingPayload{ ClientInfo: g.createClientInfo(), PingInfo: createPingInfo(), Metrics: metrics, Events: events, } var ping = g.createPing(documentType, requestInfo, telemetryPayload) var envelope = logEnvelope{ Timestamp: strconv.FormatInt(time.Now().UnixNano(), 10), Logger: "glean", Type: gleanEventMozlogType, Fields: ping, } var envelopeJson, envelopeErr = json.Marshal(envelope) if envelopeErr != nil { panic("Unable to marshal log envelope to json") } fmt.Println(string(envelopeJson)) } {% for event in events_ping["event"] %} type {{ event|event_type_name }} struct { {% for metric_type, metrics in events_ping.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} {{ metric|metric_argument_name }} {{ metric.type|go_metric_type }} // {{ metric.description|clean_string }} {% endfor %} {% endif %} {% endfor %} {% for extra, metadata in event.extra_keys.items() %} {{ extra|event_extra_name }} {{ metadata.type|go_metric_type }} // {{ metadata.description|clean_string }} {% endfor %} } // Record and submit an {{ event|event_type_name }} event. // {{ event.description|clean_string }} func (g GleanEventsLogger) Record{{ event|event_type_name }}( requestInfo RequestInfo, params {{ event|event_type_name }}, ) { var metrics = metrics{ {% for metric_type, metrics in events_ping.items() %} {% if metric_type != 'event' %} "{{ metric_type }}": { {% for metric in metrics %} {% if metric_type =='datetime' %} "{{ metric|metric_name }}": params.{{ metric|metric_argument_name }}.Format("2006-01-02T15:04:05.000Z"), {% else %} "{{ metric|metric_name }}": params.{{ metric|metric_argument_name }}, {% endif %} {% endfor %} }, {% endif %} {% endfor %} } var extraKeys = map[string]string{ {% for extra, metadata in event.extra_keys.items() %} {# convert all extra fields to string for submission #} {% if metadata.type == 'boolean' %} "{{ extra }}": fmt.Sprintf("%t", params.{{ extra|event_extra_name }}), {% elif metadata.type == 'quantity' %} "{{ extra }}": fmt.Sprintf("%d", params.{{ extra|event_extra_name }}), {% else %} "{{ extra }}": params.{{ extra|event_extra_name }}, {% endif %} {% endfor %} } var events = []gleanEvent{ gleanEvent{ Category: "{{ event.category }}", Name: "{{ event.name }}", Timestamp: time.Now().UnixMilli(), Extra: extraKeys, }, } g.record("events", requestInfo, metrics, events) } // Record and submit an {{ event|event_type_name }} event omitting user request info // {{ event.description|clean_string }} func (g GleanEventsLogger) Record{{ event|event_type_name }}WithoutUserInfo( params {{ event|event_type_name }}, ) { g.Record{{ event|event_type_name }}(defaultRequestInfo, params) } {% endfor %} glean_parser-15.0.1/glean_parser/templates/javascript.buildinfo.jinja2000066400000000000000000000007321466531427000261310ustar00rootroot00000000000000/* * AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT. DO NOT COMMIT. */ {# The rendered markdown is autogenerated, but this Jinja2 template is not. Please file bugs! #} /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ {% if platform != "qt" %}export {% endif %}const buildDate = {{ build_date }}; glean_parser-15.0.1/glean_parser/templates/javascript.jinja2000066400000000000000000000051551466531427000241630ustar00rootroot00000000000000{# The final Javascript/Typescript code is autogenerated, but this Jinja2 template is not. Please file bugs! #} {% macro obj_declaration(obj) %} new {{ obj.type|class_name }}{% if obj.extra_keys and lang == "typescript" %}<{ {% for name, type in obj.allowed_extra_keys_with_types %} {{ name }}?: {{ type|extra_type_name }}, {% endfor %} }>{% endif %}({ {% for arg_name in (obj.type|args).common if obj[arg_name] is defined %} {{ arg_name|camelize }}: {{ obj[arg_name]|js }}, {% endfor %} }{% for arg_name in (obj.type|args).extra if obj[arg_name] is defined %}, {{ obj[arg_name]|js }}{% endfor %}){% endmacro %} {% macro labeled_obj_declaration(obj) %} new {{ "labeled"|class_name }}({ {% for arg_name in (obj.type|args).common if obj[arg_name] is defined %} {{ arg_name|camelize }}: {{ obj[arg_name]|js }}, {% endfor %} }, {{ obj.type|class_name }}{% if obj.labels is not none %}, {{ obj.labels|js }}{% endif %}){% endmacro %} /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT. DO NOT COMMIT. {% if platform != "qt" %} {% if has_labeled_metrics %} import LabeledMetricType from "@mozilla/glean/private/metrics/labeled"; {% endif %} {% for type in types %} import {{ type|class_name }} from "@mozilla/glean/private/{{ type|import_path }}"; {% endfor %} {% else %} .import org.mozilla.Glean {{ version }} as Glean {% endif %} {% for obj in objs.values() %} /** * {{ obj.description|wordwrap() | replace("\n", "\n * ") }} * * Generated from `{{ obj.identifier() }}`. */ {% if obj.labeled %} {% if platform != "qt" %}export {% endif %}const {{ obj.name|camelize }} = {{ labeled_obj_declaration(obj) }}; {% else %} {% if platform != "qt" %}export {% endif %}const {{ obj.name|camelize }} = {{ obj_declaration(obj) }}; {% endif %} {% if obj|attr("_generate_enums") %} {% for name, suffix in obj["_generate_enums"] %} {% if obj|attr(name)|length and name == "reason_codes" %} /** * Reason codes for `{{ obj.identifier() }}`. * * @readonly * @enum {string} */ {% if lang == "typescript" %} export enum {{ obj.name|Camelize }}{{ name|Camelize }} { {% for key in obj|attr(name) %} {{ key|Camelize }} = "{{ key }}", {% endfor %} } {% else %} {% if platform != "qt" %}export {% endif %}const {{ obj.name|Camelize }}{{ name|Camelize }} = { {% for key in obj|attr(name) %} "{{ key|Camelize }}": "{{ key }}", {% endfor %} } {% endif %} {% endif %} {% endfor %} {% endif %} {% endfor %} glean_parser-15.0.1/glean_parser/templates/javascript_server.jinja2000066400000000000000000000230111466531427000255400ustar00rootroot00000000000000{# The final Javascript/Typescript code is autogenerated, but this Jinja2 template is not. Please file bugs! #} /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT. DO NOT COMMIT. // This requires `uuid` and `mozlog` libraries to be in the environment {% if lang == "typescript" %} // @types/uuid and mozlog types definitions are required in devDependencies // for the latter see https://github.com/mozilla/fxa/blob/85bda71cda376c417b8c850ba82aa14252208c3c/types/mozlog/index.d.ts {% endif %} {% if module_spec == "commonjs" %} const uuidv4 = require('uuid').v4; const mozlog = require('mozlog'); {% else %} import { v4 as uuidv4 } from 'uuid'; import mozlog{% if lang == "typescript" %}, { Logger }{% endif %} from 'mozlog'; {% endif %} const GLEAN_EVENT_MOZLOG_TYPE = 'glean-server-event'; {% if lang == "typescript" %} type LoggerOptions = { app: string; fmt?: 'heka' }; {% if event_metric_exists %} type Event = { category: string; name: string; extra?: Record; timestamp?: number; }; {% endif %} {% endif %} let _logger{% if lang == "typescript" %}: Logger{% endif %}; {% for ping, metrics_by_type in pings.items() %} class {{ ping|event_class_name(metrics_by_type) }} { {% if lang == "typescript" %} _applicationId: string; _appDisplayVersion: string; _channel: string; {% endif %} /** * Create {{ ping|event_class_name(metrics_by_type) }} instance. * * @param {string} applicationId - The application ID. * @param {string} appDisplayVersion - The application display version. * @param {string} channel - The channel. * @param {LoggerOptions} logger_options - The logger options. */ {% if lang == "typescript" %} constructor( applicationId: string, appDisplayVersion: string, channel: string, logger_options: LoggerOptions ) { {% else %} constructor(applicationId, appDisplayVersion, channel, logger_options) { {% endif %} this._applicationId = applicationId; this._appDisplayVersion = appDisplayVersion; this._channel = channel; if (!_logger) { // append '-glean' to `logger_options.app` to avoid collision with other loggers and double logging logger_options.app = logger_options.app + '-glean'; // set the format to `heka` so messages are properly ingested and decoded logger_options.fmt = 'heka'; {% if lang == "typescript" %} // mozlog types declaration requires a typePrefix to be passed when creating a logger // we don't want a typePrefix, so we pass `undefined` _logger = mozlog(logger_options)(undefined); {% else %} _logger = mozlog(logger_options)(); {% endif %} } } {% if 'event' in metrics_by_type %} #record({ {% else %} /** * Record and submit a server event object. * Event is logged using internal mozlog logger. * * @param {string} user_agent - The user agent. * @param {string} ip_address - The IP address. Will be used to decode Geo * information and scrubbed at ingestion. {% for metric_type, metrics in metrics_by_type.items() %} {% for metric in metrics %} * @param { {{-metric|js_metric_type-}} } {{ metric|metric_argument_name }} - {{ metric.description|clean_string }}. {% endfor %} {% endfor %} */ record({ {% endif %} user_agent, ip_address, {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} {{ metric|metric_argument_name }}, {% endfor %} {% endif %} {% endfor %} {% if 'event' in metrics_by_type %} event, {% endif %} {% if lang == "typescript" %} }: { user_agent: string; ip_address: string; {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} {{ metric|metric_argument_name }}: {{ metric|js_metric_type }}; {% endfor %} {% endif %} {% endfor %} {% if 'event' in metrics_by_type %} event: Event; {% endif %} {% endif %} }) { const now = new Date(); const timestamp = now.toISOString(); {% if 'event' in metrics_by_type %} event.timestamp = now.getTime(); {% endif %} const eventPayload = { metrics: { {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {{ metric_type }}: { {% for metric in metrics %} '{{ metric|metric_name }}': {{ metric|metric_argument_name }}, {% endfor %} }, {% endif %} {% endfor %} }, {% if 'event' in metrics_by_type %} events: [event], {% endif %} ping_info: { seq: 0, // this is required, however doesn't seem to be useful in server context start_time: timestamp, end_time: timestamp, }, // `Unknown` fields below are required in the Glean schema, however they are not useful in server context client_info: { telemetry_sdk_build: 'glean_parser v{{ parser_version }}', first_run_date: 'Unknown', os: 'Unknown', os_version: 'Unknown', architecture: 'Unknown', app_build: 'Unknown', app_display_version: this._appDisplayVersion, app_channel: this._channel, }, }; const eventPayloadSerialized = JSON.stringify(eventPayload); // This is the message structure that Decoder expects: https://github.com/mozilla/gcp-ingestion/pull/2400 const ping = { document_namespace: this._applicationId, document_type: '{{ ping }}', document_version: '1', document_id: uuidv4(), user_agent: user_agent, ip_address: ip_address, payload: eventPayloadSerialized, }; // this is similar to how FxA currently logs with mozlog: https://github.com/mozilla/fxa/blob/4c5c702a7fcbf6f8c6b1f175e9172cdd21471eac/packages/fxa-auth-server/lib/log.js#L289 _logger.info(GLEAN_EVENT_MOZLOG_TYPE, ping); } {% if 'event' in metrics_by_type %} {% for event in metrics_by_type["event"] %} /** * Record and submit a {{ event.category }}_{{ event.name }} event: * {{ event.description|clean_string }} * Event is logged using internal mozlog logger. * * @param {string} user_agent - The user agent. * @param {string} ip_address - The IP address. Will be used to decode Geo * information and scrubbed at ingestion. {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} * @param { {{-metric|js_metric_type-}} } {{ metric|metric_argument_name }} - {{ metric.description|clean_string }}. {% endfor %} {% endif %} {% endfor %} {% if event.extra_keys %} {% for extra, metadata in event.extra_keys.items() %} * @param { {{-metadata.type-}} } {{ extra }} - {{ metadata.description|clean_string }}. {% endfor %} {% endif %} */ {{ event|event_metric_record_function_name }}({ user_agent, ip_address, {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} {{ metric|metric_argument_name }}, {% endfor %} {% endif %} {% endfor %} {% for extra, metadata in event.extra_keys.items() %} {{ extra }}, {% endfor %} {% if lang == "typescript" %} }: { user_agent: string; ip_address: string; {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} {{ metric|metric_argument_name }}: {{ metric|js_metric_type }}; {% endfor %} {% endif %} {% endfor %} {% for extra, metadata in event.extra_keys.items() %} {{ extra }}: {{metadata.type}}; {% endfor %} {% endif %} }) { const event = { category: '{{ event.category }}', name: '{{ event.name }}', {% if event.extra_keys %} extra: { {% for extra, metadata in event.extra_keys.items() %} {{ extra }}: String({{ extra }}), {% endfor %} }, {% endif %} }; this.#record({ user_agent, ip_address, {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} {{ metric|metric_argument_name }}, {% endfor %} {% endif %} {% endfor %} event, }); } {% endfor %} {% endif %} } {% endfor %} {% for ping, metrics_by_type in pings.items() %} /** * Factory function that creates an instance of Glean Server Event Logger to * record `{{ ping }}` ping events. * @param {string} applicationId - The application ID. * @param {string} appDisplayVersion - The application display version. * @param {string} channel - The channel. * @param {Object} logger_options - The logger options. * @returns {EventsServerEventLogger} An instance of EventsServerEventLogger. */ {% if module_spec == "commonjs" %} module.exports.{{ ping|factory_method(metrics_by_type) }} = function ({ {% else %} export const {{ ping|factory_method(metrics_by_type) }} = function ({ {% endif %} applicationId, appDisplayVersion, channel, logger_options, {% if lang == "typescript" %} }: { applicationId: string; appDisplayVersion: string; channel: string; logger_options: LoggerOptions; {% endif %} }) { return new {{ ping|event_class_name(metrics_by_type) }}( applicationId, appDisplayVersion, channel, logger_options ); }; {% endfor %} glean_parser-15.0.1/glean_parser/templates/kotlin.buildinfo.jinja2000066400000000000000000000016301466531427000252610ustar00rootroot00000000000000// -*- mode: kotlin -*- /* * AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT. DO NOT COMMIT. */ {# The rendered markdown is autogenerated, but this Jinja2 template is not. Please file bugs! #} /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ @file:Suppress("PackageNaming", "MaxLineLength") package {{ namespace }} import java.util.Calendar import java.util.TimeZone import {{ glean_namespace }}.BuildInfo import {{ namespace_package }}.BuildConfig @Suppress("MagicNumber") internal object GleanBuildInfo { val buildInfo: BuildInfo by lazy { BuildInfo( versionCode = BuildConfig.VERSION_CODE.toString(), versionName = BuildConfig.VERSION_NAME, buildDate = {{ build_date }} ) } } glean_parser-15.0.1/glean_parser/templates/kotlin.jinja2000066400000000000000000000226421466531427000233150ustar00rootroot00000000000000// -*- mode: kotlin -*- /* * AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT. DO NOT COMMIT. */ {# The rendered markdown is autogenerated, but this Jinja2 template is not. Please file bugs! #} /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ {%- macro obj_declaration(obj, suffix='', access='', lazy=False) -%} {% if (access != "private ") -%} @get:JvmName("{{ obj.name|camelize }}{{ suffix }}") {% endif -%} {{ access }}val {{ obj.name|camelize }}{{ suffix }}: {{ obj|type_name }}{% if lazy %} by lazy { {%- else %} ={% endif %} // generated from {{ obj.identifier() }} {% if obj.type == 'ping' %} {{ obj|type_name }}( {% for arg_name in ping_args if obj[arg_name] is defined %} {{ arg_name|camelize }} = {{ obj[arg_name]|kotlin }}{{ "," if not loop.last }} {% endfor %} ) {% else %} {{ obj|type_name }}( CommonMetricData( {% for arg_name in common_metric_args if obj[arg_name] is defined %} {{ arg_name|camelize }} = {{ obj[arg_name]|kotlin }}{{ "," if not loop.last }} {% endfor %} ){%- for arg_name in extra_metric_args if obj[arg_name] is defined -%} , {{ arg_name|camelize }} = {{ obj[arg_name]|kotlin }} {%- endfor -%} ) {% endif %} {% if lazy %}}{% endif %} {%- endmacro -%} {%- macro reason_enum_decl(obj, name, suffix) -%} @Suppress("ClassNaming", "EnumNaming") enum class {{ obj.name|camelize }}{{ suffix }} : ReasonCode { {% for key in obj|attr(name) %} {{ key|camelize }} { override fun code(): Int = {{ loop.index-1 }} }{{ "," if not loop.last }}{{ ";" if loop.last }} {% endfor %} } {%- endmacro %} {%- macro struct_decl(obj, name, suffix) -%} @Suppress("ClassNaming", "EnumNaming") data class {{ obj.name|Camelize }}{{ suffix }}( {% for item, typ in obj|attr(name) %} val {{ item|camelize }}: {{typ|extra_type_name}}? = null{{ "," if not loop.last }} {% endfor %} ) : EventExtras { override fun toExtraRecord(): Map { val map = mutableMapOf() {% for item in obj|attr(name) %} this.{{ item[0]|camelize }}?.let { map.put("{{item[0]}}", it.toString()) } {% endfor %} return map } } {%- endmacro -%} {%- macro generate_structure(name, struct) %} {%- if struct.type == "array" -%} @Serializable(with = {{name}}.Serializer::class) data class {{ name }}(var items: MutableList<{{ name }}Item> = mutableListOf()) : ObjectSerialize { fun add(elem: {{ name }}Item) = items.add(elem) fun addAll(elements: Collection<{{ name }}Item>) = items.addAll(elements) fun clear() = items.clear() fun remove(element: {{ name }}Item) = items.remove(element) fun removeAll(elements: Collection<{{ name }}Item>) = items.removeAll(elements) fun removeAt(index: Int) = items.removeAt(index) fun set(index: Int, element: {{ name }}Item) = items.set(index, element) override fun intoSerializedObject(): String { return Json.encodeToString(items) } internal object Serializer : KSerializer<{{name}}> { @OptIn(kotlinx.serialization.ExperimentalSerializationApi::class) override val descriptor = listSerialDescriptor() override fun deserialize(decoder: Decoder): {{ name }} { val list = decoder.decodeSerializableValue(ListSerializer(serializer<{{name}}Item>())) return {{name}}(items = list.toMutableList()) } override fun serialize(encoder: Encoder, value: {{name}}) { encoder.encodeSerializableValue(ListSerializer(serializer<{{name}}Item>()), value.items) } } } {{ generate_structure(name ~ "Item", struct["items"]) }} {%- elif struct.type == "object" -%} @Serializable data class {{ name }}( {% for itemname, val in struct.properties.items() %} {% if val.type == "array" %} var {{itemname|camelize}}: {{ name ~ itemname|Camelize }} = {{ name ~ itemname|Camelize }}(), {% elif val.type == "object" %} var {{itemname|camelize}}: {{ name ~ "Item" ~ itemname|Camelize ~ "Object" }}? = null, {% else %} var {{itemname|camelize}}: {{val.type|structure_type_name}}? = null, {% endif %} {% endfor %} ): ObjectSerialize { override fun intoSerializedObject(): String { return Json.encodeToString(this) } } {% for itemname, val in struct.properties.items() %} {% if val.type == "array" %} {% set nested_name = name ~ itemname|Camelize %} {{ generate_structure(nested_name, val) }} {% elif val.type == "object" %} {% set nested_name = name ~ "Item" ~ itemname|Camelize ~ "Object" %} {{ generate_structure(nested_name, val) }} {% endif %} {% endfor %} {% endif %} {% endmacro %} {%- macro generate_structure_typealias(name, struct) %} {%- if struct.type == "array" -%} {{ generate_structure_typealias(name ~ "Item", struct["items"]) }} {%- elif struct.type == "object" -%} {% for itemname, val in struct.properties.items() %} {% if val.type == "array" %} {% set nested_name = name ~ itemname|Camelize %} {{ generate_structure_typealias(nested_name, val) }} {% elif val.type == "object" %} {% set nested_name = name ~ "Item" ~ itemname|Camelize ~ "Object" %} {{ generate_structure_typealias(nested_name, val) }} {% endif %} {% endfor %} {%- else -%} typealias {{ name }} = {{ struct.type|structure_type_name }} {% endif %} {% endmacro %} /* ktlint-disable no-blank-line-before-rbrace */ @file:Suppress("PackageNaming", "MaxLineLength") package {{ namespace }} import {{ glean_namespace }}.private.CommonMetricData // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.EventExtras // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.HistogramType // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.Lifetime // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.MemoryUnit // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.NoExtras // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.NoReasonCodes // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.ObjectSerialize // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.ReasonCode // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.TimeUnit // ktlint-disable import-ordering no-unused-imports {% for obj_type in obj_types %} import {{ glean_namespace }}.private.{{ obj_type }} // ktlint-disable import-ordering {% endfor %} {% if has_labeled_metrics %} import {{ glean_namespace }}.private.LabeledMetricType // ktlint-disable import-ordering {% endif %} {% if has_object_metrics %} import kotlinx.serialization.KSerializer import kotlinx.serialization.Serializable import kotlinx.serialization.builtins.ListSerializer import kotlinx.serialization.descriptors.listSerialDescriptor import kotlinx.serialization.encodeToString import kotlinx.serialization.encoding.Decoder import kotlinx.serialization.encoding.Encoder import kotlinx.serialization.json.Json import kotlinx.serialization.serializer {% endif %} {# HACK HACK HACK -- typealiases MUST BE top-level #} {% for obj in objs.values() %} {% if obj|attr("_generate_structure") %} {{ generate_structure_typealias(obj.name|Camelize ~ "Object", obj._generate_structure) }} {%- endif %} {% endfor %} {# HACK end #} internal object {{ category_name|Camelize }} { {% for obj in objs.values() %} {% if obj.type == "ping" %} {% if obj|attr("_generate_enums") %} {% for name, suffix in obj["_generate_enums"] %} {% if obj|attr(name)|length %} {{ reason_enum_decl(obj, name, suffix)|indent }} {% endif %} {% endfor %} {% endif %} {% else %} {% if obj|attr("_generate_structure") %} {{ generate_structure(obj.name|Camelize ~ "Object", obj._generate_structure) }} {%- endif %} {% if obj|attr("_generate_enums") %} {% for name, suffix in obj["_generate_enums"] %} {% if obj|attr(name)|length %} {{ struct_decl(obj, name, suffix)|indent }} {% endif %} {% endfor %} {% endif %} {% endif %} {% endfor %} {% for obj in objs.values() %} {% if obj.labeled %} {{ obj_declaration(obj, 'Label', 'private ') | indent }} /** * {{ obj.description|wordwrap() | replace('\n', '\n * ') }} */ val {{ obj.name|camelize }}: LabeledMetricType<{{ obj|type_name }}> by lazy { // generated from {{ obj.identifier() }} LabeledMetricType( category = {{ obj.category|kotlin }}, name = {{ obj.name|kotlin }}, subMetric = {{ obj.name|camelize }}Label, disabled = {{ obj.is_disabled()|kotlin }}, lifetime = {{ obj.lifetime|kotlin }}, sendInPings = {{ obj.send_in_pings|kotlin }}, labels = {{ obj.labels|kotlin }} ) } {% else %} /** * {{ obj.description|wordwrap() | replace('\n', '\n * ') }} */ {{ obj_declaration(obj, lazy=obj.type != 'ping') | indent }} {% endif %} {%- endfor %} } glean_parser-15.0.1/glean_parser/templates/markdown.jinja2000066400000000000000000000065411466531427000236370ustar00rootroot00000000000000 {# The rendered markdown is autogenerated, but this Jinja2 template is not. Please file bugs! #} # Metrics This document enumerates the metrics collected by {{ project_title }} using the [Glean SDK](https://mozilla.github.io/glean/book/index.html). This project may depend on other projects which also collect metrics. This means you might have to go searching through the dependency tree to get a full picture of everything collected by this project. {% if introduction_extra %} {{ introduction_extra }} {% endif %} # Pings {% for ping_name in metrics_by_pings.keys()|sort %} - [{{ ping_name }}]({{ '#' }}{{ ping_name|replace(" ","-") }}) {% endfor %} {% for ping_name in metrics_by_pings.keys()|sort %} {% raw %}##{% endraw %} {{ ping_name }} {% if ping_name|ping_desc and ping_name|ping_desc|length > 0 %} {{ ping_name|ping_desc }} {% if ping_name|ping_docs|length > 0 %} See the Glean SDK documentation for the [`{{ ping_name }}` ping]({{ ping_name|ping_docs }}). {% endif %} {% endif %} {% if ping_name|ping_send_if_empty %} This ping is sent if empty. {% endif %} {% if ping_name|ping_include_client_id %} This ping includes the [client id](https://mozilla.github.io/glean/book/user/pings/index.html#the-client_info-section). {% endif %} {% if ping_name|ping_data_reviews %} **Data reviews for this ping:** {% for review in ping_name|ping_data_reviews %} - <{{review}}> {% endfor %} {% endif %} {% if ping_name|ping_bugs %} **Bugs related to this ping:** {% for bug in ping_name|ping_bugs %} - {% if bug|int != 0 %}{{bug}}{% else %}<{{bug}}>{% endif %} {% endfor %} {% endif %} {% if ping_name|ping_reasons %} **Reasons this ping may be sent:** {% for (reason, desc) in ping_name|ping_reasons|dictsort %} - `{{ reason }}`: {{ desc|indent(6, first=False) }} {% endfor %} {% endif %} All Glean pings contain built-in metrics in the [`ping_info`](https://mozilla.github.io/glean/book/user/pings/index.html#the-ping_info-section) and [`client_info`](https://mozilla.github.io/glean/book/user/pings/index.html#the-client_info-section) sections. {% if metrics_by_pings[ping_name] %} In addition to those built-in metrics, the following metrics are added to the ping: | Name | Type | Description | Data reviews | Extras | Expiration | [Data Sensitivity](https://wiki.mozilla.org/Firefox/Data_Collection) | | --- | --- | --- | --- | --- | --- | --- | {% for metric in metrics_by_pings[ping_name] %} | {{ metric.identifier() }} | {{- '['}}{{ metric.type }}]({{ metric.type|metrics_docs }}) | {{- metric.description|replace("\n", " ") }} | {%- for data_review in metric.data_reviews %} [{{ data_review|ping_review_title(loop.index) }}]({{ data_review }}){{ ", " if not loop.last }} {%- endfor -%} | {%- if metric|extra_info -%}
    {%- for property, desc in metric|extra_info %}
  • {{ property }}{%- if desc is not none -%}: {{ desc|replace("\n", " ") }}{%- endif -%}
  • {%- endfor -%}
{%- endif -%} | {{- metric.expires }} | {{- metric.data_sensitivity|data_sensitivity_numbers }} | {% endfor %} {% else %} This ping contains no metrics. {% endif %} {% endfor %} Data categories are [defined here](https://wiki.mozilla.org/Firefox/Data_Collection). {# The rendered markdown is autogenerated, but this Jinja2 template is not. Please file bugs! #} glean_parser-15.0.1/glean_parser/templates/python_server.jinja2000066400000000000000000000161341466531427000247230ustar00rootroot00000000000000{# The final Go code is autogenerated, but this template is not. Please file bugs! #} """ This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT. DO NOT COMMIT. """ from __future__ import annotations from datetime import datetime, timezone from typing import Any from uuid import uuid4 import json GLEAN_EVENT_MOZLOG_TYPE = "glean-server-event" {% for ping, metrics_by_type in pings.items() %} class {{ ping|camelize }}ServerEventLogger: def __init__( self, application_id: str, app_display_version: str, channel: str ) -> None: """ Create {{ ping|camelize }}ServerEventLogger instance. :param str application_id: The application ID. :param str app_display_version: The application display version. :param str channel: The channel. """ self._application_id = application_id self._app_display_version = app_display_version self._channel = channel def _record( self, user_agent: str, ip_address: str, {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} {{ metric.category }}_{{ metric.name }}: {{ metric.type|py_metric_type }}, {% endfor %} {% endif %} {% endfor %} event: dict[str, Any] ) -> None: now = datetime.now(timezone.utc) timestamp = now.isoformat() event["timestamp"] = int(1000.0 * now.timestamp()) # Milliseconds since epoch event_payload = { "metrics": { {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} "{{ metric_type }}": { {% for metric in metrics %} "{{ metric.category }}.{{ metric.name }}": {{ metric.category }}_{{ metric.name }}, {% endfor %} }, {% endif %} {% endfor %} }, "events": [event], "ping_info": { # seq is required in the Glean schema, however is not useful in server context "seq": 0, "start_time": timestamp, "end_time": timestamp, }, # `Unknown` fields below are required in the Glean schema, however they are # not useful in server context "client_info": { "telemetry_sdk_build": "glean_parser v{{ parser_version }}", "first_run_date": "Unknown", "os": "Unknown", "os_version": "Unknown", "architecture": "Unknown", "app_build": "Unknown", "app_display_version": self._app_display_version, "app_channel": self._channel, }, } event_payload_serialized = json.dumps(event_payload) # This is the message structure that Decoder expects: # https://github.com/mozilla/gcp-ingestion/pull/2400 ping = { "document_namespace": self._application_id, "document_type": "{{ ping }}", "document_version": "1", "document_id": str(uuid4()), "user_agent": user_agent, "ip_address": ip_address, "payload": event_payload_serialized, } self.emit_record(now, ping) def emit_record(self, now: datetime, ping:dict[str, Any]) -> None: """Log the ping to STDOUT. Applications might want to override this method to use their own logging. If doing so, make sure to log the ping as JSON, and to include the `Type: GLEAN_EVENT_MOZLOG_TYPE`.""" ping_envelope = { "Timestamp": now.isoformat(), "Logger": "glean", "Type": GLEAN_EVENT_MOZLOG_TYPE, "Fields": ping, } ping_envelope_serialized = json.dumps(ping_envelope) print(ping_envelope_serialized) {% for event in metrics_by_type["event"] %} def {{ event|record_event_function_name }}( self, user_agent: str, ip_address: str, {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} {{ metric.category }}_{{ metric.name }}: {{ metric.type|py_metric_type }}, {% endfor %} {% endif %} {% endfor %} {% for extra, metadata in event.extra_keys.items() %} {{ extra }}: {{ metadata.type|py_metric_type }}, {% endfor %} ) -> None: """ Record and submit a {{ event.category }}_{{ event.name }} event: {{ event.description|clean_string }} Event is logged to STDOUT via `print`. :param str user_agent: The user agent. :param str ip_address: The IP address. Will be used to decode Geo information and scrubbed at ingestion. {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} :param {{ metric.type|py_metric_type }} {{ metric.category }}_{{ metric.name }}: {{ metric.description|clean_string }} {% endfor %} {% endif %} {% endfor %} {% if event.extra_keys %} {% for extra, metadata in event.extra_keys.items() %} :param {{ metadata.type|py_metric_type }} {{ extra }}: {{ metadata.description|clean_string }} {% endfor %} {% endif %} """ event = { "category": "{{ event.category }}", "name": "{{ event.name }}", {% if event.extra_keys %} "extra": { {% for extra, metadata in event.extra_keys.items() %} "{{ extra }}": str({{ extra }}){% if 'bool' == metadata.type|py_metric_type %}.lower(){% endif %}, {% endfor %} }, {% endif %} } self._record( user_agent, ip_address, {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} {{ metric.category }}_{{ metric.name }}, {% endfor %} {% endif %} {% endfor %} event ) {% endfor %} {% endfor %} {% for ping in pings %} def {{ ping|factory_method }}( application_id: str, app_display_version: str, channel: str, ) -> {{ ping|camelize }}ServerEventLogger: """ Factory function that creates an instance of Glean Server Event Logger to record `{{ ping }}` ping events. :param str application_id: The application ID. :param str app_display_version: The application display version. :param str channel: The channel. :return: An instance of {{ ping|camelize }}ServerEventLogger. :rtype: {{ ping|camelize }}ServerEventLogger """ return {{ ping|camelize }}ServerEventLogger(application_id, app_display_version, channel) {% endfor %} glean_parser-15.0.1/glean_parser/templates/qmldir.jinja2000066400000000000000000000002341466531427000232760ustar00rootroot00000000000000{% for category in categories|sort %} {{ category|Camelize }} {{ version }} {{ category|camelize }}.js {% endfor %} depends org.mozilla.Glean {{ version }} glean_parser-15.0.1/glean_parser/templates/ruby_server.jinja2000066400000000000000000000141541466531427000243630ustar00rootroot00000000000000{# The final Ruby code is autogenerated, but this Jinja2 template is not. Please file bugs! #} # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT. # frozen_string_literal: true # requires json, securerandom, and logger libs require 'json' require 'securerandom' require 'logger' # this will be used for identifying logs that need to forward to Moz Data Pipeline GLEAN_EVENT_MOZLOG_TYPE = 'glean-server-event' module Glean {% for ping, metrics_by_type in pings.items() %} class {{ ping|ping_class_name }} def initialize(app_id:, app_display_version:, app_channel:, logger_options:) @app_id = app_id # string - Application Id to identify application per Glean standards @app_display_version = app_display_version # string - Version of application emitting the event @app_channel = app_channel # string - Application Id to identify application per Glean standards @logger = Logger.new(logger_options) # Logger configuration @logger.formatter = proc do |severity, datetime, _progname, msg| date_format = datetime.to_i logger_name = 'glean' "#{JSON.dump(Timestamp: date_format.to_s, Logger: logger_name.to_s, Type: GLEAN_EVENT_MOZLOG_TYPE.to_s, Severity: severity.ljust(5).to_s, Pid: Process.pid.to_s, Fields: msg)}\n" end # Generated events {% for metric_type, metrics in metrics_by_type.items() %} {% for metric in metrics %} {% if metric.type == 'event' %} # {{ metric|metric_argument_description }} @{{ metric|metric_argument_name }} = {{ metric|event_class_name }}.new(self) {% endif %} {% endfor %} {% endfor %} end def _record( {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} # {{ metric|metric_argument_description }} {{ metric|metric_argument_name }}:, {% endfor %} {% endif %} {% endfor %} # full user_agent value from controller context user_agent:, # ip address value from controller context ip_address:, # event being sent in the ping event: ) t_utc = Time.now.utc # create raw metrics hash that can have nil values metrics_raw = { {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} '{{ metric_type }}' => { {% for metric in metrics %} '{{ metric|metric_name }}' => {{ metric|metric_argument_name }}, {% endfor %} }, {% endif %} {% endfor %} } # filter out key value pairs where value is nil metrics_raw.each do |key, value| metrics_raw[key] = value.compact.transform_values(&:to_s) end # filter out metrics with empty hashes metrics = metrics_raw.reject { |_k, v| v.empty? } event_payload = { # `Unknown` fields below are required in the Glean schema, however they are not useful in server context. 'client_info' => { 'telemetry_sdk_build' => 'glean_parser v{{ parser_version }}', 'first_run_date' => 'Unknown', 'os' => 'Unknown', 'os_version' => 'Unknown', 'architecture' => 'Unknown', 'app_build' => 'Unknown', 'app_display_version' => @app_display_version, 'app_channel' => @app_channel, }, 'ping_info' => { 'seq' => 0, 'start_time' => t_utc, 'end_time' => t_utc, }, 'metrics' => metrics, 'events' => event, } serialized_event_payload = event_payload.to_json # This is the message structure that Decoder expects: https://github.com/mozilla/gcp-ingestion/pull/2400. ping = { 'document_namespace' => @app_id, 'document_type' => '{{ ping }}', 'document_version' => '1', 'document_id' => SecureRandom.uuid, 'user_agent' => user_agent, 'ip_address' => ip_address, 'payload' => serialized_event_payload, } @logger.info(ping) end {% for metric_type, metrics in metrics_by_type.items() %} {% for metric in metrics %} {% if metric.type == 'event' %} attr_accessor :{{ metric|metric_argument_name }} {% endif %} {% endfor %} {% endfor %} end {% endfor %} {% for event in pings["events"]["event"] %} class {{ event|event_class_name }} # {{ event|metric_argument_description }} def initialize(glean) @glean = glean end def record( # extras to pass into event detail {% for extra, metadata in event.extra_keys.items() %} {{ extra }}:, {% endfor %} {% for ping, metric_types in pings.items() %} {% for metric_type, metrics in metric_types.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} # {{ metric|metric_argument_description }} {{ metric|metric_argument_name }}:, {% endfor %} {% endif %} {% endfor %} {% endfor %} # full user_agent value from controller context user_agent:, # ip address value from controller context ip_address: ) event = [ { 'category' => '{{ event.category }}', 'name' => '{{ event.name }}', 'timestamp' => (Time.now.utc.to_f * 1000).to_i, 'extra' => [ {% for extra, metadata in event.extra_keys.items() %} ['{{ extra }}', {{ extra }}.to_s], {% endfor %} ].to_h, }, ] @glean._record( {% for ping, metric_types in pings.items() %} {% for metric_type, metrics in metric_types.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} {{ metric|metric_argument_name }}: {{ metric|metric_argument_name }}, {% endfor %} {% endif %} {% endfor %} {% endfor %} user_agent: user_agent, ip_address: ip_address, event: event ) end end end {% endfor %} glean_parser-15.0.1/glean_parser/templates/rust.jinja2000066400000000000000000000160351466531427000230110ustar00rootroot00000000000000// -*- mode: Rust -*- // AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT. DO NOT COMMIT. {# The rendered source is autogenerated, but this Jinja2 template is not. Please file bugs! #} /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ {%- macro generate_structure(name, struct) %} {% if struct.type == "array" %} pub type {{ name }} = Vec<{{ name }}Item>; {{ generate_structure(name ~ "Item", struct["items"]) }} {% elif struct.type == "object" %} #[derive(Debug, Hash, Eq, PartialEq, ::glean::traits::__serde::Serialize, ::glean::traits::__serde::Deserialize)] #[serde(crate = "::glean::traits::__serde")] #[serde(deny_unknown_fields)] pub struct {{ name }} { {% for itemname, val in struct.properties.items() %} {% if val.type == "array" %} #[serde(skip_serializing_if = "Vec::is_empty", default = "Vec::new", deserialize_with = "::glean::traits::__serde_helper::vec_null")] pub {{itemname|snake_case}}: {{ name ~ itemname|Camelize }}, {% elif val.type == "object" %} #[serde(skip_serializing_if = "Option::is_none")] pub {{itemname|snake_case}}: Option<{{ name ~ "Item" ~ itemname|Camelize ~ "Object" }}>, {% else %} #[serde(skip_serializing_if = "Option::is_none")] pub {{itemname|snake_case}}: Option<{{val.type|structure_type_name}}>, {% endif %} {% endfor %} } {% for itemname, val in struct.properties.items() %} {% if val.type == "array" %} {% set nested_name = name ~ itemname|Camelize %} {{ generate_structure(nested_name, val) }} {% elif val.type == "object" %} {% set nested_name = name ~ "Item" ~ itemname|Camelize ~ "Object" %} {{ generate_structure(nested_name, val) }} {% endif %} {% endfor %} {% else %} pub type {{ name }} = {{ struct.type|structure_type_name }}; {% endif %} {% endmacro %} {% macro generate_extra_keys(obj) %} {% for name, _ in obj["_generate_enums"] %} {# we always use the `extra` suffix, because we only expose the new event API #} {% set suffix = "Extra" %} {% if obj|attr(name)|length %} {{ extra_keys_with_types(obj, name, suffix)|indent }} {% endif %} {% endfor %} {% endmacro %} {% macro extra_keys_with_types(obj, name, suffix) %} #[derive(Default, Debug, Clone, Hash, Eq, PartialEq)] pub struct {{ obj.name|Camelize }}{{ suffix }} { {% for item, type in obj|attr(name) %} pub {{ item|snake_case }}: Option<{{type|extra_type_name}}>, {% endfor %} } impl ExtraKeys for {{ obj.name|Camelize }}{{ suffix }} { const ALLOWED_KEYS: &'static [&'static str] = {{ obj.allowed_extra_keys|extra_keys }}; fn into_ffi_extra(self) -> ::std::collections::HashMap<::std::string::String, ::std::string::String> { let mut map = ::std::collections::HashMap::new(); {% for key, _ in obj|attr(name) %} self.{{key|snake_case}}.and_then(|val| map.insert("{{key}}".to_string(), val.to_string())); {% endfor %} map } } {% endmacro %} {% macro common_metric_data(obj) %} CommonMetricData { category: {{ obj.category|rust }}, name: {{ obj.name|rust }}, send_in_pings: {{ obj.send_in_pings|rust }}, lifetime: {{ obj.lifetime|rust }}, disabled: {{ obj.is_disabled()|rust }}, ..Default::default() } {% endmacro %} {% for category in categories %} {% if category.contains_pings %} {% for obj in category.objs.values() %} #[allow(non_upper_case_globals, dead_code)] /// {{ obj.description|wordwrap() | replace('\n', '\n/// ') }} #[rustfmt::skip] pub static {{ obj.name|snake_case }}: ::glean::private::__export::Lazy<::glean::private::PingType> = ::glean::private::__export::Lazy::new(|| ::glean::private::PingType::new("{{ obj.name }}", {{ obj.include_client_id|rust }}, {{ obj.send_if_empty|rust }}, {{ obj.precise_timestamps|rust }}, {{ obj.include_info_sections|rust }}, {{ obj.enabled|rust }}, {{ obj.schedules_pings|rust }}, {{ obj.reason_codes|rust }})); {% endfor %} {% else %} pub mod {{ category.name|snake_case }} { #[allow(unused_imports)] // HistogramType might be unusued, let's avoid warnings use glean::{private::*, traits::ExtraKeys, traits::NoExtraKeys, CommonMetricData, HistogramType, LabeledMetricData, Lifetime, TimeUnit, MemoryUnit}; {% for obj in category.objs.values() %} {% if obj|attr("_generate_structure") %} {{ generate_structure(obj.name|Camelize ~ "Object", obj._generate_structure) }} {%- endif %} {% if obj|attr("_generate_enums") %} {{ generate_extra_keys(obj) }} {%- endif %} #[allow(non_upper_case_globals, dead_code)] /// generated from {{ category.name }}.{{ obj.name }} /// /// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }} pub static {{ obj.name|snake_case }}: ::glean::private::__export::Lazy<{{ obj|type_name }}> = ::glean::private::__export::Lazy::new(|| { let meta = {% if obj.type == "labeled_custom_distribution" %} LabeledMetricData::CustomDistribution { cmd: {{ common_metric_data(obj)|indent(16) }} {%- for arg_name in extra_metric_args if obj[arg_name] is defined and arg_name != 'allowed_extra_keys' -%} , {{ arg_name }}: {{ obj[arg_name]|rust }} {%- endfor -%} }; {% elif obj.type == "labeled_memory_distribution" %} LabeledMetricData::MemoryDistribution { cmd: {{ common_metric_data(obj)|indent(16) }} {%- for arg_name in extra_metric_args if obj[arg_name] is defined and arg_name != 'allowed_extra_keys' -%} , {{ "unit" if arg_name == "memory_unit" else arg_name }}: {{ obj[arg_name]|rust }} {%- endfor -%} }; {% elif obj.type == "labeled_timing_distribution" %} LabeledMetricData::TimingDistribution { cmd: {{ common_metric_data(obj)|indent(16) }} {%- for arg_name in extra_metric_args if obj[arg_name] is defined and arg_name != 'allowed_extra_keys' -%} , {{ "unit" if arg_name == "time_unit" else arg_name }}: {{ obj[arg_name]|rust }} {%- endfor -%} }; {% elif obj.labeled %} LabeledMetricData::Common { cmd: {{common_metric_data(obj)|indent(16) }}, }; {% else %} {{ common_metric_data(obj)|indent(12) }}; {% endif %} {{ obj|ctor }}(meta {%- for arg_name in extra_metric_args if not obj.labeled and obj[arg_name] is defined and arg_name != 'allowed_extra_keys' -%} , {{ obj[arg_name]|rust }} {%- endfor -%} {{ ", " if obj.labeled else ")\n" }} {%- if obj.labeled -%} {%- if obj.labels -%} Some({{ obj.labels|rust }}) {%- else -%} None {%- endif -%}) {% endif %} }); {% endfor %} } {% endif %} {% endfor %} {% if metric_by_type|length > 0 %} {% endif %} glean_parser-15.0.1/glean_parser/templates/swift.jinja2000066400000000000000000000151511466531427000231460ustar00rootroot00000000000000// -*- mode: Swift -*- // AUTOGENERATED BY glean_parser v{{ parser_version }}. DO NOT EDIT. DO NOT COMMIT. {# The rendered markdown is autogenerated, but this Jinja2 template is not. Please file bugs! #} #if canImport(Foundation) import Foundation #endif /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ {%- macro obj_declaration(obj, suffix='', access='') %} {{ access }}static let {{ obj.name|camelize|variable_name }}{{ suffix }} = {{ obj|type_name }}( // generated from {{ obj.identifier() }} CommonMetricData( {% for arg_name in common_metric_args if obj[arg_name] is defined %} {{ arg_name|camelize }}: {{ obj[arg_name]|swift }}{{ "," if not loop.last }} {% endfor %} ) {% for arg_name in extra_metric_args if obj[arg_name] is defined %} , {{ obj[arg_name]|swift }} {% endfor %} ) {% endmacro %} {%- macro struct_decl(obj, name, suffix) %} struct {{ obj.name|Camelize }}{{ suffix }}: EventExtras { {% for item, typ in obj|attr(name) %} var {{ item|camelize|variable_name }}: {{typ|extra_type_name}}? {% endfor %} func toExtraRecord() -> [String: String] { var record = [String: String]() {% for item in obj|attr(name) %} if let {{ item[0]|camelize }} = self.{{item[0]|camelize}} { record["{{item[0]}}"] = String({{ item[0]|camelize }}) } {% endfor %} return record } } {% endmacro %} {%- macro generate_structure(name, struct) %} {%- if struct.type == "array" -%} typealias {{ name }} = [{{ name }}Item] {{ generate_structure(name ~ "Item", struct["items"]) }} {%- elif struct.type == "object" -%} struct {{ name }}: Codable, Equatable, ObjectSerialize { {% for itemname, val in struct.properties.items() %} {% if val.type == "array" %} var {{itemname|camelize|variable_name}}: {{ name ~ itemname|Camelize }} = [] {% elif val.type == "object" %} var {{itemname|camelize|variable_name}}: {{ name ~ "Item" ~ itemname|Camelize ~ "Object" }}? {% else %} var {{itemname|camelize|variable_name}}: {{val.type|structure_type_name}}? {% endif %} {% endfor %} func intoSerializedObject() -> String { let jsonEncoder = JSONEncoder() let jsonData = try! jsonEncoder.encode(self) let json = String(data: jsonData, encoding: String.Encoding.utf8)! return json } } {% for itemname, val in struct.properties.items() %} {% if val.type == "array" %} {% set nested_name = name ~ itemname|Camelize %} {{ generate_structure(nested_name, val) }} {% elif val.type == "object" %} {% set nested_name = name ~ "Item" ~ itemname|Camelize ~ "Object" %} {{ generate_structure(nested_name, val) }} {% endif %} {% endfor %} {%- else -%} typealias {{ name }} = {{ struct.type|structure_type_name }} {% endif -%} {% endmacro %} {% if not allow_reserved %} import {{ glean_namespace }} {% endif %} // swiftlint:disable superfluous_disable_command // swiftlint:disable nesting // swiftlint:disable line_length // swiftlint:disable identifier_name // swiftlint:disable force_try extension {{ namespace }} { {% if build_info %} class GleanBuild { private init() { // Intentionally left private, no external user can instantiate a new global object. } public static let info = BuildInfo(buildDate: {{ build_info.build_date }}) } {% endif %} {% for category in categories %} {% if category.contains_pings %} class {{ category.name|Camelize }} { public static let shared = {{ category.name|Camelize }}() private init() { // Intentionally left private, no external user can instantiate a new global object. } {% for obj in category.objs.values() %} {% if obj|attr("_generate_enums") %} {% for name, suffix in obj["_generate_enums"] %} {% if obj|attr(name)|length %} enum {{ obj.name|Camelize }}{{ suffix }}: Int, ReasonCodes { {% for key in obj|attr(name) %} case {{ key|camelize|variable_name }} = {{ loop.index-1 }} {% endfor %} public func index() -> Int { return self.rawValue } } {% endif %} {% endfor %} {% endif %} /// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }} let {{ obj.name|camelize|variable_name }} = {{obj|type_name}}( name: {{ obj.name|swift }}, includeClientId: {{obj.include_client_id|swift}}, sendIfEmpty: {{obj.send_if_empty|swift}}, preciseTimestamps: {{obj.precise_timestamps|swift}}, includeInfoSections: {{obj.include_info_sections|swift}}, enabled: {{obj.enabled|swift}}, schedulesPings: {{obj.schedules_pings|swift}}, reasonCodes: {{obj.reason_codes|swift}} ) {% endfor %} } {% else %} enum {{ category.name|Camelize }} { {% for obj in category.objs.values() %} {% if obj|attr("_generate_structure") %} {{ generate_structure(obj.name|Camelize ~ "Object", obj._generate_structure) }} {%- endif %} {% if obj|attr("_generate_enums") %} {% for name, suffix in obj["_generate_enums"] %} {% if obj|attr(name)|length %} {{ struct_decl(obj, name, suffix)|indent }} {% endif %} {% endfor %} {% endif %} {% endfor %} {% for obj in category.objs.values() %} {% if obj.labeled %} {{ obj_declaration(obj, 'Label', 'private ') | indent }} /// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }} static let {{ obj.name|camelize|variable_name }} = try! LabeledMetricType<{{ obj|type_name }}>( // generated from {{ obj.identifier() }} category: {{ obj.category|swift }}, name: {{ obj.name|swift }}, sendInPings: {{ obj.send_in_pings|swift }}, lifetime: {{ obj.lifetime|swift }}, disabled: {{ obj.is_disabled()|swift }}, subMetric: {{ obj.name|camelize }}Label, labels: {{ obj.labels|swift }} ) {% else %} /// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }} {{ obj_declaration(obj) | indent }} {% endif %} {% endfor %} } {% endif %} {% endfor %} } glean_parser-15.0.1/glean_parser/translate.py000066400000000000000000000203231466531427000212610ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ High-level interface for translating `metrics.yaml` into other formats. """ from pathlib import Path import os import shutil import sys import tempfile from typing import Any, Callable, Dict, Iterable, List, Optional from . import lint from . import parser from . import go_server from . import javascript from . import javascript_server from . import python_server from . import kotlin from . import markdown from . import metrics from . import ruby_server from . import rust from . import swift from . import util class Outputter: """ Class to define an output format. Each outputter in the table has the following member values: - output_func: the main function of the outputter, the one which does the actual translation. - clear_patterns: A list of glob patterns to clear in the directory before writing new results to it. """ def __init__( self, output_func: Callable[[metrics.ObjectTree, Path, Dict[str, Any]], None], clear_patterns: Optional[List[str]] = None, ): if clear_patterns is None: clear_patterns = [] self.output_func = output_func self.clear_patterns = clear_patterns OUTPUTTERS = { "go_server": Outputter(go_server.output_go, []), "javascript": Outputter(javascript.output_javascript, []), "typescript": Outputter(javascript.output_typescript, []), "javascript_server": Outputter(javascript_server.output_javascript, []), "typescript_server": Outputter(javascript_server.output_typescript, []), "python_server": Outputter(python_server.output_python, []), "ruby_server": Outputter(ruby_server.output_ruby, []), "kotlin": Outputter(kotlin.output_kotlin, ["*.kt"]), "markdown": Outputter(markdown.output_markdown, []), "swift": Outputter(swift.output_swift, ["*.swift"]), "rust": Outputter(rust.output_rust, []), } def transform_metrics(objects): """ Transform the object model from one that represents the YAML definitions to one that reflects the type specifics needed by code generators. e.g. This will transform a `rate` to be a `numerator` if its denominator is external. """ counters = {} numerators_by_denominator: Dict[str, Any] = {} for category_name, category_val in objects.items(): if category_name == "tags": continue for metric in category_val.values(): fqmn = metric.identifier() if getattr(metric, "type", None) == "counter": counters[fqmn] = metric denominator_name = getattr(metric, "denominator_metric", None) if denominator_name: metric.type = "numerator" numerators_by_denominator.setdefault(denominator_name, []) numerators_by_denominator[denominator_name].append(metric) for denominator_name, numerators in numerators_by_denominator.items(): if denominator_name not in counters: raise ValueError( f"No `counter` named {denominator_name} found to be used as" "denominator for {numerators}", file=sys.stderr, ) counters[denominator_name].__class__ = metrics.Denominator counters[denominator_name].type = "denominator" counters[denominator_name].numerators = numerators def translate_metrics( input_filepaths: Iterable[Path], output_dir: Path, translation_func: Callable[[metrics.ObjectTree, Path, Dict[str, Any]], None], clear_patterns: Optional[List[str]] = None, options: Optional[Dict[str, Any]] = None, parser_config: Optional[Dict[str, Any]] = None, ): """ Translate the files in `input_filepaths` by running the metrics through a translation function and writing the results in `output_dir`. :param input_filepaths: list of paths to input metrics.yaml files :param output_dir: the path to the output directory :param translation_func: the function that actually performs the translation. It is passed the following arguments: - metrics_objects: The tree of metrics as pings as returned by `parser.parse_objects`. - output_dir: The path to the output directory. - options: A dictionary of output format-specific options. Examples of translation functions are in `kotlin.py` and `swift.py`. :param clear_patterns: a list of glob patterns of files to clear before generating the output files. By default, no files will be cleared (i.e. the directory should be left alone). :param options: dictionary of options. The available options are backend format specific. These are passed unchanged to `translation_func`. :param parser_config: A dictionary of options that change parsing behavior. See `parser.parse_metrics` for more info. """ if clear_patterns is None: clear_patterns = [] if options is None: options = {} if parser_config is None: parser_config = {} input_filepaths = util.ensure_list(input_filepaths) allow_missing_files = parser_config.get("allow_missing_files", False) if not input_filepaths and not allow_missing_files: print("❌ No metric files specified. ", end="") print("Use `--allow-missing-files` to not treat this as an error.") return 1 if lint.glinter(input_filepaths, parser_config): return 1 all_objects = parser.parse_objects(input_filepaths, parser_config) if util.report_validation_errors(all_objects): return 1 # allow_reserved is also relevant to the translators, so copy it there if parser_config.get("allow_reserved"): options["allow_reserved"] = True # We don't render tags anywhere yet. all_objects.value.pop("tags", None) # Apply additional general transformations to all metrics transform_metrics(all_objects.value) # Write everything out to a temporary directory, and then move it to the # real directory, for transactional integrity. with tempfile.TemporaryDirectory() as tempdir: tempdir_path = Path(tempdir) translation_func(all_objects.value, tempdir_path, options) if output_dir.is_file(): output_dir.unlink() elif output_dir.is_dir() and len(clear_patterns): for clear_pattern in clear_patterns: for filepath in output_dir.glob(clear_pattern): filepath.unlink() if len(list(output_dir.iterdir())): print(f"Extra contents found in '{output_dir}'.") # We can't use shutil.copytree alone if the directory already exists. # However, if it doesn't exist, make sure to create one otherwise # shutil.copy will fail. os.makedirs(str(output_dir), exist_ok=True) for filename in tempdir_path.glob("*"): shutil.copy(str(filename), str(output_dir)) return 0 def translate( input_filepaths: Iterable[Path], output_format: str, output_dir: Path, options: Optional[Dict[str, Any]] = None, parser_config: Optional[Dict[str, Any]] = None, ): """ Translate the files in `input_filepaths` to the given `output_format` and put the results in `output_dir`. :param input_filepaths: list of paths to input metrics.yaml files :param output_format: the name of the output format :param output_dir: the path to the output directory :param options: dictionary of options. The available options are backend format specific. :param parser_config: A dictionary of options that change parsing behavior. See `parser.parse_metrics` for more info. """ if options is None: options = {} if parser_config is None: parser_config = {} format_desc = OUTPUTTERS.get(output_format, None) if format_desc is None: raise ValueError(f"Unknown output format '{output_format}'") return translate_metrics( input_filepaths, output_dir, format_desc.output_func, format_desc.clear_patterns, options, parser_config, ) glean_parser-15.0.1/glean_parser/translation_options.py000077500000000000000000000037651466531427000234130ustar00rootroot00000000000000import pydoc def translate_options(ctx, param, value): text = """Target language options for Translate function These are backend specific and optional, provide as key:value Rust: no options. Swift: - `namespace`: The namespace to generate metrics in - `glean_namespace`: The namespace to import Glean from - `allow_reserved`: When True, this is a Glean-internal build - `with_buildinfo`: If "true" the `GleanBuildInfo` is generated. Otherwise generation of that file is skipped. Defaults to "true". - `build_date`: If set to `0` a static unix epoch time will be used. If set to a ISO8601 datetime string (e.g. `2022-01-03T17:30:00`) it will use that date. Other values will throw an error. If not set it will use the current date & time. Kotlin: - `namespace`: The package namespace to declare at the top of the generated files. Defaults to `GleanMetrics`. - `glean_namespace`: The package namespace of the glean library itself. This is where glean objects will be imported from in the generated code. JavaScript: - `platform`: Which platform are we building for. Options are `webext` and `qt`. Default is `webext`. - `version`: The version of the Glean.js Qt library being used. This option is mandatory when targeting Qt. Note that the version string must only contain the major and minor version i.e. 0.14. - `with_buildinfo`: If "true" a `gleanBuildInfo.(js|ts)` file is generated. Otherwise generation of that file is skipped. Defaults to "false". - `build_date`: If set to `0` a static unix epoch time will be used. If set to a ISO8601 datetime string (e.g. `2022-01-03T17:30:00`) it will use that date. Other values will throw an error. If not set it will use the current date & time. Markdown: - `project_title`: The project's title. (press q to exit)""" if value: if value[0].lower() == "help": pydoc.pager(text) ctx.exit() return value glean_parser-15.0.1/glean_parser/util.py000066400000000000000000000375371466531427000202600ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import datetime import functools import json from pathlib import Path import sys import textwrap from typing import Any, Callable, Iterable, Sequence, Tuple, Union, Optional import urllib.request import appdirs # type: ignore import diskcache # type: ignore import jinja2 import jsonschema # type: ignore from jsonschema import _utils # type: ignore import yaml try: from yaml import CSafeLoader as SafeLoader except ImportError: from yaml import SafeLoader # type: ignore def date_fromisoformat(datestr: str) -> datetime.date: return datetime.date.fromisoformat(datestr) def datetime_fromisoformat(datestr: str) -> datetime.datetime: return datetime.datetime.fromisoformat(datestr) TESTING_MODE = "pytest" in sys.modules JSONType = Union[list, dict, str, int, float, None] """ The types supported by JSON. This is only an approximation -- this should really be a recursive type. """ class DictWrapper(dict): pass class _NoDatesSafeLoader(SafeLoader): @classmethod def remove_implicit_resolver(cls, tag_to_remove): """ Remove implicit resolvers for a particular tag Takes care not to modify resolvers in super classes. We want to load datetimes as strings, not dates, because we go on to serialise as json which doesn't have the advanced types of yaml, and leads to incompatibilities down the track. """ if "yaml_implicit_resolvers" not in cls.__dict__: cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy() for first_letter, mappings in cls.yaml_implicit_resolvers.items(): cls.yaml_implicit_resolvers[first_letter] = [ (tag, regexp) for tag, regexp in mappings if tag != tag_to_remove ] # Since we use JSON schema to validate, and JSON schema doesn't support # datetimes, we don't want the YAML loader to give us datetimes -- just # strings. _NoDatesSafeLoader.remove_implicit_resolver("tag:yaml.org,2002:timestamp") def yaml_load(stream): """ Map line number to yaml nodes, and preserve the order of metrics as they appear in the metrics.yaml file. """ class SafeLineLoader(_NoDatesSafeLoader): pass def _construct_mapping_adding_line(loader, node): loader.flatten_mapping(node) mapping = DictWrapper(loader.construct_pairs(node)) mapping.defined_in = {"line": node.start_mark.line} return mapping SafeLineLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _construct_mapping_adding_line ) return yaml.load(stream, SafeLineLoader) def ordered_yaml_dump(data, **kwargs): class OrderedDumper(yaml.Dumper): pass def _dict_representer(dumper, data): return dumper.represent_mapping( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items() ) OrderedDumper.add_representer(DictWrapper, _dict_representer) return yaml.dump(data, Dumper=OrderedDumper, **kwargs) def load_yaml_or_json(path: Path): """ Load the content from either a .json or .yaml file, based on the filename extension. :param path: `pathlib.Path` object :rtype object: The tree of objects as a result of parsing the file. :raises ValueError: The file is neither a .json, .yml or .yaml file. :raises FileNotFoundError: The file does not exist. """ # If in py.test, support bits of literal JSON/YAML content if TESTING_MODE and isinstance(path, dict): return yaml_load(yaml.dump(path)) if path.suffix == ".json": with path.open("r", encoding="utf-8") as fd: return json.load(fd) elif path.suffix in (".yml", ".yaml", ".yamlx"): with path.open("r", encoding="utf-8") as fd: return yaml_load(fd) else: raise ValueError(f"Unknown file extension {path.suffix}") def ensure_list(value: Any) -> Sequence[Any]: """ Ensures that the value is a list. If it is anything but a list or tuple, a list with a single element containing only value is returned. """ if not isinstance(value, (list, tuple)): return [value] return value def to_camel_case(input: str, capitalize_first_letter: bool) -> str: """ Convert the value to camelCase. This additionally replaces any '.' with '_'. The first letter is capitalized depending on `capitalize_first_letter`. """ sanitized_input = input.replace(".", "_").replace("-", "_") # Filter out any empty token. This could happen due to leading '_' or # consecutive '__'. tokens = [s.capitalize() for s in sanitized_input.split("_") if len(s) != 0] # If we're not meant to capitalize the first letter, then lowercase it. if not capitalize_first_letter: tokens[0] = tokens[0].lower() # Finally join the tokens and capitalize. return "".join(tokens) def camelize(value: str) -> str: """ Convert the value to camelCase (with a lower case first letter). This is a thin wrapper around inflection.camelize that handles dots in addition to underscores. """ return to_camel_case(value, False) def Camelize(value: str) -> str: """ Convert the value to CamelCase (with an upper case first letter). This is a thin wrapper around inflection.camelize that handles dots in addition to underscores. """ return to_camel_case(value, True) def snake_case(value: str) -> str: """ Convert the value to snake_case. """ return value.lower().replace(".", "_").replace("-", "_") def screaming_case(value: str) -> str: """ Convert the value to SCREAMING_SNAKE_CASE. """ return value.upper().replace(".", "_").replace("-", "_") @functools.lru_cache() def get_jinja2_template( template_name: str, filters: Iterable[Tuple[str, Callable]] = () ): """ Get a Jinja2 template that ships with glean_parser. The template has extra filters for camel-casing identifiers. :param template_name: Name of a file in ``glean_parser/templates`` :param filters: tuple of 2-tuple. A tuple of (name, func) pairs defining additional filters. """ env = jinja2.Environment( loader=jinja2.PackageLoader("glean_parser", "templates"), trim_blocks=True, lstrip_blocks=True, ) env.filters["camelize"] = camelize env.filters["Camelize"] = Camelize env.filters["scream"] = screaming_case for filter_name, filter_func in filters: env.filters[filter_name] = filter_func return env.get_template(template_name) def keep_value(f): """ Wrap a generator so the value it returns (rather than yields), will be accessible on the .value attribute when the generator is exhausted. """ class ValueKeepingGenerator(object): def __init__(self, g): self.g = g self.value = None def __iter__(self): self.value = yield from self.g @functools.wraps(f) def g(*args, **kwargs): return ValueKeepingGenerator(f(*args, **kwargs)) return g def get_null_resolver(schema): """ Returns a JSON Pointer resolver that does nothing. This lets us handle the moz: URLs in our schemas. """ class NullResolver(jsonschema.RefResolver): def resolve_remote(self, uri): if uri in self.store: return self.store[uri] if uri == "": return self.referrer return NullResolver.from_schema(schema) def fetch_remote_url(url: str, cache: bool = True): """ Fetches the contents from an HTTP url or local file path, and optionally caches it to disk. """ # Include the Python version in the cache key, since caches aren't # sharable across Python versions. key = (url, str(sys.version_info)) is_http = url.startswith("http") if not is_http: with open(url, "r", encoding="utf-8") as fd: return fd.read() if cache: cache_dir = appdirs.user_cache_dir("glean_parser", "mozilla") with diskcache.Cache(cache_dir) as dc: if key in dc: return dc[key] contents: str = urllib.request.urlopen(url).read() if cache: with diskcache.Cache(cache_dir) as dc: dc[key] = contents return contents _unset = _utils.Unset() def pprint_validation_error(error) -> str: """ A version of jsonschema's ValidationError __str__ method that doesn't include the schema fragment that failed. This makes the error messages much more succinct. It also shows any subschemas of anyOf/allOf that failed, if any (what jsonschema calls "context"). """ essential_for_verbose = ( error.validator, error.validator_value, error.instance, error.schema, ) if any(m is _unset for m in essential_for_verbose): return textwrap.fill(error.message) instance = error.instance for path in list(error.relative_path)[::-1]: if isinstance(path, str): instance = {path: instance} else: instance = [instance] yaml_instance = ordered_yaml_dump(instance, width=72, default_flow_style=False) parts = ["```", yaml_instance.rstrip(), "```", "", textwrap.fill(error.message)] if error.context: parts.extend( textwrap.fill(x.message, initial_indent=" ", subsequent_indent=" ") for x in error.context ) description = error.schema.get("description") if description: parts.extend( ["", "Documentation for this node:", textwrap.indent(description, " ")] ) return "\n".join(parts) def format_error( filepath: Union[str, Path], header: str, content: str, lineno: Optional[int] = None, ) -> str: """ Format a jsonshema validation error. """ if isinstance(filepath, Path): filepath = filepath.resolve() else: filepath = "" if lineno: filepath = f"{filepath}:{lineno}" if header: return f"{filepath}: {header}\n{textwrap.indent(content, ' ')}" else: return f"{filepath}:\n{textwrap.indent(content, ' ')}" def parse_expiration_date(expires: str) -> datetime.date: """ Parses the expired field date (yyyy-mm-dd) as a date. Raises a ValueError in case the string is not properly formatted. """ try: return date_fromisoformat(expires) except (TypeError, ValueError): raise ValueError( f"Invalid expiration date '{expires}'. " "Must be of the form yyyy-mm-dd in UTC." ) def parse_expiration_version(expires: str) -> int: """ Parses the expired field version string as an integer. Raises a ValueError in case the string does not contain a valid positive integer. """ try: if isinstance(expires, int): version_number = int(expires) if version_number > 0: return version_number # Fall-through: if it's not an integer or is not greater than zero, # raise an error. raise ValueError() except ValueError: raise ValueError( f"Invalid expiration version '{expires}'. Must be a positive integer." ) def is_expired(expires: str, major_version: Optional[int] = None) -> bool: """ Parses the `expires` field in a metric or ping and returns whether the object should be considered expired. """ if expires == "never": return False elif expires == "expired": return True elif major_version is not None: return parse_expiration_version(expires) <= major_version else: date = parse_expiration_date(expires) return date <= datetime.datetime.now(datetime.timezone.utc).date() def validate_expires(expires: str, major_version: Optional[int] = None) -> None: """ If expiration by major version is enabled, raises a ValueError in case `expires` is not a positive integer. Otherwise raises a ValueError in case the `expires` is not ISO8601 parseable, or in case the date is more than 730 days (~2 years) in the future. """ if expires in ("never", "expired"): return if major_version is not None: parse_expiration_version(expires) # Don't need to keep parsing dates if expiration by version # is enabled. We don't allow mixing dates and versions for a # single product. return date = parse_expiration_date(expires) max_date = datetime.datetime.now() + datetime.timedelta(days=730) if date > max_date.date(): raise ValueError( f"'{expires}' is more than 730 days (~2 years) in the future.", "Please make sure this is intentional.", "You can supress this warning by adding EXPIRATION_DATE_TOO_FAR to no_lint", "See: https://mozilla.github.io/glean_parser/metrics-yaml.html#no_lint", ) def build_date(date: Optional[str]) -> datetime.datetime: """ Generate the build timestamp. If `date` is set to `0` a static unix epoch time will be used. If `date` it is set to a ISO8601 datetime string (e.g. `2022-01-03T17:30:00`) it will use that date. Note that any timezone offset will be ignored and UTC will be used. Otherwise it will throw an error. If `date` is `None` it will use the current date & time. """ if date is not None: date = str(date) if date == "0": ts = datetime.datetime(1970, 1, 1, 0, 0, 0) else: ts = datetime_fromisoformat(date).replace(tzinfo=datetime.timezone.utc) else: ts = datetime.datetime.now(datetime.timezone.utc) return ts def report_validation_errors(all_objects): """ Report any validation errors found to the console. Returns the number of errors reported. """ found_errors = 0 for error in all_objects: found_errors += 1 print("=" * 78, file=sys.stderr) print(error, file=sys.stderr) return found_errors def remove_output_params(d, output_params): """ Remove output-only params, such as "defined_in", in order to validate the output against the input schema. """ modified_dict = {} for key, value in d.items(): if key is not output_params: modified_dict[key] = value return modified_dict # Names of parameters to pass to all metrics constructors constructors. common_metric_args = [ "category", "name", "send_in_pings", "lifetime", "disabled", ] # Names of parameters that only apply to some of the metrics types. # **CAUTION**: This list needs to be in the order the Swift & Rust type constructors # expects them. (The other language bindings don't care about the order). extra_metric_args = [ "time_unit", "memory_unit", "allowed_extra_keys", "reason_codes", "range_min", "range_max", "bucket_count", "histogram_type", "numerators", ] # This includes only things that the language bindings care about, not things # that are metadata-only or are resolved into other parameters at parse time. # **CAUTION**: This list needs to be in the order the Swift & Rust type constructors # expects them. (The other language bindings don't care about the order). The # `test_order_of_fields` test checks that the generated code is valid. # **DO NOT CHANGE THE ORDER OR ADD NEW FIELDS IN THE MIDDLE** metric_args = common_metric_args + extra_metric_args # Names of ping parameters to pass to constructors. ping_args = [ "name", "include_client_id", "send_if_empty", "precise_timestamps", "include_info_sections", "enabled", "schedules_pings", "reason_codes", ] # Names of parameters to pass to both metric and ping constructors (no duplicates). extra_args = metric_args + [v for v in ping_args if v not in metric_args] glean_parser-15.0.1/glean_parser/validate_ping.py000066400000000000000000000041061466531427000220730ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Validates the contents of a Glean ping against the schema. """ import functools import io import json from pathlib import Path import sys import jsonschema # type: ignore from . import util ROOT_DIR = Path(__file__).parent SCHEMAS_DIR = ROOT_DIR / "schemas" @functools.lru_cache(maxsize=1) def _get_ping_schema(schema_url): contents = util.fetch_remote_url(schema_url) return json.loads(contents) def _validate_ping(ins, outs, schema_url): schema = _get_ping_schema(schema_url) resolver = util.get_null_resolver(schema) document = json.load(ins) validator_class = jsonschema.validators.validator_for(schema) validator = validator_class(schema, resolver=resolver) has_error = 0 for error in validator.iter_errors(document): outs.write("=" * 76) outs.write("\n") outs.write(util.format_error("", "", util.pprint_validation_error(error))) outs.write("\n") has_error = 1 return has_error def validate_ping(ins, outs=None, schema_url=None): """ Validates the contents of a Glean ping. :param ins: Input stream or file path to the ping contents to validate :param outs: Output stream to write errors to. (Defaults to stdout) :param schema_url: HTTP URL or local filesystem path to Glean ping schema. Defaults to the current version of the schema in mozilla-pipeline-schemas. :rtype: int 1 if any errors occurred, otherwise 0. """ if schema_url is None: raise TypeError("Missing required argument 'schema_url'") if outs is None: outs = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8") if isinstance(ins, (str, bytes, Path)): with open(ins, "r", encoding="utf-8") as fd: return _validate_ping(fd, outs, schema_url=schema_url) else: return _validate_ping(ins, outs, schema_url=schema_url) glean_parser-15.0.1/pytest.ini000066400000000000000000000003561466531427000163050ustar00rootroot00000000000000[pytest] markers = web_dependency: mark a test that requires a web connection. node_dependency: mark a test that requires node. ruby_dependency: mark a test that requires ruby. go_dependency: mark a test that requires go. glean_parser-15.0.1/requirements_dev.txt000066400000000000000000000003401466531427000203670ustar00rootroot00000000000000coverage==7.5.3 mypy==1.4.1 pip pytest-runner==5.3.2 pytest==8.2.2 recommonmark==0.7.1 ruff==0.6.1 Sphinx==7.3.7; python_version > '3.8' twine==5.0.0 types-Jinja2==2.11.9 types-PyYAML==6.0.12.20240311 wheel yamllint==1.28.0 glean_parser-15.0.1/server_telemetry/000077500000000000000000000000001466531427000176505ustar00rootroot00000000000000glean_parser-15.0.1/server_telemetry/sdk-metrics-compat.yaml000066400000000000000000000062021466531427000242420ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # This file defines metrics that are recorded by the Glean SDK # but are not sent with the server-side Glean pings. They were # deployed erroneously to some BQ tables in the past, and are now # included for compatibility reasons. They are not used in new # server-side code. --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 glean.client.annotation: experimentation_id: type: string lifetime: application send_in_pings: - events description: | An experimentation identifier derived and provided by the application for the purpose of experimentation enrollment. bugs: - https://bugzilla.mozilla.org/show_bug.cgi?id=1848201 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1848201#c5 data_sensitivity: - technical notification_emails: - glean-team@mozilla.com expires: never glean.error: invalid_value: type: labeled_counter description: | Counts the number of times a metric was set to an invalid value. The labels are the `category.name` identifier of the metric. bugs: - https://bugzilla.mozilla.org/1499761 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1499761#c5 data_sensitivity: - technical notification_emails: - glean-team@mozilla.com expires: never send_in_pings: - events no_lint: - COMMON_PREFIX invalid_label: type: labeled_counter description: | Counts the number of times a metric was set with an invalid label. The labels are the `category.name` identifier of the metric. bugs: - https://bugzilla.mozilla.org/1499761 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1499761#c5 data_sensitivity: - technical notification_emails: - glean-team@mozilla.com expires: never send_in_pings: - events no_lint: - COMMON_PREFIX invalid_state: type: labeled_counter description: | Counts the number of times a timing metric was used incorrectly. The labels are the `category.name` identifier of the metric. bugs: - https://bugzilla.mozilla.org/1566380 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1499761#c5 data_sensitivity: - technical notification_emails: - glean-team@mozilla.com expires: never send_in_pings: - events no_lint: - COMMON_PREFIX invalid_overflow: type: labeled_counter description: | Counts the number of times a metric was set a value that overflowed. The labels are the `category.name` identifier of the metric. bugs: - https://bugzilla.mozilla.org/show_bug.cgi?id=1591912 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1591912#c3 data_sensitivity: - technical notification_emails: - glean-team@mozilla.com expires: never send_in_pings: - events no_lint: - COMMON_PREFIX glean_parser-15.0.1/server_telemetry/server-side-pings.yaml000066400000000000000000000015011466531427000240770ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # This file defines the pings that are recorded by server-side Glean. # Their code APIs is automatically generated, at build time using, # the `glean_parser` PyPI package. --- $schema: moz://mozilla.org/schemas/glean/pings/2-0-0 events: description: | The events ping's purpose is to transport all of the event metric information. The `events` ping is automatically sent when the application becomes inactive. include_client_id: true bugs: - https://bugzilla.mozilla.org/1512938 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1512938#c3 notification_emails: - glean-team@mozilla.com glean_parser-15.0.1/setup.cfg000066400000000000000000000000701466531427000160660ustar00rootroot00000000000000[bdist_wheel] python_tag = py3 [aliases] test = pytest glean_parser-15.0.1/setup.py000077500000000000000000000040521466531427000157660ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """The setup script.""" import sys from setuptools import setup, find_packages if sys.version_info < (3, 8): print("glean_parser requires at least Python 3.8", file=sys.stderr) sys.exit(1) with open("README.md", encoding="utf-8") as readme_file: readme = readme_file.read() with open("CHANGELOG.md", encoding="utf-8") as history_file: history = history_file.read() requirements = [ "appdirs>=1.4", "Click>=7", "diskcache>=4", "Jinja2>=2.10.1", "jsonschema>=3.0.2", "PyYAML>=5.3.1", ] setup_requirements = [ "pytest-runner", "setuptools-scm>=7", ] test_requirements = [ "pytest", ] setup( author="The Glean Team", author_email="glean-team@mozilla.com", classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ], description="Parser tools for Mozilla's Glean telemetry", entry_points={ "console_scripts": [ "glean_parser=glean_parser.__main__:main_wrapper", ], }, install_requires=requirements, long_description=readme + "\n\n" + history, long_description_content_type="text/markdown", include_package_data=True, keywords="glean_parser", name="glean_parser", packages=find_packages(include=["glean_parser"]), setup_requires=setup_requirements, test_suite="tests", tests_require=test_requirements, url="https://github.com/mozilla/glean_parser", zip_safe=False, use_scm_version=True, ) glean_parser-15.0.1/tests/000077500000000000000000000000001466531427000154125ustar00rootroot00000000000000glean_parser-15.0.1/tests/conftest.py000066400000000000000000000032641466531427000176160ustar00rootroot00000000000000import pytest def pytest_addoption(parser): parser.addoption( "--run-web-tests", action="store_true", default=False, help="Run tests that require a web connection", ) parser.addoption( "--run-node-tests", action="store_true", default=False, help="Run tests that require node.js", ) parser.addoption( "--run-ruby-tests", action="store_true", default=False, help="Run tests that require Ruby", ) parser.addoption( "--run-go-tests", action="store_true", default=False, help="Run tests that require Go", ) def pytest_collection_modifyitems(config, items): if not config.getoption("--run-web-tests"): skip_web = pytest.mark.skip(reason="Need --run-web-tests option to run") for item in items: if "web_dependency" in item.keywords: item.add_marker(skip_web) if not config.getoption("--run-node-tests"): skip_node = pytest.mark.skip(reason="Need --run-node-tests option to run") for item in items: if "node_dependency" in item.keywords: item.add_marker(skip_node) if not config.getoption("--run-ruby-tests"): skip_ruby = pytest.mark.skip(reason="Need --run-ruby-tests option to run") for item in items: if "ruby_dependency" in item.keywords: item.add_marker(skip_ruby) if not config.getoption("--run-go-tests"): skip_go = pytest.mark.skip(reason="Need --run-go-tests option to run") for item in items: if "go_dependency" in item.keywords: item.add_marker(skip_go) glean_parser-15.0.1/tests/data/000077500000000000000000000000001466531427000163235ustar00rootroot00000000000000glean_parser-15.0.1/tests/data/all_metrics.yaml000066400000000000000000000051201466531427000215030ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ # Note: we're using YAML anchors to re-use the values # defined in the first metric. # Saves us some typing. --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 all_metrics: counter: &defaults type: counter lifetime: ping description: for testing bugs: - https://bugzilla.mozilla.org/1137353 data_reviews: - http://example.com/ notification_emails: - CHANGE-ME@example.com expires: never labeled_counter: <<: *defaults type: labeled_counter labels: - label_a - label_b bool: <<: *defaults type: boolean labeled_bool: <<: *defaults type: labeled_boolean labels: - label_a - label_b string: <<: *defaults type: string labeled_string: <<: *defaults type: labeled_string labels: - label_a - label_b strlist: <<: *defaults type: string_list timespan: <<: *defaults type: timespan time_unit: second timing_dist: <<: *defaults type: timing_distribution time_unit: minute memory_dist: <<: *defaults type: memory_distribution memory_unit: megabyte custom_dist: <<: *defaults type: custom_distribution range_min: 0 range_max: 100 bucket_count: 10 histogram_type: linear uuid: <<: *defaults type: uuid url: <<: *defaults type: url datetime: <<: *defaults type: datetime time_unit: day event: <<: *defaults type: event extra_keys: source: description: Source of this event type: string extraKeyNOTJUSTdotted.snake: description: An extra key with an expressive name. type: boolean quantity: <<: *defaults type: quantity unit: tabs rate: <<: *defaults type: rate rate_counter: <<: *defaults type: counter rate_external: <<: *defaults type: rate denominator_metric: all_metrics.rate_counter text: <<: *defaults type: text labeled_custom_distribution: <<: *defaults type: labeled_custom_distribution range_min: 0 range_max: 100 bucket_count: 10 histogram_type: linear labels: - aLabel - 2label labeled_memory_distribution: <<: *defaults type: labeled_memory_distribution memory_unit: kilobyte labels: - aLabel - 2label labeled_timing_distribution: <<: *defaults type: labeled_timing_distribution time_unit: millisecond labels: - aLabel - 2label glean_parser-15.0.1/tests/data/all_pings.yaml000066400000000000000000000010151466531427000211540ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 telemetry: some_metric: type: uuid lifetime: ping description: > Lorem ipsum dolor sit amet, consectetur adipiscing elit. bugs: - https://bugzilla.mozilla.org/1137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: [all-pings] expires: 2021-10-01 glean_parser-15.0.1/tests/data/bad_ping.yamlx000066400000000000000000000015651466531427000211510ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # NOTE: This file DOES NOT have a trailing newline. # This will trigger the linter. # # If you accidentally add a newline, # you can use this oneline to remove it again: # # head -c -1 tests/data/bad_ping.yamlx > tests/data/bad_ping.yamlx2 && \ # mv tests/data/bad_ping.yamlx2 tests/data/bad_ping.yamlx --- $schema: moz://mozilla.org/schemas/glean/pings/2-0-0 noname: description: | Just for testing. Note that the notification_emails list below is NOT an array (it's missing the leading dash) include_client_id: false bugs: - https://github.com/mozilla-rally/rally-study-01/issues/114 data_reviews: - TODO notification_emails: CHANGE-ME@example.comglean_parser-15.0.1/tests/data/core.yaml000066400000000000000000000223551466531427000201460ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 telemetry: client_id: type: uuid lifetime: user description: > A UUID identifying a profile and allowing user-oriented Correlation of data. Some Unicode: جمع 搜集 bugs: - https://bugzilla.mozilla.org/1137353 data_reviews: - http://example.com/reviews - http://example.com/other_reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core - glean_client_info expires: 2100-01-01 event_example: type: event description: > Just testing events bugs: - https://bugzilla.mozilla.org/1123456789 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com extra_keys: key1: description: "This is key one" type: string key2: description: "This is key two" type: string expires: never preference_toggled: type: event description: | Just testing events bugs: - https://bugzilla.mozilla.org/1123456789 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com extra_keys: preference: type: string description: "This is key one" enabled: type: boolean description: "This is key two" swapped: type: quantity description: "This is key three" expires: never core_ping: seq: type: counter lifetime: application description: > Running ping counter for this ping. This works as a sequence number to allow detecting for missing send_in_pings. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 created: type: datetime time_unit: day description: > The date the ping was created. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 sessions: type: counter description: > The number of sessions since the last upload. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 durations: type: timespan description: > The combined duration, in seconds, of all the sessions since the last upload. time_unit: second bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 searches: type: labeled_counter description: > The search use counts for each search source and engine combination, e.g. “engine.source” = 3. labels: - this.is.fine - this_is_fine_too - this.is_still_fine - thisisfine - _.is_fine - this.is-fine - this-is-fine bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 flash_usage: type: counter description: > The number of times the flash plugin was used since the last upload. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 default_browser: type: boolean description: > True if the user has set Firefox as the default browser. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 environment: locale: type: string lifetime: application description: The application locale, e.g. “en-US”. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 os: type: string lifetime: application description: The name of the operating system. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 os_version: type: string lifetime: application description: The version of the operating system. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 device: type: string lifetime: application description: > Build.MANUFACTURER + " - " + Build.MODEL, where manufacturer is truncated to 12 characters & model is truncated to 19 characters. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 arch: type: string lifetime: application description: The architecture of the device. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: 2100-01-01 profile_date: type: datetime lifetime: user description: Profile creation date. time_unit: day bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 default_search: type: string lifetime: application description: > Identifier of the default search engine, e.g. “yahoo”. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 display_version: type: string lifetime: application description: > Version displayed to the user, e.g. “57.0b2”. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 distribution_id: type: string lifetime: application description: > Distribution identifier, e.g. “lollipop”. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 campaign_id: type: string lifetime: application description: > Adjust’s campaign identifier. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 event_example: type: event description: > Just testing events bugs: - https://bugzilla.mozilla.org/1123456789 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com extra_keys: key1: description: "This is key one" type: string key2: description: "This is key two" type: string expires: 2100-01-01 event_no_keys: type: event description: > Just testing events bugs: - https://bugzilla.mozilla.org/1123456789 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: 2100-01-01 dotted.category: metric: type: string lifetime: application description: > dummy metric bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: 2100-01-01 glean.internal.metrics: internal: type: string lifetime: application description: > internal metric Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: 2100-01-01 glean_parser-15.0.1/tests/data/duplicate_labeled.yaml000066400000000000000000000014221466531427000226300ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 category: counter: type: counter lifetime: user description: > Foo bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 labeled_counter: type: labeled_counter lifetime: user description: > Foo bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 glean_parser-15.0.1/tests/data/duplicate_send_in_ping.yaml000066400000000000000000000010061466531427000236720ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 telemetry: test: type: counter lifetime: user description: duplicate send in ping test bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core - metrics - default expires: 2100-01-01 glean_parser-15.0.1/tests/data/empty.yaml000066400000000000000000000001501466531427000203410ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ glean_parser-15.0.1/tests/data/event_key_ordering.yaml000066400000000000000000000015001466531427000230650ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 event: example: type: event description: > Testing event extra key ordering bugs: - https://bugzilla.mozilla.org/1123456789 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com extra_keys: # Keys are deliberately non-alphabetical here to ensure that they are # sorted by glean_parser charlie: description: "one" type: string alice: description: "two" type: string bob: description: "three" type: string And1WithExtraCasing: description: "four" type: boolean expires: 2100-01-01 glean_parser-15.0.1/tests/data/events_with_types.yaml000066400000000000000000000012601466531427000227710ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 core: preference_toggled: type: event description: | Just testing events bugs: - https://bugzilla.mozilla.org/1123456789 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com extra_keys: preference: type: string description: "This is key one" enabled: type: boolean description: "This is key two" swapped: type: quantity description: "This is key three" expires: never glean_parser-15.0.1/tests/data/fxa-server-metrics.yaml000066400000000000000000000007511466531427000227400ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 event: name: type: string description: | The name of the event lifetime: ping send_in_pings: - accounts-events notification_emails: - CHANGE-ME@example.com bugs: - https://bugzilla.mozilla.org/11566356 data_reviews: - http://example.com/reviews expires: never glean_parser-15.0.1/tests/data/fxa-server-pings.yaml000066400000000000000000000006441466531427000224130ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/pings/2-0-0 accounts-events: description: | An account event include_client_id: false send_if_empty: false bugs: - https://bugzilla.mozilla.org/11566356 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com glean_parser-15.0.1/tests/data/go_server_metrics.yaml000066400000000000000000000031651466531427000227350ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 metric: name: type: string description: | Test string metric lifetime: application send_in_pings: - events notification_emails: - CHANGE-ME@example.com bugs: - https://bugzilla.mozilla.org/1 data_reviews: - http://example.com/reviews expires: never request_count: type: quantity unit: ad description: | Test quantity metric lifetime: application send_in_pings: - events notification_emails: - CHANGE-ME@example.com bugs: - https://bugzilla.mozilla.org/1 data_reviews: - http://example.com/reviews expires: never request_datetime: type: datetime description: | Test datetime metric lifetime: application send_in_pings: - events notification_emails: - CHANGE-ME@example.com bugs: - https://bugzilla.mozilla.org/1 data_reviews: - http://example.com/reviews expires: never backend: test_event: type: event description: > test event bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com expires: never extra_keys: event_field_string: description: > A string extra field type: string event_field_quantity: description: > A quantity extra field type: quantity event_field_bool: description: > A boolean extra field type: boolean glean_parser-15.0.1/tests/data/go_server_metrics_unsupported.yaml000066400000000000000000000046121466531427000254030ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 metric: bool: type: boolean description: > boolean bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com lifetime: application expires: never send_in_pings: - events labeled_bool: type: labeled_boolean description: > labeled boolean bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com lifetime: application expires: never send_in_pings: - events labeled_string: type: labeled_string description: > labeled string bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com lifetime: application expires: never send_in_pings: - events string_list: type: string_list description: > string list bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com lifetime: application expires: never send_in_pings: - events timespan: type: timespan description: > timespan bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com lifetime: application expires: never send_in_pings: - events uuid: type: uuid description: > uuid bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com lifetime: application expires: never send_in_pings: - events url: type: url description: > url bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com lifetime: application expires: never send_in_pings: - events datetime: type: datetime description: > datetime bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com lifetime: application expires: never send_in_pings: - events backend: test_event: type: event description: > test event bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com expires: never extra_keys: event_field_1: description: > A first extra field type: string glean_parser-15.0.1/tests/data/invalid-ping-names.yaml000066400000000000000000000011251466531427000226700ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 telemetry: client_id: type: uuid lifetime: user description: > A UUID identifying a profile and allowing user-oriented Correlation of data. Some Unicode: جمع 搜集 bugs: - https://bugzilla.mozilla.org/1137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - invalid_ping_name expires: 2100-01-01 glean_parser-15.0.1/tests/data/invalid.yamlx000066400000000000000000000002011466531427000210160ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ !!! THIS ISN'T YAML !!! glean_parser-15.0.1/tests/data/jwe.yaml000066400000000000000000000007411466531427000177760ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 crypto: secret: type: jwe lifetime: ping description: | A secret token. bugs: - https://bugzilla.mozilla.org/show_bug.cgi?id=1700991 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1700991 notification_emails: - CHANGE-ME@example.com expires: never glean_parser-15.0.1/tests/data/metric-with-tags.yaml000066400000000000000000000013121466531427000223740ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 $tags: - global_tag telemetry: client_id: type: uuid lifetime: ping description: > A UUID identifying a profile and allowing user-oriented Correlation of data. Some Unicode: جمع 搜集 bugs: - https://bugzilla.mozilla.org/1137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - metrics metadata: tags: - banana - apple expires: 2100-01-01 no_lint: - EXPIRATION_DATE_TOO_FAR glean_parser-15.0.1/tests/data/mixed-expirations.yaml000066400000000000000000000014001466531427000226530ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 category: counter: type: counter lifetime: user description: > Foo bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 other_counter: type: counter lifetime: user description: > Foo bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 99 glean_parser-15.0.1/tests/data/object.yaml000066400000000000000000000046051466531427000204620ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 complex.types: number_array: type: object description: Just a list of nums. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: never structure: type: array items: type: number array_in_array: type: object description: An array of arrays of bools. bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: never structure: type: array items: type: array items: type: boolean # yamllint disable rule:line-length activity.stream: tiles: type: object description: | The id, position, and shim for each newtab tile and any extra nested data, e.g. `[{"id": 1, "pos": 2, "shim": "long-id", "nested": { "count": 17, "info": "more info"}}]` bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: never structure: type: array items: type: object properties: id: type: number pos: type: number shim: type: string nested: type: object properties: count: type: number info: type: string crash.stack: threads: type: object description: | All threads' frame information. e.g. `[{frames: [{module_index: 0, ip: "0xdecafc0ffe", trust: "scan"}]}]` bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: never structure: type: array items: type: object properties: frames: type: array items: type: object properties: module_index: type: number ip: type: string trust: type: string glean_parser-15.0.1/tests/data/old_event_api.yamlx000066400000000000000000000012401466531427000222040ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ # Note: we're using YAML anchors to re-use the values # defined in the first metric. # Saves us some typing. --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 old_event: name: type: event # note: this _should_ have been time_unit lifetime: ping description: for testing bugs: - https://bugzilla.mozilla.org/1137353 data_reviews: - http://example.com/ notification_emails: - CHANGE-ME@example.com expires: never extra_keys: key_a: description: none key_b: description: none glean_parser-15.0.1/tests/data/ordering.yaml000066400000000000000000000023201466531427000210150ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 testing.ordering: first_test_metric: type: counter lifetime: application description: > Test metric to check ordering. Second alphabetically. bugs: - https://bugzilla.mozilla.org/show_bug.cgi?id=1820648 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: never a_second_test_metric: type: counter lifetime: application description: > An additional test metric to check ordering. First alphabetically. bugs: - https://bugzilla.mozilla.org/show_bug.cgi?id=1820648 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: never third_test_metric: type: counter lifetime: application description: > Final test metric to check ordering. Third alphabetically. bugs: - https://bugzilla.mozilla.org/show_bug.cgi?id=1820648 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: never glean_parser-15.0.1/tests/data/pings.yaml000066400000000000000000000055251466531427000203360ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/pings/2-0-0 custom-ping: description: This is a custom ping include_client_id: false bugs: - http://bugzilla.mozilla.com/1137353 data_reviews: - http://nowhere.com/reviews notification_emails: - CHANGE-ME@test-only.com no_lint: [REDUNDANT_PING] really-custom-ping: description: > Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. include_client_id: true bugs: - http://bugzilla.mozilla.com/1137353 data_reviews: - http://nowhere.com/reviews notification_emails: - CHANGE-ME@test-only.com no_lint: [REDUNDANT_PING] custom-ping-might-be-empty: description: This is another custom ping include_client_id: true send_if_empty: true bugs: - http://bugzilla.mozilla.com/1137353 data_reviews: - http://nowhere.com/reviews notification_emails: - CHANGE-ME@test-only.com reasons: silly: A silly reason for sending a ping. serious: A serious reason for sending a ping. no_lint: [REDUNDANT_PING] custom-ping-no-info: description: A custom ping with no info sections include_client_id: false send_if_empty: true bugs: - http://bugzilla.mozilla.com/1866559 data_reviews: - http://nowhere.com/reviews notification_emails: - CHANGE-ME@test-only.com metadata: include_info_sections: false no_lint: [REDUNDANT_PING] custom-ping-enabled: description: A custom ping with an explicit enabled property set to true include_client_id: false send_if_empty: true bugs: - http://bugzilla.mozilla.com/1866559 data_reviews: - http://nowhere.com/reviews notification_emails: - CHANGE-ME@test-only.com enabled: true no_lint: [REDUNDANT_PING] custom-ping-disabled: description: A custom ping with an explicit enabled property set to false include_client_id: false send_if_empty: true bugs: - http://bugzilla.mozilla.com/1866559 data_reviews: - http://nowhere.com/reviews notification_emails: - CHANGE-ME@test-only.com enabled: false no_lint: [REDUNDANT_PING] custom-with-ping-schedule: description: A custom ping with a ping schedule include_client_id: true send_if_empty: true bugs: - http://bugzilla.mozilla.com/1866559 data_reviews: - http://nowhere.com/reviews notification_emails: - CHANGE-ME@test-only.com metadata: ping_schedule: - custom-ping-no-info - custom-ping-might-be-empty no_lint: [REDUNDANT_PING] glean_parser-15.0.1/tests/data/rate.yaml000066400000000000000000000034071466531427000201460ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 testing.rates: has_internal_denominator: type: rate lifetime: application description: > Test metric to ensure rates with internal denominators work. bugs: - https://bugzilla.mozilla.org/show_bug.cgi?id=1645166 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1645166#c1 notification_emails: - CHANGE-ME@example.com expires: never has_external_denominator: type: rate lifetime: application description: > Test metric to ensure rates with external denominators work. bugs: - https://bugzilla.mozilla.org/show_bug.cgi?id=1645166 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1645166#c1 notification_emails: - CHANGE-ME@example.com expires: never denominator_metric: testing.rates.the_denominator also_has_external_denominator: type: rate lifetime: application description: > Test metric to ensure rates with shared external denominators work. bugs: - https://bugzilla.mozilla.org/show_bug.cgi?id=1645166 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1645166#c1 notification_emails: - CHANGE-ME@example.com expires: never denominator_metric: testing.rates.the_denominator the_denominator: type: counter lifetime: application description: > Test denominator for rate metrics. bugs: - https://bugzilla.mozilla.org/show_bug.cgi?id=1645166 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1645166#c1 notification_emails: - CHANGE-ME@example.com expires: never glean_parser-15.0.1/tests/data/ruby_server_metrics_unsupported.yaml000066400000000000000000000023451466531427000257600ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 identifiers: fxa_account_id: type: boolean description: > The user's FxA account ID, if available. bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com lifetime: application expires: never send_in_pings: - events backend: object_update: type: event description: > Event triggered by the backend to record the change in state of an object (e.g. API requests to the mozilla.social Mastodon server). In the future, we could potentially use this event to track changes in state to core Mastodon objects (e.g. accounts and posts). bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com expires: never extra_keys: object_type: description: > A simple name to describe the object whose state changed. For example, `api_request`. type: string object_state: description: > A JSON representation of the latest state of the object. type: string glean_parser-15.0.1/tests/data/ruby_server_pings_unsupported.yaml000066400000000000000000000024421466531427000254300ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 identifiers: fxa_account_id: type: string description: > The user's FxA account ID, if available. bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com expires: never lifetime: application send_in_pings: - events - tests backend: object_update: type: event description: > Event triggered by the backend to record the change in state of an object (e.g. API requests to the mozilla.social Mastodon server). In the future, we could potentially use this event to track changes in state to core Mastodon objects (e.g. accounts and posts). bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com expires: never send_in_pings: - events - tests extra_keys: object_type: description: > A simple name to describe the object whose state changed. For example, `api_request`. type: string object_state: description: > A JSON representation of the latest state of the object. type: string glean_parser-15.0.1/tests/data/schema-violation.yaml000066400000000000000000000120141466531427000224470ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 gleantest.foo: a: b gleantest: test_event: type: event gleantest.lifetime: test_counter_inv_lt: description: A counter with an invalid lifetime type: counter lifetime: user2 bugs: - https://bugzilla.mozilla.org/1123456789 notification_emails: ['nobody@example.com'] expires: never data_reviews: ['http://example.com'] gleantest.with.way.too.long.category.name: test_event_inv_lt: description: A test metric type: boolean bugs: - https://bugzilla.mozilla.org/1580707 notification_emails: ['nobody@example.com'] expires: never data_reviews: ['http://example.com'] gleantest.short.category: very_long_metric_name_this_is_too_long_as_well_since_it_has_sooooo_many_characters: description: A test metric type: boolean bugs: - https://bugzilla.mozilla.org/1580707 notification_emails: ['nobody@example.com'] expires: never data_reviews: ['http://example.com'] gleantest.event: event_too_many_extras: description: A test event with too many extra keys type: event bugs: - https://bugzilla.mozilla.org/1580707 notification_emails: ['nobody@example.com'] expires: never data_reviews: ['http://example.com'] extra_keys: key_1: description: Sample extra key type: string key_2: description: Sample extra key type: string key_3: description: Sample extra key type: string key_4: description: Sample extra key type: string key_5: description: Sample extra key type: string key_6: description: Sample extra key type: string key_7: description: Sample extra key type: string key_8: description: Sample extra key type: string key_9: description: Sample extra key type: string key_10: description: Sample extra key type: string key_11: description: Sample extra key type: string key_12: description: Sample extra key type: string key_13: description: Sample extra key type: string key_14: description: Sample extra key type: string key_15: description: Sample extra key type: string key_16: description: Sample extra key type: string key_17: description: Sample extra key type: string key_18: description: Sample extra key type: string key_19: description: Sample extra key type: string key_20: description: Sample extra key type: string key_21: description: Sample extra key type: string key_22: description: Sample extra key type: string key_23: description: Sample extra key type: string key_24: description: Sample extra key type: string key_25: description: Sample extra key type: string key_26: description: Sample extra key type: string key_27: description: Sample extra key type: string key_28: description: Sample extra key type: string key_29: description: Sample extra key type: string key_30: description: Sample extra key type: string key_31: description: Sample extra key type: string key_32: description: Sample extra key type: string key_33: description: Sample extra key type: string key_34: description: Sample extra key type: string key_35: description: Sample extra key type: string key_36: description: Sample extra key type: string key_37: description: Sample extra key type: string key_38: description: Sample extra key type: string key_39: description: Sample extra key type: string key_40: description: Sample extra key type: string key_41: description: Sample extra key type: string key_42: description: Sample extra key type: string key_43: description: Sample extra key type: string key_44: description: Sample extra key type: string key_45: description: Sample extra key type: string key_46: description: Sample extra key type: string key_47: description: Sample extra key type: string key_48: description: Sample extra key type: string key_49: description: Sample extra key type: string key_50: description: Sample extra key type: string key_51: description: Sample extra key type: string glean_parser-15.0.1/tests/data/send_if_empty_with_metrics.yaml000066400000000000000000000010441466531427000246140ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 telemetry: some_metric: type: uuid lifetime: ping description: > Lorem ipsum dolor sit amet, consectetur adipiscing elit. bugs: - https://bugzilla.mozilla.org/1137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - custom-ping-might-be-empty expires: 2100-01-01 glean_parser-15.0.1/tests/data/server_events_compare.go000066400000000000000000000136501466531427000232570ustar00rootroot00000000000000package glean // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. // AUTOGENERATED BY {current_version}. DO NOT EDIT. // required imports import ( "encoding/json" "fmt" "strconv" "time" "github.com/google/uuid" ) // log type string used to identify logs to process in the Moz Data Pipeline var gleanEventMozlogType string = "glean-server-event" type GleanEventsLogger struct { AppID string // Application Id to identify application per Glean standards AppDisplayVersion string // Version of application emitting the event AppChannel string // Channel to differentiate logs from prod/beta/staging/devel } // exported type for public method parameters type RequestInfo struct { UserAgent string IpAddress string } // default empty values will be omitted in json from ping struct definition var defaultRequestInfo = RequestInfo{ UserAgent: "", IpAddress: "", } // structs to construct the glean ping type clientInfo struct { TelemetrySDKBuild string `json:"telemetry_sdk_build"` FirstRunDate string `json:"first_run_date"` OS string `json:"os"` OSVersion string `json:"os_version"` Architecture string `json:"architecture"` AppBuild string `json:"app_build"` AppDisplayVersion string `json:"app_display_version"` AppChannel string `json:"app_channel"` } type pingInfo struct { Seq int `json:"seq"` StartTime string `json:"start_time"` EndTime string `json:"end_time"` } type ping struct { DocumentNamespace string `json:"document_namespace"` DocumentType string `json:"document_type"` DocumentVersion string `json:"document_version"` DocumentID string `json:"document_id"` UserAgent string `json:"user_agent,omitempty"` IpAddress string `json:"ip_address,omitempty"` Payload string `json:"payload"` } type metrics map[string]map[string]interface{} type pingPayload struct { ClientInfo clientInfo `json:"client_info"` PingInfo pingInfo `json:"ping_info"` Metrics metrics `json:"metrics"` Events []gleanEvent `json:"events"` } type gleanEvent struct { Category string `json:"category"` Name string `json:"name"` Timestamp int64 `json:"timestamp"` Extra map[string]string `json:"extra"` } type logEnvelope struct { Timestamp string Logger string Type string Fields ping } func (g GleanEventsLogger) createClientInfo() clientInfo { // Fields with default values are required in the Glean schema, but not used in server context return clientInfo{ TelemetrySDKBuild: "{current_version}", FirstRunDate: "Unknown", OS: "Unknown", OSVersion: "Unknown", Architecture: "Unknown", AppBuild: "Unknown", AppDisplayVersion: g.AppDisplayVersion, AppChannel: g.AppChannel, } } func createPingInfo() pingInfo { var now = time.Now().UTC().Format("2006-01-02T15:04:05.000Z") return pingInfo{ Seq: 0, StartTime: now, EndTime: now, } } func (g GleanEventsLogger) createPing(documentType string, config RequestInfo, payload pingPayload) ping { var payloadJson, payloadErr = json.Marshal(payload) if payloadErr != nil { panic("Unable to marshal payload to json") } var documentId = uuid.New() return ping{ DocumentNamespace: g.AppID, DocumentType: documentType, DocumentVersion: "1", DocumentID: documentId.String(), UserAgent: config.UserAgent, IpAddress: config.IpAddress, Payload: string(payloadJson), } } // method called by each event method. // construct the ping, wrap it in the envelope, and print to stdout func (g GleanEventsLogger) record( documentType string, requestInfo RequestInfo, metrics metrics, events []gleanEvent, ) { var telemetryPayload = pingPayload{ ClientInfo: g.createClientInfo(), PingInfo: createPingInfo(), Metrics: metrics, Events: events, } var ping = g.createPing(documentType, requestInfo, telemetryPayload) var envelope = logEnvelope{ Timestamp: strconv.FormatInt(time.Now().UnixNano(), 10), Logger: "glean", Type: gleanEventMozlogType, Fields: ping, } var envelopeJson, envelopeErr = json.Marshal(envelope) if envelopeErr != nil { panic("Unable to marshal log envelope to json") } fmt.Println(string(envelopeJson)) } type EventBackendTestEvent struct { MetricName string // Test string metric MetricRequestCount int64 // Test quantity metric MetricRequestDatetime time.Time // Test datetime metric EventFieldString string // A string extra field EventFieldQuantity int64 // A quantity extra field EventFieldBool bool // A boolean extra field } // Record and submit an EventBackendTestEvent event. // test event func (g GleanEventsLogger) RecordEventBackendTestEvent( requestInfo RequestInfo, params EventBackendTestEvent, ) { var metrics = metrics{ "string": { "metric.name": params.MetricName, }, "quantity": { "metric.request_count": params.MetricRequestCount, }, "datetime": { "metric.request_datetime": params.MetricRequestDatetime.Format("2006-01-02T15:04:05.000Z"), }, } var extraKeys = map[string]string{ "event_field_string": params.EventFieldString, "event_field_quantity": fmt.Sprintf("%d", params.EventFieldQuantity), "event_field_bool": fmt.Sprintf("%t", params.EventFieldBool), } var events = []gleanEvent{ gleanEvent{ Category: "backend", Name: "test_event", Timestamp: time.Now().UnixMilli(), Extra: extraKeys, }, } g.record("events", requestInfo, metrics, events) } // Record and submit an EventBackendTestEvent event omitting user request info // test event func (g GleanEventsLogger) RecordEventBackendTestEventWithoutUserInfo( params EventBackendTestEvent, ) { g.RecordEventBackendTestEvent(defaultRequestInfo, params) } glean_parser-15.0.1/tests/data/server_events_compare.rb000066400000000000000000000116221466531427000232520ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # AUTOGENERATED BY {current_version}. DO NOT EDIT. # frozen_string_literal: true # requires json, securerandom, and logger libs require 'json' require 'securerandom' require 'logger' # this will be used for identifying logs that need to forward to Moz Data Pipeline GLEAN_EVENT_MOZLOG_TYPE = 'glean-server-event' module Glean class GleanEventsLogger def initialize(app_id:, app_display_version:, app_channel:, logger_options:) @app_id = app_id # string - Application Id to identify application per Glean standards @app_display_version = app_display_version # string - Version of application emitting the event @app_channel = app_channel # string - Application Id to identify application per Glean standards @logger = Logger.new(logger_options) # Logger configuration @logger.formatter = proc do |severity, datetime, _progname, msg| date_format = datetime.to_i logger_name = 'glean' "#{{JSON.dump(Timestamp: date_format.to_s, Logger: logger_name.to_s, Type: GLEAN_EVENT_MOZLOG_TYPE.to_s, Severity: severity.ljust(5).to_s, Pid: Process.pid.to_s, Fields: msg)}}\n" end # Generated events # Event triggered by the backend to record the change in state of an object (e.g. API requests to the mozilla.social Mastodon server). In the future, we could potentially use this event to track changes in state to core Mastodon objects (e.g. accounts and posts). @backend_object_update = BackendObjectUpdateEvent.new(self) end def _record( # The user's FxA account ID, if available. identifiers_fxa_account_id:, # full user_agent value from controller context user_agent:, # ip address value from controller context ip_address:, # event being sent in the ping event: ) t_utc = Time.now.utc # create raw metrics hash that can have nil values metrics_raw = {{ 'string' => {{ 'identifiers.fxa_account_id' => identifiers_fxa_account_id, }}, }} # filter out key value pairs where value is nil metrics_raw.each do |key, value| metrics_raw[key] = value.compact.transform_values(&:to_s) end # filter out metrics with empty hashes metrics = metrics_raw.reject {{ |_k, v| v.empty? }} event_payload = {{ # `Unknown` fields below are required in the Glean schema, however they are not useful in server context. 'client_info' => {{ 'telemetry_sdk_build' => '{current_version}', 'first_run_date' => 'Unknown', 'os' => 'Unknown', 'os_version' => 'Unknown', 'architecture' => 'Unknown', 'app_build' => 'Unknown', 'app_display_version' => @app_display_version, 'app_channel' => @app_channel, }}, 'ping_info' => {{ 'seq' => 0, 'start_time' => t_utc, 'end_time' => t_utc, }}, 'metrics' => metrics, 'events' => event, }} serialized_event_payload = event_payload.to_json # This is the message structure that Decoder expects: https://github.com/mozilla/gcp-ingestion/pull/2400. ping = {{ 'document_namespace' => @app_id, 'document_type' => 'events', 'document_version' => '1', 'document_id' => SecureRandom.uuid, 'user_agent' => user_agent, 'ip_address' => ip_address, 'payload' => serialized_event_payload, }} @logger.info(ping) end attr_accessor :backend_object_update end class BackendObjectUpdateEvent # Event triggered by the backend to record the change in state of an object (e.g. API requests to the mozilla.social Mastodon server). In the future, we could potentially use this event to track changes in state to core Mastodon objects (e.g. accounts and posts). def initialize(glean) @glean = glean end def record( # extras to pass into event detail object_type:, object_state:, linking:, # The user's FxA account ID, if available. identifiers_fxa_account_id:, # full user_agent value from controller context user_agent:, # ip address value from controller context ip_address: ) event = [ {{ 'category' => 'backend', 'name' => 'object_update', 'timestamp' => (Time.now.utc.to_f * 1000).to_i, 'extra' => [ ['object_type', object_type.to_s], ['object_state', object_state.to_s], ['linking', linking.to_s], ].to_h, }}, ] @glean._record( identifiers_fxa_account_id: identifiers_fxa_account_id, user_agent: user_agent, ip_address: ip_address, event: event ) end end end glean_parser-15.0.1/tests/data/server_metrics_no_events_no_pings.yaml000066400000000000000000000006161466531427000262220ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 identifiers: fxa_account_id: type: string description: > The user's FxA account ID, if available. bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com expires: never glean_parser-15.0.1/tests/data/server_metrics_with_event.yaml000066400000000000000000000026061466531427000245030ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 identifiers: fxa_account_id: type: string description: > The user's FxA account ID, if available. bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com expires: never lifetime: application send_in_pings: - events backend: object_update: type: event description: > Event triggered by the backend to record the change in state of an object (e.g. API requests to the mozilla.social Mastodon server). In the future, we could potentially use this event to track changes in state to core Mastodon objects (e.g. accounts and posts). bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com expires: never extra_keys: object_type: description: > A simple name to describe the object whose state changed. For example, `api_request`. type: string object_state: description: > A JSON representation of the latest state of the object. type: string linking: description: > Indicates the initial linking of the Mozilla account and the third-party account. type: boolean glean_parser-15.0.1/tests/data/server_pings.yaml000066400000000000000000000005601466531427000217160ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/pings/2-0-0 mastodon-action: description: | A server-side Mastodon action. include_client_id: true send_if_empty: false bugs: - TBD data_reviews: - TBD notification_emails: - fake@fake.com glean_parser-15.0.1/tests/data/single_labeled.yaml000066400000000000000000000007501466531427000221420ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 category: labeled_counter: type: labeled_counter lifetime: user description: > Foo bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 glean_parser-15.0.1/tests/data/smaller.yaml000066400000000000000000000011101466531427000206370ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 telemetry: client_id: type: uuid lifetime: user description: > A UUID identifying a profile and allowing user-oriented Correlation of data. Some Unicode: جمع 搜集 bugs: - https://bugzilla.mozilla.org/1137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com send_in_pings: - core expires: 2100-01-01 glean_parser-15.0.1/tests/data/tags.yaml000066400000000000000000000006751466531427000201550ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/tags/1-0-0 banana: description: | A yellow fruit. Sometimes green, but then you should not eat it. apple: description: | A red fruit. Or green. Or a mix of it. Very good in pie. global_tag: description: | A very uninteresting global tag. Alas, it has no colour. glean_parser-15.0.1/tests/data/telemetry_mirror.yaml000066400000000000000000000011311466531427000226070ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 telemetry.mirrored: parses_fine: type: string lifetime: application description: > Test metric to ensure telemetry_mirror properties parse. bugs: - https://bugzilla.mozilla.org/show_bug.cgi?id=1685406 data_reviews: - https://bugzilla.mozilla.org/show_bug.cgi?id=1685406#c1 notification_emails: - CHANGE-ME@example.com expires: never telemetry_mirror: telemetry.test.string_kind glean_parser-15.0.1/tests/data/text.yaml000066400000000000000000000017171466531427000202010ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 valid.text: lifetime: type: text lifetime: ping send_in_pings: - custom description: | dummy metric bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: 2100-01-01 data_sensitivity: - highly_sensitive no_lint: - EXPIRATION_DATE_TOO_FAR sensitivity: type: text lifetime: ping send_in_pings: - custom description: | dummy metric bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: 2100-01-01 data_sensitivity: - stored_content no_lint: - EXPIRATION_DATE_TOO_FAR glean_parser-15.0.1/tests/data/text_invalid.yaml000066400000000000000000000025411466531427000217030ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 invalid.text: lifetime: type: text lifetime: user send_in_pings: - custom description: | dummy metric bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: 2100-01-01 data_sensitivity: - highly_sensitive no_lint: - EXPIRATION_DATE_TOO_FAR sensitivity: type: text lifetime: ping send_in_pings: - custom description: | dummy metric bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: 2100-01-01 data_sensitivity: - technical no_lint: - EXPIRATION_DATE_TOO_FAR builtin_pings: type: text lifetime: ping send_in_pings: - metrics description: | dummy metric bugs: - https://bugzilla.mozilla.org/11137353 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: 2100-01-01 data_sensitivity: - highly_sensitive no_lint: - EXPIRATION_DATE_TOO_FAR glean_parser-15.0.1/tests/data/unknown_ping_used.yaml000066400000000000000000000014371466531427000227500ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 all_metrics: valid_metric: &defaults type: counter lifetime: ping description: for testing bugs: - https://bugzilla.mozilla.org/123 data_reviews: - http://example.com/ notification_emails: - CHANGE-ME@example.com expires: never send_in_pings: - metrics - custom-ping only_builtins: <<: *defaults send_in_pings: - metrics - events non_existent_ping: <<: *defaults send_in_pings: - does-not-exist non_existent_ping_no_lint: <<: *defaults send_in_pings: - does-not-exist no_lint: - UNKNOWN_PING_REFERENCED glean_parser-15.0.1/tests/data/wrong_key.yamlx000066400000000000000000000027771466531427000214200ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ # Note: we're using YAML anchors to re-use the values # defined in the first metric. # Saves us some typing. --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 wrong_key: datetime: &unit_defaults type: datetime # note: this _should_ have been time_unit unit: day lifetime: ping description: for testing bugs: - https://bugzilla.mozilla.org/1137353 data_reviews: - http://example.com/ notification_emails: - CHANGE-ME@example.com expires: never timing_distribution: <<: *unit_defaults type: timing_distribution timespan: <<: *unit_defaults type: timespan both_keys: datetime: type: datetime # note: it has both keys unit: day time_unit: day lifetime: ping description: for testing bugs: - https://bugzilla.mozilla.org/1137353 data_reviews: - http://example.com/ notification_emails: - CHANGE-ME@example.com expires: never missing_key: datetime: &defaults type: datetime # note: no unit specified. the `time_unit` is _optional_ lifetime: ping description: for testing bugs: - https://bugzilla.mozilla.org/1137353 data_reviews: - http://example.com/ notification_emails: - CHANGE-ME@example.com expires: never timing_distribution: <<: *defaults type: timing_distribution timespan: <<: *defaults type: timespan glean_parser-15.0.1/tests/data/yaml_nits.yamlx000066400000000000000000000010471466531427000214000ustar00rootroot00000000000000# Any copyright is dedicated to the Public Domain. # https://creativecommons.org/publicdomain/zero/1.0/ --- $schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 page.perf: load_time: type: timing_distribution gecko_datapoint: GV_PAGE_LOAD_MS time_unit: millisecond lifetime: application description: > A sample timing distribution metric exported from Gecko. bugs: - 1566356 data_reviews: - http://example.com/reviews notification_emails: - CHANGE-ME@example.com expires: 2100-01-01 glean_parser-15.0.1/tests/detekt.yml000066400000000000000000000001211466531427000174070ustar00rootroot00000000000000--- build: maxIssues: 0 naming: InvalidPackageDeclaration: active: false glean_parser-15.0.1/tests/test-go/000077500000000000000000000000001466531427000167745ustar00rootroot00000000000000glean_parser-15.0.1/tests/test-go/test.go.tmpl000066400000000000000000000003301466531427000212510ustar00rootroot00000000000000package main import ( "glean/glean" ) func main() { logger := glean.GleanEventsLogger{ AppID: "glean.test", AppDisplayVersion: "0.0.1", AppChannel: "nightly", } /* CODE */ } glean_parser-15.0.1/tests/test-js/000077500000000000000000000000001466531427000170035ustar00rootroot00000000000000glean_parser-15.0.1/tests/test-js/package.json000066400000000000000000000002431466531427000212700ustar00rootroot00000000000000{ "name": "glean_test", "type": "module", "version": "1.0.0", "license": "MPL-2.0", "dependencies": { "mozlog": "^3.0.2", "uuid": "^9.0.1" } } glean_parser-15.0.1/tests/test-js/test.js.tmpl000066400000000000000000000005701466531427000212750ustar00rootroot00000000000000import { /* FACTORY */ } from ".//* IMPORT */"; class PrintLogger { write(msg) { console.log(msg); } } let logger = new PrintLogger; let logger_options = { app: "glean-test", fmt: 'pretty', stream: logger, }; let eventLogger = /* FACTORY */({ applicationId: "glean.test", appDisplayVersion: "0.0.1", channel: "testing", logger_options }); /* CODE */ glean_parser-15.0.1/tests/test-py/000077500000000000000000000000001466531427000170175ustar00rootroot00000000000000glean_parser-15.0.1/tests/test-py/test.py000066400000000000000000000007431466531427000203540ustar00rootroot00000000000000from glean.server_events import create_events_server_event_logger logger = create_events_server_event_logger( application_id="accounts_backend", app_display_version="0.0.1", channel="nightly", ) logger.record_backend_object_update( user_agent="Mozilla/5.0 ...", ip_address="2a02:a311:803c:6300:4074:5cf2:91ac:d546", identifiers_fxa_account_id="test-py-project", object_type="some_object_type", object_state="some_object_state", linking=True, ) glean_parser-15.0.1/tests/test-rb/000077500000000000000000000000001466531427000167725ustar00rootroot00000000000000glean_parser-15.0.1/tests/test-rb/test.rb.tmpl000066400000000000000000000003031466531427000212450ustar00rootroot00000000000000require_relative ".//* IMPORT */" events = Glean::GleanEventsLogger.new( app_id: "glean.test", app_display_version: "0.0.1", app_channel: "testing", logger_options: $stdout ) /* CODE */ glean_parser-15.0.1/tests/test_cli.py000077500000000000000000000160221466531427000175760ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ """Tests for the command line interface.""" import os from pathlib import Path import re from click.testing import CliRunner from glean_parser import __main__ ROOT = Path(__file__).parent def test_basic_help(): """Test the CLI.""" runner = CliRunner() help_result = runner.invoke(__main__.main, ["--help"]) assert help_result.exit_code == 0 assert "Show this message and exit." in help_result.output def test_translate(tmp_path): """Test the 'translate' command.""" runner = CliRunner() result = runner.invoke( __main__.main, [ "translate", str(ROOT / "data" / "core.yaml"), "-o", str(tmp_path), "-f", "kotlin", "-s", "namespace=Foo", "--allow-reserved", ], ) assert result.exit_code == 0 assert set(os.listdir(str(tmp_path))) == set( [ "CorePing.kt", "Telemetry.kt", "Environment.kt", "DottedCategory.kt", "GleanInternalMetrics.kt", "GleanBuildInfo.kt", ] ) for filename in os.listdir(str(tmp_path)): path = tmp_path / filename with path.open(encoding="utf-8") as fd: content = fd.read() assert "package Foo" in content def test_translate_no_buildinfo(tmp_path): """Test the 'translate' command.""" runner = CliRunner() result = runner.invoke( __main__.main, [ "translate", str(ROOT / "data" / "core.yaml"), "-o", str(tmp_path), "-f", "kotlin", "-s", "namespace=Foo", "-s", "with_buildinfo=false", "--allow-reserved", ], ) assert result.exit_code == 0 assert set(os.listdir(str(tmp_path))) == set( [ "CorePing.kt", "Telemetry.kt", "Environment.kt", "DottedCategory.kt", "GleanInternalMetrics.kt", ] ) for filename in os.listdir(str(tmp_path)): path = tmp_path / filename with path.open(encoding="utf-8") as fd: content = fd.read() assert "package Foo" in content def test_translate_build_date(tmp_path): """Test with a custom build date.""" runner = CliRunner() result = runner.invoke( __main__.main, [ "translate", str(ROOT / "data" / "core.yaml"), "-o", str(tmp_path), "-f", "kotlin", "-s", "namespace=Foo", "-s", "build_date=2020-01-01T17:30:00", "--allow-reserved", ], ) assert result.exit_code == 0 path = tmp_path / "GleanBuildInfo.kt" with path.open(encoding="utf-8") as fd: content = fd.read() assert "buildDate = Calendar.getInstance" in content assert "cal.set(2020, 0, 1, 17, 30" in content def test_translate_fixed_build_date(tmp_path): """Test with a custom build date.""" runner = CliRunner() result = runner.invoke( __main__.main, [ "translate", str(ROOT / "data" / "core.yaml"), "-o", str(tmp_path), "-f", "kotlin", "-s", "namespace=Foo", "-s", "build_date=0", "--allow-reserved", ], ) assert result.exit_code == 0 path = tmp_path / "GleanBuildInfo.kt" with path.open(encoding="utf-8") as fd: content = fd.read() assert "buildDate = Calendar.getInstance" in content assert "cal.set(1970" in content def test_translate_borked_build_date(tmp_path): """Test with a custom build date.""" runner = CliRunner() result = runner.invoke( __main__.main, [ "translate", str(ROOT / "data" / "core.yaml"), "-o", str(tmp_path), "-f", "kotlin", "-s", "namespace=Foo", "-s", "build_date=1", "--allow-reserved", ], ) assert result.exit_code == 1 def test_translate_errors(tmp_path): """Test the 'translate' command.""" runner = CliRunner() result = runner.invoke( __main__.main, [ "translate", str(ROOT / "data" / "invalid.yamlx"), "-o", str(tmp_path), "-f", "kotlin", ], ) assert result.exit_code == 1 assert len(os.listdir(str(tmp_path))) == 0 def test_glinter_errors(tmp_path): """Test that the 'glinter' command reports all errors.""" runner = CliRunner() result = runner.invoke( __main__.main, [ "glinter", str(ROOT / "data" / "bad_ping.yamlx"), ], ) assert result.exit_code == 1 assert "Found 1 errors" in result.output def test_translate_invalid_format(tmp_path): """Test passing an invalid format to the 'translate' command.""" runner = CliRunner() result = runner.invoke( __main__.main, [ "translate", str(ROOT / "data" / "core.yaml"), "-o", str(tmp_path), "-f", "foo", ], ) assert result.exit_code == 2 assert re.search("Invalid value for ['\"]--format['\"]", result.output) def test_reject_jwe(tmp_path): """Test that the JWE type is rejected""" runner = CliRunner() result = runner.invoke( __main__.main, [ "translate", str(ROOT / "data" / "jwe.yaml"), "-o", str(tmp_path), "-f", "kotlin", ], ) assert result.exit_code == 1 assert len(os.listdir(str(tmp_path))) == 0 def test_wrong_key_lint(tmp_path): """Test that the 'glinter' reports a wrong key used.""" runner = CliRunner() result = runner.invoke( __main__.main, [ "glinter", str(ROOT / "data" / "wrong_key.yamlx"), ], ) assert result.exit_code == 1 # wrong `unit` key for datetime, timing_distribution, timespan. # a missing key is NOT an error. assert "Found 3 errors" in result.output def test_no_file_is_an_error(tmp_path): """Test that 'translate' fails when no files are passed.""" runner = CliRunner() result = runner.invoke( __main__.main, [ "translate", "-o", str(tmp_path), "-f", "kotlin", ], ) assert result.exit_code == 1 def test_no_file_can_be_skipped(tmp_path): """Test that 'translate' works when no files are passed but flag is set.""" runner = CliRunner() result = runner.invoke( __main__.main, ["translate", "-o", str(tmp_path), "-f", "kotlin", "--allow-missing-files"], ) assert result.exit_code == 0 glean_parser-15.0.1/tests/test_go_server.py000066400000000000000000000116251466531427000210230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from pathlib import Path import io import json import pytest import subprocess import glean_parser from glean_parser import translate from glean_parser import validate_ping ROOT = Path(__file__).parent def test_parser_go_server_ping_no_metrics(tmp_path, capsys): """Test that no files are generated if only ping definitions are provided without any metrics.""" translate.translate( ROOT / "data" / "server_pings.yaml", "go_server", tmp_path, ) assert all(False for _ in tmp_path.iterdir()) def test_parser_go_server_ping_file(tmp_path, capsys): """Test that no files are generated if ping definitions are provided.""" translate.translate( [ ROOT / "data" / "server_metrics_with_event.yaml", ROOT / "data" / "server_pings.yaml", ], "go_server", tmp_path, ) assert all(False for _ in tmp_path.iterdir()) def test_parser_go_server_metrics_no_ping(tmp_path, capsys): """Test that no files are generated if only metric definitions are provided without any events metrics.""" translate.translate( ROOT / "data" / "server_metrics_no_events_no_pings.yaml", "go_server", tmp_path, ) captured = capsys.readouterr() assert all(False for _ in tmp_path.iterdir()) assert ( "No event metrics found...at least one event metric is required" in captured.out ) def test_parser_go_server_metrics_unsupported_type(tmp_path, capsys): """Test that no files are generated with unsupported metric types.""" translate.translate( [ ROOT / "data" / "go_server_metrics_unsupported.yaml", ], "go_server", tmp_path, ) captured = capsys.readouterr() assert "Ignoring unsupported metric type" in captured.out unsupported_types = [ "boolean", "labeled_boolean", "labeled_string", "string_list", "timespan", "uuid", "url", ] for t in unsupported_types: assert t in captured.out def test_parser_go_server(tmp_path): """Test that parser works""" translate.translate( ROOT / "data" / "go_server_metrics.yaml", "go_server", tmp_path, ) assert set(x.name for x in tmp_path.iterdir()) == set(["server_events.go"]) # Make sure generated file matches expected with (tmp_path / "server_events.go").open("r", encoding="utf-8") as fd: content = fd.read() with (ROOT / "data" / "server_events_compare.go").open( "r", encoding="utf-8" ) as cd: compare_raw = cd.read() glean_version = f"glean_parser v{glean_parser.__version__}" # use replace instead of format since Go uses { } compare = compare_raw.replace("{current_version}", glean_version) assert content == compare def run_logger(code_dir, code): """ Run the Go logger and capture the output sent to STDOUT. """ tmpl_code = "" with open(ROOT / "test-go" / "test.go.tmpl", "r") as fp: tmpl_code = fp.read() tmpl_code = tmpl_code.replace("/* CODE */", code) with open(code_dir / "test.go", "w") as fp: fp.write(tmpl_code) subprocess.call(["go", "mod", "init", "glean"], cwd=code_dir) subprocess.call(["go", "mod", "tidy"], cwd=code_dir) return subprocess.check_output(["go", "run", "test.go"], cwd=code_dir).decode( "utf-8" ) @pytest.mark.go_dependency def test_run_logging(tmp_path): glean_module_path = tmp_path / "glean" translate.translate( [ ROOT / "data" / "server_metrics_with_event.yaml", ], "go_server", glean_module_path, ) code = """ logger.RecordEventBackendObjectUpdate( glean.RequestInfo{ UserAgent: "glean-test/1.0", IpAddress: "127.0.0.1", }, glean.EventBackendObjectUpdate{ IdentifiersFxaAccountId: "accountId", ObjectType: "type", ObjectState: "state", }, ) """ logged_output = run_logger(tmp_path, code) logged_output = json.loads(logged_output) fields = logged_output["Fields"] payload = fields["payload"] assert "glean-server-event" == logged_output["Type"] assert "glean.test" == fields["document_namespace"] assert "events" == fields["document_type"] assert "1" == fields["document_version"] assert "glean-test/1.0" == fields["user_agent"] schema_url = ( "https://raw.githubusercontent.com/mozilla-services/" "mozilla-pipeline-schemas/main/" "schemas/glean/glean/glean.1.schema.json" ) input = io.StringIO(payload) output = io.StringIO() assert ( validate_ping.validate_ping(input, output, schema_url=schema_url) == 0 ), output.getvalue() glean_parser-15.0.1/tests/test_javascript.py000066400000000000000000000325111466531427000211730ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from pathlib import Path from glean_parser import javascript from glean_parser import metrics from glean_parser import pings from glean_parser import translate ROOT = Path(__file__).parent def test_parser_js(tmp_path): """Test translating metrics to Javascript files.""" translate.translate( ROOT / "data" / "core.yaml", "javascript", tmp_path, None, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set( [ "corePing.js", "telemetry.js", "environment.js", "dottedCategory.js", "gleanInternalMetrics.js", ] ) # Make sure descriptions made it in with (tmp_path / "corePing.js").open("r", encoding="utf-8") as fd: content = fd.read() assert "True if the user has set Firefox as the default browser." in content with (tmp_path / "telemetry.js").open("r", encoding="utf-8") as fd: content = fd.read() assert "جمع 搜集" in content with (tmp_path / "gleanInternalMetrics.js").open("r", encoding="utf-8") as fd: content = fd.read() assert 'category: ""' in content def test_parser_js_all_metrics(tmp_path): """Test translating metrics to Javascript files.""" translate.translate( ROOT / "data" / "all_metrics.yaml", "javascript", tmp_path, None, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["allMetrics.js"]) def test_parser_ts(tmp_path): """Test translating metrics to Typescript files.""" translate.translate( ROOT / "data" / "core.yaml", "typescript", tmp_path, None, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set( [ "corePing.ts", "telemetry.ts", "environment.ts", "dottedCategory.ts", "gleanInternalMetrics.ts", ] ) # Make sure descriptions made it in with (tmp_path / "corePing.ts").open("r", encoding="utf-8") as fd: content = fd.read() assert "use strict" not in content assert "True if the user has set Firefox as the default browser." in content with (tmp_path / "telemetry.ts").open("r", encoding="utf-8") as fd: content = fd.read() assert "use strict" not in content assert "جمع 搜集" in content with (tmp_path / "gleanInternalMetrics.ts").open("r", encoding="utf-8") as fd: content = fd.read() assert "use strict" not in content assert 'category: ""' in content def test_ping_parser(tmp_path): """Test translating pings to Javascript files.""" translate.translate( ROOT / "data" / "pings.yaml", "javascript", tmp_path, None, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["pings.js"]) # Make sure descriptions made it in with (tmp_path / "pings.js").open("r", encoding="utf-8") as fd: content = fd.read() assert "This is a custom ping" in content def test_javascript_generator(): jdf = javascript.javascript_datatypes_filter assert jdf(metrics.Lifetime.ping) == '"ping"' def test_metric_class_name(): event = metrics.Event( type="event", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", extra_keys={"my_extra": {"description": "an extra", "type": "string"}}, ) webext_class_name = javascript.class_name_factory("webext") qt_class_name = javascript.class_name_factory("qt") assert webext_class_name(event.type) == "EventMetricType" assert qt_class_name(event.type) == "Glean.Glean._private.EventMetricType" boolean = metrics.Boolean( type="boolean", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert webext_class_name(boolean.type) == "BooleanMetricType" assert qt_class_name(boolean.type) == "Glean.Glean._private.BooleanMetricType" ping = pings.Ping( name="custom", description="description...", include_client_id=True, bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@nowhere.com"], ) assert webext_class_name(ping.type) == "PingType" assert qt_class_name(ping.type) == "Glean.Glean._private.PingType" def test_import_path(): event = metrics.Event( type="event", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", extra_keys={"my_extra": {"description": "an extra", "type": "string"}}, ) assert javascript.import_path(event.type) == "metrics/event" boolean = metrics.Boolean( type="boolean", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert javascript.import_path(boolean.type) == "metrics/boolean" ping = pings.Ping( name="custom", description="description...", include_client_id=True, bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@nowhere.com"], ) assert javascript.import_path(ping.type) == "ping" def test_labeled_subtype_is_imported(tmp_path): """ Test that both the LabeledMetricType and its subtype are imported """ translate.translate( ROOT / "data" / "single_labeled.yaml", "javascript", tmp_path, None ) assert set(x.name for x in tmp_path.iterdir()) == set(["category.js"]) with (tmp_path / "category.js").open("r", encoding="utf-8") as fd: content = fd.read() assert ( content.count( 'import CounterMetricType from "@mozilla/glean/private/metrics/counter";' # noqa ) == 1 ) assert ( content.count( 'import LabeledMetricType from "@mozilla/glean/private/metrics/labeled";' # noqa ) == 1 ) def test_duplicate(tmp_path): """ Test that there aren't duplicate imports when using a labeled and non-labeled version of the same metric. https://github.com/mozilla-mobile/android-components/issues/2793 """ translate.translate( ROOT / "data" / "duplicate_labeled.yaml", "javascript", tmp_path, None ) assert set(x.name for x in tmp_path.iterdir()) == set(["category.js"]) with (tmp_path / "category.js").open("r", encoding="utf-8") as fd: content = fd.read() assert ( content.count( 'import CounterMetricType from "@mozilla/glean/private/metrics/counter";' # noqa ) == 1 ) def test_reasons(tmp_path): translate.translate(ROOT / "data" / "pings.yaml", "javascript", tmp_path, None) translate.translate(ROOT / "data" / "pings.yaml", "typescript", tmp_path, None) assert set(x.name for x in tmp_path.iterdir()) == set(["pings.js", "pings.ts"]) with (tmp_path / "pings.js").open("r", encoding="utf-8") as fd: content = fd.read() assert "export const CustomPingMightBeEmptyReasonCodes" in content assert "export const RealPingMightBeEmptyReasonCodes" not in content with (tmp_path / "pings.ts").open("r", encoding="utf-8") as fd: content = fd.read() assert "export enum CustomPingMightBeEmptyReasonCodes" in content assert "export enum RealPingMightBeEmptyReasonCodes" not in content def test_event_extra_keys_in_correct_order(tmp_path): """ Assert that the extra keys appear in the parameter and the enumeration in the same order. https://bugzilla.mozilla.org/show_bug.cgi?id=1648768 """ translate.translate( ROOT / "data" / "event_key_ordering.yaml", "javascript", tmp_path, None, ) assert set(x.name for x in tmp_path.iterdir()) == set(["event.js"]) with (tmp_path / "event.js").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert '["And1WithExtraCasing", "alice", "bob", "charlie"]' in content def test_arguments_are_generated_in_deterministic_order(tmp_path): """ Assert that arguments on generated code are always in the same order. https://bugzilla.mozilla.org/show_bug.cgi?id=1666192 """ translate.translate( ROOT / "data" / "event_key_ordering.yaml", "javascript", tmp_path, None, ) assert set(x.name for x in tmp_path.iterdir()) == set(["event.js"]) with (tmp_path / "event.js").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) expected = 'export const example = new EventMetricType({ category: "event", name: "example", sendInPings: ["events"], lifetime: "ping", disabled: false, }, ["And1WithExtraCasing", "alice", "bob", "charlie"]);' # noqa assert expected in content def test_qt_platform_template_includes_expected_imports(tmp_path): """ Assert that when the platform is Qt, the template does not contain import/export statements. """ translate.translate( ROOT / "data" / "single_labeled.yaml", "javascript", tmp_path, {"platform": "qt", "version": "0.14"}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["category.js", "qmldir"]) with (tmp_path / "category.js").open("r", encoding="utf-8") as fd: content = fd.read() assert content.count(".import org.mozilla.Glean 0.14") == 1 assert content.count("export") == 0 def test_qt_platform_generated_correct_qmldir_file(tmp_path): """ Assert that when the platform is Qt, a qmldir is also generated with the expected files listed in it. """ translate.translate( ROOT / "data" / "core.yaml", "javascript", tmp_path, {"platform": "qt", "version": "0.14"}, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set( [ "corePing.js", "telemetry.js", "environment.js", "dottedCategory.js", "gleanInternalMetrics.js", "qmldir", ] ) with (tmp_path / "qmldir").open("r", encoding="utf-8") as fd: content = fd.read() assert content.count("CorePing 0.14 corePing.js") == 1 assert content.count("Telemetry 0.14 telemetry.js") == 1 assert content.count("Environment 0.14 environment.js") == 1 assert content.count("DottedCategory 0.14 dottedCategory.js") == 1 assert content.count("GleanInternalMetrics 0.14 gleanInternalMetrics.js") == 1 assert content.count("depends org.mozilla.Glean 0.14") == 1 def test_event_extra_keys_with_types(tmp_path): """ Assert that the extra keys with types appear with their corresponding types. """ translate.translate( ROOT / "data" / "events_with_types.yaml", "typescript", tmp_path, ) assert set(x.name for x in tmp_path.iterdir()) == set(["core.ts"]) with (tmp_path / "core.ts").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert ( "new EventMetricType<{ " "enabled?: boolean, " "preference?: string, " "swapped?: number, " "}>({" in content ) assert '"enabled", "preference", "swapped"' in content # Make sure this only happens for the TypeScript template. translate.translate( ROOT / "data" / "events_with_types.yaml", "javascript", tmp_path, ) assert set(x.name for x in tmp_path.iterdir()) == set(["core.js", "core.ts"]) with (tmp_path / "core.js").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert "new EventMetricType({" in content assert '"enabled", "preference", "swapped"' in content def test_build_info_is_generated_when_option_is_present(tmp_path): """ Assert that build info is generated """ translate.translate( ROOT / "data" / "single_labeled.yaml", "typescript", tmp_path, {"with_buildinfo": "true"}, ) assert set(x.name for x in tmp_path.iterdir()) == set( ["gleanBuildInfo.ts", "category.ts"] ) translate.translate( ROOT / "data" / "single_labeled.yaml", "typescript", tmp_path, {"with_buildinfo": "true", "build_date": "2022-03-01T14:10+01:00"}, ) assert set(x.name for x in tmp_path.iterdir()) == set( ["gleanBuildInfo.ts", "category.ts"] ) with (tmp_path / "gleanBuildInfo.ts").open("r", encoding="utf-8") as fd: content = fd.read() assert "new Date(2022, 2, 1, 14, 10, 0)" in content glean_parser-15.0.1/tests/test_javascript_server.py000066400000000000000000000127261466531427000225670ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from pathlib import Path import io import json import pytest import shutil import subprocess from glean_parser import javascript_server from glean_parser import translate from glean_parser import validate_ping from glean_parser.metrics import Metric from unittest.mock import Mock ROOT = Path(__file__).parent def test_parser_js_server_ping_no_metrics(tmp_path): """Test that no files are generated if only ping definitions are provided without any metrics.""" translate.translate( ROOT / "data" / "fxa-server-pings.yaml", "javascript_server", tmp_path, ) assert all(False for _ in tmp_path.iterdir()) def test_parser_js_server_metrics_no_ping(tmp_path): """Test that no files are generated if only metric definitions are provided without pings.""" translate.translate( ROOT / "data" / "fxa-server-metrics.yaml", "javascript_server", tmp_path, ) assert all(False for _ in tmp_path.iterdir()) def test_parser_js_server(tmp_path): """Test that no files are generated if only metric definitions are provided without pings.""" translate.translate( [ ROOT / "data" / "fxa-server-pings.yaml", ROOT / "data" / "fxa-server-metrics.yaml", ], "javascript_server", tmp_path, ) assert set(x.name for x in tmp_path.iterdir()) == set(["server_events.js"]) # Make sure string metric made it in with (tmp_path / "server_events.js").open("r", encoding="utf-8") as fd: content = fd.read() assert "'event.name': event_name" in content def test_generate_ping_factory_method(): ping = "accounts_events" expected_result = "createAccountsEventsEvent" result = javascript_server.generate_ping_factory_method(ping, metrics_by_type={}) assert result == expected_result ping = "accounts_events" expected_result = "createAccountsEventsServerEventLogger" result = javascript_server.generate_ping_factory_method( ping, metrics_by_type={"event": [Mock(spec=Metric)]} ) assert result == expected_result def run_logger(code_dir, import_file, factory, code): """ Run the JavaScript logger with a mocked logger that just prints the ping payload to STDOUT. """ shutil.copy(ROOT / "test-js" / "package.json", code_dir) subprocess.check_call(["npm", "install"], cwd=code_dir) tmpl_code = "" with open(ROOT / "test-js" / "test.js.tmpl", "r") as fp: tmpl_code = fp.read() tmpl_code = ( tmpl_code.replace("/* IMPORT */", import_file) .replace("/* FACTORY */", factory) .replace("/* CODE */", code) ) with open(code_dir / "test.js", "w") as fp: fp.write(tmpl_code) return subprocess.check_output(["node", "test.js"], cwd=code_dir).decode("utf-8") @pytest.mark.node_dependency def test_logging_custom_ping_as_events(tmp_path): translate.translate( [ ROOT / "data" / "fxa-server-pings.yaml", ROOT / "data" / "fxa-server-metrics.yaml", ], "javascript_server", tmp_path, ) factory = "createAccountsEventsEvent" code = """ eventLogger.record({ user_agent: "glean-test/1.0", event_name: "testing" }); """ logged_output = run_logger(tmp_path, "server_events.js", factory, code) logged_output = json.loads(logged_output) fields = logged_output["Fields"] payload = fields["payload"] assert "glean-server-event" == logged_output["Type"] assert "glean.test" == fields["document_namespace"] assert "accounts-events" == fields["document_type"] assert "1" == fields["document_version"] assert "glean-test/1.0" == fields["user_agent"] schema_url = ( "https://raw.githubusercontent.com/mozilla-services/" "mozilla-pipeline-schemas/main/" "schemas/glean/glean/glean.1.schema.json" ) input = io.StringIO(payload) output = io.StringIO() assert ( validate_ping.validate_ping(input, output, schema_url=schema_url) == 0 ), output.getvalue() @pytest.mark.node_dependency def test_logging_events_ping_with_event_metrics(tmp_path): translate.translate( [ ROOT / "data" / "server_metrics_with_event.yaml", ], "javascript_server", tmp_path, ) factory = "createEventsServerEventLogger" code = """ eventLogger.recordBackendObjectUpdate({ user_agent: 'glean-test/1.0', ip_address: '2a02:a311:803c:6300:4074:5cf2:91ac:d546', identifiers_fxa_account_id: 'abc', object_type: 'unknown', object_state: 'great', linking: true, }); """ logged_output = run_logger(tmp_path, "server_events.js", factory, code) logged_output = json.loads(logged_output) fields = logged_output["Fields"] payload = fields["payload"] assert "glean-server-event" == logged_output["Type"] assert "glean.test" == fields["document_namespace"] assert "events" == fields["document_type"] assert "1" == fields["document_version"] assert "glean-test/1.0" == fields["user_agent"] schema_url = ( "https://raw.githubusercontent.com/mozilla-services/" "mozilla-pipeline-schemas/main/" "schemas/glean/glean/glean.1.schema.json" ) input = io.StringIO(payload) output = io.StringIO() assert ( validate_ping.validate_ping(input, output, schema_url=schema_url) == 0 ), output.getvalue() glean_parser-15.0.1/tests/test_kotlin.py000066400000000000000000000303571466531427000203330ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ import os from pathlib import Path import subprocess from glean_parser import kotlin from glean_parser import metrics from glean_parser import pings from glean_parser import translate from glean_parser.util import DictWrapper ROOT = Path(__file__).parent def run_detekt(files): detekt_exec = ROOT.parent / "detekt-cli.jar" # We want to make sure this runs on CI, but it's not required # for local development if detekt_exec.is_file() or "CI" in os.environ: subprocess.check_call( [ "java", "-jar", str(detekt_exec), "--build-upon-default-config", "--config", str(ROOT / "detekt.yml"), "-i", ",".join(files), ] ) def run_ktlint(files): ktlint_exec = ROOT.parent / "ktlint" # We want to make sure this runs on CI, but it's not required # for local development if ktlint_exec.is_file() or "CI" in os.environ: subprocess.check_call([str(ktlint_exec)] + files) def run_linters(files): files = [str(x) for x in files] run_ktlint(files) run_detekt(files) def test_parser(tmp_path): """Test translating metrics to Kotlin files.""" translate.translate( ROOT / "data" / "core.yaml", "kotlin", tmp_path, {"namespace": "Foo"}, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set( [ "CorePing.kt", "Telemetry.kt", "Environment.kt", "DottedCategory.kt", "GleanInternalMetrics.kt", "GleanBuildInfo.kt", ] ) # Make sure descriptions made it in with (tmp_path / "CorePing.kt").open("r", encoding="utf-8") as fd: content = fd.read() assert "True if the user has set Firefox as the default browser." in content # Make sure the namespace option is in effect assert "package Foo" in content with (tmp_path / "Telemetry.kt").open("r", encoding="utf-8") as fd: content = fd.read() assert "جمع 搜集" in content with (tmp_path / "GleanInternalMetrics.kt").open("r", encoding="utf-8") as fd: content = fd.read() assert 'category = ""' in content with (tmp_path / "GleanBuildInfo.kt").open("r", encoding="utf-8") as fd: content = fd.read() assert "buildDate = Calendar.getInstance" in content run_linters(tmp_path.glob("*.kt")) def test_parser_all_metrics(tmp_path): """Test translating ALL metric types to Kotlin files.""" translate.translate( ROOT / "data" / "all_metrics.yaml", "kotlin", tmp_path, {"namespace": "Foo", "with_buildinfo": "false"}, {"allow_reserved": False}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["AllMetrics.kt"]) run_linters(tmp_path.glob("*.kt")) def test_ping_parser(tmp_path): """Test translating pings to Kotlin files.""" translate.translate( ROOT / "data" / "pings.yaml", "kotlin", tmp_path, {"namespace": "Foo"}, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set( ["Pings.kt", "GleanBuildInfo.kt"] ) # Make sure descriptions made it in with (tmp_path / "Pings.kt").open("r", encoding="utf-8") as fd: content = fd.read() assert "This is a custom ping" in content # Make sure the namespace option is in effect assert "package Foo" in content run_linters(tmp_path.glob("*.kt")) def test_kotlin_generator(): kdf = kotlin.kotlin_datatypes_filter assert kdf("\n") == r'"\n"' assert kdf([42, "\n"]) == r'listOf(42, "\n")' assert ( kdf(DictWrapper([("key", "value"), ("key2", "value2")])) == r'mapOf("key" to "value", "key2" to "value2")' ) assert kdf(metrics.Lifetime.ping) == "Lifetime.PING" def test_metric_type_name(): event = metrics.Event( type="event", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", extra_keys={"my_extra": {"description": "an extra", "type": "string"}}, ) assert kotlin.type_name(event) == "EventMetricType" event = metrics.Event( type="event", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert kotlin.type_name(event) == "EventMetricType" boolean = metrics.Boolean( type="boolean", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert kotlin.type_name(boolean) == "BooleanMetricType" ping = pings.Ping( name="custom", description="description...", include_client_id=True, bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@nowhere.com"], ) assert kotlin.type_name(ping) == "PingType" ping = pings.Ping( name="custom", description="description...", include_client_id=True, bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@nowhere.com"], reasons={"foo": "foolicious", "bar": "barlicious"}, ) assert kotlin.type_name(ping) == "PingType" def test_duplicate(tmp_path): """ Test that there aren't duplicate imports when using a labeled and non-labeled version of the same metric. https://github.com/mozilla-mobile/android-components/issues/2793 """ translate.translate( ROOT / "data" / "duplicate_labeled.yaml", "kotlin", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set( ["Category.kt", "GleanBuildInfo.kt"] ) with (tmp_path / "Category.kt").open("r", encoding="utf-8") as fd: content = fd.read() assert ( content.count( "import mozilla.components.service.glean.private.CounterMetricType" ) == 1 ) def test_glean_namespace(tmp_path): """ Test that setting the glean namespace works. """ translate.translate( ROOT / "data" / "duplicate_labeled.yaml", "kotlin", tmp_path, {"namespace": "Foo", "glean_namespace": "Bar"}, ) assert set(x.name for x in tmp_path.iterdir()) == set( ["Category.kt", "GleanBuildInfo.kt"] ) with (tmp_path / "Category.kt").open("r", encoding="utf-8") as fd: content = fd.read() assert content.count("import Bar.private.CounterMetricType") == 1 def test_event_extra_keys_in_correct_order(tmp_path): """ Assert that the extra keys appear in the parameter and the enumeration in the same order. https://bugzilla.mozilla.org/show_bug.cgi?id=1648768 """ translate.translate( ROOT / "data" / "event_key_ordering.yaml", "kotlin", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set( ["Event.kt", "GleanBuildInfo.kt"] ) with (tmp_path / "Event.kt").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert "ExampleExtra(" in content assert "and1withextracasing:" in content assert "alice:" in content assert "bob:" in content assert "charlie:" in content assert ": EventExtras" in content assert 'allowedExtraKeys = listOf("And1WithExtraCasing", "alice", "bob", "charlie")' in content def test_arguments_are_generated_in_deterministic_order(tmp_path): """ Assert that arguments on generated code are always in the same order. https://bugzilla.mozilla.org/show_bug.cgi?id=1666192 """ translate.translate( ROOT / "data" / "event_key_ordering.yaml", "kotlin", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set( ["Event.kt", "GleanBuildInfo.kt"] ) with (tmp_path / "Event.kt").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) expected = 'EventMetricType by lazy { // generated from event.example EventMetricType( CommonMetricData( category = "event", name = "example", sendInPings = listOf("events"), lifetime = Lifetime.PING, disabled = false ), allowedExtraKeys = listOf("And1WithExtraCasing", "alice", "bob", "charlie")) } }' # noqa assert expected in content def test_event_extra_keys_with_types(tmp_path): """ Assert that the extra keys with types appear with their corresponding types. """ translate.translate( ROOT / "data" / "events_with_types.yaml", "kotlin", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set( ["Core.kt", "GleanBuildInfo.kt"] ) with (tmp_path / "Core.kt").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert ( "data class PreferenceToggledExtra( " "val enabled: Boolean? = null, " "val preference: String? = null, " "val swapped: Int? = null " ") : EventExtras {" in content ) assert ( 'allowedExtraKeys = listOf("enabled", "preference", "swapped")' in content ) def test_reasons(tmp_path): """ Assert that we generate the reason codes as a plain enum. https://bugzilla.mozilla.org/show_bug.cgi?id=1811888 """ translate.translate( ROOT / "data" / "pings.yaml", "kotlin", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set( ["Pings.kt", "GleanBuildInfo.kt"] ) with (tmp_path / "Pings.kt").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) expected = '@Suppress("ClassNaming", "EnumNaming") enum class customPingMightBeEmptyReasonCodes : ReasonCode { serious { override fun code(): Int = 0 }, silly { override fun code(): Int = 1 }; }' # noqa assert expected in content expected = "val customPing: PingType = // generated from custom-ping" assert expected in content expected = "val customPingMightBeEmpty: PingType = // generated from custom-ping-might-be-empty" # noqa assert expected in content def test_object_metric(tmp_path): """ Assert that an object metric is created. """ translate.translate( ROOT / "data" / "object.yaml", "kotlin", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set( ["ActivityStream.kt", "ComplexTypes.kt", "CrashStack.kt", "GleanBuildInfo.kt"] ) with (tmp_path / "ComplexTypes.kt").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert "typealias ArrayInArrayObjectItemItem = Boolean" in content assert "typealias NumberArrayObjectItem = Int" in content with (tmp_path / "CrashStack.kt").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert "ObjectMetricType" in content assert "data class ThreadsObject(" in content assert "data class ThreadsObjectItem(" in content assert ( "var frames: ThreadsObjectItemFrames = ThreadsObjectItemFrames" in content ) assert "data class ThreadsObjectItemFramesItem(" in content assert "var moduleIndex: Int? = null," in content assert "var ip: String? = null," in content assert "var trust: String? = null," in content glean_parser-15.0.1/tests/test_lint.py000066400000000000000000000404551466531427000200010ustar00rootroot00000000000000# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from pathlib import Path from glean_parser import lint from glean_parser import parser import util import pytest ROOT = Path(__file__).parent def test_common_prefix(): contents = [ { "telemetry": { "network_latency": { "type": "quantity", "gecko_datapoint": "GC_NETWORK_LATENCY", "unit": "ms", }, "network_bandwidth": { "type": "quantity", "gecko_datapoint": "GC_NETWORK_BANDWIDTH", "unit": "kbps", }, } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 1 assert nits[0].check_name == "COMMON_PREFIX" # Now make sure the override works contents[0]["no_lint"] = ["COMMON_PREFIX"] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 0 def test_unit_in_name(): contents = [ { "telemetry": { "network_latency_ms": {"type": "timespan", "time_unit": "millisecond"}, "memory_usage_mb": { "type": "memory_distribution", "memory_unit": "megabyte", }, "width_pixels": { "type": "quantity", "gecko_datapoint": "WIDTH_PIXELS", "unit": "pixels", }, } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 3 assert all(nit.check_name == "UNIT_IN_NAME" for nit in nits) # Now make sure the override works contents[0]["telemetry"]["network_latency_ms"]["no_lint"] = ["UNIT_IN_NAME"] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 2 def test_category_generic(): contents = [{"metrics": {"measurement": {"type": "boolean"}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 1 assert nits[0].check_name == "CATEGORY_GENERIC" contents[0]["no_lint"] = ["CATEGORY_GENERIC"] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 0 def test_combined(): contents = [ { "metrics": { "m_network_latency_ms": { "type": "timespan", "time_unit": "millisecond", }, "m_memory_usage_mb": { "type": "memory_distribution", "memory_unit": "megabyte", }, "m_width_pixels": { "type": "quantity", "gecko_datapoint": "WIDTH_PIXELS", "unit": "pixels", }, } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 5 assert set(["COMMON_PREFIX", "CATEGORY_GENERIC", "UNIT_IN_NAME"]) == set( v.check_name for v in nits ) def test_baseline_restriction(): contents = [ { "user_data": { "counter": {"type": "counter", "send_in_pings": ["baseline"]}, "string": {"type": "string", "send_in_pings": ["metrics", "baseline"]}, "string2": {"type": "string", "send_in_pings": ["metrics"]}, } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 2 assert set(["BASELINE_PING"]) == set(v.check_name for v in nits) def test_misspelling_pings(): contents = [ { "user_data": { "counter": {"type": "counter", "send_in_pings": ["metric"]}, "string": { "type": "string", "lifetime": "application", "send_in_pings": ["event"], }, "string2": { "type": "string", "lifetime": "application", "send_in_pings": ["metrics", "events"], }, } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 2 assert set(["MISSPELLED_PING"]) == set(v.check_name for v in nits) def test_user_lifetime_expiration(): """Test that expiring 'user' lifetime metrics generate a warning.""" contents = [ { "user_data": { "counter": { "type": "counter", "lifetime": "user", "expires": "2100-01-01", "no_lint": ["EXPIRATION_DATE_TOO_FAR"], }, "string": {"type": "string", "lifetime": "user", "expires": "never"}, "other": {"type": "string", "lifetime": "application"}, } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 1 assert set(["USER_LIFETIME_EXPIRATION"]) == set(v.check_name for v in nits) def test_expired_metric(): """Test that expiring 'ping' lifetime metrics generate a warning.""" contents = [ { "user_data": { "counter": { "type": "counter", "lifetime": "ping", "expires": "1999-01-01", }, } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 1 assert set(["EXPIRED"]) == set(v.check_name for v in nits) def test_expires_too_far_in_the_future(): """Test that a `expires` dates too far in the future generates warnings""" contents = [ { "user_data": { "too_far": { "type": "counter", "lifetime": "ping", "expires": "2100-01-01", } } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == 1 assert set(["EXPIRATION_DATE_TOO_FAR"]) == set(v.check_name for v in nits) def test_invalid_lifetime_for_metric_on_events_ping(): """Test that a `ping` lifetime, non-event metric, fails when sent on the Metrics ping""" contents = [ { "user_data": { "invalid_lifetime": { "type": "counter", "lifetime": "ping", "send_in_pings": "events", } } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 1 def test_translate_missing_input_files(tmp_path): with pytest.raises(FileNotFoundError): lint.glinter( [ROOT / "data" / "missing.yaml"], parser_config={"allow_reserved": True}, ) assert 0 == lint.glinter( [ROOT / "data" / "missing.yaml"], parser_config={"allow_reserved": True, "allow_missing_files": True}, ) @pytest.mark.parametrize( "content,num_nits", [ ({"search": {"bugs": [12345]}}, 1), ({"search": {"bugs": [12345], "no_lint": ["BUG_NUMBER"]}}, 0), ({"search": {"bugs": [12345]}, "no_lint": ["BUG_NUMBER"]}, 0), ], ) def test_bug_number_pings(content, num_nits): """ Test that using bug numbers (rather than URLs) in pings produce linting errors. """ content["$schema"] = "moz://mozilla.org/schemas/glean/pings/1-0-0" content = util.add_required_ping(content) all_pings = parser.parse_objects([content]) errs = list(all_pings) assert len(errs) == 0 nits = lint.lint_metrics(all_pings.value) assert len(nits) == num_nits if num_nits > 0: assert set(["BUG_NUMBER"]) == set(v.check_name for v in nits) def test_redundant_pings(): """ Test that name contains '-ping' or 'ping-' or 'ping' or 'custom' yields lint errors. """ content = {"ping": {}} content = util.add_required_ping(content) all_pings = parser.parse_objects([content]) errs = list(all_pings) assert len(errs) == 0 nits = lint.lint_metrics(all_pings.value) assert len(nits) == 1 assert set(["REDUNDANT_PING"]) == set(v.check_name for v in nits) @pytest.mark.parametrize( "require_tags,expected_nits", [ (False, 0), (True, 1), ], ) def test_metric_no_tags(require_tags, expected_nits): """Test what happens when a metric has no tags (depends on parser configuration)""" metric = { "foo": { "bar": { "type": "boolean", }, }, } objs = parser.parse_objects([util.add_required(metric)]) errs = list(objs) assert len(errs) == 0 nits = lint.lint_metrics(objs.value, {"require_tags": require_tags}) assert len(nits) == expected_nits if expected_nits: assert nits[0].check_name == "TAGS_REQUIRED" assert nits[0].name == "foo.bar" assert nits[0].msg == "Tags are required but no tags specified" @pytest.mark.parametrize( "require_tags,expected_nits", [ (False, 0), (True, 1), ], ) def test_ping_no_tags(require_tags, expected_nits): """Test what happens when a metric has no tags (depends on parser configuration)""" objs = parser.parse_objects([util.add_required_ping({"search": {}})]) errs = list(objs) assert len(errs) == 0 nits = lint.lint_metrics(objs.value, {"require_tags": require_tags}) assert len(nits) == expected_nits if expected_nits: assert nits[0].check_name == "TAGS_REQUIRED" assert nits[0].name == "search" assert nits[0].msg == "Tags are required but no tags specified" @pytest.mark.parametrize( "tags,expected_nits", [ (["apple"], 0), (["grapefruit"], 1), ], ) def test_check_metric_tag_names(tags, expected_nits): """ Test that specifying an invalid tag name inside a metric produces an error """ metric = { "foo": { "bar": { "type": "boolean", "metadata": {"tags": tags}, }, }, } defined_tags = { "$schema": "moz://mozilla.org/schemas/glean/tags/1-0-0", "apple": {"description": "apple is a banana"}, } objs = parser.parse_objects([util.add_required(metric), defined_tags]) errs = list(objs) assert len(errs) == 0 nits = lint.lint_metrics(objs.value) assert len(nits) == expected_nits if expected_nits: assert nits[0].check_name == "INVALID_TAGS" assert nits[0].name == "foo.bar" assert nits[0].msg == "Invalid tags specified in metric: grapefruit" @pytest.mark.parametrize( "tags,expected_nits", [ (["apple"], 0), (["grapefruit"], 1), ], ) def test_check_ping_tag_names(tags, expected_nits): """ Test that specifying an invalid tag name inside a metric produces an error """ defined_tags = { "$schema": "moz://mozilla.org/schemas/glean/tags/1-0-0", "apple": {"description": "apple is a banana"}, } objs = parser.parse_objects( [util.add_required_ping({"search": {"metadata": {"tags": tags}}}), defined_tags] ) errs = list(objs) assert len(errs) == 0 nits = lint.lint_metrics(objs.value) assert len(nits) == expected_nits if expected_nits: assert nits[0].check_name == "INVALID_TAGS" assert nits[0].name == "search" assert nits[0].msg == "Invalid tags specified in ping: grapefruit" def test_old_event_api(): """Test that the 'glinter' reports issues with the old event API.""" all_metrics = parser.parse_objects([ROOT / "data" / "old_event_api.yamlx"]) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value, parser_config={}) assert len(nits) == 1 assert nits[0].check_name == "OLD_EVENT_API" assert nits[0].name == "old_event.name" assert "Extra keys require a type" in nits[0].msg def test_unknown_pings_lint(): """Test that the 'glinter' reports issues with unknown pings in send_in_pings.""" input = [ROOT / "data" / "unknown_ping_used.yaml", ROOT / "data" / "pings.yaml"] all_objects = parser.parse_objects(input) errs = list(all_objects) assert len(errs) == 0 nits = lint.lint_metrics(all_objects.value, parser_config={}) assert len(nits) == 2 assert nits[0].check_name == "UNKNOWN_PING_REFERENCED" assert nits[0].name == "all_metrics.non_existent_ping" assert "does-not-exist" in nits[0].msg @pytest.mark.parametrize( "metric, num_nits", [ ({"metric": {"data_reviews": ["12345"]}}, 0), ({"metric": {"data_reviews": ["12345", "", "TODO"]}}, 1), ({"metric": {"data_reviews": [""]}}, 1), ({"metric": {"data_reviews": [""], "no_lint": ["EMPTY_DATAREVIEW"]}}, 0), ({"metric": {"data_reviews": ["TODO"]}}, 1), ({"metric": {"data_reviews": ["TODO"], "no_lint": ["EMPTY_DATAREVIEW"]}}, 0), ], ) def test_empty_datareviews(metric, num_nits): """ Test that the list of data reviews does not contain empty strings or TODO markers """ content = {"category": metric} content = util.add_required(content) all_metrics = parser.parse_objects(content) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == num_nits if num_nits > 0: assert set(["EMPTY_DATAREVIEW"]) == set(v.check_name for v in nits) @pytest.mark.parametrize( "metric, num_nits", [ ({"metric": {"type": "quantity", "unit": "sheep"}}, 0), ( { "metric": { "type": "custom_distribution", "unit": "quantillions", "range_max": 100, "bucket_count": 100, "histogram_type": "linear", } }, 0, ), ({"metric": {"type": "string", "unit": "quantillions"}}, 1), ({"metric": {"type": "counter", "unit": "quantillions"}}, 1), ( { "metric": { "type": "string", "unit": "quantillions", "no_lint": ["UNEXPECTED_UNIT"], } }, 0, ), ], ) def test_unit_on_metrics(metric, num_nits): content = {"category": metric} content = util.add_required(content) all_metrics = parser.parse_objects(content) errs = list(all_metrics) assert len(errs) == 0 nits = lint.lint_metrics(all_metrics.value) assert len(nits) == num_nits if num_nits > 0: assert set(["UNEXPECTED_UNIT"]) == set(v.check_name for v in nits) glean_parser-15.0.1/tests/test_markdown.py000066400000000000000000000161331466531427000206510ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from pathlib import Path from glean_parser import markdown from glean_parser import metrics from glean_parser import pings from glean_parser import translate ROOT = Path(__file__).parent def test_parser(tmp_path): """Test translating metrics to Markdown files.""" translate.translate( ROOT / "data" / "core.yaml", "markdown", tmp_path, {"namespace": "Foo", "introduction_extra": "Extra Intro Text Bar"}, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["metrics.md"]) # Make sure descriptions made it in with (tmp_path / "metrics.md").open("r", encoding="utf-8") as fd: content = fd.read() assert "is assembled out of the box by the Glean SDK." in content # Make sure the table structure is in place assert ( "| Name | Type | Description | Data reviews | Extras | " + "Expiration | [Data Sensitivity]" in content ) # Make sure non ASCII characters are there assert "جمع 搜集" in content # test that extra text made it assert "Extra Intro Text" in content def test_extra_info_generator(): event = metrics.Event( type="event", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", extra_keys={"my_extra": {"description": "an extra", "type": "string"}}, ) assert markdown.extra_info(event) == [("my_extra", "an extra")] labeled = metrics.LabeledCounter( type="labeled_counter", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", labels=["label"], ) assert markdown.extra_info(labeled) == [("label", None)] # We currently support extra info only for events and labeled types. other = metrics.Timespan( type="timespan", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], time_unit="day", notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert len(markdown.extra_info(other)) == 0 def test_ping_desc(): # Make sure to return something for built-in pings. for ping_name in pings.RESERVED_PING_NAMES: assert len(markdown.ping_desc(ping_name)) > 0 # We don't expect nothing for unknown pings. assert len(markdown.ping_desc("unknown-ping")) == 0 # If we have a custom ping cache, try look up the # description there. cache = {} cache["cached-ping"] = pings.Ping( name="cached-ping", description="the description for the custom ping\n with a surprise", bugs=["1234"], notification_emails=["email@example.com"], data_reviews=["https://www.example.com/review"], include_client_id=False, ) assert ( markdown.ping_desc("cached-ping", cache) == "the description for the custom ping\n with a surprise" ) # We don't expect nothing for unknown pings, even with caches. assert len(markdown.ping_desc("unknown-ping", cache)) == 0 def test_ping_docs(): # Make sure to return something for built-in pings. for ping_name in pings.RESERVED_PING_NAMES: docs = markdown.ping_docs(ping_name) assert docs.startswith("https://") assert len(docs) > 0 # We don't expect nothing for unknown pings. assert len(markdown.ping_docs("unknown-ping")) == 0 def test_metrics_docs(): assert ( markdown.metrics_docs("boolean") == "https://mozilla.github.io/glean/book/user/metrics/boolean.html" ) assert ( markdown.metrics_docs("labeled_counter") == "https://mozilla.github.io/glean/book/user/metrics/labeled_counters.html" ) assert ( markdown.metrics_docs("labeled_string") == "https://mozilla.github.io/glean/book/user/metrics/labeled_strings.html" ) def test_review_title(): index = 1 assert ( markdown.ping_review_title( "https://bugzilla.mozilla.org/show_bug.cgi?id=1581647", index ) == "Bug 1581647" ) assert ( markdown.ping_review_title( "https://github.com/mozilla-mobile/fenix/pull/1707", index ) == "mozilla-mobile/fenix#1707" ) assert markdown.ping_review_title("http://example.com/reviews", index) == "Review 1" def test_reasons(tmp_path): translate.translate( ROOT / "data" / "pings.yaml", "markdown", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["metrics.md"]) # Make sure descriptions made it in with (tmp_path / "metrics.md").open("r", encoding="utf-8") as fd: content = fd.read() assert "- `serious`: A serious reason for sending a ping." in content def test_event_extra_keys_in_correct_order(tmp_path): """ Assert that the extra keys appear in the parameter and the enumeration in the same order. https://bugzilla.mozilla.org/show_bug.cgi?id=1648768 """ translate.translate( ROOT / "data" / "event_key_ordering.yaml", "markdown", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["metrics.md"]) with (tmp_path / "metrics.md").open("r", encoding="utf-8") as fd: content = fd.read() print(content) content = " ".join(content.split()) assert ( r"
  • And1WithExtraCasing: four
  • " r"
  • alice: two
  • " r"
  • bob: three
  • " r"
  • charlie: one
" in content ) def test_send_if_empty_metrics(tmp_path): translate.translate( [ ROOT / "data" / "send_if_empty_with_metrics.yaml", ROOT / "data" / "pings.yaml", ], "markdown", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["metrics.md"]) # Make sure descriptions made it in with (tmp_path / "metrics.md").open("r", encoding="utf-8") as fd: content = fd.read() assert "Lorem ipsum dolor sit amet, consectetur adipiscing elit." in content def test_data_sensitivity(): event = metrics.Event( type="event", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", extra_keys={"my_extra": {"description": "an extra", "type": "string"}}, data_sensitivity=["technical", "interaction"], ) assert markdown.data_sensitivity_numbers(event.data_sensitivity) == "1, 2" assert markdown.data_sensitivity_numbers(None) == "unknown" glean_parser-15.0.1/tests/test_metrics.py000066400000000000000000000134451466531427000205000ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ import datetime import pytest from glean_parser import parser from glean_parser import metrics def test_metrics_match_schema(): """ Make sure the supported set of metric types in the schema matches the set in `metrics.py` """ schema, validator = parser._get_schema(parser.METRICS_ID) # "ERROR": The unset typename of `Metric` # "denominator": The special wrapper type around counter for external denominators assert set(metrics.Metric.metric_types.keys()) == set( schema["definitions"]["metric"]["properties"]["type"]["enum"] ) | set(["ERROR", "denominator"]) def test_enforcement(): """ Test dataclasses enforcement. """ with pytest.raises(TypeError): metrics.Boolean() # Python dataclasses don't actually validate any types, so we # delegate to jsonschema with pytest.raises(ValueError): metrics.Boolean( type="boolean", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], description=42, notification_emails=["nobody@example.com"], expires="never", ) def test_expires(): """ Test that expires is parsed correctly """ for date, expired in [ ("2018-06-10", True), (datetime.datetime.now(datetime.timezone.utc).date().isoformat(), True), ("3000-01-01", False), ]: m = metrics.Boolean( type="boolean", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], expires=date, notification_emails=["nobody@example.com"], description="description...", ) assert m.is_expired() == expired with pytest.raises(ValueError): m = metrics.Boolean( type="boolean", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], expires="foo", notification_emails=["nobody@example.com"], description="description...", ) m.validate_expires() def test_timespan_time_unit(): """ Test that the timespan's time_unit is coerced to an enum. """ m = metrics.Timespan( type="timespan", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], time_unit="day", notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert isinstance(m.time_unit, metrics.TimeUnit) assert m.time_unit == metrics.TimeUnit.day with pytest.raises(AttributeError): m = metrics.Timespan( type="timespan", category="category", name="metric", time_unit="foo", notification_emails=["nobody@example.com"], description="description...", expires="never", ) def test_identifier(): """ Test that the identifier is created correctly. """ m = metrics.Timespan( type="timespan", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], time_unit="day", notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert m.identifier() == "category.metric" def test_identifier_glean_category(): """ Test that the glean-internal identifier is created correctly. """ m = metrics.Timespan( type="timespan", category=metrics.Metric.glean_internal_metric_cat, name="metric", bugs=["http://bugzilla.mozilla.com/12345"], time_unit="day", notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert m.identifier() == "metric" def test_reserved_extra_keys(): """ Test that extra keys starting with 'glean.' are rejected for non-internal metrics. """ with pytest.raises(ValueError): metrics.Event( type="event", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", extra_keys={"glean.internal": {"description": "foo", "type": "string"}}, ) metrics.Event( type="event", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", extra_keys={"glean.internal": {"description": "foo", "type": "string"}}, _config={"allow_reserved": True}, ) def test_no_unit(): event = metrics.Event( type="event", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", extra_keys={"glean.internal": {"description": "foo", "type": "string"}}, _config={"allow_reserved": True}, ) assert not event.unit def test_jwe_is_rejected(): with pytest.raises(ValueError): metrics.Jwe( type="jwe", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", extra_keys={"glean.internal": {"description": "foo"}}, _config={"allow_reserved": True}, ) glean_parser-15.0.1/tests/test_parser.py000066400000000000000000001165061466531427000203300ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from pathlib import Path import json import re import textwrap import pytest from glean_parser import metrics from glean_parser import parser import util ROOT = Path(__file__).parent def test_parser(): """Test the basics of parsing a single file.""" all_metrics = parser.parse_objects( [ROOT / "data" / "core.yaml", ROOT / "data" / "pings.yaml"], config={"allow_reserved": True}, ) errs = list(all_metrics) assert len(errs) == 0 for category_key, category_val in all_metrics.value.items(): if category_key == "pings": continue for _metric_key, metric_val in category_val.items(): assert isinstance(metric_val, metrics.Metric) assert isinstance(metric_val.lifetime, metrics.Lifetime) if getattr(metric_val, "labels", None) is not None: assert isinstance(metric_val.labels, set) pings = all_metrics.value["pings"] assert pings["custom-ping"].name == "custom-ping" assert pings["custom-ping"].include_client_id is False assert pings["really-custom-ping"].name == "really-custom-ping" assert pings["really-custom-ping"].include_client_id is True def test_parser_invalid(): """Test the basics of parsing a single file.""" all_metrics = parser.parse_objects(ROOT / "data" / "invalid.yamlx") errors = list(all_metrics) assert len(errors) == 1 assert "could not determine a constructor for the tag" in errors[0] def test_parser_schema_violation(): """1507792""" all_metrics = parser.parse_objects(ROOT / "data" / "schema-violation.yaml") errors = list(all_metrics) found_errors = set( re.sub(r"\s", "", str(error).split("\n", 1)[1].strip()) for error in errors ) expected_errors = [ """ ``` gleantest.lifetime: test_counter_inv_lt: lifetime: user2 ``` 'user2' is not one of ['ping', 'user', 'application'] Documentation for this node: Definesthelifetimeofthe metric. It must be one of the following values: - `ping` (default): The metric is reset each time it is sent in a ping. - `user`: The metric contains a property that is part of the user's profile and is never reset. - `application`: The metric contains a property that is related to the application, and is reset only at application restarts. """, """ ``` gleantest.foo: a: b ``` 'b' is not of type 'object' Documentation for this node: Describes a single metric. See https://mozilla.github.io/glean_parser/metrics-yaml.html """, """ ``` gleantest.with.way.too.long.category.name ... ``` 'gleantest.with.way.too.long.category.name' is not valid under any of the given schemas 'gleantest.with.way.too.long.category.name' is too long 'gleantest.with.way.too.long.category.name' is not one of ['$schema', '$tags'] """, """ ``` gleantest.short.category:very_long_metric_name_this_is_too_long_as_well_since_it_has_sooooo_many_characters ``` 'very_long_metric_name_this_is_too_long_as_well_since_it_has_sooooo_many_characters' is not valid under any of the given schemas 'very_long_metric_name_this_is_too_long_as_well_since_it_has_sooooo_many_characters' is too long """, # noqa: E501 # """ ``` gleantest: test_event: type: event ``` Missing required properties: bugs, data_reviews, description, expires, notification_emails Documentation for this node: Describes a single metric. See https://mozilla.github.io/glean_parser/metrics-yaml.html """, """ ``` gleantest.event: event_too_many_extras: extra_keys: key_1: description: Sample extra key type: string key_2: description: Sample extra key type: string key_3: description: Sample extra key type: string key_4: description: Sample extra key type: string key_5: description: Sample extra key type: string key_6: description: Sample extra key type: string key_7: description: Sample extra key type: string key_8: description: Sample extra key type: string key_9: description: Sample extra key type: string key_10: description: Sample extra key type: string key_11: description: Sample extra key type: string key_12: description: Sample extra key type: string key_13: description: Sample extra key type: string key_14: description: Sample extra key type: string key_15: description: Sample extra key type: string key_16: description: Sample extra key type: string key_17: description: Sample extra key type: string key_18: description: Sample extra key type: string key_19: description: Sample extra key type: string key_20: description: Sample extra key type: string key_21: description: Sample extra key type: string key_22: description: Sample extra key type: string key_23: description: Sample extra key type: string key_24: description: Sample extra key type: string key_25: description: Sample extra key type: string key_26: description: Sample extra key type: string key_27: description: Sample extra key type: string key_28: description: Sample extra key type: string key_29: description: Sample extra key type: string key_30: description: Sample extra key type: string key_31: description: Sample extra key type: string key_32: description: Sample extra key type: string key_33: description: Sample extra key type: string key_34: description: Sample extra key type: string key_35: description: Sample extra key type: string key_36: description: Sample extra key type: string key_37: description: Sample extra key type: string key_38: description: Sample extra key type: string key_39: description: Sample extra key type: string key_40: description: Sample extra key type: string key_41: description: Sample extra key type: string key_42: description: Sample extra key type: string key_43: description: Sample extra key type: string key_44: description: Sample extra key type: string key_45: description: Sample extra key type: string key_46: description: Sample extra key type: string key_47: description: Sample extra key type: string key_48: description: Sample extra key type: string key_49: description: Sample extra key type: string key_50: description: Sample extra key type: string key_51: description: Sample extra key type: string ``` {'key_1': {'description': 'Sample extra key','type': 'string'}, 'key_2': {'description': 'Sample extra key','type': 'string'}, 'key_3': {'description': 'Sample extra key','type': 'string'}, 'key_4': {'description': 'Sample extra key','type': 'string'}, 'key_5': {'description': 'Sample extra key','type': 'string'}, 'key_6': {'description': 'Sample extra key','type': 'string'}, 'key_7': {'description': 'Sample extra key','type': 'string'}, 'key_8': {'description': 'Sample extra key','type': 'string'}, 'key_9': {'description': 'Sample extra key','type': 'string'}, 'key_10': {'description': 'Sample extra key','type': 'string'}, 'key_11': {'description': 'Sample extra key','type': 'string'}, 'key_12': {'description': 'Sample extra key','type': 'string'}, 'key_13': {'description': 'Sample extra key','type': 'string'}, 'key_14': {'description': 'Sample extra key','type': 'string'}, 'key_15': {'description': 'Sample extra key','type': 'string'}, 'key_16': {'description': 'Sample extra key','type': 'string'}, 'key_17': {'description': 'Sample extra key','type': 'string'}, 'key_18': {'description': 'Sample extra key','type': 'string'}, 'key_19': {'description': 'Sample extra key','type': 'string'}, 'key_20': {'description': 'Sample extra key','type': 'string'}, 'key_21': {'description': 'Sample extra key','type': 'string'}, 'key_22': {'description': 'Sample extra key','type': 'string'}, 'key_23': {'description': 'Sample extra key','type': 'string'}, 'key_24': {'description': 'Sample extra key','type': 'string'}, 'key_25': {'description': 'Sample extra key','type': 'string'}, 'key_26': {'description': 'Sample extra key','type': 'string'}, 'key_27': {'description': 'Sample extra key','type': 'string'}, 'key_28': {'description': 'Sample extra key','type': 'string'}, 'key_29': {'description': 'Sample extra key','type': 'string'}, 'key_30': {'description': 'Sample extra key','type': 'string'}, 'key_31': {'description': 'Sample extra key','type': 'string'}, 'key_32': {'description': 'Sample extra key','type': 'string'}, 'key_33': {'description': 'Sample extra key','type': 'string'}, 'key_34': {'description': 'Sample extra key','type': 'string'}, 'key_35': {'description': 'Sample extra key','type': 'string'}, 'key_36': {'description': 'Sample extra key','type': 'string'}, 'key_37': {'description': 'Sample extra key','type': 'string'}, 'key_38': {'description': 'Sample extra key','type': 'string'}, 'key_39': {'description': 'Sample extra key','type': 'string'}, 'key_40': {'description': 'Sample extra key','type': 'string'}, 'key_41': {'description': 'Sample extra key','type': 'string'}, 'key_42': {'description': 'Sample extra key','type': 'string'}, 'key_43': {'description': 'Sample extra key','type': 'string'}, 'key_44': {'description': 'Sample extra key','type': 'string'}, 'key_45': {'description': 'Sample extra key','type': 'string'}, 'key_46': {'description': 'Sample extra key','type': 'string'}, 'key_47': {'description': 'Sample extra key','type': 'string'}, 'key_48': {'description': 'Sample extra key','type': 'string'}, 'key_49': {'description': 'Sample extra key','type': 'string'}, 'key_50': {'description': 'Sample extra key','type': 'string'}, 'key_51': {'description': 'Sample extra key','type': 'string'} } has too many properties Documentation for this node: The acceptable keys on the "extra" object sent with events. This is an object mapping the key to an object containing metadata about the key. A maximum of 50 extra keys is allowed. This metadata object has the following keys: - `description`: **Required.** A description of the key. Valid when `type`_ is `event`. """, ] expected_errors = set( re.sub(r"\s", "", textwrap.indent(textwrap.dedent(x), " ").strip()) for x in expected_errors ) # Compare errors 1-by-1 for better assertion message when it fails. found = sorted(list(found_errors)) expected = sorted(list(expected_errors)) for found_error, expected_error in zip(found, expected): assert found_error == expected_error def test_parser_empty(): """1507792: Get a good error message if the metrics.yaml file is empty.""" all_metrics = parser.parse_objects(ROOT / "data" / "empty.yaml") errors = list(all_metrics) assert len(errors) == 1 assert "file can not be empty" in errors[0] def test_invalid_schema(): all_metrics = parser.parse_objects([{"$schema": "This is wrong"}]) errors = list(all_metrics) assert any("key must be one of" in str(e) for e in errors) def test_merge_metrics(): """Merge multiple metrics.yaml files""" contents = [ {"category1": {"metric1": {}, "metric2": {}}, "category2": {"metric3": {}}}, {"category1": {"metric4": {}}, "category3": {"metric5": {}}}, ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) list(all_metrics) all_metrics = all_metrics.value assert set(all_metrics["category1"].keys()) == set( ["metric1", "metric2", "metric4"] ) assert set(all_metrics["category2"].keys()) == set(["metric3"]) assert set(all_metrics["category3"].keys()) == set(["metric5"]) def test_merge_metrics_clash(): """Merge multiple metrics.yaml files with conflicting metric names.""" contents = [{"category1": {"metric1": {}}}, {"category1": {"metric1": {}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert "Duplicate metric name" in errors[0] def test_snake_case_enforcement(): """Expect exception if names aren't in snake case.""" contents = [ {"categoryWithCamelCase": {"metric": {}}}, {"category": {"metricWithCamelCase": {}}}, ] for content in contents: util.add_required(content) errors = list(parser._load_file(content, {})) assert len(errors) == 1 def test_multiple_errors(): """Make sure that if there are multiple errors, we get all of them.""" contents = [{"camelCaseName": {"metric": {"type": "unknown"}}}] contents = [util.add_required(x) for x in contents] metrics = parser.parse_objects(contents) errors = list(metrics) assert len(errors) == 2 def test_event_must_be_ping_lifetime(): contents = [{"category": {"metric": {"type": "event", "lifetime": "user"}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert "Event metrics must have ping lifetime" in errors[0] def test_parser_reserved(): contents = [{"glean.baseline": {"metric": {"type": "string"}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert "For category 'glean.baseline'" in errors[0] all_metrics = parser.parse_objects(contents, {"allow_reserved": True}) errors = list(all_metrics) assert len(errors) == 0 def invalid_in_category(name): return [{name: {"metric": {"type": "string"}}}] def invalid_in_name(name): return [{"baseline": {name: {"type": "string"}}}] def invalid_in_label(name): return [{"baseline": {"metric": {"type": "string", "labels": [name]}}}] @pytest.mark.parametrize( "location", [invalid_in_category, invalid_in_name, invalid_in_label] ) @pytest.mark.parametrize( "name", [ "1" * 72, "Møøse", ], ) def test_invalid_names(location, name): contents = location(name) contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert name in errors[0] def test_duplicate_send_in_pings(): """Test the basics of parsing a single file.""" all_metrics = parser.parse_objects( [ROOT / "data" / "duplicate_send_in_ping.yaml"], config={"allow_reserved": True} ) errs = list(all_metrics) assert len(errs) == 0 metric = all_metrics.value["telemetry"]["test"] assert metric.send_in_pings == ["core", "metrics"] def test_geckoview_only_on_valid_metrics(): for metric in [ "timing_distribution", "custom_distributiuon", "memory_distribution", ]: contents = [ {"category1": {"metric1": {"type": metric, "gecko_datapoint": "FOO"}}} ] contents = [util.add_required(x) for x in contents] contents = [{"category1": {"metric1": {"type": "event", "gecko_datapoint": "FOO"}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 1 assert "is only allowed for" in str(errs[0]) def test_timing_distribution_unit_default(): contents = [{"category1": {"metric1": {"type": "timing_distribution"}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errs = list(all_metrics) assert len(errs) == 0 assert ( all_metrics.value["category1"]["metric1"].time_unit == metrics.TimeUnit.nanosecond ) def test_all_pings_reserved(): # send_in_pings: [all-pings] is only allowed for internal metrics contents = [ {"category": {"metric": {"type": "string", "send_in_pings": ["all-pings"]}}} ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert "On instance category.metric" in errors[0] assert "Only internal metrics" in errors[0] all_metrics = parser.parse_objects(contents, {"allow_reserved": True}) errors = list(all_metrics) assert len(errors) == 0 # A custom ping called "all-pings" is not allowed contents = [{"all-pings": {"include_client_id": True}}] contents = [util.add_required_ping(x) for x in contents] all_pings = parser.parse_objects(contents) errors = list(all_pings) assert len(errors) == 1 assert "is not allowed for 'all-pings'" def test_custom_distribution(): # Test plain custom_distribution, now also allowed generally contents = [ { "category": { "metric": { "type": "custom_distribution", "range_min": 0, "range_max": 60000, "bucket_count": 100, "histogram_type": "exponential", } } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 0 # Test that custom_distribution has required parameters contents = [ { "category": { "metric": { "type": "custom_distribution", "gecko_datapoint": "FROM_GECKO", } } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert "`custom_distribution` is missing required parameters" in errors[0] # Test maximum bucket_count is enforced contents = [ { "category": { "metric": { "type": "custom_distribution", "gecko_datapoint": "FROM_GECKO", "range_max": 60000, "bucket_count": 101, "histogram_type": "exponential", } } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert "101 is greater than" in errors[0] # Test that correct usage contents = [ { "category": { "metric": { "type": "custom_distribution", "range_max": 60000, "bucket_count": 100, "histogram_type": "exponential", } } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 0 distribution = all_metrics.value["category"]["metric"] assert distribution.range_min == 1 assert distribution.range_max == 60000 assert distribution.bucket_count == 100 assert distribution.histogram_type == metrics.HistogramType.exponential def test_memory_distribution(): # Test that we get an error for a missing unit contents = [{"category": {"metric": {"type": "memory_distribution"}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert ( "`memory_distribution` is missing required parameter `memory_unit`" in errors[0] ) # Test that memory_distribution works contents = [ { "category": { "metric": {"type": "memory_distribution", "memory_unit": "megabyte"} } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 0 assert len(all_metrics.value) == 1 assert ( all_metrics.value["category"]["metric"].memory_unit == metrics.MemoryUnit.megabyte ) def test_quantity(): # Test that we get an error for a missing unit contents = [{"category": {"metric": {"type": "quantity"}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert any( "`quantity` is missing required parameter `unit`" in err for err in errors ) # Test that quantity works contents = [ { "category": { "metric": { "type": "quantity", "unit": "pixel", "gecko_datapoint": "FOO", } } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 0 assert len(all_metrics.value) == 1 assert all_metrics.value["category"]["metric"].unit == "pixel" def test_do_not_disable_expired(): # Test that we get an error for a missing unit and gecko_datapoint contents = [{"category": {"metric": {"type": "boolean", "expires": "1900-01-01"}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents, {"do_not_disable_expired": True}) errors = list(all_metrics) assert len(errors) == 0 metrics = all_metrics.value assert metrics["category"]["metric"].disabled is False def test_send_in_pings_restrictions(): """Test that invalid ping names are disallowed in `send_in_pings`.""" all_metrics = parser.parse_objects(ROOT / "data" / "invalid-ping-names.yaml") errors = list(all_metrics) assert len(errors) == 1 assert "'invalid_ping_name' does not match" in errors[0] def test_tags(): """Tests that tags can be specified.""" all_metrics = parser.parse_objects(ROOT / "data" / "metric-with-tags.yaml") errors = list(all_metrics) assert errors == [] assert len(all_metrics.value) == 1 assert set(all_metrics.value["telemetry"]["client_id"].metadata.keys()) == set( ["tags"] ) assert set(all_metrics.value["telemetry"]["client_id"].metadata["tags"]) == set( ["banana", "apple", "global_tag"] ) def test_custom_expires(): contents = [ { "category": { "metric": { "type": "boolean", "expires": "foo", }, "metric2": { "type": "boolean", "expires": "bar", }, } } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects( contents, { "custom_is_expired": lambda x: x == "foo", "custom_validate_expires": lambda x: x in ("foo", "bar"), }, ) errors = list(all_metrics) assert len(errors) == 0 assert all_metrics.value["category"]["metric"].disabled is True assert all_metrics.value["category"]["metric2"].disabled is False with pytest.raises(ValueError): # Double-check that parsing without custom functions breaks all_metrics = parser.parse_objects(contents) errors = list(all_metrics) def test_expire_by_major_version(): failing_metrics = [ { "category": { "metric_fail_date": { "type": "boolean", "expires": "1986-07-03", }, } } ] failing_metrics = [util.add_required(x) for x in failing_metrics] with pytest.raises(ValueError): # Dates are not allowed if expiration by major version is enabled. all_metrics = parser.parse_objects( failing_metrics, { "expire_by_version": 15, }, ) errors = list(all_metrics) contents = [ { "category": { "metric_expired_version": { "type": "boolean", "expires": 7, }, "metric_expired_edge": { "type": "boolean", "expires": 15, }, "metric_expired": { "type": "boolean", "expires": "expired", }, "metric": { "type": "boolean", "expires": 18, }, } } ] contents = [util.add_required(x) for x in contents] # Double-check that parsing without custom functions breaks all_metrics = parser.parse_objects( contents, { "expire_by_version": 15, }, ) errors = list(all_metrics) assert len(errors) == 0 assert all_metrics.value["category"]["metric_expired_version"].disabled is True assert all_metrics.value["category"]["metric_expired_edge"].disabled is True assert all_metrics.value["category"]["metric_expired"].disabled is True assert all_metrics.value["category"]["metric"].disabled is False def test_parser_mixed_expirations(): """Validate that mixing expiration types fail""" with pytest.raises(ValueError): # Mixing expiration types must fail when expiring by version. all_metrics = parser.parse_objects( ROOT / "data" / "mixed-expirations.yaml", { "expire_by_version": 15, }, ) list(all_metrics) with pytest.raises(ValueError): # Mixing expiration types must fail when expiring by date. all_metrics = parser.parse_objects(ROOT / "data" / "mixed-expirations.yaml") list(all_metrics) def test_expire_by_version_must_fail_if_disabled(): failing_metrics = [ { "category": { "metric_fail_date": { "type": "boolean", "expires": 99, }, } } ] failing_metrics = [util.add_required(x) for x in failing_metrics] with pytest.raises(ValueError): # Versions are not allowed if expiration by major version is enabled. all_metrics = parser.parse_objects(failing_metrics) list(all_metrics) def test_historical_versions(): """ Make sure we can load the correct version of the schema and it will correctly reject or not reject entries based on that. """ # No issues: # * Bugs as numbers # * event extra keys don't have a type contents = [ { "$schema": "moz://mozilla.org/schemas/glean/metrics/1-0-0", "category": { "metric": { "type": "event", "extra_keys": {"key_a": {"description": "foo"}}, "bugs": [42], } }, } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 0 # 1 issue: # * Bugs as numbers are disallowed # # events: not having a `type` is fine in version 2. contents = [ { "$schema": "moz://mozilla.org/schemas/glean/metrics/2-0-0", "category": { "metric": { "type": "event", "extra_keys": {"key_a": {"description": "foo"}}, "bugs": [42], } }, } ] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 def test_telemetry_mirror(): """ Ensure that telemetry_mirror makes it into the parsed metric definition. """ all_metrics = parser.parse_objects( [ROOT / "data" / "telemetry_mirror.yaml"], config={"allow_reserved": False}, ) errs = list(all_metrics) assert len(errs) == 0 assert ( all_metrics.value["telemetry.mirrored"]["parses_fine"].telemetry_mirror == "telemetry.test.string_kind" ) def test_rates(): """ Ensure that `rate` metrics parse properly. """ all_metrics = parser.parse_objects( [ROOT / "data" / "rate.yaml"], config={"allow_reserved": False}, ) errs = list(all_metrics) assert len(errs) == 0 category = all_metrics.value["testing.rates"] assert category["has_internal_denominator"].type == "rate" assert ( category["has_external_denominator"].type == "rate" ) # Hasn't been transformed to "numerator" yet assert ( category["also_has_external_denominator"].type == "rate" ) # Hasn't been transformed to "numerator" yet assert ( category["the_denominator"].type == "counter" ) # Hasn't been transformed to "denominator" yet def test_ping_ordering(): contents = parser.parse_objects( [ROOT / "data" / "pings.yaml"], config={"allow_reserved": False}, ) errs = list(contents) assert len(errs) == 0 pings = list(contents.value["pings"].keys()) assert pings == sorted(pings) def test_metric_ordering(): all_metrics = parser.parse_objects( [ROOT / "data" / "ordering.yaml"], config={"allow_reserved": False} ) errs = list(all_metrics) assert len(errs) == 0 category = all_metrics.value["testing.ordering"] assert len(category.values()) > 0 test_metrics = [f"{m.category}.{m.name}" for m in category.values()] # Alphabetically ordered assert test_metrics == [ "testing.ordering.a_second_test_metric", "testing.ordering.first_test_metric", "testing.ordering.third_test_metric", ] def test_tag_ordering(): all_metrics = parser.parse_objects(ROOT / "data" / "metric-with-tags.yaml") errs = list(all_metrics) assert len(errs) == 0 tags = all_metrics.value["telemetry"]["client_id"].metadata["tags"] assert tags == sorted(tags) def test_text_valid(): """ Ensure that `text` metrics parse properly. """ all_metrics = parser.parse_objects( [ROOT / "data" / "text.yaml"], config={"allow_reserved": False}, ) errors = list(all_metrics) assert len(errors) == 0 assert all_metrics.value["valid.text"]["lifetime"].lifetime == metrics.Lifetime.ping assert all_metrics.value["valid.text"]["sensitivity"].data_sensitivity == [ metrics.DataSensitivity.stored_content ] def test_text_invalid(): """ Ensure that `text` metrics parse properly. """ all_metrics = parser.parse_objects( [ROOT / "data" / "text_invalid.yaml"], config={"allow_reserved": False}, ) errors = list(all_metrics) assert len(errors) == 3 def compare(expected, found): return "".join(expected.split()) in "".join(found.split()) for error in errors: if "sensitivity" in error: assert compare("'technical' is not one of", error) if "lifetime" in error: assert compare("'user' is not one of", error) if "builtin_pings" in error: assert compare("Built-in pings are not allowed", error) def test_metadata_tags_sorted(): all_metrics = parser.parse_objects( [ util.add_required( { "$tags": ["tag1"], "category": {"metric": {"metadata": {"tags": ["tag2"]}}}, } ) ] ) errors = list(all_metrics) assert len(errors) == 0 assert all_metrics.value["category"]["metric"].disabled is False assert all_metrics.value["category"]["metric"].metadata["tags"] == ["tag1", "tag2"] def test_no_lint_sorted(): all_objects = parser.parse_objects( [ util.add_required( { "no_lint": ["lint1"], "category": {"metric": {"no_lint": ["lint2"]}}, } ), util.add_required_ping( { "no_lint": ["lint1"], "ping": {"no_lint": ["lint2"]}, } ), { "$schema": parser.TAGS_ID, # no_lint is only valid at the top level for tags "no_lint": ["lint2", "lint1"], "tag": {"description": ""}, }, ] ) errors = list(all_objects) assert len(errors) == 0 assert all_objects.value["category"]["metric"].no_lint == ["lint1", "lint2"] assert all_objects.value["pings"]["ping"].no_lint == ["lint1", "lint2"] assert all_objects.value["tags"]["tag"].no_lint == ["lint1", "lint2"] def test_no_internal_fields_exposed(): """ We accidentally exposed fields like `_config` and `_generate_enums` before. These ended up in probe-scraper output. We replicate the code probe-scraper uses and ensure we get the JSON we expect from it. """ results = parser.parse_objects( [ util.add_required( { "category": { "metric": { "type": "event", "extra_keys": { "key_a": {"description": "desc-a", "type": "boolean"} }, } }, } ), ] ) errs = list(results) assert len(errs) == 0 metrics = { metric.identifier(): metric.serialize() for category, probes in results.value.items() for probe_name, metric in probes.items() } expected = { "category.metric": { "bugs": ["http://bugzilla.mozilla.org/12345678"], "data_reviews": ["https://example.com/review/"], "defined_in": {"line": 3}, "description": "DESCRIPTION...", "disabled": False, "expires": "never", "extra_keys": {"key_a": {"description": "desc-a", "type": "boolean"}}, "gecko_datapoint": "", "lifetime": "ping", "metadata": {}, "no_lint": [], "notification_emails": ["nobody@example.com"], "send_in_pings": ["events"], "type": "event", "version": 0, } } expected_json = json.dumps(expected, sort_keys=True, indent=2) out_json = json.dumps( metrics, sort_keys=True, indent=2, ) assert expected_json == out_json def test_object(): structure = {"type": "array", "items": {"type": "number"}} contents = [{"category": {"metric": {"type": "object", "structure": structure}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 0, errors assert len(all_metrics.value) == 1 assert all_metrics.value["category"]["metric"]._generate_structure == structure def test_object_invalid(): contents = [{"category": {"metric": {"type": "object"}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert "`object` is missing required parameter `structure`" in errors[0] structure = {"type": "array", "items": {}} contents = [{"category": {"metric": {"type": "object", "structure": structure}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert "invalid or missing `type`" in errors[0] structure = {"type": "array", "items": {"type": "unknown"}} contents = [{"category": {"metric": {"type": "object", "structure": structure}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert "invalid or missing `type`" in errors[0] structure = {"type": "array", "properties": {}} contents = [{"category": {"metric": {"type": "object", "structure": structure}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert "`properties` not allowed in array structure" in errors[0] structure = {"type": "object", "items": {}} contents = [{"category": {"metric": {"type": "object", "structure": structure}}}] contents = [util.add_required(x) for x in contents] all_metrics = parser.parse_objects(contents) errors = list(all_metrics) assert len(errors) == 1 assert "`items` not allowed in object structure" in errors[0] glean_parser-15.0.1/tests/test_pings.py000066400000000000000000000061451466531427000201510ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from glean_parser import parser, pings import util def test_reserved_ping_name(): """ Make sure external users can't use a reserved ping name. """ for ping in pings.RESERVED_PING_NAMES: content = {ping: {"include_client_id": True}} util.add_required_ping(content) errors = list(parser._instantiate_pings({}, {}, content, "", {})) assert len(errors) == 1, f"Ping '{ping}' should not be allowed" assert "Ping uses a reserved name" in errors[0] errors = list( parser._instantiate_pings({}, {}, content, "", {"allow_reserved": True}) ) assert len(errors) == 0 def test_reserved_metrics_category(): """ The category "pings" can't be used by metrics -- it's reserved for pings. """ content = {"pings": {"metric": {"type": "string"}}} util.add_required(content) errors = list(parser.parse_objects(content)) assert len(errors) == 1 assert "reserved" in errors[0] def test_camel_case_ping_name(): content = {"camelCasePingName": {"include_client_id": True}} util.add_required_ping(content) errors = list(parser.parse_objects([content])) assert len(errors) == 1 assert "camelCasePingName" in errors[0] def test_snake_case_ping_name(): content = {"snake_case_ping_name": {"include_client_id": True}} util.add_required_ping(content) errors = list(parser.parse_objects([content])) assert len(errors) == 1 assert "snake_case_ping_name" in errors[0] def test_legacy_snake_case_ping_name(): content = { "bookmarks_sync": {"include_client_id": True}, "$schema": "moz://mozilla.org/schemas/glean/pings/1-0-0", } util.add_required_ping(content) errors = list(parser.parse_objects([content])) assert len(errors) == 0 def test_send_if_empty(): content = {"valid-ping": {"include_client_id": True, "send_if_empty": True}} util.add_required_ping(content) errors = list(parser.parse_objects([content])) assert len(errors) == 0 def test_send_if_disabled(): content = {"disabled-ping": {"include_client_id": True, "enabled": False}} util.add_required_ping(content) errors = list(parser.parse_objects([content])) assert len(errors) == 0 def test_ping_schedule(): content = { "piggyback-ping": { "include_client_id": True, "metadata": {"ping_schedule": ["trigger-ping"]}, }, "trigger-ping": {"include_client_id": True}, } util.add_required_ping(content) all_pings = parser.parse_objects([content]) errors = list(all_pings) assert len(errors) == 0 assert "piggyback-ping" in all_pings.value["pings"]["trigger-ping"].schedules_pings def test_no_self_ping_schedule(): content = { "my_ping": { "include_client_id": True, "metadata": {"ping_schedule": ["my_ping"]}, } } util.add_required_ping(content) errors = list(parser.parse_objects([content])) assert "my_ping" in errors[0] glean_parser-15.0.1/tests/test_python_server.py000066400000000000000000000040031466531427000217270ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from pathlib import Path import io import json import shutil import subprocess from glean_parser import translate from glean_parser import validate_ping ROOT = Path(__file__).parent def test_no_event_metrics(tmp_path): """If no event metrics are provided parser should fail and no files should be generated""" translate.translate( [], "python_server", tmp_path, ) assert all(False for _ in tmp_path.iterdir()) def test_logging(tmp_path): """Test that generated code can be used to log events in the right format.""" glean_module_path = tmp_path / "glean" # generate logging code translate.translate( [ ROOT / "data" / "server_metrics_with_event.yaml", ], "python_server", glean_module_path, ) # copy ROOT / "test-py" / "test.py" to tmpdir shutil.copy(ROOT / "test-py" / "test.py", tmp_path) # run test script logged_output = subprocess.check_output(["python", "test.py"], cwd=tmp_path).decode( "utf-8" ) logged_output = json.loads(logged_output) fields = logged_output["Fields"] payload = fields["payload"] # validate that ping envelope contains all the required fields assert "glean-server-event" == logged_output["Type"] assert "accounts_backend" == fields["document_namespace"] assert "events" == fields["document_type"] assert "1" == fields["document_version"] assert "Mozilla/5.0 ..." == fields["user_agent"] schema_url = ( "https://raw.githubusercontent.com/mozilla-services/" "mozilla-pipeline-schemas/main/" "schemas/glean/glean/glean.1.schema.json" ) # validate that ping payload is valid against glean schema input = io.StringIO(payload) output = io.StringIO() assert ( validate_ping.validate_ping(input, output, schema_url=schema_url) == 0 ), output.getvalue() glean_parser-15.0.1/tests/test_ruby_server.py000066400000000000000000000112021466531427000213660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from pathlib import Path import io import json import pytest import subprocess import glean_parser from glean_parser import translate from glean_parser import validate_ping ROOT = Path(__file__).parent def test_parser_rb_server_ping_file(tmp_path, capsys): """Test that no files are generated if ping definition is provided.""" translate.translate( [ ROOT / "data" / "server_metrics_with_event.yaml", ROOT / "data" / "server_pings.yaml", ], "ruby_server", tmp_path, ) captured = capsys.readouterr() assert all(False for _ in tmp_path.iterdir()) assert ( "Ping definition found. Server-side environment is simplified" in captured.out ) def test_parser_rb_server_no_event_metrics(tmp_path, capsys): """Test that no files are generated if no event metrics.""" translate.translate( [ROOT / "data" / "server_metrics_no_events_no_pings.yaml"], "ruby_server", tmp_path, ) captured = capsys.readouterr() assert all(False for _ in tmp_path.iterdir()) assert ( "No event metrics found...at least one event metric is required" in captured.out ) def test_parser_rb_server_metrics_unsupported_type(tmp_path, capsys): """Test that no files are generated with unsupported metric types.""" translate.translate( [ ROOT / "data" / "ruby_server_metrics_unsupported.yaml", ], "ruby_server", tmp_path, ) captured = capsys.readouterr() assert "Ignoring unsupported metric type" in captured.out assert "boolean" in captured.out def test_parser_rb_server_pings_unsupported_type(tmp_path, capsys): """Test that no files are generated with ping types that are not `events`.""" translate.translate( [ ROOT / "data" / "ruby_server_pings_unsupported.yaml", ], "ruby_server", tmp_path, ) captured = capsys.readouterr() assert "Non-events ping reference found" in captured.out assert "Ignoring the tests ping type" in captured.out def test_parser_rb_server(tmp_path): """Test that parser works""" translate.translate( [ROOT / "data" / "server_metrics_with_event.yaml"], "ruby_server", tmp_path, ) assert set(x.name for x in tmp_path.iterdir()) == set(["server_events.rb"]) # Make sure string metric made it in with (tmp_path / "server_events.rb").open("r", encoding="utf-8") as fd: content = fd.read() with (ROOT / "data" / "server_events_compare.rb").open( "r", encoding="utf-8" ) as cd: compare_raw = cd.read() compare = compare_raw.format( current_version=f"glean_parser v{glean_parser.__version__}" ) assert content == compare def run_logger(code_dir, import_file, code): """ Run the Ruby logger with a mocked logger that just prints the ping payload to STDOUT. """ tmpl_code = "" with open(ROOT / "test-rb" / "test.rb.tmpl", "r") as fp: tmpl_code = fp.read() tmpl_code = tmpl_code.replace("/* IMPORT */", import_file).replace( "/* CODE */", code ) with open(code_dir / "test.rb", "w") as fp: fp.write(tmpl_code) return subprocess.check_output(["ruby", "test.rb"], cwd=code_dir).decode("utf-8") @pytest.mark.ruby_dependency def test_run_logging(tmp_path): translate.translate( [ ROOT / "data" / "server_metrics_with_event.yaml", ], "ruby_server", tmp_path, ) code = """ events.backend_object_update.record( object_type: "type", object_state: "state", linking: true, identifiers_fxa_account_id: nil, user_agent: "glean-test/1.0", ip_address: "127.0.0.1" ) """ logged_output = run_logger(tmp_path, "server_events.rb", code) logged_output = json.loads(logged_output) fields = logged_output["Fields"] payload = fields["payload"] assert "glean-server-event" == logged_output["Type"] assert "glean.test" == fields["document_namespace"] assert "events" == fields["document_type"] assert "1" == fields["document_version"] assert "glean-test/1.0" == fields["user_agent"] schema_url = ( "https://raw.githubusercontent.com/mozilla-services/" "mozilla-pipeline-schemas/main/" "schemas/glean/glean/glean.1.schema.json" ) input = io.StringIO(payload) output = io.StringIO() assert ( validate_ping.validate_ping(input, output, schema_url=schema_url) == 0 ), output.getvalue() glean_parser-15.0.1/tests/test_rust.py000066400000000000000000000215021466531427000200200ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from pathlib import Path import shutil import subprocess import re from glean_parser import rust from glean_parser import metrics from glean_parser import pings from glean_parser import translate ROOT = Path(__file__).parent def run_linters(files): # Syntax check on the generated files. # Only run this test if cargo is on the path. if shutil.which("rustfmt"): for filepath in files: subprocess.check_call( [ "rustfmt", "--check", filepath, ] ) if shutil.which("cargo"): for filepath in files: subprocess.check_call( [ "cargo", "clippy", "--all", "--", "-D", "warnings", filepath, ] ) def test_parser(tmp_path): """Test translating metrics to Rust files.""" translate.translate( ROOT / "data" / "core.yaml", "rust", tmp_path, {}, {"allow_reserved": True} ) assert set(x.name for x in tmp_path.iterdir()) == set(["glean_metrics.rs"]) # Make sure descriptions made it in with (tmp_path / "glean_metrics.rs").open("r", encoding="utf-8") as fd: content = fd.read() assert "True if the user has set Firefox as the default browser." in content assert "جمع 搜集" in content assert 'category: "telemetry"' in content # We don't have a cargo.toml, not sure what to do here aside from creating a fake # one for the purpose of running cargo fmt and cargo clippy # run_linters(tmp_path.glob("*.rs")) def test_ping_parser(tmp_path): """Test translating pings to Rust files.""" translate.translate( ROOT / "data" / "pings.yaml", "rust", tmp_path, {"namespace": "Foo"}, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["glean_metrics.rs"]) # Make sure descriptions made it in with (tmp_path / "glean_metrics.rs").open("r", encoding="utf-8") as fd: content = fd.read() assert "This is a custom ping" in content assert ( "custom_ping: ::glean::private::__export::Lazy<::glean::private::" + "PingType> =\n ::glean::private::__export::Lazy::new" in content ) assert ( "custom_ping_might_be_empty: ::glean::private::__export::Lazy<" + "::glean::private::PingType> =\n ::glean::private::__export::Lazy::new" in content ) # TODO we need a cargo.toml to run `cargo fmt` and `cargo clippy` # and I'm not quite sure how to do that in a non-Rust project for # the purpose of testing run_linters(tmp_path.glob("*.rs")) def test_rust_generator(): kdf = rust.rust_datatypes_filter # The Rust datatypes filter encodes strings using JSON-escaping assert kdf("\n") == '"\\n".into()' assert kdf([42, "\n"]) == 'vec![42, "\\n".into()]' assert kdf(metrics.Lifetime.ping) == "Lifetime::Ping" def test_metric_type_name(): event = metrics.Event( type="event", category="category", name="metric", bugs=["42"], notification_emails=["nobody@example.com"], description="description...", expires="never", extra_keys={"my_extra": {"description": "an extra", "type": "string"}}, ) assert rust.type_name(event) == "EventMetric" event = metrics.Event( type="event", category="category", name="metric", bugs=["42"], notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert rust.type_name(event) == "EventMetric" boolean = metrics.Boolean( type="boolean", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert rust.type_name(boolean) == "BooleanMetric" ping = pings.Ping( name="custom", description="description...", include_client_id=True, bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@nowhere.com"], ) assert rust.type_name(ping) == "Ping" ping = pings.Ping( name="custom", description="description...", include_client_id=True, bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@nowhere.com"], reasons={"foo": "foolicious", "bar": "barlicious"}, ) assert rust.type_name(ping) == "Ping" def test_order_of_fields(tmp_path): """Test that translating metrics to Rust files keeps a stable order of fields.""" translate.translate( ROOT / "data" / "core.yaml", "rust", tmp_path, {}, {"allow_reserved": True} ) # Make sure descriptions made it in fd = (tmp_path / "glean_metrics.rs").open("r", encoding="utf-8") content = fd.read() fd.close() lines = content.splitlines() first_metric_fields = [] found_metric = False # Get the fields of the first metric # Checking only one metric should be good enough for now for line in lines: if found_metric: if re.search("..Default::default()$", line): break # Collect only the fields field = line.strip().split(":")[0] first_metric_fields.append(field) elif re.search("CommonMetricData {", line): found_metric = True expected_fields = ["category", "name", "send_in_pings", "lifetime", "disabled"] # We only check the limited list of always available fields. size = len(expected_fields) assert expected_fields == first_metric_fields[:size] def test_event_extra_keys_in_correct_order(tmp_path): """ Assert that the extra keys appear in the parameter and the enumeration in the same order. https://bugzilla.mozilla.org/show_bug.cgi?id=1648768 """ translate.translate( ROOT / "data" / "events_with_types.yaml", "rust", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["glean_metrics.rs"]) with (tmp_path / "glean_metrics.rs").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert ( "pub struct PreferenceToggledExtra { " "pub enabled: Option, pub preference: " "Option, pub swapped: Option, }" in content ) assert ( "const ALLOWED_KEYS: &'static [&'static str] = " '&["enabled", "preference", "swapped"];' in content ) def test_event_extra_keys_with_types(tmp_path): """ Assert that the extra keys with types appear with their corresponding types. """ translate.translate( ROOT / "data" / "events_with_types.yaml", "rust", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["glean_metrics.rs"]) with (tmp_path / "glean_metrics.rs").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert ( "impl ExtraKeys for PreferenceToggledExtra { " "const ALLOWED_KEYS: &'static [&'static str] = " '&["enabled", "preference", "swapped"];' in content ) assert ( "const ALLOWED_KEYS: &'static [&'static str] = " '&["enabled", "preference", "swapped"];' in content ) def test_object_metric(tmp_path): """ Assert that an object metric is created. """ translate.translate( ROOT / "data" / "object.yaml", "rust", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["glean_metrics.rs"]) with (tmp_path / "glean_metrics.rs").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert "ObjectMetric" in content assert "pub struct ThreadsObjectItem { " in content assert "frames: ThreadsObjectItemFrames, }" in content assert ( "pub type ThreadsObjectItemFrames = " "Vec;" in content ) assert "pub struct ThreadsObjectItemFramesItem { " in content assert "module_index: Option, " in content assert "ip: Option, " in content assert "trust: Option, " in content assert "}" in content glean_parser-15.0.1/tests/test_swift.py000066400000000000000000000267161466531427000201730ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from pathlib import Path import shutil import subprocess import re from glean_parser import swift from glean_parser import metrics from glean_parser import pings from glean_parser import translate from glean_parser.util import DictWrapper ROOT = Path(__file__).parent def run_linters(files): # Syntax check on the generated files. # Only run this test if swiftc is on the path. if shutil.which("swiftc"): for filepath in files: subprocess.check_call(["swiftc", "-parse", filepath]) # Lint check on the generated files. # Only run this test if swiftlint is on the path. if shutil.which("swiftlint"): for filepath in files: subprocess.check_call(["swiftlint", "lint", filepath]) def test_parser(tmp_path): """Test translating metrics to Swift files.""" translate.translate( ROOT / "data" / "core.yaml", "swift", tmp_path, {}, {"allow_reserved": True} ) assert set(x.name for x in tmp_path.iterdir()) == set(["Metrics.swift"]) # Make sure descriptions made it in with (tmp_path / "Metrics.swift").open("r", encoding="utf-8") as fd: content = fd.read() assert "True if the user has set Firefox as the default browser." in content assert "جمع 搜集" in content assert 'category: ""' in content assert "class GleanBuild" in content assert "BuildInfo(buildDate:" in content run_linters(tmp_path.glob("*.swift")) def test_parser_no_build_info(tmp_path): """Test translating metrics to Swift files without build info.""" translate.translate( ROOT / "data" / "core.yaml", "swift", tmp_path, {"with_buildinfo": "false"}, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["Metrics.swift"]) # Make sure descriptions made it in with (tmp_path / "Metrics.swift").open("r", encoding="utf-8") as fd: content = fd.read() assert "class GleanBuild" not in content run_linters(tmp_path.glob("*.swift")) def test_parser_custom_build_date(tmp_path): """Test translating metrics to Swift files without build info.""" translate.translate( ROOT / "data" / "core.yaml", "swift", tmp_path, {"build_date": "2020-01-01T17:30:00"}, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["Metrics.swift"]) # Make sure descriptions made it in with (tmp_path / "Metrics.swift").open("r", encoding="utf-8") as fd: content = fd.read() assert "class GleanBuild" in content assert "BuildInfo(buildDate:" in content assert "year: 2020, month: 1, day: 1" in content run_linters(tmp_path.glob("*.swift")) def test_parser_all_metrics(tmp_path): """Test translating ALL metric types to Swift files.""" translate.translate( ROOT / "data" / "all_metrics.yaml", "swift", tmp_path, {"namespace": "Foo"}, {"allow_reserved": False}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["Metrics.swift"]) run_linters(tmp_path.glob("*.swift")) def test_ping_parser(tmp_path): """Test translating pings to Swift files.""" translate.translate( ROOT / "data" / "pings.yaml", "swift", tmp_path, {"namespace": "Foo"}, {"allow_reserved": True}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["Metrics.swift"]) # Make sure descriptions made it in with (tmp_path / "Metrics.swift").open("r", encoding="utf-8") as fd: content = fd.read() assert "This is a custom ping" in content # Make sure the namespace option is in effect assert "extension Foo" in content assert "customPing = Ping" in content assert ( "customPingMightBeEmpty = Ping" in content ) run_linters(tmp_path.glob("*.swift")) def test_swift_generator(): kdf = swift.swift_datatypes_filter assert kdf("\n") == r'"\n"' assert kdf([42, "\n"]) == r'[42, "\n"]' assert ( kdf(DictWrapper([("key", "value"), ("key2", "value2")])) == r'["key": "value", "key2": "value2"]' ) assert kdf(metrics.Lifetime.ping) == ".ping" def test_metric_type_name(): event = metrics.Event( type="event", category="category", name="metric", bugs=["42"], notification_emails=["nobody@example.com"], description="description...", expires="never", extra_keys={"my_extra": {"description": "an extra", "type": "string"}}, ) assert swift.type_name(event) == "EventMetricType" event = metrics.Event( type="event", category="category", name="metric", bugs=["42"], notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert swift.type_name(event) == "EventMetricType" boolean = metrics.Boolean( type="boolean", category="category", name="metric", bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@example.com"], description="description...", expires="never", ) assert swift.type_name(boolean) == "BooleanMetricType" ping = pings.Ping( name="custom", description="description...", include_client_id=True, bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@nowhere.com"], ) assert swift.type_name(ping) == "Ping" ping = pings.Ping( name="custom", description="description...", include_client_id=True, bugs=["http://bugzilla.mozilla.com/12345"], notification_emails=["nobody@nowhere.com"], reasons={"foo": "foolicious", "bar": "barlicious"}, ) assert swift.type_name(ping) == "Ping" def test_order_of_fields(tmp_path): """Test that translating metrics to Swift files keeps a stable order of fields.""" translate.translate( ROOT / "data" / "core.yaml", "swift", tmp_path, {}, {"allow_reserved": True} ) # Make sure descriptions made it in fd = (tmp_path / "Metrics.swift").open("r", encoding="utf-8") content = fd.read() fd.close() lines = content.splitlines() first_metric_fields = [] found_metric = False # Get the fields of the first metric # Checking only one metric should be good enough for now for line in lines: if found_metric: if re.search("\\)$", line): break # Collect only the fields field = line.strip().split(":")[0] first_metric_fields.append(field) elif re.search("CommonMetricData", line): found_metric = True expected_fields = ["category", "name", "sendInPings", "lifetime", "disabled"] # We only check the limited list of always available fields. size = len(expected_fields) assert expected_fields == first_metric_fields[:size] def test_no_import_glean(tmp_path): translate.translate( ROOT / "data" / "core.yaml", "swift", tmp_path, {}, {"allow_reserved": True} ) # Make sure descriptions made it in fd = (tmp_path / "Metrics.swift").open("r", encoding="utf-8") content = fd.read() fd.close() assert "import Glean" not in content def test_import_glean(tmp_path): translate.translate(ROOT / "data" / "smaller.yaml", "swift", tmp_path, {}, {}) # Make sure descriptions made it in fd = (tmp_path / "Metrics.swift").open("r", encoding="utf-8") content = fd.read() fd.close() assert "import Glean" in content def test_event_extra_keys_in_correct_order(tmp_path): """ Assert that the extra keys appear in the parameter and the enumeration in the same order. https://bugzilla.mozilla.org/show_bug.cgi?id=1648768 """ translate.translate( ROOT / "data" / "event_key_ordering.yaml", "swift", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["Metrics.swift"]) with (tmp_path / "Metrics.swift").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert ( "struct ExampleExtra: EventExtras " "{ var and1withextracasing: Bool? var alice: String? var bob: String? var charlie: String?" in content ) assert ', ["And1WithExtraCasing", "alice", "bob", "charlie"]' in content def test_event_extra_keys_with_types(tmp_path): """ Assert that the extra keys with types appear with their corresponding types. """ translate.translate( ROOT / "data" / "events_with_types.yaml", "swift", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["Metrics.swift"]) with (tmp_path / "Metrics.swift").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert ( "struct PreferenceToggledExtra: EventExtras " "{ var enabled: Bool? var preference: String? " "var swapped: Int32?" in content ) assert ', ["enabled", "preference", "swapped"]' in content def test_reasons(tmp_path): """ Assert that we generate the reason codes as a plain enum. https://bugzilla.mozilla.org/show_bug.cgi?id=1811888 """ translate.translate( ROOT / "data" / "pings.yaml", "swift", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["Metrics.swift"]) with (tmp_path / "Metrics.swift").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) expected = "enum CustomPingMightBeEmptyReasonCodes: Int, ReasonCodes { case serious = 0 case silly = 1 public func index() -> Int { return self.rawValue } }" # noqa assert expected in content expected = "let customPing = Ping(" assert expected in content expected = "let customPingMightBeEmpty = Ping(" assert expected in content def test_object_metric(tmp_path): """ Assert that an object metric is created. """ translate.translate( ROOT / "data" / "object.yaml", "swift", tmp_path, {"namespace": "Foo"}, ) assert set(x.name for x in tmp_path.iterdir()) == set(["Metrics.swift"]) with (tmp_path / "Metrics.swift").open("r", encoding="utf-8") as fd: content = fd.read() content = " ".join(content.split()) assert "typealias ArrayInArrayObjectItemItem = Bool" in content assert "typealias NumberArrayObjectItem = Int64" in content assert "ObjectMetricType" in content assert "typealias ThreadsObject = [ThreadsObjectItem]" in content assert "struct ThreadsObjectItem: Codable, Equatable, ObjectSerialize {" in content assert ( "var frames: ThreadsObjectItemFrames" in content ) assert "struct ThreadsObjectItemFramesItem: Codable, Equatable, ObjectSerialize {" in content assert "var moduleIndex: Int64?" in content assert "var ip: String?" in content assert "var trust: String?" in content glean_parser-15.0.1/tests/test_tags.py000066400000000000000000000026731466531427000177710ustar00rootroot00000000000000from glean_parser import parser def test_basic_tags(): content = { "$schema": "moz://mozilla.org/schemas/glean/tags/1-0-0", "Tag": {"description": "Normal tag"}, "Testing :: General": {"description": "Testing / General"}, } objs = parser.parse_objects([content]) errors = list(objs) assert len(errors) == 0 tags = objs.value["tags"] assert set(tags.keys()) == set(["Tag", "Testing :: General"]) assert tags["Tag"].description == content["Tag"]["description"] def test_tags_description_required(): content = { "$schema": "moz://mozilla.org/schemas/glean/tags/1-0-0", "Tag": {}, } errors = list(parser.parse_objects([content])) assert len(errors) == 1 assert "Missing required properties: description" in errors[0] def test_tags_extra_keys_not_allowed(): content = { "$schema": "moz://mozilla.org/schemas/glean/tags/1-0-0", "Tag": {"description": "Normal tag", "extra": "Extra stuff"}, } errors = list(parser.parse_objects([content])) assert len(errors) == 1 assert "Additional properties are not allowed" in errors[0] def test_tags_name_too_long(): content = { "$schema": "moz://mozilla.org/schemas/glean/tags/1-0-0", "Tag" * 80: {"description": "This name is way too long"}, } errors = list(parser.parse_objects([content])) assert len(errors) == 1 assert "TagTagTag' is too long" in errors[0] glean_parser-15.0.1/tests/test_translate.py000066400000000000000000000160751466531427000210310ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from pathlib import Path import pytest import shutil from glean_parser import parser from glean_parser import translate from glean_parser.util import load_yaml_or_json import util ROOT = Path(__file__).parent def test_translate_unknown_format(): with pytest.raises(ValueError) as e: translate.translate([], "foo", ".") assert "Unknown output format" in str(e) def test_all_metrics(tmp_path): output = tmp_path / "tags_test" translate.translate( [ROOT / "data" / "all_metrics.yaml"], "kotlin", output, options={"with_buildinfo": "false"}, parser_config={"allow_reserved": True}, ) # A single metric category in a single file assert len(list(output.iterdir())) == 1 def test_translate_missing_directory(tmp_path): output = tmp_path / "foo" translate.translate( ROOT / "data" / "core.yaml", "kotlin", output, parser_config={"allow_reserved": True}, ) assert len(list(output.iterdir())) == 6 def test_translate_missing_input_files(tmp_path): output = tmp_path with pytest.raises(FileNotFoundError): translate.translate( ROOT / "data" / "missing.yaml", "kotlin", output, parser_config={"allow_reserved": True}, ) assert 0 == translate.translate( ROOT / "data" / "missing.yaml", "kotlin", output, parser_config={"allow_reserved": True, "allow_missing_files": True}, ) def test_translate_remove_obsolete_kotlin_files(tmp_path): output = tmp_path / "foo" translate.translate( ROOT / "data" / "core.yaml", "kotlin", output, parser_config={"allow_reserved": True}, ) assert len(list(output.iterdir())) == 6 translate.translate(ROOT / "data" / "smaller.yaml", "kotlin", output) assert len(list(output.iterdir())) == 2 def test_translate_retains_existing_markdown_files(tmp_path): output = tmp_path / "foo" translate.translate( ROOT / "data" / "core.yaml", "markdown", output, parser_config={"allow_reserved": True}, ) # Move the file to a different location, translate always writes to # metrics.md. shutil.move(str(output / "metrics.md"), str(output / "old.md")) assert len(list(output.iterdir())) == 1 translate.translate(ROOT / "data" / "smaller.yaml", "markdown", output) assert len(list(output.iterdir())) == 2 def test_translate_expires(): contents = [ { "metrics": { "a": {"type": "counter", "expires": "never"}, "b": {"type": "counter", "expires": "expired"}, "c": {"type": "counter", "expires": "2000-01-01"}, "d": {"type": "counter", "expires": "2100-01-01"}, } } ] contents = [util.add_required(x) for x in contents] objs = parser.parse_objects(contents) assert len(list(objs)) == 0 objs = objs.value assert objs["metrics"]["a"].disabled is False assert objs["metrics"]["b"].disabled is True assert objs["metrics"]["c"].disabled is True assert objs["metrics"]["d"].disabled is False def test_translate_send_in_pings(tmp_path): contents = [ { "baseline": { "counter": {"type": "counter"}, "event": {"type": "event"}, "c": {"type": "counter", "send_in_pings": ["default", "custom"]}, } } ] contents = [util.add_required(x) for x in contents] objs = parser.parse_objects(contents) assert len(list(objs)) == 0 objs = objs.value assert objs["baseline"]["counter"].send_in_pings == ["metrics"] assert objs["baseline"]["event"].send_in_pings == ["events"] assert objs["baseline"]["c"].send_in_pings == ["custom", "metrics"] def test_translate_dont_remove_extra_files(tmp_path): output = tmp_path / "foo" output.mkdir() with (output / "extra.txt").open("w") as fd: fd.write("\n") translate.translate( ROOT / "data" / "core.yaml", "kotlin", output, parser_config={"allow_reserved": True}, ) assert len(list(output.iterdir())) == 7 assert "extra.txt" in [str(x.name) for x in output.iterdir()] def test_external_translator(tmp_path): def external_translator(all_objects, output_dir, options): assert {"foo": "bar", "allow_reserved": True} == options for category in all_objects: with (tmp_path / category).open("w") as fd: for metric in category: fd.write(f"{metric}\n") translate.translate_metrics( ROOT / "data" / "core.yaml", tmp_path, external_translator, [], options={"foo": "bar"}, parser_config={"allow_reserved": True}, ) content = load_yaml_or_json(ROOT / "data" / "core.yaml") expected_keys = set(content.keys()) - set(["$schema"]) assert set(x.name for x in tmp_path.iterdir()) == expected_keys def test_getting_line_number(): pings = load_yaml_or_json(ROOT / "data" / "pings.yaml") metrics = load_yaml_or_json(ROOT / "data" / "core.yaml") assert pings["custom-ping"].defined_in["line"] == 7 assert metrics["core_ping"]["seq"].defined_in["line"] == 69 def test_rates(tmp_path): def external_translator(all_objects, output_dir, options): category = all_objects["testing.rates"] assert category["has_internal_denominator"].type == "rate" assert category["has_external_denominator"].type == "numerator" assert ( category["has_external_denominator"].denominator_metric == "testing.rates.the_denominator" ) assert category["also_has_external_denominator"].type == "numerator" assert ( category["also_has_external_denominator"].denominator_metric == "testing.rates.the_denominator" ) assert category["the_denominator"].type == "denominator" numerators = [ f"{m.category}.{m.name}" for m in category["the_denominator"].numerators ] # Because generation gaurantees order, this must be sorted. assert numerators == sorted( [ "testing.rates.has_external_denominator", "testing.rates.also_has_external_denominator", ] ) translate.translate_metrics( ROOT / "data" / "rate.yaml", tmp_path, external_translator, [], options={"foo": "bar"}, parser_config={"allow_reserved": True}, ) def test_with_tags(tmp_path): output = tmp_path / "tags_test" translate.translate( [ ROOT / "data" / "metric-with-tags.yaml", ROOT / "data" / "tags.yaml", ], "kotlin", output, options={"with_buildinfo": "false"}, parser_config={"allow_reserved": True}, ) # A single metric category in a single file assert len(list(output.iterdir())) == 1 glean_parser-15.0.1/tests/test_utils.py000066400000000000000000000021241466531427000201620ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from glean_parser.util import to_camel_case, remove_output_params def test_camel_case_first_lowercase(): assert "testMe" == to_camel_case("test_me", False) def test_camel_case_first_uppercase(): assert "TestMe" == to_camel_case("test_me", True) def test_camel_case_empty_tokens(): assert "testMe" == to_camel_case("__test____me", False) def test_camel_case_dots_sanitized(): assert "testMeYeah" == to_camel_case("__test..me.yeah", False) def test_camel_case_numbers(): assert "g33kS4n1t1z3d" == to_camel_case("g33k_s4n1t1z3d", False) def test_camel_case_expected(): assert "easyOne" == to_camel_case("easy_one", False) assert "moreInvolved1" == to_camel_case("more_involved_1", False) def test_removing_output_params(): d = {"name": "test dict", "defined_in": {"line": "42"}, "abc": "xyz"} output_removed = {"name": "test dict", "abc": "xyz"} test = remove_output_params(d, "defined_in") assert test == output_removed glean_parser-15.0.1/tests/test_validate_ping.py000066400000000000000000000024171466531427000216350ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ import io import json import pytest from pytest import raises from glean_parser import validate_ping @pytest.mark.web_dependency def test_validate_ping(): content = { "experiments": { "experiment2": {"branch": "branch_b", "extra": {"key": "value"}} }, "metrics": {"string": {"telemetry.string_metric": "foo"}}, "ping_info": { "ping_type": "metrics", "telemetry_sdk_build": "0.32.0", "seq": 0, "app_build": "test-placeholder", "client_id": "900b6d8c-34d2-44d4-926d-83bde790474f", "start_time": "2018-11-19T16:19-05:00", "end_time": "2018-11-19T16:19-05:00", }, } input = io.StringIO(json.dumps(content)) output = io.StringIO() schema_url = ( "https://raw.githubusercontent.com/mozilla-services/" "mozilla-pipeline-schemas/3a15121c582ef0cffe430da024a5bf11b7c48740/" "schemas/glean/baseline/baseline.1.schema.json" ) assert validate_ping.validate_ping(input, output, schema_url=schema_url) == 0 with raises(TypeError): validate_ping.validate_ping(input, output) glean_parser-15.0.1/tests/util.py000066400000000000000000000030431466531427000167410ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Any copyright is dedicated to the Public Domain. # http://creativecommons.org/publicdomain/zero/1.0/ from glean_parser import parser def add_required(chunk): DEFAULTS = { "type": "string", "bugs": ["http://bugzilla.mozilla.org/12345678"], "description": "DESCRIPTION...", "notification_emails": ["nobody@example.com"], "data_reviews": ["https://example.com/review/"], "expires": "never", } for category_key, category_val in chunk.items(): if category_key in ("$schema", "$tags", "no_lint"): continue for metric in category_val.values(): for default_name, default_val in DEFAULTS.items(): if default_name not in metric: metric[default_name] = default_val if "$schema" not in chunk: chunk["$schema"] = parser.METRICS_ID return chunk def add_required_ping(chunk): DEFAULTS = { "bugs": ["http://bugzilla.mozilla.org/12345678"], "description": "DESCRIPTION...", "notification_emails": ["nobody@nowhere.com"], "data_reviews": ["https://nowhere.com/review/"], "include_client_id": True, } for ping_name, ping in chunk.items(): if ping_name in ("no_lint", "$schema"): continue for default_name, default_val in DEFAULTS.items(): if default_name not in ping: ping[default_name] = default_val if "$schema" not in chunk: chunk["$schema"] = parser.PINGS_ID return chunk glean_parser-15.0.1/tools/000077500000000000000000000000001466531427000154105ustar00rootroot00000000000000glean_parser-15.0.1/tools/extract_data_categories.py000077500000000000000000000111521466531427000226350ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Usage: python extract_data_categories.py metrics.yaml Automatically extract the data collection categories for all the metrics in a metrics.yaml file by consulting the linked data reviews. This script reads a metrics.yaml file, visits all of the associated data reviews, trying to determine the associated data categories, and inserts them (in place) to the original metrics.yaml file. A very simple heuristic is used: to look for the question about data categories used in all data reviews, and then find any numbers between it and the next question. When this simple heuristic fails, comments with "!!!" are inserted in the output as a recommendation to manually investigate and enter the data categories. Requirements from PyPI: BeautifulSoup4, PyYAML """ import dbm import functools import re import sys import time from typing import List, Set from urllib.request import urlopen from bs4 import BeautifulSoup import yaml cache = dbm.open("bugzilla-cache.db", "c") QUESTION = "what collection type of data do the requested measurements fall under?" CATEGORY_MAP = { 1: "technical", 2: "interaction", 3: "stored_content", 4: "highly_sensitive", } def fetch_url(url: str) -> str: """ Fetch a web page containing a data review, caching it to avoid over-fetching. """ content = cache.get(url) if content is not None: return content print(f"Fetching {url}") content = urlopen(url).read() cache[url] = content time.sleep(0.5) return content @functools.lru_cache(1000) def parse_data_review(html: str) -> Set[int]: """ Parse a single data review. """ soup = BeautifulSoup(html, features="html.parser") text = soup.get_text() lines = iter(text.splitlines()) for line in lines: if QUESTION in line.strip(): break categories: Set[int] = set() for line in lines: if "?" in line: break categories.update(int(x) for x in re.findall("[0-9]+", line)) return categories def categories_as_strings(categories: Set[int]) -> List[str]: """ From a set of numeric categories, return the strings used in a metrics.yaml file. This may contain strings representing errors. """ if len(categories): return [ CATEGORY_MAP.get(x, f"!!!UNKNOWN CATEGORY {x}") for x in sorted(list(categories)) ] else: return ["!!! NO DATA CATEGORIES FOUND"] def update_lines( lines: List[str], category_name: str, metric_name: str, data_sensitivity_values: List[str], ) -> List[str]: """ Update the lines of a YAML file in place to include the data_sensitivity for the given metric, returning the lines of the result. """ output = [] lines_iter = iter(lines) for line in lines_iter: output.append(line) if line.startswith(f"{category_name}:"): break for line in lines_iter: output.append(line) if line.startswith(f" {metric_name}:"): break for line in lines_iter: output.append(line) if line.startswith(" data_reviews:"): break for line in lines_iter: if not line.strip().startswith("- "): output.append(" data_sensitivity:\n") for data_sensitivity in data_sensitivity_values: output.append(f" - {data_sensitivity}\n") output.append(line) break else: output.append(line) for line in lines_iter: output.append(line) return output def parse_yaml(yamlpath: str): with open(yamlpath) as fd: content = yaml.safe_load(fd) with open(yamlpath) as fd: lines = list(fd.readlines()) for category_name, category in content.items(): if category_name.startswith("$") or category_name == "no_lint": continue for metric_name, metric in category.items(): categories = set() for data_review_url in metric["data_reviews"]: html = fetch_url(data_review_url) categories.update(parse_data_review(html)) lines = update_lines( lines, category_name, metric_name, categories_as_strings(categories) ) with open(yamlpath, "w") as fd: for line in lines: fd.write(line) if __name__ == "__main__": parse_yaml(sys.argv[-1])