pax_global_header 0000666 0000000 0000000 00000000064 15063547320 0014517 g ustar 00root root 0000000 0000000 52 comment=bb513222d215dd418e80fe17cf0a19ddc4f943e0
awkward-2.8.9/ 0000775 0000000 0000000 00000000000 15063547320 0013177 5 ustar 00root root 0000000 0000000 awkward-2.8.9/.all-contributorsrc 0000664 0000000 0000000 00000035655 15063547320 0017046 0 ustar 00root root 0000000 0000000 {
"files": [
"README.md"
],
"imageSize": 100,
"commit": false,
"contributors": [
{
"login": "jpivarski",
"name": "Jim Pivarski",
"avatar_url": "https://avatars0.githubusercontent.com/u/1852447?v=4",
"profile": "https://github.com/jpivarski",
"contributions": [
"code",
"doc",
"infra",
"maintenance"
]
},
{
"login": "ianna",
"name": "Ianna Osborne",
"avatar_url": "https://avatars0.githubusercontent.com/u/1390682?v=4",
"profile": "https://github.com/ianna",
"contributions": [
"code"
]
},
{
"login": "reikdas",
"name": "Pratyush Das",
"avatar_url": "https://avatars0.githubusercontent.com/u/11775615?v=4",
"profile": "https://github.com/reikdas",
"contributions": [
"code"
]
},
{
"login": "trickarcher",
"name": "Anish Biswas",
"avatar_url": "https://avatars3.githubusercontent.com/u/39878675?v=4",
"profile": "https://github.com/trickarcher",
"contributions": [
"code"
]
},
{
"login": "glass-ships",
"name": "glass-ships",
"avatar_url": "https://avatars2.githubusercontent.com/u/26975530?v=4",
"profile": "https://github.com/glass-ships",
"contributions": [
"code",
"test"
]
},
{
"login": "henryiii",
"name": "Henry Schreiner",
"avatar_url": "https://avatars1.githubusercontent.com/u/4616906?v=4",
"profile": "http://iscinumpy.gitlab.io",
"contributions": [
"code",
"infra"
]
},
{
"login": "nsmith-",
"name": "Nicholas Smith",
"avatar_url": "https://avatars2.githubusercontent.com/u/6587412?v=4",
"profile": "https://github.com/nsmith-",
"contributions": [
"code",
"test"
]
},
{
"login": "lgray",
"name": "Lindsey Gray",
"avatar_url": "https://avatars0.githubusercontent.com/u/1068089?v=4",
"profile": "https://github.com/lgray",
"contributions": [
"code",
"test"
]
},
{
"login": "Ellipse0934",
"name": "Ellipse0934",
"avatar_url": "https://avatars3.githubusercontent.com/u/7466364?v=4",
"profile": "https://github.com/Ellipse0934",
"contributions": [
"test"
]
},
{
"login": "veprbl",
"name": "Dmitry Kalinkin",
"avatar_url": "https://avatars1.githubusercontent.com/u/245573?v=4",
"profile": "https://gitlab.com/veprbl",
"contributions": [
"infra"
]
},
{
"login": "EscottC",
"name": "Charles Escott",
"avatar_url": "https://avatars3.githubusercontent.com/u/48469669?v=4",
"profile": "https://www.linkedin.com/in/charles-c-escott/",
"contributions": [
"code"
]
},
{
"login": "masonproffitt",
"name": "Mason Proffitt",
"avatar_url": "https://avatars3.githubusercontent.com/u/32773304?v=4",
"profile": "https://github.com/masonproffitt",
"contributions": [
"code"
]
},
{
"login": "mhedges",
"name": "Michael Hedges",
"avatar_url": "https://avatars3.githubusercontent.com/u/18672512?v=4",
"profile": "https://github.com/mhedges",
"contributions": [
"code"
]
},
{
"login": "guitargeek",
"name": "Jonas Rembser",
"avatar_url": "https://avatars2.githubusercontent.com/u/6578603?v=4",
"profile": "https://github.com/guitargeek",
"contributions": [
"code"
]
},
{
"login": "Jayd-1234",
"name": "Jaydeep Nandi",
"avatar_url": "https://avatars0.githubusercontent.com/u/34567389?v=4",
"profile": "https://github.com/Jayd-1234",
"contributions": [
"code"
]
},
{
"login": "benkrikler",
"name": "benkrikler",
"avatar_url": "https://avatars0.githubusercontent.com/u/4083697?v=4",
"profile": "https://github.com/benkrikler",
"contributions": [
"code"
]
},
{
"login": "bfis",
"name": "bfis",
"avatar_url": "https://avatars0.githubusercontent.com/u/15651150?v=4",
"profile": "https://github.com/bfis",
"contributions": [
"code"
]
},
{
"login": "douglasdavis",
"name": "Doug Davis",
"avatar_url": "https://avatars2.githubusercontent.com/u/3202090?v=4",
"profile": "https://ddavis.io/",
"contributions": [
"code"
]
},
{
"login": "jpata",
"name": "Joosep Pata",
"avatar_url": "https://avatars0.githubusercontent.com/u/69717?v=4",
"profile": "http://twitter: @JoosepPata",
"contributions": [
"ideas"
]
},
{
"login": "martindurant",
"name": "Martin Durant",
"avatar_url": "https://avatars1.githubusercontent.com/u/6042212?v=4",
"profile": "http://martindurant.github.io/",
"contributions": [
"ideas"
]
},
{
"login": "gordonwatts",
"name": "Gordon Watts",
"avatar_url": "https://avatars2.githubusercontent.com/u/1778366?v=4",
"profile": "http://gordonwatts.wordpress.com",
"contributions": [
"ideas"
]
},
{
"login": "nikoladze",
"name": "Nikolai Hartmann",
"avatar_url": "https://avatars0.githubusercontent.com/u/3707225?v=4",
"profile": "https://gitlab.com/nikoladze",
"contributions": [
"code"
]
},
{
"login": "sjperkins",
"name": "Simon Perkins",
"avatar_url": "https://avatars3.githubusercontent.com/u/3530212?v=4",
"profile": "https://github.com/sjperkins",
"contributions": [
"code"
]
},
{
"login": "drahnreb",
"name": ".hard",
"avatar_url": "https://avatars.githubusercontent.com/u/25883607?v=4",
"profile": "https://github.com/drahnreb",
"contributions": [
"code",
"test"
]
},
{
"login": "HenryDayHall",
"name": "HenryDayHall",
"avatar_url": "https://avatars.githubusercontent.com/u/12996763?v=4",
"profile": "https://github.com/HenryDayHall",
"contributions": [
"code"
]
},
{
"login": "agoose77",
"name": "Angus Hollands",
"avatar_url": "https://avatars.githubusercontent.com/u/1248413?v=4",
"profile": "https://github.com/agoose77",
"contributions": [
"test",
"code"
]
},
{
"login": "ioanaif",
"name": "ioanaif",
"avatar_url": "https://avatars.githubusercontent.com/u/9751871?v=4",
"profile": "https://github.com/ioanaif",
"contributions": [
"code",
"test"
]
},
{
"login": "bmwiedemann",
"name": "Bernhard M. Wiedemann",
"avatar_url": "https://avatars.githubusercontent.com/u/637990?v=4",
"profile": "http://lizards.opensuse.org/author/bmwiedemann/",
"contributions": [
"maintenance"
]
},
{
"login": "matthewfeickert",
"name": "Matthew Feickert",
"avatar_url": "https://avatars.githubusercontent.com/u/5142394?v=4",
"profile": "http://www.matthewfeickert.com/",
"contributions": [
"maintenance"
]
},
{
"login": "SantamRC",
"name": "Santam Roy Choudhury",
"avatar_url": "https://avatars.githubusercontent.com/u/52635773?v=4",
"profile": "https://github.com/SantamRC",
"contributions": [
"test"
]
},
{
"login": "BioGeek",
"name": "Jeroen Van Goey",
"avatar_url": "https://avatars.githubusercontent.com/u/59344?v=4",
"profile": "http://jeroen.vangoey.be",
"contributions": [
"doc"
]
},
{
"login": "Ahmad-AlSubaie",
"name": "Ahmad-AlSubaie",
"avatar_url": "https://avatars.githubusercontent.com/u/32343365?v=4",
"profile": "https://github.com/Ahmad-AlSubaie",
"contributions": [
"code"
]
},
{
"login": "ManasviGoyal",
"name": "Manasvi Goyal",
"avatar_url": "https://avatars.githubusercontent.com/u/55101825?v=4",
"profile": "https://github.com/ManasviGoyal",
"contributions": [
"code"
]
},
{
"login": "aryan26roy",
"name": "Aryan Roy",
"avatar_url": "https://avatars.githubusercontent.com/u/50577809?v=4",
"profile": "https://github.com/aryan26roy",
"contributions": [
"code"
]
},
{
"login": "Saransh-cpp",
"name": "Saransh",
"avatar_url": "https://avatars.githubusercontent.com/u/74055102?v=4",
"profile": "https://saransh-cpp.github.io/",
"contributions": [
"code"
]
},
{
"login": "Laurits7",
"name": "Laurits Tani",
"avatar_url": "https://avatars.githubusercontent.com/u/30724920?v=4",
"profile": "https://github.com/Laurits7",
"contributions": [
"doc"
]
},
{
"login": "dsavoiu",
"name": "Daniel Savoiu",
"avatar_url": "https://avatars.githubusercontent.com/u/17005255?v=4",
"profile": "https://github.com/dsavoiu",
"contributions": [
"code"
]
},
{
"login": "raybellwaves",
"name": "Ray Bell",
"avatar_url": "https://avatars.githubusercontent.com/u/17162724?v=4",
"profile": "https://sites.google.com/view/raybellwaves/home",
"contributions": [
"doc"
]
},
{
"login": "zonca",
"name": "Andrea Zonca",
"avatar_url": "https://avatars.githubusercontent.com/u/383090?v=4",
"profile": "https://zonca.dev",
"contributions": [
"code"
]
},
{
"login": "chrisburr",
"name": "Chris Burr",
"avatar_url": "https://avatars.githubusercontent.com/u/5220533?v=4",
"profile": "https://github.com/chrisburr",
"contributions": [
"infra"
]
},
{
"login": "zbilodea",
"name": "Zoë Bilodeau",
"avatar_url": "https://avatars.githubusercontent.com/u/70441641?v=4",
"profile": "https://github.com/zbilodea",
"contributions": [
"code"
]
},
{
"login": "raymondEhlers",
"name": "Raymond Ehlers",
"avatar_url": "https://avatars.githubusercontent.com/u/1571927?v=4",
"profile": "https://github.com/raymondEhlers",
"contributions": [
"maintenance"
]
},
{
"login": "mloning",
"name": "Markus Löning",
"avatar_url": "https://avatars.githubusercontent.com/u/21020482?v=4",
"profile": "https://www.mloning.com/",
"contributions": [
"doc"
]
},
{
"login": "kkothari2001",
"name": "Kush Kothari",
"avatar_url": "https://avatars.githubusercontent.com/u/53650538?v=4",
"profile": "https://github.com/kkothari2001",
"contributions": [
"code",
"test"
]
},
{
"login": "jrueb",
"name": "Jonas Rübenach",
"avatar_url": "https://avatars.githubusercontent.com/u/30041073?v=4",
"profile": "https://github.com/jrueb",
"contributions": [
"code"
]
},
{
"login": "Moelf",
"name": "Jerry Ling",
"avatar_url": "https://avatars.githubusercontent.com/u/5306213?v=4",
"profile": "http://blog.jling.dev",
"contributions": [
"doc",
"code"
]
},
{
"login": "lobis",
"name": "Luis Antonio Obis Aparicio",
"avatar_url": "https://avatars.githubusercontent.com/u/35803280?v=4",
"profile": "https://github.com/lobis",
"contributions": [
"code"
]
},
{
"login": "tcawlfield",
"name": "Topher Cawlfield",
"avatar_url": "https://avatars.githubusercontent.com/u/4094385?v=4",
"profile": "https://github.com/tcawlfield",
"contributions": [
"code"
]
},
{
"login": "maxgalli",
"name": "Massimiliano Galli",
"avatar_url": "https://avatars.githubusercontent.com/u/26309531?v=4",
"profile": "https://github.com/maxgalli",
"contributions": [
"code"
]
},
{
"login": "pfackeldey",
"name": "Peter Fackeldey",
"avatar_url": "https://avatars.githubusercontent.com/u/18463582?v=4",
"profile": "https://github.com/pfackeldey",
"contributions": [
"code"
]
},
{
"login": "ariostas",
"name": "Andres Rios Tascon",
"avatar_url": "https://avatars.githubusercontent.com/u/7596837?v=4",
"profile": "http://www.ariostas.com",
"contributions": [
"code"
]
},
{
"login": "maxymnaumchyk",
"name": "maxymnaumchyk",
"avatar_url": "https://avatars.githubusercontent.com/u/70752300?v=4",
"profile": "https://github.com/maxymnaumchyk",
"contributions": [
"code"
]
},
{
"login": "tacaswell",
"name": "Thomas A Caswell",
"avatar_url": "https://avatars.githubusercontent.com/u/199813?v=4",
"profile": "https://tacaswell.github.io",
"contributions": [
"maintenance"
]
},
{
"login": "basnijholt",
"name": "Bas Nijholt",
"avatar_url": "https://avatars.githubusercontent.com/u/6897215?v=4",
"profile": "http://www.nijho.lt",
"contributions": [
"maintenance"
]
},
{
"login": "nj-vs-vh",
"name": "Igor Vaiman",
"avatar_url": "https://avatars.githubusercontent.com/u/30616208?v=4",
"profile": "https://nj-vs-vh.name/",
"contributions": [
"code"
]
},
{
"login": "HavryliukAY",
"name": "Havryliuk Artem ",
"avatar_url": "https://avatars.githubusercontent.com/u/58536463?v=4",
"profile": "https://github.com/HavryliukAY",
"contributions": [
"code"
]
},
{
"login": "ikrommyd",
"name": "Iason Krommydas",
"avatar_url": "https://avatars.githubusercontent.com/u/82155404?v=4",
"profile": "https://github.com/ikrommyd",
"contributions": [
"code",
"test"
]
},
{
"login": "NJManganelli",
"name": "Nick",
"avatar_url": "https://avatars.githubusercontent.com/u/38217274?v=4",
"profile": "https://github.com/NJManganelli",
"contributions": [
"code"
]
},
{
"login": "APN-Pucky",
"name": "Alexander Puck Neuwirth",
"avatar_url": "https://avatars.githubusercontent.com/u/4533248?v=4",
"profile": "https://apn-pucky.github.io/",
"contributions": [
"code"
]
},
{
"login": "nileshpatra",
"name": "Nilesh Patra",
"avatar_url": "https://avatars.githubusercontent.com/u/37436956?v=4",
"profile": "https://github.com/nileshpatra",
"contributions": [
"code"
]
}
],
"contributorsPerLine": 7,
"projectName": "awkward",
"projectOwner": "scikit-hep",
"repoType": "github",
"repoHost": "https://github.com",
"skipCi": true,
"commitConvention": "angular",
"commitType": "docs"
}
awkward-2.8.9/.github/ 0000775 0000000 0000000 00000000000 15063547320 0014537 5 ustar 00root root 0000000 0000000 awkward-2.8.9/.github/ISSUE_TEMPLATE/ 0000775 0000000 0000000 00000000000 15063547320 0016722 5 ustar 00root root 0000000 0000000 awkward-2.8.9/.github/ISSUE_TEMPLATE/bug-report.yml 0000664 0000000 0000000 00000003532 15063547320 0021536 0 ustar 00root root 0000000 0000000 name: "Bug report"
description: "Something seems to be broken…"
labels: ["bug (unverified)"]
assignees: []
body:
- type: input
id: version
attributes:
label: "Version of Awkward Array"
description: |
```python
>>> import awkward as ak
>>> ak.__version__
```
(Your issue might have already been fixed; see [latest version](https://pypi.org/project/awkward/).)
placeholder: "XX.YY.ZZ"
validations:
required: true
- type: textarea
id: description
attributes:
label: "Description and code to reproduce"
description: |
- What did you attempt to do? Include code so that we can reproduce it (in [backticks](https://www.markdownguide.org/extended-syntax/#fenced-code-blocks)).
- What did you expect it to do?
- What did it do instead? (Include full log output in [backticks](https://www.markdownguide.org/extended-syntax/#fenced-code-blocks). Copy-pasted text is better than screenshots.)
If your code example requires data, be sure to include it in a way that is publicly accessible, such as **clicking on the rectangle below to highlight it** and then dragging the file in. (You can [pickle](https://docs.python.org/3/library/pickle.html) Awkward Arrays, but verify that the issue applies to pickled arrays as well as the original arrays.)
placeholder: "Tell us what's not working."
validations:
required: true
- type: markdown
attributes:
value: |
Thank you for submitting an issue; we know that it takes time and effort to do so!
Note that we'll be closing the issue as soon as a solution is proposed. This is not meant to be unfriendly; it's for our own bookkeeping. If you think the first answer/solution is unsatisfactory, please do continue the thread and we'll reopen it or otherwise address it.
awkward-2.8.9/.github/ISSUE_TEMPLATE/config.yml 0000664 0000000 0000000 00000000730 15063547320 0020712 0 ustar 00root root 0000000 0000000 blank_issues_enabled: true
contact_links:
- name: "Start a discussion (GitHub)"
about: "How do I…?"
url: https://github.com/scikit-hep/awkward-1.0/discussions
- name: "StackOverflow: [awkward-array] tag"
about: "How do I…?"
url: https://stackoverflow.com/questions/tagged/awkward-array
- name: "Gitter: Scikit-HEP/awkward-array room"
about: "Getting help in real-time…"
url: https://gitter.im/Scikit-HEP/awkward-array
awkward-2.8.9/.github/ISSUE_TEMPLATE/documentation.yml 0000664 0000000 0000000 00000002104 15063547320 0022313 0 ustar 00root root 0000000 0000000 name: "Documentation"
description: "Something needs to be explained…"
labels: ["docs"]
assignees: []
body:
- type: dropdown
id: where
attributes:
label: "Which documentation?"
description: |
In the [documentation website](https://awkward-array.org/), the [GitHub README.md](https://github.com/scikit-hep/awkward/blob/main/README.md), [CONTRIBUTING.md](https://github.com/scikit-hep/awkward/blob/main/CONTRIBUTING.md), Python docstrings, or C++ doxygen comments?
options:
- Documentation website
- GitHub README.md
- CONTRIBUTING.md
- Python docstrings
- C++ doxygen comments
- Other (please explain)?
validations:
required: true
- type: textarea
id: what
attributes:
label: "What needs to be documented?"
description: |
If you need to include a screenshot, try **clicking on the rectangle below to highlight it** and then dragging the file in or pasting from the clipboard.
placeholder: "Tell us what you need to know."
validations:
required: true
awkward-2.8.9/.github/ISSUE_TEMPLATE/feature-request.yml 0000664 0000000 0000000 00000002401 15063547320 0022563 0 ustar 00root root 0000000 0000000 name: "Feature request"
description: "Some functionality needs to be added…"
labels: ["feature"]
assignees: []
body:
- type: markdown
attributes:
value: |
What you're looking for might already be possible as a combination of existing functions, but ask anyway. If what you want can be performed by a combination of existing features, we'll convert this issue into a [Discussion Q&A](https://github.com/scikit-hep/awkward-1.0/discussions/categories/q-a).
The following links might help:
* [awkward-array.org](https://awkward-array.org/)
* [StackOverflow: [awkward-array] tag](https://stackoverflow.com/questions/tagged/awkward-array)
* [Gitter: Scikit-HEP/awkward-array room](https://gitter.im/Scikit-HEP/awkward-array)
- type: textarea
id: feature
attributes:
label: "Description of new feature"
placeholder: "Tell us what you need."
description: |
Please be specific, with code examples and expected output (in [backticks](https://www.markdownguide.org/extended-syntax/#fenced-code-blocks)) where it would help.
If you need to include a file, try **clicking on the rectangle below to highlight it** and then dragging the file in.
validations:
required: true
awkward-2.8.9/.github/ISSUE_TEMPLATE/performance-bug-report.yml 0000664 0000000 0000000 00000003671 15063547320 0024041 0 ustar 00root root 0000000 0000000 name: "Performance bug report"
description: "It works, but it could/should be faster…"
labels: ["performance"]
assignees: []
body:
- type: markdown
attributes:
value: |
The goal of these issues is to fix performance "mistakes," instances where a fix would make the majority of applications several times faster or more, not fine-tuning an application or trading performance in one case for another (unless the former is a very rare or unusual case).
To prove that something is a performance mistake, it needs to have a reproducible metric and a demonstration that shows how fast it could be in bare metal, such as equivalent C or Numba code. If the comparison is truly equivalent (i.e. a general-purpose function is not compared with a highly specialized one), we'll try to optimize the metric within a factor of 2 or so of the baseline.
Alternatively, if you've found a mistake in the code that would always be faster if fixed, we can fix it without tests. Some bugs are obvious.
- type: input
id: version
attributes:
label: "Version of Awkward Array"
description: |
```python
>>> import awkward as ak
>>> ak.__version__
```
(Your issue might have already been fixed; see [latest version](https://pypi.org/project/awkward/).)
placeholder: "XX.YY.ZZ"
validations:
required: true
- type: textarea
id: description
attributes:
label: "Description and code to reproduce"
description: |
If your code example requires data, be sure to include it in a way that is publicly accessible, such as **clicking on the rectangle below to highlight it** and then dragging the file in. (You can [pickle](https://docs.python.org/3/library/pickle.html) Awkward Arrays, but verify that the issue applies to pickled arrays as well as the original arrays.)
placeholder: "Tell us what should be faster."
validations:
required: true
awkward-2.8.9/.github/dependabot.yml 0000664 0000000 0000000 00000000340 15063547320 0017364 0 ustar 00root root 0000000 0000000 version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
groups:
actions:
patterns:
- "*"
awkward-2.8.9/.github/matchers/ 0000775 0000000 0000000 00000000000 15063547320 0016345 5 ustar 00root root 0000000 0000000 awkward-2.8.9/.github/matchers/pylint.json 0000664 0000000 0000000 00000001234 15063547320 0020557 0 ustar 00root root 0000000 0000000 {
"problemMatcher": [
{
"severity": "warning",
"pattern": [
{
"regexp": "^([^:]+):(\\d+):(\\d+): ([A-DF-Z]\\d+): \\033\\[[\\d;]+m([^\\033]+).*$",
"file": 1,
"line": 2,
"column": 3,
"code": 4,
"message": 5
}
],
"owner": "pylint-warning"
},
{
"severity": "error",
"pattern": [
{
"regexp": "^([^:]+):(\\d+):(\\d+): (E\\d+): \\033\\[[\\d;]+m([^\\033]+).*$",
"file": 1,
"line": 2,
"column": 3,
"code": 4,
"message": 5
}
],
"owner": "pylint-error"
}
]
}
awkward-2.8.9/.github/workflows/ 0000775 0000000 0000000 00000000000 15063547320 0016574 5 ustar 00root root 0000000 0000000 awkward-2.8.9/.github/workflows/build-wheels.yml 0000664 0000000 0000000 00000007301 15063547320 0021704 0 ustar 00root root 0000000 0000000 name: Build wheels
on:
# Run daily at 1:23 UTC
schedule:
- cron: '23 1 * * *'
# Run on demand with workflow dispatch
workflow_dispatch:
# Use from other workflows
workflow_call:
pull_request:
paths:
- .github/workflows/build-wheels.yml
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
determine-source-date-epoch:
name: "Determine SOURCE_DATE_EPOCH"
runs-on: ubuntu-latest
outputs:
source-date-epoch: ${{ steps.log.outputs.source-date-epoch }}
if: github.repository_owner == 'scikit-hep'
steps:
- uses: actions/checkout@v5
with:
submodules: true
fetch-depth: 0
- id: log
name: Compute SOURCE_DATE_EPOCH
run: |
# Find latest unix timestamp in awkward-cpp, and the kernel generation files
epoch=$( git log -1 --format=%at -- awkward-cpp kernel-specification.yml kernel-test-data.json )
echo "source-date-epoch=$epoch" >> $GITHUB_OUTPUT
make_sdist:
name: "Build awkward-cpp sdist"
runs-on: ubuntu-latest
needs: [determine-source-date-epoch]
env:
SOURCE_DATE_EPOCH: ${{ needs.determine-source-date-epoch.outputs.source-date-epoch }}
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Prepare build files
run: pipx run nox -s prepare
- name: Build awkward-cpp sdist
run: pipx run build --sdist awkward-cpp
- name: Check metadata
run: pipx run twine check awkward-cpp/dist/*
- uses: actions/upload-artifact@v4
with:
name: awkward-cpp-sdist
path: awkward-cpp/dist/*.tar.gz
build_wheels:
needs: [determine-source-date-epoch]
name: "Wheel awkward-cpp: ${{ matrix.arch }} on ${{ matrix.os }} with ${{ matrix.build }}"
runs-on: ${{ matrix.os }}
env:
SOURCE_DATE_EPOCH: ${{ needs.determine-source-date-epoch.outputs.source-date-epoch }}
strategy:
matrix:
os: [ubuntu-latest, macos-13, macos-14]
arch: [auto64]
build: ["cp", "pp"]
include:
- os: windows-latest
arch: auto64
build: "cp"
- os: windows-latest
arch: auto32
build: "cp"
- os: ubuntu-24.04-arm
arch: auto64
build: "cp"
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Python 3.12
uses: actions/setup-python@v6
with:
python-version: '3.12'
- name: Setup uv
uses: astral-sh/setup-uv@v6
- name: Prepare build files
run: pipx run nox -s prepare
- uses: pypa/cibuildwheel@v3.1
env:
CIBW_BUILD: "${{ matrix.build }}*"
CIBW_ARCHS: ${{ matrix.arch }}
with:
package-dir: awkward-cpp
- name: Check metadata
run: pipx run twine check wheelhouse/*.whl
- name: Upload wheels
uses: actions/upload-artifact@v4
with:
name: awkward-cpp-wheels-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.build }}
path: wheelhouse/*.whl
build_awkward_wheel:
name: "Build awkward sdist and wheel"
runs-on: ubuntu-latest
needs: [determine-source-date-epoch]
env:
SOURCE_DATE_EPOCH: ${{ needs.determine-source-date-epoch.outputs.source-date-epoch }}
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Setup uv
uses: astral-sh/setup-uv@v6
- name: Prepare build files
run: pipx run nox -s prepare
- name: Build distributions
run: pipx run build --installer uv
- name: Check metadata
run: pipx run twine check dist/*
- uses: actions/upload-artifact@v4
with:
name: awkward-wheel
path: dist/*
awkward-2.8.9/.github/workflows/coverage.yml 0000664 0000000 0000000 00000004176 15063547320 0021122 0 ustar 00root root 0000000 0000000 name: Codecov
on:
push:
branches:
- main
paths-ignore:
- README.md
- CONTRIBUTING.md
- CITATION.cff
- LICENSE
- .readthedocs.yml
- docs-img/**
- docs/**
- awkward-cpp/docs/**
- studies/**
workflow_dispatch:
concurrency:
group: 'coverage-${{ github.head_ref || github.run_id }}'
cancel-in-progress: true
env:
# Leverage reproducible builds by setting a constant SOURCE_DATE_EPOCH
# This will ensure that the hash of the awkward-cpp directory remains
# constant for unchanged files, meaning that it can be used for caching
SOURCE_DATE_EPOCH: "1668811211"
jobs:
coverage:
runs-on: ubuntu-latest
name: Run Codecov
env:
PIP_ONLY_BINARY: cmake
PYTHON_VERSION: "3.9"
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: 'Python ${{ env.PYTHON_VERSION }}'
uses: actions/setup-python@v6
with:
python-version: '${{ env.PYTHON_VERSION }}'
- name: Generate build files
run: pipx run nox -s prepare -- --headers --signatures --tests
- name: Cache awkward-cpp wheel
id: cache-awkward-cpp-wheel
uses: actions/cache@v4
with:
path: ./awkward-cpp/dist
key: ${{ github.job }}-${{ env.PYTHON_VERSION }}-${{ hashFiles('awkward-cpp/**') }}
- name: Build awkward-cpp wheel
if: steps.cache-awkward-cpp-wheel.outputs.cache-hit != 'true'
run: |
python -m pip install build
python -m build -w ./awkward-cpp
ls ./awkward-cpp/dist
- name: Install awkward-cpp
run: python -m pip install -v ./awkward-cpp/dist/*.whl
- name: Build & install awkward
run: python -m pip install -v .
- name: Print versions
run: python -m pip list
- name: Install test requirements
run: python -m pip install -v -r requirements-test-full.txt
- name: Test
run: >-
python -m pytest -vv -rs tests --cov=awkward --cov-report=term
--cov-report=xml
- name: Upload Codecov results
uses: codecov/codecov-action@v5
awkward-2.8.9/.github/workflows/deploy-cpp.yml 0000664 0000000 0000000 00000001710 15063547320 0021372 0 ustar 00root root 0000000 0000000 name: Deploy C++
on:
workflow_dispatch:
inputs:
publish-pypi:
type: boolean
description: Publish to PyPI
jobs:
build-wheels:
uses: ./.github/workflows/build-wheels.yml
upload-awkward-cpp:
needs: [build-wheels]
runs-on: ubuntu-latest
if: inputs.publish-pypi
permissions:
id-token: write
attestations: write
contents: read
environment:
name: "pypi"
url: "https://pypi.org/project/awkward-cpp/"
steps:
- uses: actions/download-artifact@v5
with:
pattern: "awkward-cpp*"
path: dist
merge-multiple: true
- name: List distributions to be deployed
run: ls -l dist/
- name: Generate artifact attestation for sdist and wheel
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
with:
subject-path: "dist/awkward*cpp-*"
- uses: pypa/gh-action-pypi-publish@v1.13.0
awkward-2.8.9/.github/workflows/deploy.yml 0000664 0000000 0000000 00000010345 15063547320 0020616 0 ustar 00root root 0000000 0000000 name: Deploy
on:
workflow_dispatch:
inputs:
publish-pypi:
type: boolean
description: Publish to PyPI
release:
types:
- published
jobs:
determine-source-date-epoch:
name: "Determine SOURCE_DATE_EPOCH"
runs-on: ubuntu-latest
outputs:
source-date-epoch: ${{ steps.log.outputs.source-date-epoch }}
steps:
- uses: actions/checkout@v5
with:
submodules: true
fetch-depth: 0
- id: log
name: Compute SOURCE_DATE_EPOCH
run: |
# Find latest unix timestamp in awkward-cpp, and the kernel generation files
epoch=$( git log -1 --format=%at -- awkward-cpp kernel-specification.yml kernel-test-data.json )
echo "source-date-epoch=$epoch" >> $GITHUB_OUTPUT
check-requirements:
name: "Check awkward requirements"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Check awkward-cpp version matches requirement
run: pipx run nox -s check_cpp_constraint
check-cpp-on-pypi:
name: "Check awkward-cpp dependency on PyPI"
runs-on: ubuntu-latest
needs: [determine-source-date-epoch]
env:
SOURCE_DATE_EPOCH: ${{ needs.determine-source-date-epoch.outputs.source-date-epoch }}
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Prepare build files
run: pipx run nox -s prepare
- name: Build awkward-cpp sdist
run: pipx run build --sdist awkward-cpp
- name: Check sdist matches PyPI
run: pipx run nox -s check_cpp_sdist_released -- awkward-cpp/dist/awkward_cpp*.tar.gz
build:
name: "Build wheel & sdist"
runs-on: ubuntu-latest
needs: [determine-source-date-epoch]
permissions:
id-token: write
attestations: write
contents: read
env:
SOURCE_DATE_EPOCH: ${{ needs.determine-source-date-epoch.outputs.source-date-epoch }}
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Prepare build files
run: pipx run nox -s prepare
- name: Build distributions
run: pipx run build
- name: Check metadata
run: pipx run twine check dist/*
- name: Generate artifact attestation for sdist and wheel
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
with:
subject-path: "dist/awkward-*"
- uses: actions/upload-artifact@v4
with:
name: distributions
path: dist/*
bundle-headers:
name: "Bundle header-only libraries"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/upload-artifact@v4
with:
name: header-only
path: header-only
upload:
needs: [build, check-requirements, check-cpp-on-pypi]
runs-on: ubuntu-latest
if: (github.event_name == 'release' && github.event.action == 'published') || inputs.publish-pypi
permissions:
id-token: write
environment:
name: "pypi"
url: "https://pypi.org/project/awkward/"
steps:
- uses: actions/download-artifact@v5
with:
name: distributions
path: dist
- name: List distributions to be deployed
run: ls -l dist/
- name: Verify sdist artifact attestation
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: gh attestation verify dist/awkward-*.tar.gz --repo ${{ github.repository }}
- name: Verify wheel artifact attestation
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: gh attestation verify dist/awkward-*.whl --repo ${{ github.repository }}
- uses: pypa/gh-action-pypi-publish@v1.13.0
publish-headers:
name: "Publish header-only libraries alongside release"
runs-on: ubuntu-latest
needs: [bundle-headers]
if: github.event_name == 'release' && github.event.action == 'published'
steps:
- uses: actions/download-artifact@v5
with:
name: header-only
path: header-only
- name: Create archive
run: |
# Don't include `header-only` parent directory
env -C header-only/ zip -r header-only.zip .
- uses: softprops/action-gh-release@v2
with:
files: header-only/header-only.zip
awkward-2.8.9/.github/workflows/docs-preview.yml 0000664 0000000 0000000 00000007243 15063547320 0021734 0 ustar 00root root 0000000 0000000 name: Docs Preview
on:
workflow_run:
workflows: [Docs]
types:
- completed
jobs:
branch-preview:
runs-on: ubuntu-24.04
name: Deploy Branch Preview
if: ${{ github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request' }}
permissions:
id-token: write
contents: read
pull-requests: write
env:
S3_BUCKET: "preview.awkward-array.org"
DEPLOY_URL: "http://preview.awkward-array.org.s3-website.us-east-1.amazonaws.com"
environment:
name: docs
url: "${{ env.DEPLOY_URL }}/PR${{ steps.pr_number.outputs.pr_number }}"
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5
with:
aws-region: eu-west-2
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
- name: Download rendered docs
uses: actions/github-script@v8
with:
script: |
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.payload.workflow_run.id,
});
let docsArtifact = allArtifacts.data.artifacts.filter((artifact) => {
return artifact.name == "docs"
})[0];
let PRNumberArtifact = allArtifacts.data.artifacts.filter((artifact) => {
return artifact.name == "pr_number"
})[0];
let downloadDocs = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: docsArtifact.id,
archive_format: 'zip',
});
let downloadPRNumber = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: PRNumberArtifact.id,
archive_format: 'zip',
});
const fs = require('fs');
const path = require('path');
const temp = '${{ runner.temp }}/artifacts';
if (!fs.existsSync(temp)){
fs.mkdirSync(temp);
}
fs.writeFileSync(path.join(temp, 'docs.zip'), Buffer.from(downloadDocs.data));
fs.writeFileSync(path.join(temp, 'pr_number.zip'), Buffer.from(downloadPRNumber.data));
- name: Unzip artifacts
run: |
unzip "${{ runner.temp }}/artifacts/docs.zip" -d "${{ runner.temp }}/artifacts"
unzip "${{ runner.temp }}/artifacts/pr_number.zip" -d "${{ runner.temp }}/artifacts"
- name: Read PR number
id: pr_number
run: |
echo "pr_number=$(cat ${{ runner.temp }}/artifacts/pr_number.txt)" >> $GITHUB_OUTPUT
rm "${{ runner.temp }}/artifacts/pr_number.txt"
rm "${{ runner.temp }}/artifacts/docs.zip"
rm "${{ runner.temp }}/artifacts/pr_number.zip"
- name: Sync artifacts
run: |
aws s3 sync ${{ runner.temp }}/artifacts/ "s3://${S3_BUCKET}/PR${{ steps.pr_number.outputs.pr_number }}"
- name: Try to find previous bot comment
uses: peter-evans/find-comment@v3
id: fc
with:
issue-number: ${{ steps.pr_number.outputs.pr_number }}
comment-author: 'github-actions[bot]'
body-includes: The documentation preview is ready to be viewed
- name: Create comment with preview link
if: steps.fc.outputs.comment-id == ''
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ steps.pr_number.outputs.pr_number }}
body: |
The documentation preview is ready to be viewed at <${{ env.DEPLOY_URL }}/PR${{ steps.pr_number.outputs.pr_number }}>
awkward-2.8.9/.github/workflows/docs-version.yml 0000664 0000000 0000000 00000002051 15063547320 0021730 0 ustar 00root root 0000000 0000000 name: Sync Docs Selector
on:
push:
branches:
- main
paths:
- docs/switcher.json
workflow_dispatch:
concurrency:
group: 'docs-version-${{ github.head_ref || github.run_id }}'
cancel-in-progress: true
jobs:
coverage:
runs-on: ubuntu-22.04
name: Push version switcher
permissions:
id-token: write
contents: read
env:
S3_BUCKET: "awkward-array.org"
CLOUDFRONT_ID: "EFM4QVENUIXHS"
environment:
name: docs
steps:
- uses: actions/checkout@v5
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5
with:
aws-region: eu-west-2
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
# Pushes to main trigger latest
- name: Push version selector
run: |
aws s3 cp docs/switcher.json "s3://${S3_BUCKET}/doc/switcher.json"
aws cloudfront create-invalidation --distribution-id "${CLOUDFRONT_ID}" \
--paths "/doc/switcher.json"
awkward-2.8.9/.github/workflows/docs.yml 0000664 0000000 0000000 00000023577 15063547320 0020265 0 ustar 00root root 0000000 0000000 name: Docs
on:
push:
branches:
- main
pull_request:
release:
types: [released]
workflow_dispatch:
concurrency:
group: 'docs-${{ github.head_ref || github.run_id }}'
cancel-in-progress: true
env:
X86_64_PYTHON_VERSION: "3.11.0"
SOURCE_DATE_EPOCH: "1668811211"
jobs:
awkward-cpp-wasm:
name: Build C++ WASM
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Generate build files
run: pipx run nox -s prepare -- --headers --signatures --tests
- uses: pypa/cibuildwheel@v3.1
with:
package-dir: awkward-cpp
env:
CIBW_PLATFORM: pyodide
CIBW_BUILD: "cp313*"
CIBW_PYODIDE_VERSION: "0.28.0"
CIBW_ENABLE: "pyodide-prerelease"
- name: Upload wheel
uses: actions/upload-artifact@v4
with:
name: awkward-cpp-wasm
path: awkward-cpp/dist/awkward*wasm32.whl
awkward-cpp-x86-64:
runs-on: ubuntu-24.04
name: Build C++ x86
defaults:
run:
# Ensure conda env is activated
shell: "bash -l {0}"
steps:
- uses: actions/checkout@v5
with:
submodules: true
# TODO: remove this once mamba-org/mamba#1726 is fixed
# and replace with `-r requirements.txt` in a
# non-template `environment.yml`
- name: Template environment.yml
run: pipx run --spec cogapp cog -o environment.yml environment.yml.cog
working-directory: docs
# Technically this give us an environment that is incompatible with
# the wheel built in the awkward-x86-64 job if the environments
# solve with different external library versions. By default,
# ROOT uses cxx-compiler too, so hopefully this won't be an issue
- name: Setup Python via Conda
uses: mamba-org/setup-micromamba@v2
with:
# Cache invalidates daily by default
cache-environment: true
cache-downloads: true
environment-file: docs/environment.yml
create-args: >-
python=${{ env.X86_64_PYTHON_VERSION }}
doxygen
python-build
- name: Generate build files
run: pipx run nox -s prepare -- --headers --signatures
- name: Cache wheel
id: cache-awkward-cpp-x86-64-wheel
uses: actions/cache@v4
with:
path: ./awkward-cpp/dist
key: ${{ runner.os }}-${{ env.X86_64_PYTHON_VERSION }}-awkward-x86-64-wasm-${{ hashFiles('awkward-cpp/**') }}
- name: Build wheel
if: steps.cache-awkward-cpp-x86-64-wheel.outputs.cache-hit != 'true'
run: python -m build -w ./awkward-cpp
- name: Upload wheel
uses: actions/upload-artifact@v4
with:
name: awkward-cpp-x86-64
path: awkward-cpp/dist/awkward*.whl
awkward:
runs-on: ubuntu-24.04
name: Build Python
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Generate build files
run: pipx run nox -s prepare -- --headers --signatures
- name: Build wheel
run: pipx run build -w
- name: Upload wheel
uses: actions/upload-artifact@v4
with:
name: awkward
path: dist/awkward*.whl
execute-cppyy:
needs: [awkward-cpp-x86-64, awkward]
runs-on: ubuntu-24.04
name: Execute cppyy notebook
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Setup Python
uses: actions/setup-python@v6
with:
python-version: "${{ env.X86_64_PYTHON_VERSION }}"
- name: Extract pre-built manylinux cppyy wheels
uses: shrink/actions-docker-extract@v3
with:
image: "docker.io/agoose77/cppyy-wheels:cp311"
path: "/wheels/."
destination: "/tmp/wheels/"
- name: Download awkward wheel
uses: actions/download-artifact@v5
with:
name: awkward
path: /tmp/wheels/
- name: Download awkward-cpp x86_64 wheel
uses: actions/download-artifact@v5
with:
name: awkward-cpp-x86-64
path: /tmp/wheels/
- name: Install dependencies
working-directory: docs
run: python -m pip install /tmp/wheels/*.whl -r requirements.txt
- name: Execute cppyy notebook
working-directory: docs
run: jupyter nbconvert --to notebook --execute --inplace user-guide/how-to-use-in-cpp-cppyy.ipynb
- name: Save executed notebook
uses: actions/upload-artifact@v4
with:
name: how-to-use-in-cpp-cppyy
path: docs/user-guide/how-to-use-in-cpp-cppyy.ipynb
build-docs:
runs-on: ubuntu-24.04
needs: [awkward-cpp-x86-64, awkward, execute-cppyy]
name: Build Docs
defaults:
run:
# Ensure conda env is activated
shell: "bash -l {0}"
env:
DOCS_CANONICAL_VERSION: main
steps:
- uses: actions/checkout@v5
# TODO: remove this once mamba-org/mamba#1726 is fixed
# and replace with `-r requirements.txt` in a
# non-template `environment.yml`
- name: Template environment.yml
run: pipx run --spec cogapp cog -o environment.yml environment.yml.cog
working-directory: docs
# Technically this give us an environment that is incompatible with
# the wheel built in the awkward-x86-64 job if the environments
# solve with different external library versions. By default,
# ROOT uses cxx-compiler too, so hopefully this won't be an issue
- name: Setup Python via Conda
uses: mamba-org/setup-micromamba@v2
with:
# Cache invalidates daily by default
cache-environment: true
cache-downloads: true
environment-file: docs/environment.yml
create-args: >-
python=${{ env.X86_64_PYTHON_VERSION }}
doxygen
- name: Download awkward wheel
uses: actions/download-artifact@v5
with:
name: awkward
path: dist
- name: Copy awkward wheel to JupyterLite
run: |
mkdir -p docs/lite/pypi/
cp dist/awkward*.whl docs/lite/pypi/
- name: Download awkward-cpp x86_64 wheel
uses: actions/download-artifact@v5
with:
name: awkward-cpp-x86-64
path: dist
- name: Install awkward and awkward-cpp wheels
run: python -m pip install dist/awkward*.whl --force-reinstall --no-deps
- name: Generate build files
run: pipx run nox -s prepare -- --docs --headers
- name: Generate C++ documentation
run: doxygen
working-directory: awkward-cpp/docs
- name: Copy C++ documentation
run: cp -r awkward-cpp/docs/html/ docs/_static/doxygen
- name: Enable analytics & version selector
if: github.event_name == 'push' || github.event_name == 'release'
run: |
echo "DOCS_REPORT_ANALYTICS=1" >> $GITHUB_ENV
echo "DOCS_SHOW_VERSION=1" >> $GITHUB_ENV
- name: Set version to main
if: github.event_name == 'push'
run: |
echo "DOCS_VERSION=main" >> $GITHUB_ENV
- name: Download cppyy notebook
uses: actions/download-artifact@v5
with:
name: how-to-use-in-cpp-cppyy
path: docs/user-guide
- name: Generate Python documentation
run: sphinx-build -M html . _build/ -T
working-directory: docs
- name: Upload docs artefact
uses: actions/upload-artifact@v4
with:
name: docs
path: docs/_build/html
- name: Upload Jupyter Book cache
uses: actions/upload-artifact@v4
with:
name: doctrees
path: docs/_build/doctrees
- name: Upload Jupyter Book cache
uses: actions/upload-artifact@v4
with:
name: jupyter-cache
path: docs/_build/.jupyter_cache
- name: Save PR number
env:
PR_NUMBER: ${{ github.event.number }}
run: |
mkdir -p ./pr
echo $PR_NUMBER > ./pr/pr_number.txt
- uses: actions/upload-artifact@v4
with:
name: pr_number
path: pr/
deploy:
runs-on: ubuntu-24.04
needs: [ build-docs ]
# We can only deploy for PRs on host repo
if: github.event_name == 'push' || github.event_name == 'release'
name: Deploy
permissions:
id-token: write
contents: read
env:
S3_BUCKET: "awkward-array.org"
PRODUCTION_URL: "http://awkward-array.org"
CLOUDFRONT_ID: "EFM4QVENUIXHS"
environment:
name: docs
url: ${{ env.PRODUCTION_URL }}${{ steps.sync-main.outputs.path || steps.sync-stable.outputs.path }}
steps:
- uses: actions/checkout@v5
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5
with:
aws-region: eu-west-2
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
- name: Download rendered docs
uses: actions/download-artifact@v5
with:
name: docs
path: built-docs
- name: Setup Python
uses: actions/setup-python@v6
with:
python-version: "3.11"
# Pushes to main trigger latest
- name: Sync `main`
if: github.event_name == 'push'
id: sync-main
run: |
aws s3 sync built-docs/ "s3://${S3_BUCKET}/doc/main/"
aws cloudfront create-invalidation --distribution-id "${CLOUDFRONT_ID}" \
--paths "/doc/main*"
echo "path=/doc/main" >> $GITHUB_OUTPUT
# Releases trigger versions
- name: Sync `stable`
if: github.event_name == 'release'
id: sync-stable
run: |
# Take only leading version
version=$(echo "${GITHUB_REF_NAME}" | sed -n -E "s/v?([0-9]+\.[0-9]+)\.[0-9]+/\1/p")
aws s3 cp docs/switcher.json "s3://${S3_BUCKET}/doc/"
aws s3 sync built-docs/ "s3://${S3_BUCKET}/doc/$version/"
aws s3 sync built-docs/ "s3://${S3_BUCKET}/doc/stable/"
aws cloudfront create-invalidation --distribution-id "${CLOUDFRONT_ID}" \
--paths "/doc/$version*" "/doc/stable*" "/doc/switcher.json"
echo "path=/doc/stable" >> $GITHUB_OUTPUT
awkward-2.8.9/.github/workflows/header-only-test.yml 0000664 0000000 0000000 00000001765 15063547320 0022514 0 ustar 00root root 0000000 0000000 name: Header-only Tests
on:
pull_request:
workflow_dispatch:
concurrency:
group: header-only-test-${{ github.head_ref }}
cancel-in-progress: true
jobs:
test:
name: "Run Tests"
strategy:
matrix:
os: [ubuntu-latest, macos-13, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Run CMake
run: |
cmake -B build -S header-only -DCMAKE_RUNTIME_OUTPUT_DIRECTORY=bin -DCMAKE_BUILD_TYPE=Debug -DBUILD_TESTS=ON
cmake --build build/
- name: Run tests
run: |
import os
import pathlib
import subprocess
for path in pathlib.Path("build/tests/bin").glob("test_*"):
if path.is_file():
print(f"Running {path.name}", flush=True)
print("::group::Test output", flush=True)
subprocess.run([path], check=True)
print("::endgroup::", flush=True)
shell: python3 {0}
awkward-2.8.9/.github/workflows/lint.yml 0000664 0000000 0000000 00000000651 15063547320 0020267 0 ustar 00root root 0000000 0000000 name: Lint
on:
pull_request:
workflow_dispatch:
concurrency:
group: lint-${{ github.head_ref }}
cancel-in-progress: true
jobs:
pylint:
name: "Run PyLint"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Run PyLint
run: |
echo "::add-matcher::$GITHUB_WORKSPACE/.github/matchers/pylint.json"
pipx run nox -s pylint
awkward-2.8.9/.github/workflows/needs-cpp-release.yml 0000664 0000000 0000000 00000002462 15063547320 0022617 0 ustar 00root root 0000000 0000000 name: Needs C++ Release
on:
workflow_dispatch:
push:
branches:
- main
jobs:
determine-source-date-epoch:
name: "Determine SOURCE_DATE_EPOCH"
runs-on: ubuntu-latest
outputs:
source-date-epoch: ${{ steps.log.outputs.source-date-epoch }}
steps:
- uses: actions/checkout@v5
with:
submodules: true
fetch-depth: 0
- id: log
name: Compute SOURCE_DATE_EPOCH
run: |
# Find latest unix timestamp in awkward-cpp, and the kernel generation files
epoch=$( git log -1 --format=%at -- awkward-cpp kernel-specification.yml kernel-test-data.json )
echo "source-date-epoch=$epoch" >> $GITHUB_OUTPUT
check-cpp-on-pypi:
name: "Check awkward-cpp dependency on PyPI"
runs-on: ubuntu-latest
needs: [determine-source-date-epoch]
env:
SOURCE_DATE_EPOCH: ${{ needs.determine-source-date-epoch.outputs.source-date-epoch }}
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Prepare build files
run: pipx run nox -s prepare
- name: Build awkward-cpp sdist
run: pipx run build --sdist awkward-cpp
- name: Check sdist matches PyPI
run: pipx run nox -s check_cpp_sdist_released -- awkward-cpp/dist/awkward_cpp*.tar.gz
awkward-2.8.9/.github/workflows/packaging-test.yml 0000664 0000000 0000000 00000003621 15063547320 0022222 0 ustar 00root root 0000000 0000000 name: Packaging Tests
on:
pull_request:
workflow_dispatch:
concurrency:
group: packaging-test-${{ github.head_ref }}
cancel-in-progress: true
env:
SOURCE_DATE_EPOCH: "1668811211"
jobs:
build_awkward_sdist_wheel:
name: "Build awkward"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Prepare build files
run: pipx run nox -s prepare
- name: Build awkward SDist & wheel
run: pipx run build
- name: Check metadata
run: pipx run twine check dist/*
- name: Build awkward-cpp SDist
run: pipx run build --sdist awkward-cpp
- uses: actions/upload-artifact@v4
with:
name: awkward-sdist
path: dist/*.tar.gz
- uses: actions/upload-artifact@v4
with:
name: awkward-wheel
path: dist/*.whl
- uses: actions/upload-artifact@v4
with:
name: awkward-cpp-sdist
path: awkward-cpp/dist/*.tar.gz
build_cpp_wheels:
name: "Build awkward-cpp: ${{ matrix.os }}"
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [windows-latest, macos-13, ubuntu-latest]
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: Setup uv
uses: astral-sh/setup-uv@v6
- name: Prepare build files
run: pipx run nox -s prepare
- uses: pypa/cibuildwheel@v3.1
env:
CIBW_ARCHS_MACOS: universal2
CIBW_BUILD: cp39-win_amd64 cp310-manylinux_x86_64 cp39-macosx_universal2
with:
package-dir: awkward-cpp
- uses: pypa/cibuildwheel@v3.1
if: matrix.os == 'ubuntu-latest'
env:
CIBW_BUILD: cp312-manylinux_x86_64
with:
package-dir: awkward-cpp
- name: Upload wheels
uses: actions/upload-artifact@v4
with:
name: awkward-cpp-wheels-${{ matrix.os }}
path: wheelhouse/*.whl
awkward-2.8.9/.github/workflows/semantic-pr-title.yml 0000664 0000000 0000000 00000000635 15063547320 0022664 0 ustar 00root root 0000000 0000000 name: "Lint PR"
on:
pull_request:
types:
- opened
- edited
- synchronize
workflow_dispatch:
concurrency:
group: semantic-pr-title-${{ github.head_ref }}
cancel-in-progress: true
jobs:
main:
name: Validate PR title
runs-on: ubuntu-latest
steps:
- uses: amannn/action-semantic-pull-request@v6.1.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
awkward-2.8.9/.github/workflows/test.yml 0000664 0000000 0000000 00000024765 15063547320 0020314 0 ustar 00root root 0000000 0000000 name: Tests
on:
pull_request:
paths-ignore:
- README.md
- CONTRIBUTING.md
- CITATION.cff
- LICENSE
- .readthedocs.yml
- docs-img/**
- docs/**
- awkward-cpp/docs/**
- studies/**
schedule:
- cron: 0 12 1 * *
workflow_dispatch:
concurrency:
group: 'test-${{ github.head_ref || github.run_id }}'
cancel-in-progress: true
env:
# Leverage reproducible builds by setting a constant SOURCE_DATE_EPOCH
# This will ensure that the hash of the awkward-cpp directory remains
# constant for unchanged files, meaning that it can be used for caching
SOURCE_DATE_EPOCH: "1668811211"
jobs:
run-tests:
name: Run Tests
strategy:
fail-fast: false
matrix:
runs-on:
- windows-latest
- ubuntu-latest
- macos-13
python-version:
- '3.14'
- '3.13'
- '3.12'
- '3.11'
- '3.10'
- '3.9'
python-architecture:
- x64
dependencies-kind:
- full
include:
- python-version: '3.9'
python-architecture: x86
runs-on: windows-latest
dependencies-kind: full
- python-version: '3.9'
python-architecture: x86
runs-on: windows-latest
dependencies-kind: numpy1
- python-version: '3.9'
python-architecture: x64
runs-on: ubuntu-latest
dependencies-kind: minimal
- python-version: 'pypy3.9'
python-architecture: x64
runs-on: ubuntu-latest
dependencies-kind: pypy
- python-version: '3.11'
python-architecture: x64
runs-on: ubuntu-latest
dependencies-kind: ml
- python-version: '3.13t'
python-architecture: x64
runs-on: ubuntu-latest
dependencies-kind: nogil
- python-version: '3.13t'
python-architecture: x64
runs-on: windows-latest
dependencies-kind: nogil
runs-on: ${{ matrix.runs-on }}
env:
PIP_ONLY_BINARY: numpy,pandas,pyarrow,numexpr,numexpr
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: 'Python ${{ matrix.python-version }}'
uses: actions/setup-python@v6
with:
python-version: '${{ matrix.python-version }}'
architecture: '${{ matrix.python-architecture }}'
allow-prereleases: true
- name: Generate build files
run: pipx run nox -s prepare -- --headers --signatures --tests
- name: Cache awkward-cpp wheel
id: cache-awkward-cpp-wheel
uses: actions/cache@v4
with:
path: awkward-cpp/dist
key: ${{ github.job }}-${{ matrix.runs-on }}-${{ matrix.python-version }}-${{ matrix.python-architecture }}-${{ hashFiles('awkward-cpp/**') }}
- name: Build awkward-cpp wheel
if: steps.cache-awkward-cpp-wheel.outputs.cache-hit != 'true'
run: |
python -m pip install build
python -m build -w awkward-cpp
- name: Find built wheel
uses: tj-actions/glob@v22
id: find-wheel
with:
files: |
awkward-cpp/dist/*.whl
- name: Add workaround for 3.13 + cramjam
if: matrix.python-version == '3.13'
run: echo 'PYO3_USE_ABI3_FORWARD_COMPATIBILITY=1' >> $GITHUB_ENV
shell: bash
- name: Install awkward, awkward-cpp, and dependencies
run: >-
python -m pip install -v . ${{ steps.find-wheel.outputs.paths }} pytest-github-actions-annotate-failures
-r "requirements-test-${{ matrix.dependencies-kind }}.txt"
- name: Print versions
run: python -m pip list
- name: Check if kernel specification is sorted
# We don't need to run this all the time
if: (matrix.python-version == '3.12') && (matrix.runs-on == 'ubuntu-latest')
run: pipx run nox -s diagnostics -- --check-spec-sorted
- name: Test specification
run: python -m pytest -vv -rs awkward-cpp/tests-spec
- name: Test specification with explicitly defined values
run: python -m pytest -vv -rs awkward-cpp/tests-spec-explicit
- name: Test CPU kernels
run: python -m pytest -vv -rs awkward-cpp/tests-cpu-kernels
- name: Test CPU kernels with explicitly defined values
run: python -m pytest -vv -rs awkward-cpp/tests-cpu-kernels-explicit
- name: Test non-kernels (Python)
run: >-
python -m pytest -vv -rs tests --cov=awkward --cov-report=term
--cov-report=xml
if: startsWith(matrix.python-version, '3.')
- name: Test non-kernels (PyPy)
run: >-
python -m pytest -vv -rs tests
if: startsWith(matrix.python-version, 'pypy')
- name: Upload Codecov results
if: (matrix.python-version == '3.9') && (matrix.runs-on == 'ubuntu-latest')
uses: codecov/codecov-action@v5
run-gpu-tests:
name: Run GPU Tests
runs-on: self-hosted
env:
PIP_ONLY_BINARY: numpy,pandas,pyarrow,numexpr,numexpr
# Required for miniconda to activate conda
defaults:
run:
shell: bash -l {0}
steps:
- name: Clean the workspace and mamba
run: |
rm -rf * .[!.]* || echo "Nothing to clean"
rm -rf ~/micromamba* || echo "Nothing to clean"
- uses: actions/checkout@v5
with:
submodules: true
- name: Get micromamba
uses: mamba-org/setup-micromamba@v2
with:
environment-name: test-env
init-shell: bash
create-args: >-
python=3.13
- name: Generate build files
run: |
pip install pipx
pipx run nox -s prepare -- --headers --signatures --tests
- name: Cache awkward-cpp wheel
id: cache-awkward-cpp-wheel
uses: actions/cache@v4
with:
path: awkward-cpp/dist
key: ${{ github.job }}-${{ hashFiles('awkward-cpp/**') }}
- name: Build awkward-cpp wheel
if: steps.cache-awkward-cpp-wheel.outputs.cache-hit != 'true'
run: |
python -m pip install build
python -m build -w awkward-cpp
- name: Find built wheel
uses: tj-actions/glob@v22
id: find-wheel
with:
files: |
awkward-cpp/dist/*.whl
- name: Add workaround for 3.13 + cramjam
run: echo 'PYO3_USE_ABI3_FORWARD_COMPATIBILITY=1' >> $GITHUB_ENV
shell: bash
- name: Install awkward, awkward-cpp, and dependencies
run: >-
python -m pip install -v . ${{ steps.find-wheel.outputs.paths }} pytest-github-actions-annotate-failures
-r "requirements-test-gpu.txt"
- name: Print versions
run: python -m pip list
- name: Test CUDA kernels
run: python -m pytest -vv -rs tests-cuda-kernels
- name: Test CUDA kernels with explicitly defined values
run: python -m pytest -vv -rs tests-cuda-kernels-explicit
- name: Test CUDA non-kernel (Python)
run: >-
python -m pytest -vv -rs tests-cuda
Linux-ROOT:
runs-on: ubuntu-latest
env:
PIP_ONLY_BINARY: numpy,pandas,pyarrow,numexpr,numexpr
timeout-minutes: 30
# Required for miniconda to activate conda
defaults:
run:
shell: "bash -l {0}"
steps:
- uses: "actions/checkout@v5"
with:
submodules: true
- name: Setup Python via Conda
uses: mamba-org/setup-micromamba@v2
with:
# Cache invalidates daily by default
cache-environment: true
environment-name: awkward
# Need Python 3.9 for the cached wheels
create-args: >-
python=3.9
numpy
root
- name: Generate build files
run: pipx run nox -s prepare -- --headers --signatures --tests
- name: Cache awkward-cpp wheel
id: cache-awkward-cpp-wheel
uses: actions/cache@v4
with:
path: ./awkward-cpp/dist
key: ${{ github.job }}-${{ hashFiles('awkward-cpp/**') }}
- name: Build awkward-cpp wheel
if: steps.cache-awkward-cpp-wheel.outputs.cache-hit != 'true'
run: |
python3 -m pip install build
python3 -m build -w ./awkward-cpp
- name: Install awkward, awkward-cpp, and dependencies
run: >-
python -m pip install --only-binary "numpy,pandas,pyarrow,numexpr"
-v . ./awkward-cpp/dist/*.whl
pytest-github-actions-annotate-failures
-r requirements-test-full.txt
- name: Print versions
run: python -m pip list
- name: Test
run: python -m pytest -vv -rs tests
Linux-cppyy:
# TODO: remove this part of the workflow
# cppyy is not yet released. Let's load some pre-built wheels via docker (as a binary store)
runs-on: ubuntu-22.04
env:
PIP_ONLY_BINARY: numpy,pandas,pyarrow,numexpr,numexpr
steps:
- uses: actions/checkout@v5
with:
submodules: true
- name: 'Python ${{ matrix.python-version }}'
uses: actions/setup-python@v6
with:
python-version: '3.11'
- name: Generate build files
run: pipx run nox -s prepare -- --headers --signatures --tests
- name: Cache awkward-cpp wheel
id: cache-awkward-cpp-wheel
uses: actions/cache@v4
with:
path: ./awkward-cpp/dist
key: ${{ github.job }}-${{ hashFiles('awkward-cpp/**') }}
- name: Build awkward-cpp wheel
if: steps.cache-awkward-cpp-wheel.outputs.cache-hit != 'true'
run: |
python -m pip install build
python -m build -w ./awkward-cpp
- name: Extract pre-built manylinux cppyy wheels
uses: shrink/actions-docker-extract@v3
with:
image: "docker.io/agoose77/cppyy-wheels:cp311"
path: "/wheels/."
destination: "/tmp/wheelhouse"
- name: Install awkward, awkward-cpp, and dependencies
run: >-
python -m pip install -v --only-binary "numpy,pandas,pyarrow,numexpr"
./ ./awkward-cpp/dist/*.whl /tmp/wheelhouse/* pytest-github-actions-annotate-failures
- name: Print versions
run: python -m pip list
- name: Test
run: python -m pytest -vv -rs tests -k cppyy
pass-tests:
if: always()
needs: [ run-tests, Linux-ROOT, Linux-cppyy ]
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
awkward-2.8.9/.github/workflows/upload-nightly-wheels.yml 0000664 0000000 0000000 00000004541 15063547320 0023550 0 ustar 00root root 0000000 0000000 name: Upload nightly wheels to Anaconda Cloud
on:
# Run daily at 2:34 UTC to upload nightly wheels to Anaconda Cloud
schedule:
- cron: '34 2 * * *'
# Run on demand with workflow dispatch
workflow_dispatch:
permissions:
actions: read
jobs:
upload_nightly_wheels:
name: Upload nightly wheels to Anaconda Cloud
runs-on: ubuntu-latest
defaults:
run:
# The login shell is necessary for the setup-micromamba setup
# to work in subsequent jobs.
# https://github.com/mamba-org/setup-micromamba#about-login-shells
shell: bash -e -l {0}
if: github.repository_owner == 'scikit-hep'
steps:
# https://github.com/actions/download-artifact/issues/3#issuecomment-1017141067
- name: Download wheel artifacts from last build on 'main'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PROJECT_REPO="scikit-hep/awkward"
BRANCH="main"
WORKFLOW_NAME="build-wheels.yml"
ARTIFACT_PATTERN="awkward*wheel*" # awkward-wheel and awkward-cpp-wheels-*
gh run --repo "${PROJECT_REPO}" \
list --branch "${BRANCH}" \
--workflow "${WORKFLOW_NAME}" \
--json event,status,conclusion,databaseId > runs.json
RUN_ID=$(
jq --compact-output \
'[
.[] |
# Filter on "schedule" and "workflow_dispatch" events to main (nightly build) ...
select(.event == "schedule" or .event == "workflow_dispatch") |
# that have completed successfully ...
select(.status == "completed" and .conclusion == "success")
] |
# and get ID of latest build of wheels.
sort_by(.databaseId) | reverse | .[0].databaseId' runs.json
)
gh run --repo "${PROJECT_REPO}" view "${RUN_ID}"
gh run --repo "${PROJECT_REPO}" \
download "${RUN_ID}" --pattern "${ARTIFACT_PATTERN}"
mkdir dist
mv ${ARTIFACT_PATTERN}/*.whl dist/
ls -l dist/
- name: Upload wheels to Anaconda Cloud as nightlies
uses: scientific-python/upload-nightly-action@b36e8c0c10dbcfd2e05bf95f17ef8c14fd708dbf # 0.6.2
with:
artifacts_path: dist
anaconda_nightly_upload_token: ${{ secrets.ANACONDA_ORG_UPLOAD_TOKEN }}
awkward-2.8.9/.gitignore 0000664 0000000 0000000 00000004561 15063547320 0015175 0 ustar 00root root 0000000 0000000 studies/**/sample-*
studies/named_axis.*
docs/demos/countries.geojson
docs/demos/test-program
docs/demos/test-program.cpp
.clangd/
# Generated files
src/awkward/_connect/cuda/_kernel_signatures.py
src/awkward/_connect/header-only
src/awkward/_version.py
# Kernel tests
awkward-cpp/tests-cpu-kernels
awkward-cpp/tests-cpu-kernels-explicit
awkward-cpp/tests-spec
awkward-cpp/tests-spec-explicit
tests-cuda-kernels
tests-cuda-kernels-explicit
# to use all-contributors-cli without adding it to the project
node_modules
package.json
yarn.lock
array*.parquet
############################################################# Sphinx & Doxygen
docs/_build
docs/reference/generated
# Jupyter Book
docs/.jupyter_cache
docs/jupyter_execute
docs/environment.yml
# Pyodide / JupyterLite
docs/lite/
docs/_contents/
**/.pyodide-xbuildenv
docs/_static/doxygen
############################################################# IDEs
.idea
cmake-build-debug
GTAGS
GPATH
GRTAGS
.vscode
pyrightconfig.json
.ropeproject
# ...
############################################################# LaTeX
*.aux
*.log
_minted-*
############################################################# Python
**/.mypy_cache
# Byte-compiled / optimized / DLL files
__pycache__
*.py[cod]
*$py.class
# Distribution / packaging
_skbuild/
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
cuda-kernels/include
cuda-kernels/src
cuda-kernels/VERSION_INFO
# Virtual Environment folder is not needed
venv
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.nox/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
# Jupyter Notebook
.ipynb_checkpoints
############################################################# C and C++
# ctest
Testing/
# Prerequisites
*.d
# Compiled Object files
*.slo
*.lo
*.o
*.obj
# Precompiled Headers
*.gch
*.pch
# Compiled Dynamic libraries
*.so
*.dylib
*.dll
# Fortran module files
*.mod
*.smod
# Compiled Static libraries
*.lai
*.la
*.a
*.lib
# Executables
*.exe
*.out
*.app
# Python environments
/.env/*
# Pip stuff
/pip-wheel-metadata/*
/_skbuild/*
# MacOS
.DS_Store
awkward-2.8.9/.gitmodules 0000664 0000000 0000000 00000000146 15063547320 0015355 0 ustar 00root root 0000000 0000000 [submodule "rapidjson"]
path = awkward-cpp/rapidjson
url = https://github.com/Tencent/rapidjson.git
awkward-2.8.9/.pre-commit-config.yaml 0000664 0000000 0000000 00000003737 15063547320 0017472 0 ustar 00root root 0000000 0000000 ci:
autoupdate_commit_msg: "chore: update pre-commit hooks"
autofix_commit_msg: "style: pre-commit fixes"
autoupdate_schedule: monthly
exclude: ^(docs|studies|tests/samples|src/awkward/_typeparser/generated_parser.py)
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
hooks:
- id: check-added-large-files
- id: check-case-conflict
- id: check-merge-conflict
- id: check-symlinks
- id: check-yaml
- id: end-of-file-fixer
exclude_types: [svg]
- id: mixed-line-ending
- id: requirements-txt-fixer
- id: trailing-whitespace
- id: name-tests-test
args: ["--pytest-test-first"]
- repo: https://github.com/cheshirekow/cmake-format-precommit
rev: v0.6.13
hooks:
- id: cmake-format
additional_dependencies: [pyyaml]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.11
hooks:
- id: ruff
args: ["--fix", "--show-fixes"]
- id: ruff-format
- repo: https://github.com/codespell-project/codespell
rev: v2.4.1
hooks:
- id: codespell
args: ["-L", "ue,subjet,parms,fo,numer,thre,nin,nout"]
- repo: local
hooks:
- id: disallow-caps
name: disallow improper capitalization
language: pygrep
entry: PyBind|Cmake|CCache|Github|PyTest
exclude: .pre-commit-config.yaml
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: "v0.11.0.1"
hooks:
- id: shellcheck
- repo: local
hooks:
- id: require-test-name-identifier
name: require identifiers for test names
language: python
entry: python dev/validate-test-names.py
types: [file, python]
files: ^tests/
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.33.3
hooks:
- id: check-github-workflows
args: ["--verbose"]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.17.1
hooks:
- id: mypy
files: src
additional_dependencies:
- numpy>=1.24
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.24.1
hooks:
- id: validate-pyproject
awkward-2.8.9/.readthedocs.yml 0000664 0000000 0000000 00000000374 15063547320 0016271 0 ustar 00root root 0000000 0000000 version: 2
build:
os: "ubuntu-22.04"
tools:
python: "3.10"
commands:
- mkdir --parents _readthedocs/html/
- python3 dev/generate-redirects.py docs/redirects.json _readthedocs/html/
- cp dev/robots.txt _readthedocs/html/robots.txt
awkward-2.8.9/CITATION.cff 0000664 0000000 0000000 00000004232 15063547320 0015072 0 ustar 00root root 0000000 0000000 cff-version: 1.2.0
title: "Awkward Array"
message: "If you use this software, please cite it as below."
doi: "10.5281/zenodo.4341376"
date-released: "2018-10-12"
authors:
- family-names: "Pivarski"
given-names: "Jim"
affiliation: "Princeton University"
orcid: "https://orcid.org/0000-0002-6649-343X"
email: "jpivarski@gmail.com"
- family-names: "Osborne"
given-names: "Ianna"
affiliation: "Princeton University"
orcid: "https://orcid.org/0000-0002-6955-1033"
email: "iosborne@princeton.edu"
- family-names: "Ifrim"
given-names: "Ioana"
affiliation: "Princeton University"
orcid: "https://orcid.org/0000-0002-6932-1385"
email: "ii3193@princeton.edu"
- family-names: "Schreiner"
given-names: "Henry"
affiliation: "Princeton University"
orcid: "https://orcid.org/0000-0002-7833-783X"
email: "henryfs@princeton.edu"
- family-names: "Hollands"
given-names: "Angus"
affiliation: "Princeton University"
orcid: "https://orcid.org/0000-0003-0788-3814"
email: "goosey15@gmail.com"
- family-names: "Biswas"
given-names: "Anish"
affiliation: "Manipal Institute Of Technology"
orcid: "https://orcid.org/0000-0001-6149-9739"
email: "anishbiswas271@gmail.com"
- family-names: "Das"
given-names: "Pratyush"
affiliation: "Purdue University"
orcid: "https://orcid.org/0000-0001-8140-0097"
email: "reikdas@gmail.com"
- family-names: "Roy Choudhury"
given-names: "Santam"
affiliation: "National Institute of Technology, Durgapur"
orcid: "https://orcid.org/0000-0003-0153-9748"
email: "santamdev404@gmail.com"
- family-names: "Smith"
given-names: "Nicholas"
affiliation: "Fermilab"
orcid: "https://orcid.org/0000-0002-0324-3054"
email: "nick.smith@cern.ch"
- family-names: "Goyal"
given-names: "Manasvi"
affiliation: "Harvard University"
orcid: "https://orcid.org/0000-0001-6321-7491"
email: "mg.manasvi@gmail.com"
- family-names: "Fackeldey"
given-names: "Peter"
affiliation: "Princeton University"
orcid: "https://orcid.org/0000-0003-4932-7162"
email: "pf4572@princeton.edu"
- family-names: "Krommydas"
given-names: "Iason"
affiliation: "Rice University"
orcid: "https://orcid.org/0000-0001-7849-8863"
email: "ik23@rice.edu"
awkward-2.8.9/CONTRIBUTING.md 0000664 0000000 0000000 00000035631 15063547320 0015440 0 ustar 00root root 0000000 0000000 # Contributing to Awkward Array
[](https://github.com/scikit-hep/awkward/actions/workflows/needs-cpp-release.yml)
Thank you for your interest in contributing! We're eager to see your ideas and look forward to working with you.
This document describes the technical procedures we follow in this project. It should also be stressed that as members of the Scikit-HEP community, we are all obliged to maintaining a welcoming, harassment-free environment. See the [Code of Conduct](https://scikit-hep.org/code-of-conduct) for details.
### Where to start
The front page for the Awkward Array project is its [GitHub README](https://github.com/scikit-hep/awkward#readme). This leads directly to tutorials and reference documentation that you may have already seen. It also includes instructions for [compiling for development](https://github.com/scikit-hep/awkward#installation-for-developers).
### Reporting issues
The first thing you should do if you want to fix something is to [submit an issue through GitHub](https://github.com/scikit-hep/awkward/issues). That way, we can all see it and maybe one of us or a member of the community knows of a solution that could save you the time spent fixing it. If you "assign yourself" to the issue (top of right side-bar), you can signal your intent to fix it in the issue report.
### Contributing a pull request
Feel free to [open pull requests in GitHub](https://github.com/scikit-hep/awkward/pulls) from your [forked repo](https://docs.github.com/en/get-started/quickstart/fork-a-repo) when you start working on the problem. We recommend opening the pull request early so that we can see your progress and communicate about it. (Note that you can `git commit --allow-empty` to make an empty commit and start a pull request before you even have new code.)
Please [make the pull request a draft](https://github.blog/2019-02-14-introducing-draft-pull-requests/) to indicate that it is in an incomplete state and shouldn't be merged until you click "ready for review."
### Getting your pull request reviewed
Currently, we have two regular reviewers of pull requests:
* Ianna Osborne ([ianna](https://github.com/ianna))
* Peter Fackeldey ([pfackeldey](https://github.com/pfackeldey))
You can request a review from one of us or just comment in GitHub that you want a review and we'll see it. Only one review is required to be allowed to merge a pull request. We'll work with you to get it into shape.
If you're waiting for a response and haven't heard in a few days, it's possible that we forgot/got distracted/thought someone else was reviewing it/thought we were waiting on you, rather than you waiting on us—just write another comment to remind us.
### Becoming a regular committer
If you want to contribute frequently, we'll grant you write access to the `scikit-hep/awkward` repo itself. This is more convenient than pull requests from forked repos.
### Git practices
Unless you ask us not to, we might commit directly to your pull request as a way of communicating what needs to be changed. That said, most of the commits on a pull request are from a single author: corrections and suggestions are exceptions.
Therefore, we prefer git branches to be named with your GitHub userid, such as `ianna/write-contributing-md`.
The titles of pull requests (and therefore the merge commit messages) should follow [these conventions](https://www.conventionalcommits.org/en/v1.0.0/#summary). Mostly, this means prefixing the title with one of these words and a colon:
* feat: new feature
* fix: bug-fix
* perf: code change that improves performance
* refactor: code change that neither fixes a bug nor adds a feature
* style: changes that do not affect the meaning of the code
* test: adding missing tests or correcting existing tests
* build: changes that affect the build system or external dependencies
* docs: documentation only changes
* ci: changes to our CI configuration files and scripts
* chore: other changes that don't modify src or test files
* revert: reverts a previous commit
Almost all pull requests are merged with the "squash and merge" feature, so details about commit history within a pull request are hidden from the `main` branch's history. Feel free, therefore, to commit with any frequency you're comfortable with.
It is unnecessary to manually edit (rebase) commit history within a pull request.
### Building and testing locally
The [installation for developers](README.md#installation-for-developers) procedure is described in brief on the front page, and in more detail here.
Awkward Array is shipped as two packages: `awkward` and `awkward-cpp`. The `awkward-cpp` package contains the compiled C++ components required for performance, and `awkward` is only Python code. If you do not need to modify any C++ (the usual case), then `awkward-cpp` can simply be installed using `pip` or `conda`.
Subsequent steps require the generation of code and datafiles (kernel specification, header-only includes). This can be done with the `prepare` [nox](https://nox.thea.codes/) session:
```bash
nox -s prepare
```
The `prepare` session accepts flags to specify exact generation targets, e.g.
```bash
nox -s prepare -- --tests --docs
```
This can reduce the time taken to perform the preparation step in the event that only the package-building step is needed.
`nox` also lets us reuse the virtualenvs that it creates for each session with the `-R` flag, eliminating the dependency reinstall time:
```bash
nox -R -s prepare
```
#### Installing the `awkward-cpp` package
The C++ components can be installed by building the `awkward-cpp` package:
```bash
python -m pip install ./awkward-cpp
```
If you are working on the C++ components of Awkward Array, it might be more convenient to skip the build isolation step, which involves creating an isolated build environment. First, you must install the build requirements:
```bash
python -m pip install "scikit-build-core[pyproject,color]" pybind11 ninja cmake
```
Then the installation can be performed without build isolation:
```bash
python -m pip install --no-build-isolation --check-build-dependencies ./awkward-cpp
```
#### Installing the `awkward` package
With `awkward-cpp` installed, an editable installation of the pure-python `awkward` package can be performed with
```bash
python -m pip install -e .
```
#### Testing the installed packages
Finally, let's run the integration test suite to ensure that everything's working as expected:
```bash
python -m pytest -n auto tests
```
For more fine-grained testing, we also have tests of the low-level kernels, which can be invoked with
```bash
python -m pytest -n auto awkward-cpp/tests-spec
python -m pytest -n auto awkward-cpp/tests-cpu-kernels
```
This assumes that the `nox -s prepare` session ran the `--tests` target.
Furthermore, if you have an Nvidia GPU and CuPy installed, you can run the CUDA tests with
```bash
python -m pytest tests-cuda-kernels
python -m pytest tests-cuda
```
#### Unit tests for the kernels
You can also run additional unit tests that have more test coverage for all the low-level kernels for even more detailed fine-grained testing.
For Python Kernels:
```bash
python -m pytest -n auto awkward-cpp/tests-spec-explicit
```
For CPU Kernels:
```bash
python -m pytest -n auto awkward-cpp/tests-cpu-kernels-explicit
```
For CUDA Kernels
```bash
python -m pytest tests-cuda-kernels-explicit
```
### Building wheels
Sometimes it's convenient to build a wheel for the `awkward-cpp` package, so that subsequent re-installs do not require the package to be rebuilt. The `build` package can be used to do this, though care must be taken to specify the *current* Python interpreter in [pipx](https://pypa.github.io/pipx/):
```bash
pipx run --python=$(which python) build --wheel awkward-cpp
```
The built wheel will then be available in `awkward-cpp/dist`.
### Automatic formatting and linting
The Awkward Array project uses [pre-commit](https://pre-commit.com) to handle formatters and linters. This automatically checks (and may push corrections to) your pull request's git branch.
To respond more quickly to pre-commit's feedback, it can help to install it and run it locally. Once it is installed, run
```bash
pre-commit run -a
```
to test all of your files. If you leave off the `-a`, it will run only on currently stashed changes.
### Automated tests
As stated above, we use [pytest](https://docs.pytest.org/) to verify the correctness of the code, and GitHub will reject a pull request if either pre-commit or pytest fails (red "X"). All tests must pass for a pull request to be accepted.
Note that if a pull request doesn't modify code, only the documentation tests will run. That's okay: documentation-only pull requests only need the documentation tests to pass.
### Testing practices
Unless you're refactoring code, such that your changes are fully tested by the existing test suite, new code should be accompanied by new tests. Our testing suite is organized by GitHub issue or pull request number: that is, test file names are
```
tests/test_XXXX-yyyy.py
```
where `XXXX` is either the number of the issue your pull request fixes or the number of the pull request and `yyyy` is descriptive text, often the same as the git branch. This makes it easier to run your test in isolation:
```bash
python -m pytest tests/test_XXXX-yyyy.py
```
and it makes it easier to figure out why a particular test was added. The easiest way to make a new testing file is to copy an existing one and replace its `test_zzzz` functions with your own. The previous tests should also give you a sense of the way we test things and the kinds of things that are constrained in tests.
### Building documentation locally
Documentation is automatically built by each pull request. You usually won't need to build the documentation locally, but if you do, this section describes how.
We use [Sphinx](https://pypi.org/project/Sphinx/) to generate documentation. You may need to install some additional packages:
* [Doxygen](https://www.doxygen.nl/download.html)
* [pycparser](https://pypi.org/project/pycparser/)
* [black](https://pypi.org/project/black/)
* [sphinx](https://pypi.org/project/sphinx/)
* [sphinx-rtd-theme](https://pypi.org/project/sphinx-rtd-theme/)
To build documentation locally, first prepare the generated data files with
```bash
nox -s prepare
```
Only the `--headers` and `--docs` flags are actually required at the time of writing. These can be passed with:
```bash
nox -s prepare -- --docs --headers
```
Then, use `nox` to run the various documentation build steps
```bash
nox -s docs
```
This command executes multiple custom Python scripts (some require a working internet connection), in addition to using Sphinx and Doxygen to generate the required browser viewable documentation.
To view the built documentation, open
```bash
docs/_build/html/index.html
```
from the root directory of the project in your preferred web browser, e.g.
```bash
python -m http.server 8080 --directory docs/_build/html/
```
Before re-building documentation, you might want to delete the files that were generated to create viewable documentation. A simple command to remove all of them is
```bash
rm -rf docs/reference/generated docs/_build docs/_static/doxygen
```
There is also a cache in the `docs/_build/.jupyter_cache` directory for Jupyter Book, which can be removed.
### The main branch
The Awkward Array `main` branch must be kept in an unbroken state. There are two reasons for this: so that developers can work independently on known-to-be-working states and so that users can test the latest changes (usually to see if the bug they've discovered is fixed by a potential correction).
The `main` branch is also never far from the latest released version. We usually deploy patch releases (`z` in a version number like `x.y.z`) within days of a bug-fix.
Committing directly to `main` is not allowed except for
* updating the `pyproject.toml` file to increase the version number, which should be independent of pull requests
* updating documentation or non-code files
* unprecedented emergencies
and only by the the [reviewing team](CONTRIBUTING.md#getting-your-pull-request-reviewed).
### The main-v1 branch
The `main-v1` branch was split from `main` just before Awkward 1.x code was removed, so it exists to make 1.10.x bug-fix releases. These commits must be drawn from `main-v1`, not `main`, and pull requests must target `main-v1` (not the GitHub default). A single commit cannot be applied to both `main` and `main-v1` because they have diverged too much. If a bug-fix needs to be applied to both (unlikely), it will have to be reimplemented on both.
### Releases
Currently, only one person can deploy releases:
* Ianna Osborne ([ianna](https://github.com/ianna))
There are two kinds of releases: (1) `awkward-cpp` updates, which only occur when the C++ is updated (rare) and involves compilation on many platforms (takes hours), and (2) `awkward` updates, which can happen with any bug-fix. The [releases listed in GitHub](https://github.com/scikit-hep/awkward/releases) are `awkward` releases, not `awkward-cpp`.
If you need your merged pull request to be deployed in a release, just ask!
#### `awkward-cpp` releases
To make an `awkward-cpp` release:
1. A commit to `main` should increase the version number in `awkward-cpp/pyproject.toml` and the corresponding dependency in `pyproject.toml`. This ensures that `awkward-cpp` and `awkward` remain in-sync.
2. The [Deploy C++](https://github.com/scikit-hep/awkward/actions/workflows/deploy-cpp.yml) GitHub Actions workflow should be manually triggered.
3. A `git` tag `awkward-cpp-{version}` should be created for the new version epoch.
#### `awkward` releases
To make an `awkward` release:
1. A commit to `main` should increase the version number in `pyproject.toml`
2. A new GitHub release must be published.
3. A `docs/switcher.json` entry must be added for new minor/major versions.
Pushes that modify `docs/switcher.json` on `main` will automatically be synchronised with AWS.
#### Nightly wheels
Nightly wheels of `awkward-cpp` and `awkward` are built and published to the [Scientific Python Nightly Wheels Anaconda Cloud organization](https://anaconda.org/scientific-python-nightly-wheels).
As the `awkward-cpp` and `awkward` nightly wheels do not include version control system information, they will have the same version numbers as the last released versions on the public PyPI. To avoid resolution conflicts when installing the nightly wheels, it is recommended to first install `awkward-cpp` and `awkward` from PyPI to get all of their dependencies, then uninstall `awkward-cpp` and `awkward` and install the nightly wheels from the Scientific Python nightly index.
```
python -m pip install --upgrade awkward
python -m pip uninstall --yes awkward awkward-cpp
python -m pip install --upgrade --extra-index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple awkward
```
awkward-2.8.9/LICENSE 0000664 0000000 0000000 00000002760 15063547320 0014211 0 ustar 00root root 0000000 0000000 BSD 3-Clause License
Copyright (c) 2019, Jim Pivarski
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
awkward-2.8.9/README.md 0000664 0000000 0000000 00000074571 15063547320 0014474 0 ustar 00root root 0000000 0000000

[](https://pypi.org/project/awkward)
[](https://github.com/conda-forge/awkward-feedstock)
[](https://www.python.org)
[](https://opensource.org/licenses/BSD-3-Clause)
[](https://github.com/scikit-hep/awkward/actions/workflows/test.yml)
[](https://scikit-hep.org/)
[](https://doi.org/10.5281/zenodo.4341376)
[](https://awkward-array.org/)
[](https://gitter.im/Scikit-HEP/awkward-array)
[](https://nsf.gov/awardsearch/showAward?AWD_ID=1836650)
[](https://nsf.gov/awardsearch/showAward?AWD_ID=2103945)
[](https://nsf.gov/awardsearch/showAward?AWD_ID=2121686)
[](https://nsf.gov/awardsearch/showAward?AWD_ID=2323298)
Awkward Array is a library for **nested, variable-sized data**, including arbitrary-length lists, records, mixed types, and missing data, using **NumPy-like idioms**.
Arrays are **dynamically typed**, but operations on them are **compiled and fast**. Their behavior coincides with NumPy when array dimensions are regular and generalizes when they're not.
# Motivating example
Given an array of lists of objects with `x`, `y` fields (with nested lists in the `y` field),
```python
import awkward as ak
array = ak.Array([
[{"x": 1.1, "y": [1]}, {"x": 2.2, "y": [1, 2]}, {"x": 3.3, "y": [1, 2, 3]}],
[],
[{"x": 4.4, "y": [1, 2, 3, 4]}, {"x": 5.5, "y": [1, 2, 3, 4, 5]}]
])
```
the following slices out the `y` values, drops the first element from each inner list, and runs NumPy's `np.square` function on everything that is left:
```python
output = np.square(array["y", ..., 1:])
```
The result is
```python
[
[[], [4], [4, 9]],
[],
[[4, 9, 16], [4, 9, 16, 25]]
]
```
The equivalent using only Python is
```python
output = []
for sublist in array:
tmp1 = []
for record in sublist:
tmp2 = []
for number in record["y"][1:]:
tmp2.append(np.square(number))
tmp1.append(tmp2)
output.append(tmp1)
```
The expression using Awkward Arrays is more concise, using idioms familiar from NumPy, and it also has NumPy-like performance. For a similar problem 10 million times larger than the one above (single-threaded on a 2.2 GHz processor),
* the Awkward Array one-liner takes **1.5 seconds** to run and uses **2.1 GB** of memory,
* the equivalent using Python lists and dicts takes **140 seconds** to run and uses **22 GB** of memory.
Awkward Array is even faster when used in [Numba](https://numba.pydata.org/)'s JIT-compiled functions.
See the [Getting started](https://awkward-array.org/doc/main/getting-started/index.html) documentation on [awkward-array.org](https://awkward-array.org) for an introduction, including a [no-install demo](https://awkward-array.org/doc/main/_static/try-it.html) you can try in your web browser.
# Getting help
* View the documentation on [awkward-array.org](https://awkward-array.org/).
* Report bugs, request features, and ask for additional documentation on [GitHub Issues](https://github.com/scikit-hep/awkward/issues).
* If you have a "How do I...?" question, start a [GitHub Discussion](https://github.com/scikit-hep/awkward/discussions) with category "Q&A".
* Alternatively, ask about it on [StackOverflow with the [awkward-array] tag](https://stackoverflow.com/questions/tagged/awkward-array). Be sure to include tags for any other libraries that you use, such as Pandas or PyTorch.
* To ask questions in real time, try the Gitter [Scikit-HEP/awkward-array](https://gitter.im/Scikit-HEP/awkward-array) chat room.
# Installation
Awkward Array can be installed from [PyPI](https://pypi.org/project/awkward) using pip:
```bash
pip install awkward
```
The `awkward` package is pure Python, and it will download the `awkward-cpp` compiled components as a dependency. If there is no `awkward-cpp` binary package (wheel) for your platform and Python version, pip will attempt to compile it from source (which has additional dependencies, such as a C++ compiler).
Awkward Array is also available on [conda-forge](https://conda-forge.org/docs/user/introduction.html#how-can-i-install-packages-from-conda-forge):
```bash
conda install -c conda-forge awkward
```
Because of the two packages (`awkward-cpp` may be updated in GitHub but not on PyPI), pip install through git (`pip install git+https://...`) will not work. Instead, use the [Installation for developers](#installation-for-developers) section below.
# Installation for developers
Clone this repository _recursively_ to get the header-only C++ dependencies, then generate sources with [nox](https://nox.thea.codes/), compile and install `awkward-cpp`, and finally install `awkward` as an editable installation:
```bash
git clone --recursive https://github.com/scikit-hep/awkward.git
cd awkward
nox -s prepare
python -m pip install -v ./awkward-cpp
python -m pip install -e .
```
Tests can be run in parallel with [pytest](https://docs.pytest.org/):
```bash
python -m pytest -n auto tests
```
For more details, see [CONTRIBUTING.md](https://github.com/scikit-hep/awkward/blob/main/CONTRIBUTING.md), or one of the links below.
* [Continuous integration](https://github.com/scikit-hep/awkward/actions/workflows/test.yml) and [continuous deployment](https://github.com/scikit-hep/awkward/actions/workflows/wheels.yml) are hosted by [GitHub Actions](https://github.com/features/actions/).
* [Code of conduct](https://scikit-hep.org/code-of-conduct) for how we work together.
* The [LICENSE](LICENSE) is BSD-3.
# Documentation, Release notes, Roadmap, Citations
The documentation is on [awkward-array.org](https://awkward-array.org), including
* [Getting started](https://awkward-array.org/doc/main/getting-started/index.html)
* [User guide](https://awkward-array.org/doc/main/user-guide/index.html)
* [API reference](https://awkward-array.org/doc/main/reference/index.html)
* [Tutorials (with videos)](https://awkward-array.org/doc/main/getting-started/community-tutorials.html)
* [Papers and talks](https://awkward-array.org/doc/main/getting-started/papers-and-talks.html) about Awkward Array
The Release notes for each version are in the [GitHub Releases tab](https://github.com/scikit-hep/awkward/releases).
The Roadmap, Plans, and Deprecation Schedule are in the [GitHub Wiki](https://github.com/scikit-hep/awkward/wiki).
To cite Awkward Array in a paper, see the "Cite this repository" drop-down menu on the top-right of the [GitHub front page](https://github.com/scikit-hep/awkward). The BibTeX is
```bibtex
@software{Pivarski_Awkward_Array_2018,
author = {Pivarski, Jim and Osborne, Ianna and Ifrim, Ioana and Schreiner, Henry and Hollands, Angus and Biswas, Anish and Das, Pratyush and Roy Choudhury, Santam and Smith, Nicholas and Goyal, Manasvi and Fackeldey, Peter and Krommydas, Iason},
doi = {10.5281/zenodo.4341376},
month = {10},
title = {{Awkward Array}},
year = {2018}
}
```
# Acknowledgements
Support for this work was provided by NSF cooperative agreement [OAC-1836650](https://www.nsf.gov/awardsearch/showAward?AWD_ID=1836650) (IRIS-HEP 1), [PHY-2323298](https://www.nsf.gov/awardsearch/showAward?AWD_ID=2323298) (IRIS-HEP 2), grant [OAC-1450377](https://nsf.gov/awardsearch/showAward?AWD_ID=1450377) (DIANA/HEP), [PHY-1520942](https://www.nsf.gov/awardsearch/showAward?AWD_ID=1520942) and [PHY-2121686](https://www.nsf.gov/awardsearch/showAward?AWD_ID=2121686) (US-CMS LHC Ops), and [OAC-2103945](https://www.nsf.gov/awardsearch/showAward?AWD_ID=2103945) (Awkward Array).
We also thank [Erez Shinan](https://github.com/erezsh) and the developers of the [Lark standalone parser](https://github.com/lark-parser/lark), which is used to parse type strings as type objects.
Thanks especially to the gracious help of Awkward Array contributors (including the [original repository](https://github.com/scikit-hep/awkward-0.x)).
💻: code, 📖: documentation, 🚇: infrastructure, 🚧: maintenance, ⚠: tests and feedback, 🤔: foundational ideas.
awkward-2.8.9/awkward-cpp/ 0000775 0000000 0000000 00000000000 15063547320 0015417 5 ustar 00root root 0000000 0000000 awkward-2.8.9/awkward-cpp/.clang-format 0000664 0000000 0000000 00000001205 15063547320 0017770 0 ustar 00root root 0000000 0000000 ---
Language: Cpp
BasedOnStyle: Google
ColumnLimit: 80
NamespaceIndentation: All
SortIncludes: false
IndentWidth: 2
AccessModifierOffset: -2
PenaltyBreakComment: 30
PenaltyExcessCharacter: 100
AlignAfterOpenBracket: Align
AlignTrailingComments: true
AllowAllArgumentsOnNextLine: true
AllowAllParametersOfDeclarationOnNextLine: true
AllowShortBlocksOnASingleLine: false
AllowShortIfStatementsOnASingleLine: Never
AllowShortLambdasOnASingleLine: false
AllowShortLoopsOnASingleLine: false
AlwaysBreakAfterReturnType: All
BinPackParameters: false
AlwaysBreakTemplateDeclarations: Yes
ReflowComments: false
BinPackArguments: false
awkward-2.8.9/awkward-cpp/.clang-tidy 0000664 0000000 0000000 00000003060 15063547320 0017452 0 ustar 00root root 0000000 0000000 ---
Checks: -*,
,boost-use-to-string,
,misc-string-compare,
,misc-uniqueptr-reset-release,
,modernize-deprecated-headers,
,modernize-make-shared,
,modernize-use-bool-literals,
,modernize-use-equals-delete,
,modernize-use-nullptr,
,modernize-use-override,
,modernize-make-unique,
,performance-unnecessary-copy-initialization,
,readability-container-size-empty,
,readability-redundant-string-cstr,
,readability-static-definition-in-anonymous-namespace,
,readability-uniqueptr-delete-release,
,cppcoreguidelines-pro-type-member-init
WarningsAsErrors: ''
HeaderFilterRegex: ''
AnalyzeTemporaryDtors: false
CheckOptions:
- key: google-readability-braces-around-statements.ShortStatementLines
value: '1'
- key: google-readability-function-size.StatementThreshold
value: '800'
- key: google-readability-namespace-comments.ShortNamespaceLines
value: '10'
- key: google-readability-namespace-comments.SpacesBeforeComments
value: '2'
- key: modernize-loop-convert.MaxCopySize
value: '16'
- key: modernize-loop-convert.MinConfidence
value: reasonable
- key: modernize-loop-convert.NamingStyle
value: CamelCase
- key: modernize-pass-by-value.IncludeStyle
value: llvm
- key: modernize-replace-auto-ptr.IncludeStyle
value: llvm
- key: modernize-use-nullptr.NullMacros
value: 'NULL'
...
awkward-2.8.9/awkward-cpp/.cmake-format.yaml 0000664 0000000 0000000 00000000031 15063547320 0020721 0 ustar 00root root 0000000 0000000 format:
line_width: 99
awkward-2.8.9/awkward-cpp/.gitignore 0000664 0000000 0000000 00000000214 15063547320 0017404 0 ustar 00root root 0000000 0000000 # Doxygen
docs/html
.pyodide-xbuildenv
# Generated
header-only
include/awkward/kernels.h
src/awkward_cpp/_kernel_signatures.py
dist
.venv
awkward-2.8.9/awkward-cpp/CMakeLists.txt 0000664 0000000 0000000 00000011053 15063547320 0020157 0 ustar 00root root 0000000 0000000 # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
cmake_minimum_required(VERSION 3.15...3.30)
if(NOT DEFINED SKBUILD)
set(SKBUILD_PROJECT_NAME awkward_cpp)
set(SKBUILD_PROJECT_VERSION 0.0.0)
endif()
# Project must be near the top
project(
${SKBUILD_PROJECT_NAME}
LANGUAGES CXX
VERSION ${SKBUILD_PROJECT_VERSION})
message(STATUS "CMake version ${CMAKE_VERSION}")
message(STATUS "CMAKE_BUILD_TYPE = ${CMAKE_BUILD_TYPE}")
if(CMAKE_CXX_COMPILER_ID MATCHES AppleClang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL
15)
if(CMAKE_VERSION VERSION_LESS 3.29)
message(WARNING "CMake should be 3.29+ to support AppleClang 15+. Trying anyway.")
endif()
set(CMAKE_LINKER_TYPE APPLE_CLASSIC)
endif()
# Check for header-only libraries
if(NOT (EXISTS "header-only" AND EXISTS "include/awkward/kernels.h"))
message(
FATAL_ERROR
"\
awkward-cpp relies upon generated and copied artefacts such as the header-only libraries and generated kernel headers. \
These could not be found, which indicates that\n\n\
nox -s prepare\
\n\nwas skipped or failed. \
Please check https://github.com/scikit-hep/awkward#installation-for-developers to learn more about this process.\
")
endif()
# Setup the RPATH for built libraries
if(APPLE)
set(CMAKE_INSTALL_RPATH "@loader_path")
else()
set(CMAKE_INSTALL_RPATH "\$ORIGIN")
endif()
set(CMAKE_BUILD_RPATH_USE_ORIGIN TRUE)
# Three tiers: [cpu-kernels (extern "C" interface), cuda-kernels (extern "C" interface)],
# libawkward (C++), and Python modules.
file(GLOB CPU_KERNEL_SOURCES CONFIGURE_DEPENDS "src/cpu-kernels/*.cpp")
file(GLOB_RECURSE LIBAWKWARD_SOURCES CONFIGURE_DEPENDS "src/libawkward/*.cpp")
# Shared properties
add_library(awkward-parent INTERFACE)
target_compile_definitions(awkward-parent INTERFACE VERSION_INFO="${SKBUILD_PROJECT_VERSION}")
target_include_directories(awkward-parent INTERFACE include)
target_compile_features(awkward-parent INTERFACE cxx_std_17)
# C++ dependencies (header-only): RapidJSON
target_include_directories(awkward-parent INTERFACE rapidjson/include)
# C++ dependencies (header-only): GrowableBuffer
add_subdirectory(header-only EXCLUDE_FROM_ALL)
target_link_libraries(awkward-parent INTERFACE awkward::growable-buffer)
# WASM needs exceptions enabled
if(CMAKE_SYSTEM_NAME MATCHES Emscripten)
set_property(
TARGET awkward-parent
APPEND
PROPERTY INTERFACE_LINK_OPTIONS -fexceptions)
set_property(
TARGET awkward-parent
APPEND
PROPERTY INTERFACE_COMPILE_OPTIONS -fexceptions)
endif()
# First tier: cpu-kernels
add_library(awkward-cpu-kernels SHARED ${CPU_KERNEL_SOURCES})
target_link_libraries(awkward-cpu-kernels PUBLIC awkward-parent)
set_target_properties(
awkward-cpu-kernels
PROPERTIES CXX_VISIBILITY_PRESET hidden
VISIBILITY_INLINES_HIDDEN ON
CXX_EXTENSIONS NO)
# Second tier: libawkward
add_library(awkward SHARED ${LIBAWKWARD_SOURCES})
if(${CMAKE_CXX_COMPILER_ID} MATCHES "Clang")
# Avoid emitting vtables in the dependent libraries
target_compile_options(
awkward
PRIVATE -Werror=weak-vtables
-Wweak-vtables
-Wshorten-64-to-32
-Wsign-compare
-Wsign-conversion
-Wshift-sign-overflow
-Wreorder
-Wrange-loop-analysis
-Wconversion
-Wunused)
endif()
target_link_libraries(awkward PUBLIC awkward-parent)
set_target_properties(
awkward
PROPERTIES CXX_VISIBILITY_PRESET hidden
VISIBILITY_INLINES_HIDDEN ON
CXX_EXTENSIONS NO)
# Third tier: Python modules.
find_package(Python REQUIRED COMPONENTS Interpreter Development.Module)
find_package(pybind11 CONFIG REQUIRED)
# Install python bindings
file(GLOB LAYOUT_SOURCES "src/python/*.cpp")
pybind11_add_module(_ext MODULE ${LAYOUT_SOURCES})
target_link_libraries(_ext PRIVATE awkward)
set_target_properties(
_ext
PROPERTIES CXX_VISIBILITY_PRESET hidden
VISIBILITY_INLINES_HIDDEN ON
CXX_EXTENSIONS NO)
# This has to be passed explicitly to make Pyodide 0.28 happy
if(CMAKE_SYSTEM_NAME MATCHES Emscripten)
target_link_options(_ext PRIVATE "-Wl,-rpath=\$ORIGIN")
endif()
# Install pure-python files
file(GLOB_RECURSE PYTHON_SOURCES "src/${SKBUILD_PROJECT_NAME}/*.py")
install(
TARGETS awkward awkward-parent awkward-cpu-kernels _ext
LIBRARY DESTINATION "${SKBUILD_PROJECT_NAME}/lib"
RUNTIME DESTINATION "${SKBUILD_PROJECT_NAME}/lib"
ARCHIVE DESTINATION "${SKBUILD_PROJECT_NAME}/lib")
install(FILES ${PYTHON_SOURCES} DESTINATION ${SKBUILD_PROJECT_NAME})
awkward-2.8.9/awkward-cpp/LICENSE 0000664 0000000 0000000 00000002760 15063547320 0016431 0 ustar 00root root 0000000 0000000 BSD 3-Clause License
Copyright (c) 2019, Jim Pivarski
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
awkward-2.8.9/awkward-cpp/README.md 0000664 0000000 0000000 00000000232 15063547320 0016673 0 ustar 00root root 0000000 0000000 # `awkward-cpp`
`awkward-cpp` provides precompiled routines for the `awkward` package. It is not useful on its own, only as a dependency for `awkward `.
awkward-2.8.9/awkward-cpp/docs/ 0000775 0000000 0000000 00000000000 15063547320 0016347 5 ustar 00root root 0000000 0000000 awkward-2.8.9/awkward-cpp/docs/Doxyfile 0000664 0000000 0000000 00000325346 15063547320 0020072 0 ustar 00root root 0000000 0000000 # Doxyfile 1.8.16
# This file describes the settings to be used by the documentation system
# doxygen (www.doxygen.org) for a project.
#
# All text after a double hash (##) is considered a comment and is placed in
# front of the TAG it is preceding.
#
# All text after a single hash (#) is considered a comment and will be ignored.
# The format is:
# TAG = value [value, ...]
# For lists, items can also be appended using:
# TAG += value [value, ...]
# Values that contain spaces should be placed between quotes (\" \").
#---------------------------------------------------------------------------
# Project related configuration options
#---------------------------------------------------------------------------
# This tag specifies the encoding used for all characters in the configuration
# file that follow. The default is UTF-8 which is also the encoding used for all
# text before the first occurrence of this tag. Doxygen uses libiconv (or the
# iconv built into libc) for the transcoding. See
# https://www.gnu.org/software/libiconv/ for the list of possible encodings.
# The default value is: UTF-8.
DOXYFILE_ENCODING = UTF-8
# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
# double-quotes, unless you are using Doxywizard) that should identify the
# project for which the documentation is generated. This name is used in the
# title of most generated pages and in a few other places.
# The default value is: My Project.
PROJECT_NAME = "Awkward Array"
# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER =
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
# quick idea about the purpose of the project. Keep the description short.
PROJECT_BRIEF =
# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
# in the documentation. The maximum height of the logo should not exceed 55
# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
# the logo to the output directory.
PROJECT_LOGO =
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
# into which the generated documentation will be written. If a relative path is
# entered, it will be relative to the location where doxygen was started. If
# left blank the current directory will be used.
OUTPUT_DIRECTORY =
# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
# directories (in 2 levels) under the output directory of each output format and
# will distribute the generated files over these directories. Enabling this
# option can be useful when feeding doxygen a huge amount of source files, where
# putting all generated files in the same directory would otherwise causes
# performance problems for the file system.
# The default value is: NO.
CREATE_SUBDIRS = NO
# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
# characters to appear in the names of generated files. If set to NO, non-ASCII
# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
# U+3044.
# The default value is: NO.
ALLOW_UNICODE_NAMES = NO
# The OUTPUT_LANGUAGE tag is used to specify the language in which all
# documentation generated by doxygen is written. Doxygen will use this
# information to generate all constant output in the proper language.
# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
# Ukrainian and Vietnamese.
# The default value is: English.
OUTPUT_LANGUAGE = English
# The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all
# documentation generated by doxygen is written. Doxygen will use this
# information to generate all generated output in the proper direction.
# Possible values are: None, LTR, RTL and Context.
# The default value is: None.
OUTPUT_TEXT_DIRECTION = None
# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
# descriptions after the members that are listed in the file and class
# documentation (similar to Javadoc). Set to NO to disable this.
# The default value is: YES.
BRIEF_MEMBER_DESC = YES
# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
# description of a member or function before the detailed description
#
# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
# brief descriptions will be completely suppressed.
# The default value is: YES.
REPEAT_BRIEF = YES
# This tag implements a quasi-intelligent brief description abbreviator that is
# used to form the text in various listings. Each string in this list, if found
# as the leading text of the brief description, will be stripped from the text
# and the result, after processing the whole list, is used as the annotated
# text. Otherwise, the brief description is used as-is. If left blank, the
# following values are used ($name is automatically replaced with the name of
# the entity):The $name class, The $name widget, The $name file, is, provides,
# specifies, contains, represents, a, an and the.
ABBREVIATE_BRIEF = "The $name class" \
"The $name widget" \
"The $name file" \
is \
provides \
specifies \
contains \
represents \
a \
an \
the
# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
# doxygen will generate a detailed section even if there is only a brief
# description.
# The default value is: NO.
ALWAYS_DETAILED_SEC = NO
# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
# inherited members of a class in the documentation of that class as if those
# members were ordinary class members. Constructors, destructors and assignment
# operators of the base classes will not be shown.
# The default value is: NO.
INLINE_INHERITED_MEMB = NO
# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
# before files name in the file list and in the header files. If set to NO the
# shortest path that makes the file name unique will be used
# The default value is: YES.
FULL_PATH_NAMES = YES
# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
# Stripping is only done if one of the specified strings matches the left-hand
# part of the path. The tag can be used to show relative paths in the file list.
# If left blank the directory from which doxygen is run is used as the path to
# strip.
#
# Note that you can specify absolute paths here, but also relative paths, which
# will be relative from the directory where doxygen is started.
# This tag requires that the tag FULL_PATH_NAMES is set to YES.
STRIP_FROM_PATH =
# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
# path mentioned in the documentation of a class, which tells the reader which
# header file to include in order to use a class. If left blank only the name of
# the header file containing the class definition is used. Otherwise one should
# specify the list of include paths that are normally passed to the compiler
# using the -I flag.
STRIP_FROM_INC_PATH =
# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
# less readable) file names. This can be useful is your file systems doesn't
# support long names like on DOS, Mac, or CD-ROM.
# The default value is: NO.
SHORT_NAMES = NO
# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
# first line (until the first dot) of a Javadoc-style comment as the brief
# description. If set to NO, the Javadoc-style will behave just like regular Qt-
# style comments (thus requiring an explicit @brief command for a brief
# description.)
# The default value is: NO.
JAVADOC_AUTOBRIEF = NO
# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line
# such as
# /***************
# as being the beginning of a Javadoc-style comment "banner". If set to NO, the
# Javadoc-style will behave just like regular comments and it will not be
# interpreted by doxygen.
# The default value is: NO.
JAVADOC_BANNER = NO
# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
# line (until the first dot) of a Qt-style comment as the brief description. If
# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
# requiring an explicit \brief command for a brief description.)
# The default value is: NO.
QT_AUTOBRIEF = NO
# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
# a brief description. This used to be the default behavior. The new default is
# to treat a multi-line C++ comment block as a detailed description. Set this
# tag to YES if you prefer the old behavior instead.
#
# Note that setting this tag to YES also means that rational rose comments are
# not recognized any more.
# The default value is: NO.
MULTILINE_CPP_IS_BRIEF = NO
# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
# documentation from any documented member that it re-implements.
# The default value is: YES.
INHERIT_DOCS = YES
# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
# page for each member. If set to NO, the documentation of a member will be part
# of the file/class/namespace that contains it.
# The default value is: NO.
SEPARATE_MEMBER_PAGES = NO
# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
# uses this value to replace tabs by spaces in code fragments.
# Minimum value: 1, maximum value: 16, default value: 4.
TAB_SIZE = 4
# This tag can be used to specify a number of aliases that act as commands in
# the documentation. An alias has the form:
# name=value
# For example adding
# "sideeffect=@par Side Effects:\n"
# will allow you to put the command \sideeffect (or @sideeffect) in the
# documentation, which will result in a user-defined paragraph with heading
# "Side Effects:". You can put \n's in the value part of an alias to insert
# newlines (in the resulting output). You can put ^^ in the value part of an
# alias to insert a newline as if a physical newline was in the original file.
# When you need a literal { or } or , in the value part of an alias you have to
# escape them by means of a backslash (\), this can lead to conflicts with the
# commands \{ and \} for these it is advised to use the version @{ and @} or use
# a double escape (\\{ and \\})
ALIASES =
# This tag can be used to specify a number of word-keyword mappings (TCL only).
# A mapping has the form "name=value". For example adding "class=itcl::class"
# will allow you to use the command class in the itcl::class meaning.
TCL_SUBST =
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
# only. Doxygen will then generate output that is more tailored for C. For
# instance, some of the names that are used will be different. The list of all
# members will be omitted, etc.
# The default value is: NO.
OPTIMIZE_OUTPUT_FOR_C = NO
# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
# Python sources only. Doxygen will then generate output that is more tailored
# for that language. For instance, namespaces will be presented as packages,
# qualified scopes will look different, etc.
# The default value is: NO.
OPTIMIZE_OUTPUT_JAVA = NO
# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
# sources. Doxygen will then generate output that is tailored for Fortran.
# The default value is: NO.
OPTIMIZE_FOR_FORTRAN = NO
# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
# sources. Doxygen will then generate output that is tailored for VHDL.
# The default value is: NO.
OPTIMIZE_OUTPUT_VHDL = NO
# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice
# sources only. Doxygen will then generate output that is more tailored for that
# language. For instance, namespaces will be presented as modules, types will be
# separated into more groups, etc.
# The default value is: NO.
OPTIMIZE_OUTPUT_SLICE = NO
# Doxygen selects the parser to use depending on the extension of the files it
# parses. With this tag you can assign which parser to use for a given
# extension. Doxygen has a built-in mapping, but you can override or extend it
# using this tag. The format is ext=language, where ext is a file extension, and
# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice,
# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran:
# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser
# tries to guess whether the code is fixed or free formatted code, this is the
# default for Fortran type files), VHDL, tcl. For instance to make doxygen treat
# .inc files as Fortran files (default is PHP), and .f files as C (default is
# Fortran), use: inc=Fortran f=C.
#
# Note: For files without extension you can use no_extension as a placeholder.
#
# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
# the files are not read by doxygen.
EXTENSION_MAPPING =
# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
# according to the Markdown format, which allows for more readable
# documentation. See https://daringfireball.net/projects/markdown/ for details.
# The output of markdown processing is further processed by doxygen, so you can
# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
# case of backward compatibilities issues.
# The default value is: YES.
MARKDOWN_SUPPORT = YES
# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up
# to that level are automatically included in the table of contents, even if
# they do not have an id attribute.
# Note: This feature currently applies only to Markdown headings.
# Minimum value: 0, maximum value: 99, default value: 5.
# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
TOC_INCLUDE_HEADINGS = 5
# When enabled doxygen tries to link words that correspond to documented
# classes, or namespaces to their corresponding documentation. Such a link can
# be prevented in individual cases by putting a % sign in front of the word or
# globally by setting AUTOLINK_SUPPORT to NO.
# The default value is: YES.
AUTOLINK_SUPPORT = YES
# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
# to include (a tag file for) the STL sources as input, then you should set this
# tag to YES in order to let doxygen match functions declarations and
# definitions whose arguments contain STL classes (e.g. func(std::string);
# versus func(std::string) {}). This also make the inheritance and collaboration
# diagrams that involve STL classes more complete and accurate.
# The default value is: NO.
BUILTIN_STL_SUPPORT = YES
# If you use Microsoft's C++/CLI language, you should set this option to YES to
# enable parsing support.
# The default value is: NO.
CPP_CLI_SUPPORT = NO
# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen
# will parse them like normal C++ but will assume all classes use public instead
# of private inheritance when no explicit protection keyword is present.
# The default value is: NO.
SIP_SUPPORT = NO
# For Microsoft's IDL there are propget and propput attributes to indicate
# getter and setter methods for a property. Setting this option to YES will make
# doxygen to replace the get and set methods by a property in the documentation.
# This will only work if the methods are indeed getting or setting a simple
# type. If this is not the case, or you want to show the methods anyway, you
# should set this option to NO.
# The default value is: YES.
IDL_PROPERTY_SUPPORT = YES
# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
# tag is set to YES then doxygen will reuse the documentation of the first
# member in the group (if any) for the other members of the group. By default
# all members of a group must be documented explicitly.
# The default value is: NO.
DISTRIBUTE_GROUP_DOC = NO
# If one adds a struct or class to a group and this option is enabled, then also
# any nested class or struct is added to the same group. By default this option
# is disabled and one has to add nested compounds explicitly via \ingroup.
# The default value is: NO.
GROUP_NESTED_COMPOUNDS = NO
# Set the SUBGROUPING tag to YES to allow class member groups of the same type
# (for instance a group of public functions) to be put as a subgroup of that
# type (e.g. under the Public Functions section). Set it to NO to prevent
# subgrouping. Alternatively, this can be done per class using the
# \nosubgrouping command.
# The default value is: YES.
SUBGROUPING = YES
# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
# are shown inside the group in which they are included (e.g. using \ingroup)
# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
# and RTF).
#
# Note that this feature does not work in combination with
# SEPARATE_MEMBER_PAGES.
# The default value is: NO.
INLINE_GROUPED_CLASSES = NO
# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
# with only public data fields or simple typedef fields will be shown inline in
# the documentation of the scope in which they are defined (i.e. file,
# namespace, or group documentation), provided this scope is documented. If set
# to NO, structs, classes, and unions are shown on a separate page (for HTML and
# Man pages) or section (for LaTeX and RTF).
# The default value is: NO.
INLINE_SIMPLE_STRUCTS = NO
# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
# enum is documented as struct, union, or enum with the name of the typedef. So
# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
# with name TypeT. When disabled the typedef will appear as a member of a file,
# namespace, or class. And the struct will be named TypeS. This can typically be
# useful for C code in case the coding convention dictates that all compound
# types are typedef'ed and only the typedef is referenced, never the tag name.
# The default value is: NO.
TYPEDEF_HIDES_STRUCT = NO
# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
# cache is used to resolve symbols given their name and scope. Since this can be
# an expensive process and often the same symbol appears multiple times in the
# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
# doxygen will become slower. If the cache is too large, memory is wasted. The
# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
# symbols. At the end of a run doxygen will report the cache usage and suggest
# the optimal cache size from a speed point of view.
# Minimum value: 0, maximum value: 9, default value: 0.
LOOKUP_CACHE_SIZE = 0
#---------------------------------------------------------------------------
# Build related configuration options
#---------------------------------------------------------------------------
# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
# documentation are documented, even if no documentation was available. Private
# class members and static file members will be hidden unless the
# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
# Note: This will also disable the warnings about undocumented members that are
# normally produced when WARNINGS is set to YES.
# The default value is: NO.
EXTRACT_ALL = YES
# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
# be included in the documentation.
# The default value is: NO.
EXTRACT_PRIVATE = NO
# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual
# methods of a class will be included in the documentation.
# The default value is: NO.
EXTRACT_PRIV_VIRTUAL = NO
# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
# scope will be included in the documentation.
# The default value is: NO.
EXTRACT_PACKAGE = NO
# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
# included in the documentation.
# The default value is: NO.
EXTRACT_STATIC = NO
# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
# locally in source files will be included in the documentation. If set to NO,
# only classes defined in header files are included. Does not have any effect
# for Java sources.
# The default value is: YES.
EXTRACT_LOCAL_CLASSES = YES
# This flag is only useful for Objective-C code. If set to YES, local methods,
# which are defined in the implementation section but not in the interface are
# included in the documentation. If set to NO, only methods in the interface are
# included.
# The default value is: NO.
EXTRACT_LOCAL_METHODS = NO
# If this flag is set to YES, the members of anonymous namespaces will be
# extracted and appear in the documentation as a namespace called
# 'anonymous_namespace{file}', where file will be replaced with the base name of
# the file that contains the anonymous namespace. By default anonymous namespace
# are hidden.
# The default value is: NO.
EXTRACT_ANON_NSPACES = NO
# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
# undocumented members inside documented classes or files. If set to NO these
# members will be included in the various overviews, but no documentation
# section is generated. This option has no effect if EXTRACT_ALL is enabled.
# The default value is: NO.
HIDE_UNDOC_MEMBERS = NO
# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
# undocumented classes that are normally visible in the class hierarchy. If set
# to NO, these classes will be included in the various overviews. This option
# has no effect if EXTRACT_ALL is enabled.
# The default value is: NO.
HIDE_UNDOC_CLASSES = NO
# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
# (class|struct|union) declarations. If set to NO, these declarations will be
# included in the documentation.
# The default value is: NO.
HIDE_FRIEND_COMPOUNDS = NO
# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
# documentation blocks found inside the body of a function. If set to NO, these
# blocks will be appended to the function's detailed documentation block.
# The default value is: NO.
HIDE_IN_BODY_DOCS = NO
# The INTERNAL_DOCS tag determines if documentation that is typed after a
# \internal command is included. If the tag is set to NO then the documentation
# will be excluded. Set it to YES to include the internal documentation.
# The default value is: NO.
INTERNAL_DOCS = NO
# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
# names in lower-case letters. If set to YES, upper-case letters are also
# allowed. This is useful if you have classes or files whose names only differ
# in case and if your file system supports case sensitive file names. Windows
# (including Cygwin) ands Mac users are advised to set this option to NO.
# The default value is: system dependent.
CASE_SENSE_NAMES = YES
# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
# their full class and namespace scopes in the documentation. If set to YES, the
# scope will be hidden.
# The default value is: NO.
HIDE_SCOPE_NAMES = YES
# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
# append additional text to a page's title, such as Class Reference. If set to
# YES the compound reference will be hidden.
# The default value is: NO.
HIDE_COMPOUND_REFERENCE= NO
# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
# the files that are included by a file in the documentation of that file.
# The default value is: YES.
SHOW_INCLUDE_FILES = YES
# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
# grouped member an include statement to the documentation, telling the reader
# which file to include in order to use the member.
# The default value is: NO.
SHOW_GROUPED_MEMB_INC = NO
# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
# files with double quotes in the documentation rather than with sharp brackets.
# The default value is: NO.
FORCE_LOCAL_INCLUDES = NO
# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
# documentation for inline members.
# The default value is: YES.
INLINE_INFO = YES
# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
# (detailed) documentation of file and class members alphabetically by member
# name. If set to NO, the members will appear in declaration order.
# The default value is: YES.
SORT_MEMBER_DOCS = YES
# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
# descriptions of file, namespace and class members alphabetically by member
# name. If set to NO, the members will appear in declaration order. Note that
# this will also influence the order of the classes in the class list.
# The default value is: NO.
SORT_BRIEF_DOCS = NO
# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
# (brief and detailed) documentation of class members so that constructors and
# destructors are listed first. If set to NO the constructors will appear in the
# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
# member documentation.
# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
# detailed member documentation.
# The default value is: NO.
SORT_MEMBERS_CTORS_1ST = NO
# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
# of group names into alphabetical order. If set to NO the group names will
# appear in their defined order.
# The default value is: NO.
SORT_GROUP_NAMES = NO
# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
# fully-qualified names, including namespaces. If set to NO, the class list will
# be sorted only by class name, not including the namespace part.
# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
# Note: This option applies only to the class list, not to the alphabetical
# list.
# The default value is: NO.
SORT_BY_SCOPE_NAME = YES
# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
# type resolution of all parameters of a function it will reject a match between
# the prototype and the implementation of a member function even if there is
# only one candidate or it is obvious which candidate to choose by doing a
# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
# accept a match between prototype and implementation in such cases.
# The default value is: NO.
STRICT_PROTO_MATCHING = NO
# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
# list. This list is created by putting \todo commands in the documentation.
# The default value is: YES.
GENERATE_TODOLIST = YES
# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
# list. This list is created by putting \test commands in the documentation.
# The default value is: YES.
GENERATE_TESTLIST = YES
# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
# list. This list is created by putting \bug commands in the documentation.
# The default value is: YES.
GENERATE_BUGLIST = YES
# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
# the deprecated list. This list is created by putting \deprecated commands in
# the documentation.
# The default value is: YES.
GENERATE_DEPRECATEDLIST= YES
# The ENABLED_SECTIONS tag can be used to enable conditional documentation
# sections, marked by \if ... \endif and \cond
# ... \endcond blocks.
ENABLED_SECTIONS =
# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
# initial value of a variable or macro / define can have for it to appear in the
# documentation. If the initializer consists of more lines than specified here
# it will be hidden. Use a value of 0 to hide initializers completely. The
# appearance of the value of individual variables and macros / defines can be
# controlled using \showinitializer or \hideinitializer command in the
# documentation regardless of this setting.
# Minimum value: 0, maximum value: 10000, default value: 30.
MAX_INITIALIZER_LINES = 30
# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
# the bottom of the documentation of classes and structs. If set to YES, the
# list will mention the files that were used to generate the documentation.
# The default value is: YES.
SHOW_USED_FILES = YES
# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
# will remove the Files entry from the Quick Index and from the Folder Tree View
# (if specified).
# The default value is: YES.
SHOW_FILES = YES
# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
# page. This will remove the Namespaces entry from the Quick Index and from the
# Folder Tree View (if specified).
# The default value is: YES.
SHOW_NAMESPACES = NO
# The FILE_VERSION_FILTER tag can be used to specify a program or script that
# doxygen should invoke to get the current version for each file (typically from
# the version control system). Doxygen will invoke the program by executing (via
# popen()) the command command input-file, where command is the value of the
# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
# by doxygen. Whatever the program writes to standard output is used as the file
# version. For an example see the documentation.
FILE_VERSION_FILTER =
# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
# by doxygen. The layout file controls the global structure of the generated
# output files in an output format independent way. To create the layout file
# that represents doxygen's defaults, run doxygen with the -l option. You can
# optionally specify a file name after the option, if omitted DoxygenLayout.xml
# will be used as the name of the layout file.
#
# Note that if you run doxygen from a directory containing a file called
# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
# tag is left empty.
LAYOUT_FILE =
# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
# the reference definitions. This must be a list of .bib files. The .bib
# extension is automatically appended if omitted. This requires the bibtex tool
# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info.
# For LaTeX the style of the bibliography can be controlled using
# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
# search path. See also \cite for info how to create references.
CITE_BIB_FILES =
#---------------------------------------------------------------------------
# Configuration options related to warning and progress messages
#---------------------------------------------------------------------------
# The QUIET tag can be used to turn on/off the messages that are generated to
# standard output by doxygen. If QUIET is set to YES this implies that the
# messages are off.
# The default value is: NO.
QUIET = NO
# The WARNINGS tag can be used to turn on/off the warning messages that are
# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
# this implies that the warnings are on.
#
# Tip: Turn warnings on while writing the documentation.
# The default value is: YES.
WARNINGS = YES
# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
# will automatically be disabled.
# The default value is: YES.
WARN_IF_UNDOCUMENTED = YES
# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
# potential errors in the documentation, such as not documenting some parameters
# in a documented function, or documenting parameters that don't exist or using
# markup commands wrongly.
# The default value is: YES.
WARN_IF_DOC_ERROR = YES
# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
# are documented, but have no documentation for their parameters or return
# value. If set to NO, doxygen will only warn about wrong or incomplete
# parameter documentation, but not about the absence of documentation. If
# EXTRACT_ALL is set to YES then this flag will automatically be disabled.
# The default value is: NO.
WARN_NO_PARAMDOC = NO
# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
# a warning is encountered.
# The default value is: NO.
WARN_AS_ERROR = NO
# The WARN_FORMAT tag determines the format of the warning messages that doxygen
# can produce. The string should contain the $file, $line, and $text tags, which
# will be replaced by the file and line number from which the warning originated
# and the warning text. Optionally the format may contain $version, which will
# be replaced by the version of the file (if it could be obtained via
# FILE_VERSION_FILTER)
# The default value is: $file:$line: $text.
WARN_FORMAT = "$file:$line: $text"
# The WARN_LOGFILE tag can be used to specify a file to which warning and error
# messages should be written. If left blank the output is written to standard
# error (stderr).
WARN_LOGFILE =
#---------------------------------------------------------------------------
# Configuration options related to the input files
#---------------------------------------------------------------------------
# The INPUT tag is used to specify the files and/or directories that contain
# documented source files. You may enter file names like myfile.cpp or
# directories like /usr/src/myproject. Separate the files or directories with
# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
# Note: If this tag is empty the current directory is searched.
INPUT = ../include \
../header-only \
index.md
# This tag can be used to specify the character encoding of the source files
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
# documentation (see: https://www.gnu.org/software/libiconv/) for the list of
# possible encodings.
# The default value is: UTF-8.
INPUT_ENCODING = UTF-8
# If the value of the INPUT tag contains directories, you can use the
# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
# *.h) to filter out the source-files in the directories.
#
# Note that for custom extensions or not directly supported extensions you also
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
# read by doxygen.
#
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice.
FILE_PATTERNS = *.cpp *.h
# The RECURSIVE tag can be used to specify whether or not subdirectories should
# be searched for input files as well.
# The default value is: NO.
RECURSIVE = YES
# The EXCLUDE tag can be used to specify files and/or directories that should be
# excluded from the INPUT source files. This way you can easily exclude a
# subdirectory from a directory tree whose root is specified with the INPUT tag.
#
# Note that relative paths are relative to the directory from which doxygen is
# run.
EXCLUDE =
# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
# directories that are symbolic links (a Unix file system feature) are excluded
# from the input.
# The default value is: NO.
EXCLUDE_SYMLINKS = NO
# If the value of the INPUT tag contains directories, you can use the
# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
# certain files from those directories.
#
# Note that the wildcards are matched against the file with absolute path, so to
# exclude all test directories for example use the pattern */test/*
EXCLUDE_PATTERNS =
# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
# (namespaces, classes, functions, etc.) that should be excluded from the
# output. The symbol name can be a fully qualified name, a word, or if the
# wildcard * is used, a substring. Examples: ANamespace, AClass,
# AClass::ANamespace, ANamespace::*Test
#
# Note that the wildcards are matched against the file with absolute path, so to
# exclude all test directories use the pattern */test/*
EXCLUDE_SYMBOLS =
# The EXAMPLE_PATH tag can be used to specify one or more files or directories
# that contain example code fragments that are included (see the \include
# command).
EXAMPLE_PATH =
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
# *.h) to filter out the source-files in the directories. If left blank all
# files are included.
EXAMPLE_PATTERNS = *
# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
# searched for input files to be used with the \include or \dontinclude commands
# irrespective of the value of the RECURSIVE tag.
# The default value is: NO.
EXAMPLE_RECURSIVE = NO
# The IMAGE_PATH tag can be used to specify one or more files or directories
# that contain images that are to be included in the documentation (see the
# \image command).
IMAGE_PATH =
# The INPUT_FILTER tag can be used to specify a program that doxygen should
# invoke to filter for each input file. Doxygen will invoke the filter program
# by executing (via popen()) the command:
#
#
#
# where is the value of the INPUT_FILTER tag, and is the
# name of an input file. Doxygen will then use the output that the filter
# program writes to standard output. If FILTER_PATTERNS is specified, this tag
# will be ignored.
#
# Note that the filter must not add or remove lines; it is applied before the
# code is scanned, but not when the output code is generated. If lines are added
# or removed, the anchors will not be placed correctly.
#
# Note that for custom extensions or not directly supported extensions you also
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
# properly processed by doxygen.
INPUT_FILTER =
# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
# basis. Doxygen will compare the file name with each pattern and apply the
# filter if there is a match. The filters are a list of the form: pattern=filter
# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
# patterns match the file name, INPUT_FILTER is applied.
#
# Note that for custom extensions or not directly supported extensions you also
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
# properly processed by doxygen.
FILTER_PATTERNS =
# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
# INPUT_FILTER) will also be used to filter the input files that are used for
# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
# The default value is: NO.
FILTER_SOURCE_FILES = NO
# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
# it is also possible to disable source filtering for a specific pattern using
# *.ext= (so without naming a filter).
# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
FILTER_SOURCE_PATTERNS =
# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
# is part of the input, its contents will be placed on the main page
# (index.html). This can be useful if you have a project on for instance GitHub
# and want to reuse the introduction page also for the doxygen output.
USE_MDFILE_AS_MAINPAGE = index.md
#---------------------------------------------------------------------------
# Configuration options related to source browsing
#---------------------------------------------------------------------------
# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
# generated. Documented entities will be cross-referenced with these sources.
#
# Note: To get rid of all source code in the generated output, make sure that
# also VERBATIM_HEADERS is set to NO.
# The default value is: NO.
SOURCE_BROWSER = NO
# Setting the INLINE_SOURCES tag to YES will include the body of functions,
# classes and enums directly into the documentation.
# The default value is: NO.
INLINE_SOURCES = NO
# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
# special comment blocks from generated source code fragments. Normal C, C++ and
# Fortran comments will always remain visible.
# The default value is: YES.
STRIP_CODE_COMMENTS = YES
# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
# entity all documented functions referencing it will be listed.
# The default value is: NO.
REFERENCED_BY_RELATION = NO
# If the REFERENCES_RELATION tag is set to YES then for each documented function
# all documented entities called/used by that function will be listed.
# The default value is: NO.
REFERENCES_RELATION = NO
# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
# to YES then the hyperlinks from functions in REFERENCES_RELATION and
# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
# link to the documentation.
# The default value is: YES.
REFERENCES_LINK_SOURCE = YES
# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
# source code will show a tooltip with additional information such as prototype,
# brief description and links to the definition and documentation. Since this
# will make the HTML file larger and loading of large files a bit slower, you
# can opt to disable this feature.
# The default value is: YES.
# This tag requires that the tag SOURCE_BROWSER is set to YES.
SOURCE_TOOLTIPS = YES
# If the USE_HTAGS tag is set to YES then the references to source code will
# point to the HTML generated by the htags(1) tool instead of doxygen built-in
# source browser. The htags tool is part of GNU's global source tagging system
# (see https://www.gnu.org/software/global/global.html). You will need version
# 4.8.6 or higher.
#
# To use it do the following:
# - Install the latest version of global
# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file
# - Make sure the INPUT points to the root of the source tree
# - Run doxygen as normal
#
# Doxygen will invoke htags (and that will in turn invoke gtags), so these
# tools must be available from the command line (i.e. in the search path).
#
# The result: instead of the source browser generated by doxygen, the links to
# source code will now point to the output of htags.
# The default value is: NO.
# This tag requires that the tag SOURCE_BROWSER is set to YES.
USE_HTAGS = NO
# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
# verbatim copy of the header file for each class for which an include is
# specified. Set to NO to disable this.
# See also: Section \class.
# The default value is: YES.
VERBATIM_HEADERS = YES
#---------------------------------------------------------------------------
# Configuration options related to the alphabetical class index
#---------------------------------------------------------------------------
# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
# compounds will be generated. Enable this if the project contains a lot of
# classes, structs, unions or interfaces.
# The default value is: YES.
ALPHABETICAL_INDEX = NO
# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
# which the alphabetical index list will be split.
# Minimum value: 1, maximum value: 20, default value: 5.
# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
COLS_IN_ALPHA_INDEX = 1
# In case all classes in a project start with a common prefix, all classes will
# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
# can be used to specify a prefix (or a list of prefixes) that should be ignored
# while generating the index headers.
# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
IGNORE_PREFIX =
#---------------------------------------------------------------------------
# Configuration options related to the HTML output
#---------------------------------------------------------------------------
# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
# The default value is: YES.
GENERATE_HTML = YES
# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
# it.
# The default directory is: html.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_OUTPUT = html
# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
# generated HTML page (for example: .htm, .php, .asp).
# The default value is: .html.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_FILE_EXTENSION = .html
# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
# each generated HTML page. If the tag is left blank doxygen will generate a
# standard header.
#
# To get valid HTML the header file that includes any scripts and style sheets
# that doxygen needs, which is dependent on the configuration options used (e.g.
# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
# default header using
# doxygen -w html new_header.html new_footer.html new_stylesheet.css
# YourConfigFile
# and then modify the file new_header.html. See also section "Doxygen usage"
# for information on how to generate the default header that doxygen normally
# uses.
# Note: The header is subject to change so you typically have to regenerate the
# default header when upgrading to a newer version of doxygen. For a description
# of the possible markers and block names see the documentation.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_HEADER = header.html
# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
# generated HTML page. If the tag is left blank doxygen will generate a standard
# footer. See HTML_HEADER for more information on how to generate a default
# footer and what special commands can be used inside the footer. See also
# section "Doxygen usage" for information on how to generate the default footer
# that doxygen normally uses.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_FOOTER = footer.html
# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
# sheet that is used by each HTML page. It can be used to fine-tune the look of
# the HTML output. If left blank doxygen will generate a default style sheet.
# See also section "Doxygen usage" for information on how to generate the style
# sheet that doxygen normally uses.
# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
# it is more robust and this tag (HTML_STYLESHEET) will in the future become
# obsolete.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_STYLESHEET =
# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
# cascading style sheets that are included after the standard style sheets
# created by doxygen. Using this option one can overrule certain style aspects.
# This is preferred over using HTML_STYLESHEET since it does not replace the
# standard style sheet and is therefore more robust against future updates.
# Doxygen will copy the style sheet files to the output directory.
# Note: The order of the extra style sheet files is of importance (e.g. the last
# style sheet in the list overrules the setting of the previous ones in the
# list). For an example see the documentation.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_EXTRA_STYLESHEET = stylesheet.css
# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
# other source files which should be copied to the HTML output directory. Note
# that these files will be copied to the base HTML output directory. Use the
# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
# files will be copied as-is; there are no commands or markers available.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_EXTRA_FILES =
# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
# will adjust the colors in the style sheet and background images according to
# this color. Hue is specified as an angle on a colorwheel, see
# https://en.wikipedia.org/wiki/Hue for more information. For instance the value
# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
# purple, and 360 is red again.
# Minimum value: 0, maximum value: 359, default value: 220.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_COLORSTYLE_HUE = 220
# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
# in the HTML output. For a value of 0 the output will use grayscales only. A
# value of 255 will produce the most vivid colors.
# Minimum value: 0, maximum value: 255, default value: 100.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_COLORSTYLE_SAT = 100
# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
# luminance component of the colors in the HTML output. Values below 100
# gradually make the output lighter, whereas values above 100 make the output
# darker. The value divided by 100 is the actual gamma applied, so 80 represents
# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
# change the gamma.
# Minimum value: 40, maximum value: 240, default value: 80.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_COLORSTYLE_GAMMA = 80
# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
# page will contain the date and time when the page was generated. Setting this
# to YES can help to show when doxygen was last run and thus if the
# documentation is up to date.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_TIMESTAMP = NO
# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
# documentation will contain a main index with vertical navigation menus that
# are dynamically created via Javascript. If disabled, the navigation index will
# consists of multiple levels of tabs that are statically embedded in every HTML
# page. Disable this option to support browsers that do not have Javascript,
# like the Qt help browser.
# The default value is: YES.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_DYNAMIC_MENUS = YES
# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
# documentation will contain sections that can be hidden and shown after the
# page has loaded.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_DYNAMIC_SECTIONS = NO
# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
# shown in the various tree structured indices initially; the user can expand
# and collapse entries dynamically later on. Doxygen will expand the tree to
# such a level that at most the specified number of entries are visible (unless
# a fully collapsed tree already exceeds this amount). So setting the number of
# entries 1 will produce a full collapsed tree by default. 0 is a special value
# representing an infinite number of entries and will result in a full expanded
# tree by default.
# Minimum value: 0, maximum value: 9999, default value: 100.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_INDEX_NUM_ENTRIES = 100
# If the GENERATE_DOCSET tag is set to YES, additional index files will be
# generated that can be used as input for Apple's Xcode 3 integrated development
# environment (see: https://developer.apple.com/xcode/), introduced with OSX
# 10.5 (Leopard). To create a documentation set, doxygen will generate a
# Makefile in the HTML output directory. Running make will produce the docset in
# that directory and running make install will install the docset in
# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy
# genXcode/_index.html for more information.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
GENERATE_DOCSET = NO
# This tag determines the name of the docset feed. A documentation feed provides
# an umbrella under which multiple documentation sets from a single provider
# (such as a company or product suite) can be grouped.
# The default value is: Doxygen generated docs.
# This tag requires that the tag GENERATE_DOCSET is set to YES.
DOCSET_FEEDNAME = "Doxygen generated docs"
# This tag specifies a string that should uniquely identify the documentation
# set bundle. This should be a reverse domain-name style string, e.g.
# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
# The default value is: org.doxygen.Project.
# This tag requires that the tag GENERATE_DOCSET is set to YES.
DOCSET_BUNDLE_ID = org.doxygen.Project
# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
# the documentation publisher. This should be a reverse domain-name style
# string, e.g. com.mycompany.MyDocSet.documentation.
# The default value is: org.doxygen.Publisher.
# This tag requires that the tag GENERATE_DOCSET is set to YES.
DOCSET_PUBLISHER_ID = org.doxygen.Publisher
# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
# The default value is: Publisher.
# This tag requires that the tag GENERATE_DOCSET is set to YES.
DOCSET_PUBLISHER_NAME = Publisher
# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on
# Windows.
#
# The HTML Help Workshop contains a compiler that can convert all HTML output
# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
# files are now used as the Windows 98 help format, and will replace the old
# Windows help format (.hlp) on all Windows platforms in the future. Compressed
# HTML files also contain an index, a table of contents, and you can search for
# words in the documentation. The HTML workshop also contains a viewer for
# compressed HTML files.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
GENERATE_HTMLHELP = NO
# The CHM_FILE tag can be used to specify the file name of the resulting .chm
# file. You can add a path in front of the file if the result should not be
# written to the html output directory.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
CHM_FILE =
# The HHC_LOCATION tag can be used to specify the location (absolute path
# including file name) of the HTML help compiler (hhc.exe). If non-empty,
# doxygen will try to run the HTML help compiler on the generated index.hhp.
# The file has to be specified with full path.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
HHC_LOCATION =
# The GENERATE_CHI flag controls if a separate .chi index file is generated
# (YES) or that it should be included in the master .chm file (NO).
# The default value is: NO.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
GENERATE_CHI = NO
# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
# and project file content.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
CHM_INDEX_ENCODING =
# The BINARY_TOC flag controls whether a binary table of contents is generated
# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
# enables the Previous and Next buttons.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
BINARY_TOC = NO
# The TOC_EXPAND flag can be set to YES to add extra items for group members to
# the table of contents of the HTML help documentation and to the tree view.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
TOC_EXPAND = NO
# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
# (.qch) of the generated HTML documentation.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
GENERATE_QHP = NO
# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
# the file name of the resulting .qch file. The path specified is relative to
# the HTML output folder.
# This tag requires that the tag GENERATE_QHP is set to YES.
QCH_FILE =
# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
# Project output. For more information please see Qt Help Project / Namespace
# (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
# The default value is: org.doxygen.Project.
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_NAMESPACE = org.doxygen.Project
# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
# Help Project output. For more information please see Qt Help Project / Virtual
# Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-
# folders).
# The default value is: doc.
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_VIRTUAL_FOLDER = doc
# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
# filter to add. For more information please see Qt Help Project / Custom
# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_CUST_FILTER_NAME =
# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
# custom filter to add. For more information please see Qt Help Project / Custom
# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_CUST_FILTER_ATTRS =
# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
# project's filter section matches. Qt Help Project / Filter Attributes (see:
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_SECT_FILTER_ATTRS =
# The QHG_LOCATION tag can be used to specify the location of Qt's
# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
# generated .qhp file.
# This tag requires that the tag GENERATE_QHP is set to YES.
QHG_LOCATION =
# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
# generated, together with the HTML files, they form an Eclipse help plugin. To
# install this plugin and make it available under the help contents menu in
# Eclipse, the contents of the directory containing the HTML and XML files needs
# to be copied into the plugins directory of eclipse. The name of the directory
# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
# After copying Eclipse needs to be restarted before the help appears.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
GENERATE_ECLIPSEHELP = NO
# A unique identifier for the Eclipse help plugin. When installing the plugin
# the directory name containing the HTML and XML files should also have this
# name. Each documentation set should have its own identifier.
# The default value is: org.doxygen.Project.
# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
ECLIPSE_DOC_ID = org.doxygen.Project
# If you want full control over the layout of the generated HTML pages it might
# be necessary to disable the index and replace it with your own. The
# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
# of each HTML page. A value of NO enables the index and the value YES disables
# it. Since the tabs in the index contain the same information as the navigation
# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
DISABLE_INDEX = NO
# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
# structure should be generated to display hierarchical information. If the tag
# value is set to YES, a side panel will be generated containing a tree-like
# index structure (just like the one that is generated for HTML Help). For this
# to work a browser that supports JavaScript, DHTML, CSS and frames is required
# (i.e. any modern browser). Windows users are probably better off using the
# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
# further fine-tune the look of the index. As an example, the default style
# sheet generated by doxygen has an example that shows how to put an image at
# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
# the same information as the tab index, you could consider setting
# DISABLE_INDEX to YES when enabling this option.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
GENERATE_TREEVIEW = YES
# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
# doxygen will group on one line in the generated HTML documentation.
#
# Note that a value of 0 will completely suppress the enum values from appearing
# in the overview section.
# Minimum value: 0, maximum value: 20, default value: 4.
# This tag requires that the tag GENERATE_HTML is set to YES.
ENUM_VALUES_PER_LINE = 4
# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
# to set the initial width (in pixels) of the frame in which the tree is shown.
# Minimum value: 0, maximum value: 1500, default value: 250.
# This tag requires that the tag GENERATE_HTML is set to YES.
TREEVIEW_WIDTH = 250
# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
# external symbols imported via tag files in a separate window.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
EXT_LINKS_IN_WINDOW = NO
# Use this tag to change the font size of LaTeX formulas included as images in
# the HTML documentation. When you change the font size after a successful
# doxygen run you need to manually remove any form_*.png images from the HTML
# output directory to force them to be regenerated.
# Minimum value: 8, maximum value: 50, default value: 10.
# This tag requires that the tag GENERATE_HTML is set to YES.
FORMULA_FONTSIZE = 10
# Use the FORMULA_TRANSPARENT tag to determine whether or not the images
# generated for formulas are transparent PNGs. Transparent PNGs are not
# supported properly for IE 6.0, but are supported on all modern browsers.
#
# Note that when changing this option you need to delete any form_*.png files in
# the HTML output directory before the changes have effect.
# The default value is: YES.
# This tag requires that the tag GENERATE_HTML is set to YES.
FORMULA_TRANSPARENT = YES
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
# https://www.mathjax.org) which uses client side Javascript for the rendering
# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
# installed or if you want to formulas look prettier in the HTML output. When
# enabled you may also need to install MathJax separately and configure the path
# to it using the MATHJAX_RELPATH option.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
USE_MATHJAX = NO
# When MathJax is enabled you can set the default output format to be used for
# the MathJax output. See the MathJax site (see:
# http://docs.mathjax.org/en/latest/output.html) for more details.
# Possible values are: HTML-CSS (which is slower, but has the best
# compatibility), NativeMML (i.e. MathML) and SVG.
# The default value is: HTML-CSS.
# This tag requires that the tag USE_MATHJAX is set to YES.
MATHJAX_FORMAT = HTML-CSS
# When MathJax is enabled you need to specify the location relative to the HTML
# output directory using the MATHJAX_RELPATH option. The destination directory
# should contain the MathJax.js script. For instance, if the mathjax directory
# is located at the same level as the HTML output directory, then
# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
# Content Delivery Network so you can quickly see the result without installing
# MathJax. However, it is strongly recommended to install a local copy of
# MathJax from https://www.mathjax.org before deployment.
# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/.
# This tag requires that the tag USE_MATHJAX is set to YES.
MATHJAX_RELPATH = https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/
# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
# extension names that should be enabled during MathJax rendering. For example
# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
# This tag requires that the tag USE_MATHJAX is set to YES.
MATHJAX_EXTENSIONS =
# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
# of code that will be used on startup of the MathJax code. See the MathJax site
# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
# example see the documentation.
# This tag requires that the tag USE_MATHJAX is set to YES.
MATHJAX_CODEFILE =
# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
# the HTML output. The underlying search engine uses javascript and DHTML and
# should work on any modern browser. Note that when using HTML help
# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
# there is already a search function so this one should typically be disabled.
# For large projects the javascript based search engine can be slow, then
# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
# search using the keyboard; to jump to the search box use + S
# (what the is depends on the OS and browser, but it is typically
# , /