././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1712341552.509778 bids-validator-1.14.5/0000755000000000000000000000000000000000000014445 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/LICENSE0000644000000000000000000000215000000000000015450 0ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2015 The Board of Trustees of the Leland Stanford Junior University Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/MANIFEST.in0000644000000000000000000000013700000000000016204 0ustar00rootroot00000000000000include LICENSE MANIFEST.in README.md include versioneer.py include bids_validator/_version.py ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1712341552.505778 bids-validator-1.14.5/PKG-INFO0000644000000000000000000011410000000000000015537 0ustar00rootroot00000000000000Metadata-Version: 2.1 Name: bids-validator Version: 1.14.5 Summary: Validator for the Brain Imaging Data Structure Author-email: PyBIDS developers License: MIT License Project-URL: Homepage, https://github.com/bids-standard/bids-validator Classifier: Development Status :: 3 - Alpha Classifier: Environment :: Console Classifier: Intended Audience :: Science/Research Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Topic :: Scientific/Engineering Requires-Python: >=3.8 Description-Content-Type: text/markdown License-File: LICENSE [![Node Tests](https://github.com/bids-standard/bids-validator/actions/workflows/node_tests.yml/badge.svg)](https://github.com/bids-standard/bids-validator/actions/workflows/node_tests.yml) [![Python tests](https://github.com/bids-standard/bids-validator/actions/workflows/python_tests.yml/badge.svg)](https://github.com/bids-standard/bids-validator/actions/workflows/python_tests.yml) [![bids-examples tests](https://github.com/bids-standard/bids-validator/actions/workflows/test-bids-examples.yml/badge.svg)](https://github.com/bids-standard/bids-validator/actions/workflows/test-bids-examples.yml) [![CircleCI](https://circleci.com/gh/bids-standard/bids-validator.svg?style=shield&circle-token=:circle-token)](https://circleci.com/gh/bids-standard/bids-validator) [![Codecov](https://codecov.io/gh/bids-standard/bids-validator/branch/master/graph/badge.svg)](https://codecov.io/gh/bids-standard/bids-validator) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3688707.svg)](https://doi.org/10.5281/zenodo.3688707) # BIDS-Validator - [BIDS-Validator](#bids-validator) - [Quickstart](#quickstart) - [Support](#support) - [Maintainers and Contributors](#maintainers-and-contributors) - [Use](#use) - [API](#api) - [.bidsignore](#bidsignore) - [Configuration](#configuration) - [In the Browser](#in-the-browser) - [On the Server](#on-the-server) - [Through Command Line](#through-command-line) - [Docker image](#docker-image) - [Python Library](#python-library) - [Example](#example) - [Development](#development) - [Running Locally in a Browser](#running-locally-in-a-browser) - [Testing](#testing) - [Publishing](#publishing) - [Acknowledgments](#acknowledgments) ## Quickstart 1. Web version: 1. Open [Google Chrome](https://www.google.com/chrome/) or [Mozilla Firefox](https://mozilla.org/firefox) (currently the only supported browsers) 1. Go to https://bids-standard.github.io/bids-validator/ and select a folder with your BIDS dataset. If the validator seems to be working longer than couple of minutes please open [developer tools ](https://developer.chrome.com/devtools) and report the error at [https://github.com/bids-standard/bids-validator/issues](https://github.com/bids-standard/bids-validator/issues). 1. Command line version: 1. Install [Node.js](https://nodejs.org) (at least version 18.0.0) 1. Update `npm` to be at least version 7 (`npm install --global npm@^7`) 1. From a terminal run `npm install -g bids-validator` 1. Run `bids-validator` to start validating datasets. 1. Docker 1. Install Docker 1. From a terminal run `docker run -ti --rm -v /path/to/data:/data:ro bids/validator /data` but replace the `/path/to/data` part of the command with your own path on your machine. 1. Python Library: 1. Install [Python](https://www.python.org/) 1. Install [Pip](https://pip.pypa.io/en/stable/installing/) package manager for Python, if not already installed. 1. From a terminal run `pip install bids_validator` to acquire the [BIDS Validator PyPI package](https://pypi.org/project/bids-validator/) or `conda install bids-validator` for the [Conda package](https://anaconda.org/conda-forge/bids-validator). 1. Open a Python terminal and type: `python` 1. Import the BIDS Validator package `from bids_validator import BIDSValidator` 1. Check if a file is BIDS compatible `BIDSValidator().is_bids('/relative/path/to/a/bids/file')` 1. Note, the file path must be relative to the root of the BIDS dataset, and a leading forward slash `/` must be added to the file path. ## Support The BIDS Validator is designed to work in both the browser and in Node.js. We target support for the latest long term stable (LTS) release of Node.js and the latest version of Chrome. There is also a library of helper functions written in Python, for use with BIDS compliant applications written in this language. Please report any issues you experience while using these support targets via the [GitHub issue tracker](https://github.com/bids-standard/bids-validator/issues). If you experience issues outside of these supported environments and believe we should extend our targeted support feel free to open a new issue describing the issue, your support target and why you require extended support and we will address these issues on a case by case basis. ## Maintainers and Contributors [![All Contributors](https://img.shields.io/badge/all_contributors-43-orange.svg?style=flat-square)](#contributors-) This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind are welcome! The project is maintained by [@rwblair](https://github.com/rwblair/) with the help of many contributors listed below. (The [emoji key](https://allcontributors.org/docs/en/emoji-key) is indicating the kind of contribution) Please also see [Acknowledgments](#acknowledgments).

Adam Li

πŸ’» ⚠️ πŸ““ πŸ›

Adam Thomas

πŸ“–

Alexander Jones

πŸ’» ⚠️ πŸ€”

Ben Beasley

πŸ“¦

Chris Gorgolewski

πŸ› πŸ’» πŸ”£ πŸ“– πŸ’‘ πŸ€” πŸš‡ 🚧 πŸ§‘β€πŸ« πŸ’¬ πŸ‘€ ⚠️ βœ… πŸ“’ πŸ““

Chris Holdgraf

πŸ’»

Chris Markiewicz

πŸ’» ⚠️ πŸ€” πŸ› πŸ’¬ πŸ”§ 🚧

David Nishikawa

πŸ’» ⚠️

Dimitri Papadopoulos Orfanos

πŸ’»

Duncan Macleod

πŸš‡

Franklin Feingold

πŸ“–

Gregory noack

πŸ’» ⚠️

Horea Christian

πŸ’»

Jakub Kaczmarzyk

πŸš‡

Joke Durnez

πŸ’»

Mainak Jas

πŸ’» ⚠️ πŸ€” πŸ‘€ πŸ““

Marco Castellaro

πŸ’» ⚠️

Max

πŸ’» πŸ›

Michael Hanke

πŸ“–

Mikael Naveau

πŸ’»

Nell Hardcastle

πŸ’» πŸ€” πŸš‡ πŸ’¬ πŸ‘€

Nicolas Traut

πŸ’»

Parul Sethi

πŸ’» ⚠️

Patricia Clement

πŸ’»

Remi Gau

πŸ’» πŸ“– πŸ““

Richard HΓΆchenberger

πŸ’» πŸ““ ⚠️ πŸ›

Robert Oostenveld

πŸ’» πŸ€” πŸ› ⚠️

Rohan Goyal

πŸ’»

Ross Blair

🚧 πŸ€” πŸ’» πŸ› πŸš‡ πŸ“† πŸ’¬ πŸ‘€ πŸ”§ ⚠️

Russ Poldrack

πŸ’» πŸ’΅ πŸ”

Soichi Hayashi

πŸ›

Stefan Appelhoff

πŸ› πŸ’» πŸ”£ πŸ“– πŸ’‘ πŸ€” πŸš‡ 🚧 πŸ§‘β€πŸ« πŸ’¬ πŸ‘€ ⚠️ βœ… πŸ“’ πŸ““

Suyash

πŸ’»

Taylor Salo

πŸ’»

Teal Hobson-Lowther

πŸ’» ⚠️

Travis Riddle

πŸ›

VisLab

πŸ€” πŸ’»

Wazeer Zulfikar

πŸ“–

Yaroslav Halchenko

πŸ€” πŸ’» πŸ“– πŸ““

constellates

πŸ’» ⚠️

dewarrn1

πŸ’»

dkp

πŸ’»

goldmund

πŸ’» ⚠️
## Use ### API The BIDS Validator has one primary method that takes a directory as either a path to the directory (node) or the object given by selecting a directory with a file input (browser), an options object, and a callback. Available options include: - ignoreWarnings - (boolean - defaults to false) - ignoreNiftiHeaders - (boolean - defaults to false) For example: `validate.BIDS(directory, {ignoreWarnings: true}, function (issues, summary) {console.log(issues.errors, issues.warnings);});` If you would like to test individual files you can use the file specific checks that we expose. - validate.BIDS() - validate.JSON() - validate.TSV() - validate.NIFTI() Additionally you can reformat stored errors against a new config using `validate.reformat()` ### .bidsignore Optionally one can include a `.bidsignore` file in the root of the dataset. This file lists patterns (compatible with the [.gitignore syntax](https://git-scm.com/docs/gitignore)) defining files that should be ignored by the validator. This option is useful when the validated dataset includes file types not yet supported by BIDS specification. ```Text *_not_bids.txt extra_data/ ``` ### Configuration You can configure the severity of errors by passing a json configuration file with a `-c` or `--config` flag to the command line interface or by defining a config object on the options object passed during javascript usage. If no path is specified a default path of `.bids-validator-config.json` will be used. You can add this file to your dataset to share dataset specific validation configuration. To disable this behavior use `--no-config` and the default configuration will be used. The basic configuration format is outlined below. All configuration is optional. ```JSON { "ignore": [], "warn": [], "error": [], "ignoredFiles": [] } ``` `ignoredFiles` takes a list of file paths or glob patterns you'd like to ignore. Lets say we want to ignore all files and sub-directory under `/derivatives/`. **This is not the same syntax as used in the .bidsignore file** ```JSON { "ignoredFiles": ["/derivatives/**"] } ``` Note that adding two stars `**` in path makes validator recognize all files and sub-dir to be ignored. `ignore`, `warn`, and `error` take lists of issue codes or issue keys and change the severity of those issues so they are either ignored or reported as warnings or errors. You can find a list of all available issues at [utils/issues/list](https://github.com/bids-standard/bids-validator/blob/master/bids-validator/utils/issues/list.js). Some issues may be ignored by default, but can be elevated to warnings or errors. These provide a way to check for common things that are more specific than BIDS compatibility. An example is a check for the presence of a T1w modality. The following would raise an error if no T1W image was found in a dataset. ```JSON { "error": ["NO_T1W"] } ``` In addition to issue codes and keys these lists can also contain objects with and "and" or "or" properties set to arrays of codes or keys. These allow some level of conditional logic when configuring issues. For example: ```JSON { "ignore": [ { "and": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" ] } ] } ``` In the above example the two issues will only be ignored if both of them are triggered during validation. ```JSON { "ignore": [ { "and": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" { "or": [ "ECHO_TIME1-2_NOT_DEFINED", "ECHO_TIME_MUST_DEFINE" ] } ] } ] } ``` And in this example the listed issues will only be ignored if `ECHO_TIME_GREATER_THAN`, `ECHO_TIME_NOT_DEFINED` and either `ECHO_TIME1-2_NOT_DEFINED` or `ECHO_TIME_MUST_DEFINE` are triggered during validation. "or" arrays are not supported at the lowest level because it wouldn't add any functionality. For example the following is not supported. ```JSON { "ignore": [ { "or": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" ] } ] } ``` because it would be functionally the same as this: ```JSON { "ignore": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" ] } ``` For passing a configuration while using the bids-validator on the command line, you can use the following style to for example ignore empty file errors (99) and files that cannot be read (44): ``` bids-validator --config.ignore=99 --config.ignore=44 path/to/bids/dir ``` This style of use puts limits on what configuration you can require, so for complex scenarios, we advise users to create a dedicated configuration file with contents as described above. ### In the Browser The BIDS Validator currently works in the browser with [browserify](https://browserify.org/) or [webpack](https://webpack.js.org/). You can add it to a project by cloning the validator and requiring it with browserify syntax `const validate = require('bids-validator');` or an ES2015 webpack import `import validate from 'bids-validator'`. ### On the Server The BIDS validator works like most npm packages. You can install it by running `npm install bids-validator`. ### Through Command Line If you install the bids validator globally by using `npm install -g bids-validator` you will be able to use it as a command line tool. Once installed you should be able to run `bids-validator /path/to/your/bids/directory` and see any validation issues logged to the terminal. Run `bids-validator` without a directory path to see available options. ## Docker image [![Docker Image Version (latest by date)](https://img.shields.io/docker/v/bids/validator?label=docker)](https://hub.docker.com/r/bids/validator) To use bids validator with [docker](https://www.docker.com/), you simply need to [install docker](https://docs.docker.com/install/) on your system. And then from a terminal run: - `docker run -ti --rm bids/validator --version` to print the version of the docker image - `docker run -ti --rm bids/validator --help` to print the help - `docker run -ti --rm -v /path/to/data:/data:ro bids/validator /data` to validate the dataset `/path/to/data` on your host machine See here for a brief explanation of the commands: - `docker run` is the command to tell docker to run a certain docker image, usually taking the form `docker run ` - the `-ti` flag means the inputs are accepted and outputs are printed to the terminal - the `--rm` flag means that the state of the docker container is not saved after it has run - the `-v` flag is adding your local data to the docker container ([bind-mounts](https://docs.docker.com/storage/bind-mounts/)). Importantly, the input after the `-v` flag consists of three fields separated colons: `:` - the first field is the path to the directory on the host machine: `/path/to/data` - the second field is the path where the directory is mounted in the container - the third field is optional. In our case, we use `ro` to specify that the mounted data is _read only_ ## Python Library [![PyPI version](https://badge.fury.io/py/bids-validator.svg)](https://badge.fury.io/py/bids-validator) [![Conda version](https://img.shields.io/conda/vn/conda-forge/bids-validator)](https://anaconda.org/conda-forge/bids-validator) There are is a limited library of helper functions written in Python. The main function determines if a file extension is compliant with the BIDS specification. You can find the available functions in the library, as well as their descriptions, [here](https://github.com/bids-standard/bids-validator/blob/master/bids-validator/bids_validator/bids_validator.py). To install, run `pip install -U bids_validator` (requires python and pip) or `conda install bids-validator` (requires a Conda environment). ### Example ```Python from bids_validator import BIDSValidator validator = BIDSValidator() filepaths = ["/sub-01/anat/sub-01_rec-CSD_T1w.nii.gz", "/sub-01/anat/sub-01_acq-23_rec-CSD_T1w.exe"] for filepath in filepaths: print(validator.is_bids(filepath)) # will print True, and then False ``` Note, the file path must be relative to the root of the BIDS dataset, and a leading forward slash `/` must be added to the file path. ## Development To develop locally, clone the project and run `npm install` from the project root. This will install external dependencies. If you wish to install `bids-validator` globally (so that you can run it in other folders), use the following command to install it globally: `cd bids-validator && npm install -g` (for windows users, if in a different drive add /d, e.g. `cd /d F:\bids-validator && npm install -g`) Please see the [CONTRIBUTING.md](../CONTRIBUTING.md) for additional details. ### Bundling bids-validator is bundled with esbuild. While developing, the script `bids-validator/bin/bids-validator` will automatically bundle the project each time it is run. To test a build without publishing it `npm -w bids-validator run build`. This will generate a bids-validator/dist directory containing the local build and `bids-validator/bin/bids-validator` will use this build. To return to automatic bundling on each run, remove the dist directory. ### Running Locally in a Browser A note about OS X, the dependencies for the browser require a npm package called node-gyp which needs xcode to be installed in order to be compiled. 1. The browser version of `bids-validator` lives in the repo subdirectory `/bids-validator-web`. It is a [React.js](https://reactjs.org/) application that uses the [next.js](https://nextjs.org/) framework. 2. To develop `bids-validator` and see how it will act in the browser, simply run `npm run web-dev` in the project root and navigate to `localhost:3000`. 3. In development mode, changes to the codebase will trigger rebuilds of the application automatically. 4. Changes to the `/bids-validator` in the codebase will also be reflected in the web application. 5. Tests use the [Jest](https://jestjs.io/index.html) testing library and should be developed in `/bids-validator-web/tests`. We can always use more tests, so please feel free to contribute a test that reduces the chance of any bugs you fix! 6. To ensure that the web application compiles successfully in production, run `npm run web-export` ### Testing If it's your first time running tests, first use the command `git submodule update --init --depth 1` to pull the test example data. This repo contains the [bids-examples github repository](https://github.com/bids-standard/bids-examples) as a [submodule](https://git-scm.com/book/en/v2/Git-Tools-Submodules). To start the test suite run `npm run test` from the project root. `npm run test -- --watch` is useful to run tests while making changes. A coverage report is available with `npm run coverage`. To run the linter which checks code conventions run `npm run lint`. ### Install globally from a development branch Global installs are not recommended for development because of the possibility of package conflicts with other Node.js projects. If you do need to test with a global install from a development tree, follow these steps to generate the NPM package without publishing it and install the package locally. 1. `npm -w bids-validator run build` 2. `npm -w bids-validator pack` 3. `npm install -g bids-validator-*.tgz` ### Publishing Publishing is done with [Lerna](https://github.com/lerna/lerna). Use the command `npx lerna publish` and follow instructions to set a new version. Using lerna publish will create a git commit with updated version information and create a version number tag for it, push the tag to GitHub, then publish to NPM and PyPI. The GitHub release is manual following that. ## Acknowledgments Many contributions to the `bids-validator` were done by members of the BIDS community. See the [list of contributors](https://bids-specification.readthedocs.io/en/stable/99-appendices/01-contributors.html). A large part of the development of `bids-validator` is currently done by [Squishymedia](https://squishymedia.com/), who are in turn financed through different grants offered for the general development of BIDS. See the list below. Development and contributions were supported through the following federally funded projects/grants: - [BIDS Derivatives (NIMH: R24MH114705, PI: Poldrack)](https://grantome.com/grant/NIH/R24-MH114705-01) - [OpenNeuro (NIMH: R24MH117179, PI: Poldrack)](https://grantome.com/grant/NIH/R24-MH117179-01) - [Spokes: MEDIUM: WEST (NSF: 1760950, PI: Poldrack & Gorgolewski)](https://grantome.com/grant/NSF/IIS-1760950) - [ReproNim](http://repronim.org) [(NIH-NIBIB P41 EB019936, PI: Kennedy)](https://projectreporter.nih.gov/project_info_description.cfm?aid=8999833) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/README.md0000644000000000000000000011263600000000000015735 0ustar00rootroot00000000000000[![Node Tests](https://github.com/bids-standard/bids-validator/actions/workflows/node_tests.yml/badge.svg)](https://github.com/bids-standard/bids-validator/actions/workflows/node_tests.yml) [![Python tests](https://github.com/bids-standard/bids-validator/actions/workflows/python_tests.yml/badge.svg)](https://github.com/bids-standard/bids-validator/actions/workflows/python_tests.yml) [![bids-examples tests](https://github.com/bids-standard/bids-validator/actions/workflows/test-bids-examples.yml/badge.svg)](https://github.com/bids-standard/bids-validator/actions/workflows/test-bids-examples.yml) [![CircleCI](https://circleci.com/gh/bids-standard/bids-validator.svg?style=shield&circle-token=:circle-token)](https://circleci.com/gh/bids-standard/bids-validator) [![Codecov](https://codecov.io/gh/bids-standard/bids-validator/branch/master/graph/badge.svg)](https://codecov.io/gh/bids-standard/bids-validator) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3688707.svg)](https://doi.org/10.5281/zenodo.3688707) # BIDS-Validator - [BIDS-Validator](#bids-validator) - [Quickstart](#quickstart) - [Support](#support) - [Maintainers and Contributors](#maintainers-and-contributors) - [Use](#use) - [API](#api) - [.bidsignore](#bidsignore) - [Configuration](#configuration) - [In the Browser](#in-the-browser) - [On the Server](#on-the-server) - [Through Command Line](#through-command-line) - [Docker image](#docker-image) - [Python Library](#python-library) - [Example](#example) - [Development](#development) - [Running Locally in a Browser](#running-locally-in-a-browser) - [Testing](#testing) - [Publishing](#publishing) - [Acknowledgments](#acknowledgments) ## Quickstart 1. Web version: 1. Open [Google Chrome](https://www.google.com/chrome/) or [Mozilla Firefox](https://mozilla.org/firefox) (currently the only supported browsers) 1. Go to https://bids-standard.github.io/bids-validator/ and select a folder with your BIDS dataset. If the validator seems to be working longer than couple of minutes please open [developer tools ](https://developer.chrome.com/devtools) and report the error at [https://github.com/bids-standard/bids-validator/issues](https://github.com/bids-standard/bids-validator/issues). 1. Command line version: 1. Install [Node.js](https://nodejs.org) (at least version 18.0.0) 1. Update `npm` to be at least version 7 (`npm install --global npm@^7`) 1. From a terminal run `npm install -g bids-validator` 1. Run `bids-validator` to start validating datasets. 1. Docker 1. Install Docker 1. From a terminal run `docker run -ti --rm -v /path/to/data:/data:ro bids/validator /data` but replace the `/path/to/data` part of the command with your own path on your machine. 1. Python Library: 1. Install [Python](https://www.python.org/) 1. Install [Pip](https://pip.pypa.io/en/stable/installing/) package manager for Python, if not already installed. 1. From a terminal run `pip install bids_validator` to acquire the [BIDS Validator PyPI package](https://pypi.org/project/bids-validator/) or `conda install bids-validator` for the [Conda package](https://anaconda.org/conda-forge/bids-validator). 1. Open a Python terminal and type: `python` 1. Import the BIDS Validator package `from bids_validator import BIDSValidator` 1. Check if a file is BIDS compatible `BIDSValidator().is_bids('/relative/path/to/a/bids/file')` 1. Note, the file path must be relative to the root of the BIDS dataset, and a leading forward slash `/` must be added to the file path. ## Support The BIDS Validator is designed to work in both the browser and in Node.js. We target support for the latest long term stable (LTS) release of Node.js and the latest version of Chrome. There is also a library of helper functions written in Python, for use with BIDS compliant applications written in this language. Please report any issues you experience while using these support targets via the [GitHub issue tracker](https://github.com/bids-standard/bids-validator/issues). If you experience issues outside of these supported environments and believe we should extend our targeted support feel free to open a new issue describing the issue, your support target and why you require extended support and we will address these issues on a case by case basis. ## Maintainers and Contributors [![All Contributors](https://img.shields.io/badge/all_contributors-43-orange.svg?style=flat-square)](#contributors-) This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind are welcome! The project is maintained by [@rwblair](https://github.com/rwblair/) with the help of many contributors listed below. (The [emoji key](https://allcontributors.org/docs/en/emoji-key) is indicating the kind of contribution) Please also see [Acknowledgments](#acknowledgments).

Adam Li

πŸ’» ⚠️ πŸ““ πŸ›

Adam Thomas

πŸ“–

Alexander Jones

πŸ’» ⚠️ πŸ€”

Ben Beasley

πŸ“¦

Chris Gorgolewski

πŸ› πŸ’» πŸ”£ πŸ“– πŸ’‘ πŸ€” πŸš‡ 🚧 πŸ§‘β€πŸ« πŸ’¬ πŸ‘€ ⚠️ βœ… πŸ“’ πŸ““

Chris Holdgraf

πŸ’»

Chris Markiewicz

πŸ’» ⚠️ πŸ€” πŸ› πŸ’¬ πŸ”§ 🚧

David Nishikawa

πŸ’» ⚠️

Dimitri Papadopoulos Orfanos

πŸ’»

Duncan Macleod

πŸš‡

Franklin Feingold

πŸ“–

Gregory noack

πŸ’» ⚠️

Horea Christian

πŸ’»

Jakub Kaczmarzyk

πŸš‡

Joke Durnez

πŸ’»

Mainak Jas

πŸ’» ⚠️ πŸ€” πŸ‘€ πŸ““

Marco Castellaro

πŸ’» ⚠️

Max

πŸ’» πŸ›

Michael Hanke

πŸ“–

Mikael Naveau

πŸ’»

Nell Hardcastle

πŸ’» πŸ€” πŸš‡ πŸ’¬ πŸ‘€

Nicolas Traut

πŸ’»

Parul Sethi

πŸ’» ⚠️

Patricia Clement

πŸ’»

Remi Gau

πŸ’» πŸ“– πŸ““

Richard HΓΆchenberger

πŸ’» πŸ““ ⚠️ πŸ›

Robert Oostenveld

πŸ’» πŸ€” πŸ› ⚠️

Rohan Goyal

πŸ’»

Ross Blair

🚧 πŸ€” πŸ’» πŸ› πŸš‡ πŸ“† πŸ’¬ πŸ‘€ πŸ”§ ⚠️

Russ Poldrack

πŸ’» πŸ’΅ πŸ”

Soichi Hayashi

πŸ›

Stefan Appelhoff

πŸ› πŸ’» πŸ”£ πŸ“– πŸ’‘ πŸ€” πŸš‡ 🚧 πŸ§‘β€πŸ« πŸ’¬ πŸ‘€ ⚠️ βœ… πŸ“’ πŸ““

Suyash

πŸ’»

Taylor Salo

πŸ’»

Teal Hobson-Lowther

πŸ’» ⚠️

Travis Riddle

πŸ›

VisLab

πŸ€” πŸ’»

Wazeer Zulfikar

πŸ“–

Yaroslav Halchenko

πŸ€” πŸ’» πŸ“– πŸ““

constellates

πŸ’» ⚠️

dewarrn1

πŸ’»

dkp

πŸ’»

goldmund

πŸ’» ⚠️
## Use ### API The BIDS Validator has one primary method that takes a directory as either a path to the directory (node) or the object given by selecting a directory with a file input (browser), an options object, and a callback. Available options include: - ignoreWarnings - (boolean - defaults to false) - ignoreNiftiHeaders - (boolean - defaults to false) For example: `validate.BIDS(directory, {ignoreWarnings: true}, function (issues, summary) {console.log(issues.errors, issues.warnings);});` If you would like to test individual files you can use the file specific checks that we expose. - validate.BIDS() - validate.JSON() - validate.TSV() - validate.NIFTI() Additionally you can reformat stored errors against a new config using `validate.reformat()` ### .bidsignore Optionally one can include a `.bidsignore` file in the root of the dataset. This file lists patterns (compatible with the [.gitignore syntax](https://git-scm.com/docs/gitignore)) defining files that should be ignored by the validator. This option is useful when the validated dataset includes file types not yet supported by BIDS specification. ```Text *_not_bids.txt extra_data/ ``` ### Configuration You can configure the severity of errors by passing a json configuration file with a `-c` or `--config` flag to the command line interface or by defining a config object on the options object passed during javascript usage. If no path is specified a default path of `.bids-validator-config.json` will be used. You can add this file to your dataset to share dataset specific validation configuration. To disable this behavior use `--no-config` and the default configuration will be used. The basic configuration format is outlined below. All configuration is optional. ```JSON { "ignore": [], "warn": [], "error": [], "ignoredFiles": [] } ``` `ignoredFiles` takes a list of file paths or glob patterns you'd like to ignore. Lets say we want to ignore all files and sub-directory under `/derivatives/`. **This is not the same syntax as used in the .bidsignore file** ```JSON { "ignoredFiles": ["/derivatives/**"] } ``` Note that adding two stars `**` in path makes validator recognize all files and sub-dir to be ignored. `ignore`, `warn`, and `error` take lists of issue codes or issue keys and change the severity of those issues so they are either ignored or reported as warnings or errors. You can find a list of all available issues at [utils/issues/list](https://github.com/bids-standard/bids-validator/blob/master/bids-validator/utils/issues/list.js). Some issues may be ignored by default, but can be elevated to warnings or errors. These provide a way to check for common things that are more specific than BIDS compatibility. An example is a check for the presence of a T1w modality. The following would raise an error if no T1W image was found in a dataset. ```JSON { "error": ["NO_T1W"] } ``` In addition to issue codes and keys these lists can also contain objects with and "and" or "or" properties set to arrays of codes or keys. These allow some level of conditional logic when configuring issues. For example: ```JSON { "ignore": [ { "and": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" ] } ] } ``` In the above example the two issues will only be ignored if both of them are triggered during validation. ```JSON { "ignore": [ { "and": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" { "or": [ "ECHO_TIME1-2_NOT_DEFINED", "ECHO_TIME_MUST_DEFINE" ] } ] } ] } ``` And in this example the listed issues will only be ignored if `ECHO_TIME_GREATER_THAN`, `ECHO_TIME_NOT_DEFINED` and either `ECHO_TIME1-2_NOT_DEFINED` or `ECHO_TIME_MUST_DEFINE` are triggered during validation. "or" arrays are not supported at the lowest level because it wouldn't add any functionality. For example the following is not supported. ```JSON { "ignore": [ { "or": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" ] } ] } ``` because it would be functionally the same as this: ```JSON { "ignore": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" ] } ``` For passing a configuration while using the bids-validator on the command line, you can use the following style to for example ignore empty file errors (99) and files that cannot be read (44): ``` bids-validator --config.ignore=99 --config.ignore=44 path/to/bids/dir ``` This style of use puts limits on what configuration you can require, so for complex scenarios, we advise users to create a dedicated configuration file with contents as described above. ### In the Browser The BIDS Validator currently works in the browser with [browserify](https://browserify.org/) or [webpack](https://webpack.js.org/). You can add it to a project by cloning the validator and requiring it with browserify syntax `const validate = require('bids-validator');` or an ES2015 webpack import `import validate from 'bids-validator'`. ### On the Server The BIDS validator works like most npm packages. You can install it by running `npm install bids-validator`. ### Through Command Line If you install the bids validator globally by using `npm install -g bids-validator` you will be able to use it as a command line tool. Once installed you should be able to run `bids-validator /path/to/your/bids/directory` and see any validation issues logged to the terminal. Run `bids-validator` without a directory path to see available options. ## Docker image [![Docker Image Version (latest by date)](https://img.shields.io/docker/v/bids/validator?label=docker)](https://hub.docker.com/r/bids/validator) To use bids validator with [docker](https://www.docker.com/), you simply need to [install docker](https://docs.docker.com/install/) on your system. And then from a terminal run: - `docker run -ti --rm bids/validator --version` to print the version of the docker image - `docker run -ti --rm bids/validator --help` to print the help - `docker run -ti --rm -v /path/to/data:/data:ro bids/validator /data` to validate the dataset `/path/to/data` on your host machine See here for a brief explanation of the commands: - `docker run` is the command to tell docker to run a certain docker image, usually taking the form `docker run ` - the `-ti` flag means the inputs are accepted and outputs are printed to the terminal - the `--rm` flag means that the state of the docker container is not saved after it has run - the `-v` flag is adding your local data to the docker container ([bind-mounts](https://docs.docker.com/storage/bind-mounts/)). Importantly, the input after the `-v` flag consists of three fields separated colons: `:` - the first field is the path to the directory on the host machine: `/path/to/data` - the second field is the path where the directory is mounted in the container - the third field is optional. In our case, we use `ro` to specify that the mounted data is _read only_ ## Python Library [![PyPI version](https://badge.fury.io/py/bids-validator.svg)](https://badge.fury.io/py/bids-validator) [![Conda version](https://img.shields.io/conda/vn/conda-forge/bids-validator)](https://anaconda.org/conda-forge/bids-validator) There are is a limited library of helper functions written in Python. The main function determines if a file extension is compliant with the BIDS specification. You can find the available functions in the library, as well as their descriptions, [here](https://github.com/bids-standard/bids-validator/blob/master/bids-validator/bids_validator/bids_validator.py). To install, run `pip install -U bids_validator` (requires python and pip) or `conda install bids-validator` (requires a Conda environment). ### Example ```Python from bids_validator import BIDSValidator validator = BIDSValidator() filepaths = ["/sub-01/anat/sub-01_rec-CSD_T1w.nii.gz", "/sub-01/anat/sub-01_acq-23_rec-CSD_T1w.exe"] for filepath in filepaths: print(validator.is_bids(filepath)) # will print True, and then False ``` Note, the file path must be relative to the root of the BIDS dataset, and a leading forward slash `/` must be added to the file path. ## Development To develop locally, clone the project and run `npm install` from the project root. This will install external dependencies. If you wish to install `bids-validator` globally (so that you can run it in other folders), use the following command to install it globally: `cd bids-validator && npm install -g` (for windows users, if in a different drive add /d, e.g. `cd /d F:\bids-validator && npm install -g`) Please see the [CONTRIBUTING.md](../CONTRIBUTING.md) for additional details. ### Bundling bids-validator is bundled with esbuild. While developing, the script `bids-validator/bin/bids-validator` will automatically bundle the project each time it is run. To test a build without publishing it `npm -w bids-validator run build`. This will generate a bids-validator/dist directory containing the local build and `bids-validator/bin/bids-validator` will use this build. To return to automatic bundling on each run, remove the dist directory. ### Running Locally in a Browser A note about OS X, the dependencies for the browser require a npm package called node-gyp which needs xcode to be installed in order to be compiled. 1. The browser version of `bids-validator` lives in the repo subdirectory `/bids-validator-web`. It is a [React.js](https://reactjs.org/) application that uses the [next.js](https://nextjs.org/) framework. 2. To develop `bids-validator` and see how it will act in the browser, simply run `npm run web-dev` in the project root and navigate to `localhost:3000`. 3. In development mode, changes to the codebase will trigger rebuilds of the application automatically. 4. Changes to the `/bids-validator` in the codebase will also be reflected in the web application. 5. Tests use the [Jest](https://jestjs.io/index.html) testing library and should be developed in `/bids-validator-web/tests`. We can always use more tests, so please feel free to contribute a test that reduces the chance of any bugs you fix! 6. To ensure that the web application compiles successfully in production, run `npm run web-export` ### Testing If it's your first time running tests, first use the command `git submodule update --init --depth 1` to pull the test example data. This repo contains the [bids-examples github repository](https://github.com/bids-standard/bids-examples) as a [submodule](https://git-scm.com/book/en/v2/Git-Tools-Submodules). To start the test suite run `npm run test` from the project root. `npm run test -- --watch` is useful to run tests while making changes. A coverage report is available with `npm run coverage`. To run the linter which checks code conventions run `npm run lint`. ### Install globally from a development branch Global installs are not recommended for development because of the possibility of package conflicts with other Node.js projects. If you do need to test with a global install from a development tree, follow these steps to generate the NPM package without publishing it and install the package locally. 1. `npm -w bids-validator run build` 2. `npm -w bids-validator pack` 3. `npm install -g bids-validator-*.tgz` ### Publishing Publishing is done with [Lerna](https://github.com/lerna/lerna). Use the command `npx lerna publish` and follow instructions to set a new version. Using lerna publish will create a git commit with updated version information and create a version number tag for it, push the tag to GitHub, then publish to NPM and PyPI. The GitHub release is manual following that. ## Acknowledgments Many contributions to the `bids-validator` were done by members of the BIDS community. See the [list of contributors](https://bids-specification.readthedocs.io/en/stable/99-appendices/01-contributors.html). A large part of the development of `bids-validator` is currently done by [Squishymedia](https://squishymedia.com/), who are in turn financed through different grants offered for the general development of BIDS. See the list below. Development and contributions were supported through the following federally funded projects/grants: - [BIDS Derivatives (NIMH: R24MH114705, PI: Poldrack)](https://grantome.com/grant/NIH/R24-MH114705-01) - [OpenNeuro (NIMH: R24MH117179, PI: Poldrack)](https://grantome.com/grant/NIH/R24-MH117179-01) - [Spokes: MEDIUM: WEST (NSF: 1760950, PI: Poldrack & Gorgolewski)](https://grantome.com/grant/NSF/IIS-1760950) - [ReproNim](http://repronim.org) [(NIH-NIBIB P41 EB019936, PI: Kennedy)](https://projectreporter.nih.gov/project_info_description.cfm?aid=8999833) ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1712341552.505778 bids-validator-1.14.5/bids_validator/0000755000000000000000000000000000000000000017433 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/bids_validator/__init__.py0000644000000000000000000000027300000000000021546 0ustar00rootroot00000000000000"""BIDS validator common Python package.""" from .bids_validator import BIDSValidator __all__ = ['BIDSValidator'] from . import _version __version__ = _version.get_versions()['version'] ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1712341552.509778 bids-validator-1.14.5/bids_validator/_version.py0000644000000000000000000000076200000000000021636 0ustar00rootroot00000000000000 # This file was generated by 'versioneer.py' (0.29) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json version_json = ''' { "date": "2024-04-05T13:21:18-0500", "dirty": false, "error": null, "full-revisionid": "da85778c9be0980a9bd04ed741d740281b76fd84", "version": "1.14.5" } ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/bids_validator/bids_validator.py0000644000000000000000000001243100000000000022774 0ustar00rootroot00000000000000"""Validation class for BIDS projects.""" import re import os import json from functools import lru_cache class BIDSValidator(): """Object for BIDS (Brain Imaging Data Structure) verification. The main method of this class is `is_bids()`. You should use it for checking whether a file path is compatible with BIDS. """ def __init__(self, index_associated=True): """Initialize BIDSValidator object. Parameters ---------- index_associated : bool Specifies if an associated data should be checked. If it is true then any file paths in directories `code/`, `derivatives/`, `sourcedata/` and `stimuli/` will pass the validation, else they won't. Defaults to True. """ self.dir_rules = os.path.join(os.path.dirname(__file__)) + "/rules/" self.index_associated = index_associated def is_bids(self, path): """Check if file path adheres to BIDS. Main method of the validator. Uses other class methods for checking different aspects of the file path. Parameters ---------- path : str Path of a file to be checked. Must be relative to root of a BIDS dataset, and must include a leading forward slash `/`. Notes ----- When you test a file path, make sure that the path is relative to the root of the BIDS dataset the file is part of. That is, as soon as the file path contains parts outside of the BIDS dataset, the validation will fail. For example "home/username/my_dataset/participants.tsv" will fail, although "/participants.tsv" is a valid BIDS file. Examples -------- >>> from bids_validator import BIDSValidator >>> validator = BIDSValidator() >>> filepaths = ["/sub-01/anat/sub-01_rec-CSD_T1w.nii.gz", ... "/sub-01/anat/sub-01_acq-23_rec-CSD_T1w.exe", # wrong extension ... "home/username/my_dataset/participants.tsv", # not relative to root ... "/participants.tsv"] >>> for filepath in filepaths: ... print(validator.is_bids(filepath)) True False False True """ return any( check(path) for check in ( self.is_top_level, self.is_associated_data, self.is_session_level, self.is_subject_level, self.is_phenotypic, self.is_file ) ) def is_top_level(self, path): """Check if the file has appropriate name for a top-level file.""" regexps = self.get_regular_expressions(self.dir_rules + 'top_level_rules.json') return any(re.search(regexp, path) for regexp in regexps) def is_associated_data(self, path): """Check if file is appropriate associated data.""" if not self.index_associated: return False regexps = self.get_regular_expressions(self.dir_rules + 'associated_data_rules.json') return any(re.search(regexp, path) for regexp in regexps) def is_session_level(self, path): """Check if the file has appropriate name for a session level.""" regexps = self.get_regular_expressions(self.dir_rules + 'session_level_rules.json') return any(self.conditional_match(regexp, path) for regexp in regexps) def is_subject_level(self, path): """Check if the file has appropriate name for a subject level.""" regexps = self.get_regular_expressions(self.dir_rules + 'subject_level_rules.json') return any(re.search(regexp, path) for regexp in regexps) def is_phenotypic(self, path): """Check if file is phenotypic data.""" regexps = self.get_regular_expressions(self.dir_rules + 'phenotypic_rules.json') return any(re.search(regexp, path) for regexp in regexps) def is_file(self, path): """Check if file is phenotypic data.""" regexps = self.get_regular_expressions(self.dir_rules + 'file_level_rules.json') return any(re.search(regexp, path) for regexp in regexps) @staticmethod @lru_cache def get_regular_expressions(file_name): """Read regular expressions from a file.""" regexps = [] with open(file_name) as fin: rules = json.load(fin) for key in list(rules.keys()): rule = rules[key] regexp = rule["regexp"] if "tokens" in rule: tokens = rule["tokens"] for token in list(tokens): regexp = regexp.replace(token, "|".join(tokens[token])) regexps.append(regexp) return regexps @staticmethod def conditional_match(expression, path): """Find conditional match.""" match = re.compile(expression).findall(path) match = match[0] if len(match) >= 1 else False # adapted from JS code and JS does not support conditional groups return bool(match) and (match[1] == match[2][1:] or not match[1]) ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1712341552.505778 bids-validator-1.14.5/bids_validator/rules/0000755000000000000000000000000000000000000020565 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/bids_validator/rules/associated_data_rules.json0000644000000000000000000000040000000000000025774 0ustar00rootroot00000000000000{ "associated_data": { "regexp": "^[\\/\\\\](?:@@@_associated_data_type_@@@)[\\/\\\\](?:.*)$", "tokens": { "@@@_associated_data_type_@@@": [ "code", "derivatives", "sourcedata", "stimuli" ] } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/bids_validator/rules/file_level_rules.json0000644000000000000000000005405400000000000025010 0ustar00rootroot00000000000000{ "anat_nonparametric": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:_part-(imag|mag|phase|real))?(?:_chunk-[0-9]+)?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", "tokens": { "@@@_anat_suffixes_@@@": [ "T1w", "T2w", "PDw", "T2starw", "FLAIR", "inplaneT1", "inplaneT2", "PDT2", "angio", "T2star", "FLASH", "PD" ], "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "anat_parametric": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", "tokens": { "@@@_anat_suffixes_@@@": [ "T1map", "T2map", "T2starmap", "R1map", "R2map", "R2starmap", "PDmap", "MTRmap", "MTsat", "UNIT1", "T1rho", "MWFmap", "MTVmap", "PDT2map", "Chimap", "S0map", "M0map" ], "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "anat_defacemask": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_mod-(?:@@@_anat_suffixes_@@@))?_defacemask\\.(@@@_anat_ext_@@@)$", "tokens": { "@@@_anat_suffixes_@@@": [ "T1w", "T2w", "PDw", "T2starw", "FLAIR", "inplaneT1", "inplaneT2", "PDT2", "angio", "T1map", "T2map", "T2starmap", "R1map", "R2map", "R2starmap", "PDmap", "MTRmap", "MTsat", "UNIT1", "T1rho", "MWFmap", "MTVmap", "PDT2map", "Chimap", "TB1map", "RB1map", "S0map", "M0map", "MESE", "MEGRE", "VFA", "IRT1", "MP2RAGE", "MPM", "MTS", "MTR", "T2star", "FLASH", "PD" ], "@@@_anat_ext_@@@": ["nii.gz", "nii"] } }, "anat_multiecho": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_echo-[0-9]+?(_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", "tokens": { "@@@_anat_suffixes_@@@": ["MESE", "MEGRE"], "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "anat_multiflip": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_flip-[0-9]+?(?:_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", "tokens": { "@@@_anat_suffixes_@@@": ["VFA"], "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "anat_multiinv": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_inv-[0-9]+?(?:_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", "tokens": { "@@@_anat_suffixes_@@@": ["IRT1"], "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "anat_mp2rage": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:_flip-[0-9]+)?_inv-[0-9]+?(?:_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", "tokens": { "@@@_anat_suffixes_@@@": ["MP2RAGE"], "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "anat_vfa_mt": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?_flip-[0-9]+?_mt-(on|off)?(?:_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", "tokens": { "@@@_anat_suffixes_@@@": ["MPM", "MTS"], "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "anat_mtr": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_mt-(on|off)?(?:_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", "tokens": { "@@@_anat_suffixes_@@@": ["MTR"], "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "behavioral": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?beh[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?((?:@@@_behavioral_ext_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", "tokens": { "@@@_behavioral_ext_@@@": [ "_beh\\.json", "_beh\\.tsv", "_events\\.json", "_events\\.tsv" ], "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", "_stim\\.tsv\\.gz", "_physio\\.json", "_stim\\.json" ] } }, "dwi": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?dwi[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_part-(imag|mag|phase|real))?((?:@@@_dwi_ext_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", "tokens": { "@@@_dwi_ext_@@@": [ "_dwi\\.nii\\.gz", "_dwi\\.nii", "_dwi\\.json", "_dwi\\.bvec", "_dwi\\.bval", "_sbref\\.nii\\.gz", "_sbref\\.nii", "_sbref\\.json" ], "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", "_stim\\.tsv\\.gz", "_physio\\.json", "_stim\\.json" ] } }, "fmap_gre": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", "tokens": { "@@@_field_map_type_@@@": [ "phasediff", "phase1", "phase2", "magnitude1", "magnitude2", "magnitude", "fieldmap" ], "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "fmap_pepolar_asl": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?_dir-[a-zA-Z0-9]+(?:_run-[0-9]+)?(?:_part-(mag|phase|real|imag))?(?:_chunk-[0-9]+)?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", "tokens": { "@@@_field_map_type_@@@": ["m0scan", "epi"], "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "fmap_TB1DAM": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_flip-[0-9]+?(?:_part-(imag|mag|phase|real))?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", "tokens": { "@@@_field_map_type_@@@": ["TB1DAM"], "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "fmap_TB1EPI": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_echo-[0-9]+?_flip-[0-9]+?(?:_inv-[0-9]+)?(?:_part-(imag|mag|phase|real))?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", "tokens": { "@@@_field_map_type_@@@": ["TB1EPI"], "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "fmap_rf": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:_flip-[0-9]+)?(?:_inv-[0-9]+)?(?:_part-(imag|mag|phase|real))?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", "tokens": { "@@@_field_map_type_@@@": ["TB1AFI", "TB1TFL", "TB1RFM", "RB1COR"], "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "fmap_TB1SRGE": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?_flip-[0-9]+?_inv-[0-9]+?(?:_part-(imag|mag|phase|real))?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", "tokens": { "@@@_field_map_type_@@@": ["TB1SRGE"], "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "fmap_parametric": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", "tokens": { "@@@_field_map_type_@@@": ["TB1map", "RB1map"], "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] } }, "func": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?func[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:_part-(imag|mag|phase|real))?(?:@@@_func_ext_@@@)$", "tokens": { "@@@_func_ext_@@@": [ "_bold\\.nii\\.gz", "_bold\\.nii", "_bold\\.json", "_cbv\\.nii\\.gz", "_cbv\\.nii", "_cbv\\.json", "_sbref\\.nii\\.gz", "_sbref\\.nii", "_sbref\\.json" ] } }, "func_phase_deprecated": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?func[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:@@@_func_ext_@@@)$", "tokens": { "@@@_func_ext_@@@": ["_phase\\.nii\\.gz", "_phase\\.nii", "_phase\\.json"] } }, "func_events": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?func[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:@@@_func_ext_@@@)$", "tokens": { "@@@_func_ext_@@@": ["_events\\.tsv", "_events\\.json"] } }, "func_timeseries": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?func[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@)$", "tokens": { "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", "_stim\\.tsv\\.gz", "_physio\\.json", "_stim\\.json" ] } }, "func_bold": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?func[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:_part-(imag|mag|phase|real))?(?:@@@_func_bold_ext_@@@)$", "tokens": { "@@@_func_bold_ext_@@@": [ "_bold\\.nii\\.gz", "_bold\\.nii", "_sbref\\.nii\\.gz", "_sbref\\.nii" ] } }, "asl": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?perf[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:(?:@@@_asl_type_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", "tokens": { "@@@_asl_type_@@@": [ "_asl\\.nii\\.gz", "_asl\\.nii", "_asl\\.json", "_m0scan\\.nii\\.gz", "_m0scan\\.nii", "_m0scan\\.json", "_aslcontext\\.tsv", "_asllabeling\\.jpg" ], "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", "_stim\\.tsv\\.gz", "_physio\\.json", "_stim\\.json" ] } }, "eeg": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?eeg[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_split-[0-9]+)?(?:_space-(@@@_eeg_space_@@@))?((_eeg\\.(@@@_eeg_type_@@@)|(@@@_eeg_ext_@@@))|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", "tokens": { "@@@_eeg_space_@@@": [ "Other", "CapTrak", "EEGLAB", "EEGLAB-HJ", "CTF", "ElektaNeuromag", "4DBti", "KitYokogawa", "ChietiItab", "ICBM452AirSpace", "ICBM452Warp5Space", "IXI549Space", "fsaverage", "fsaverageSym", "fsLR", "MNIColin27", "MNI152Lin", "MNI152NLin2009aSym", "MNI152NLin2009bSym", "MNI152NLin2009cSym", "MNI152NLin2009aAsym", "MNI152NLin2009bAsym", "MNI152NLin2009cAsym", "MNI152NLin6Sym", "MNI152NLin6ASym", "MNI305", "NIHPD", "OASIS30AntsOASISAnts", "OASIS30Atropos", "Talairach", "UNCInfant", "fsaverage3", "fsaverage4", "fsaverage5", "fsaverage6", "fsaveragesym", "UNCInfant0V21", "UNCInfant1V21", "UNCInfant2V21", "UNCInfant0V22", "UNCInfant1V22", "UNCInfant2V22", "UNCInfant0V23", "UNCInfant1V23", "UNCInfant2V23" ], "@@@_eeg_type_@@@": ["vhdr", "vmrk", "eeg", "edf", "bdf", "set", "fdt"], "@@@_eeg_ext_@@@": [ "_events\\.json", "_events\\.tsv", "_electrodes\\.json", "_electrodes\\.tsv", "_channels\\.json", "_channels\\.tsv", "_eeg\\.json", "_coordsystem\\.json", "_photo\\.jpg", "_photo\\.png", "_photo\\.tif" ], "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", "_stim\\.tsv\\.gz", "_physio\\.json", "_stim\\.json" ] } }, "ieeg": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?ieeg[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_split-[0-9]+)?(?:_space-(@@@_ieeg_space_@@@))?((_ieeg\\.(@@@_ieeg_type_@@@)|(@@@_ieeg_ext_@@@))|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", "tokens": { "@@@_ieeg_space_@@@": [ "Other", "Pixels", "ACPC", "ScanRAS", "ICBM452AirSpace", "ICBM452Warp5Space", "IXI549Space", "fsaverage", "fsaverageSym", "fsLR", "MNIColin27", "MNI152Lin", "MNI152NLin2009aSym", "MNI152NLin2009bSym", "MNI152NLin2009cSym", "MNI152NLin2009aAsym", "MNI152NLin2009bAsym", "MNI152NLin2009cAsym", "MNI152NLin6Sym", "MNI152NLin6ASym", "MNI305", "NIHPD", "OASIS30AntsOASISAnts", "OASIS30Atropos", "Talairach", "UNCInfant", "fsaverage3", "fsaverage4", "fsaverage5", "fsaverage6", "fsaveragesym", "UNCInfant0V21", "UNCInfant1V21", "UNCInfant2V21", "UNCInfant0V22", "UNCInfant1V22", "UNCInfant2V22", "UNCInfant0V23", "UNCInfant1V23", "UNCInfant2V23" ], "@@@_ieeg_type_@@@": [ "edf", "vhdr", "vmrk", "eeg", "set", "fdt", "nwb", "mefd[\\/\\\\].*" ], "@@@_ieeg_ext_@@@": [ "_events\\.json", "_events\\.tsv", "_electrodes\\.json", "_electrodes\\.tsv", "_channels\\.json", "_channels\\.tsv", "_ieeg\\.json", "_coordsystem\\.json", "_photo\\.jpg", "_photo\\.png", "_photo\\.tif" ], "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", "_stim\\.tsv\\.gz", "_physio\\.json", "_stim\\.json" ] } }, "meg": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?meg[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_split-[0-9]+)?(_digitizer\\.txt|_meg(@@@_meg_type_@@@[\\/\\\\](.(?!\\.(sqd|con|fif|raw|raw\\.mhd|trg|kdf|chn)$))*|[\\/\\\\](.(?!\\.(sqd|con|fif|raw|raw\\.mhd|trg|kdf|chn)$))*)|(@@@_meg_ext_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", "tokens": { "@@@_meg_type_@@@": [ "\\.ds[\\/\\\\].*", "\\.(?:chn|kdf|trg)", "\\.(?:raw|raw\\.mhd)", "\\.fif", "\\.(?:con|sqd)", "\\.(?:kdf|chn|trg)" ], "@@@_meg_ext_@@@": [ "_events\\.json", "_events\\.tsv", "_channels\\.json", "_channels\\.tsv", "_electrodes\\.json", "_electrodes\\.tsv", "_meg\\.json", "_coordsystem\\.json", "_photo\\.jpg", "_photo\\.png", "_photo\\.tif", "_headshape\\.pos", "_markers\\.(?:mrk|sqd)" ], "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", "_stim\\.tsv\\.gz", "_physio\\.json", "_stim\\.json" ] } }, "meg_calbibration": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?meg[\\/]\\1(_\\2)?_acq-calibration_meg\\.dat$" }, "meg_crosstalk": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?meg[\\/]\\1(_\\2)?_acq-crosstalk_meg\\.fif$" }, "stimuli": { "regexp": "^[\\/\\\\](?:stimuli)[\\/\\\\](?:.*)$" }, "nirs": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?nirs[\\/\\\\]\\1(_\\2)?(((?:_acq-[a-zA-Z0-9]+)?(@@@_nirs_optodes_@@@))|((?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_part-[0-9]+)?(_nirs\\.(@@@_nirs_type_@@@)|(@@@_nirs_ext_@@@))))$", "tokens": { "@@@_nirs_type_@@@": ["snirf"], "@@@_nirs_ext_@@@": [ "_events\\.json", "_events\\.tsv", "_channels\\.json", "_channels\\.tsv", "_nirs\\.json", "_photo\\.jpg" ], "@@@_nirs_optodes_@@@": [ "_optodes\\.tsv", "_optodes\\.json", "_coordsystem\\.json" ] } }, "pet": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?pet[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_task-[a-zA-Z0-9]+)?(?:_trc-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:(?:@@@_pet_ext_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", "tokens": { "@@@_pet_ext_@@@": [ "_pet\\.nii\\.gz", "_pet\\.nii", "_pet\\.json", "_events\\.json", "_events\\.tsv" ], "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", "_stim\\.tsv\\.gz", "_physio\\.json", "_stim\\.json" ] } }, "pet_blood": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?pet[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_task-[a-zA-Z0-9]+)?(?:_trc-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_recording-[a-zA-Z0-9]+)_(@@@_pet_ext_@@@)$", "tokens": { "@@@_pet_ext_@@@": ["blood\\.tsv\\.gz", "blood\\.tsv", "blood\\.json"] } }, "motion": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?motion[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(_tracksys-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_(@@@_motion_ext_@@@))|((?:_tracksys-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_(@@@_cont_ext_@@@))$", "tokens": { "@@@_motion_ext_@@@": [ "channels\\.json", "channels\\.tsv", "motion\\.json", "motion\\.tsv" ], "@@@_cont_ext_@@@": ["events\\.json", "events\\.tsv"] } }, "microscopy": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", "_SEM", "_uCT", "_BF", "_DF", "_PC", "_DIC", "_FLUO", "_CONF", "_PLI", "_CARS", "_2PE", "_MPE", "_SR", "_NLO", "_OCT", "_SPIM" ], "@@@_microscopy_ext_@@@": [ "\\.ome\\.tif", "\\.ome\\.btf", "\\.ome\\.zarr[\\/\\\\].*", "\\.tif", "\\.png" ] } }, "microscopy_photo": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(@@@_photo_ext_@@@)$", "tokens": { "@@@_photo_ext_@@@": [ "_photo\\.jpg", "_photo\\.png", "_photo\\.tif", "_photo\\.json" ] } }, "microscopy_json": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?(@@@_microscopy_type_@@@)\\.json$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", "_SEM", "_uCT", "_BF", "_DF", "_PC", "_DIC", "_FLUO", "_CONF", "_PLI", "_CARS", "_2PE", "_MPE", "_SR", "_NLO", "_OCT", "_SPIM" ] } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/bids_validator/rules/phenotypic_rules.json0000644000000000000000000000014700000000000025056 0ustar00rootroot00000000000000{ "phenotypic_data": { "regexp": "^[\\/\\\\](?:phenotype)[\\/\\\\](?:.*\\.tsv|.*\\.json)$" } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/bids_validator/rules/session_level_rules.json0000644000000000000000000001664200000000000025555 0ustar00rootroot00000000000000{ "scans": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(@@@_scat_ext_@@@)$", "tokens": { "@@@_scat_ext_@@@": ["_scans\\.tsv", "_scans\\.json"] } }, "func_ses": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(@@@_func_ses_ext_@@@)$", "tokens": { "@@@_func_ses_ext_@@@": [ "_bold\\.json", "_sbref\\.json", "_events\\.json", "_events\\.tsv", "_physio\\.json", "_stim\\.json" ] } }, "asl_ses": { "regexp": "^\\/(sub-[a-zA-Z0-9]+)\\/(?:(ses-[a-zA-Z0-9]+)\\/)?\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_asl_ses_ext_@@@)$", "tokens": { "@@@_asl_ses_ext_@@@": [ "_asl\\.json", "_aslcontext\\.tsv", "_m0scan\\.json", "_asllabeling\\.jpg" ] } }, "pet_ses": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_trc-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(@@@_pet_ses_type_@@@)$", "tokens": { "@@@_pet_ses_type_@@@": ["_pet\\.json", "_events\\.json", "_events\\.tsv"] } }, "anat_ses": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(@@@_anat_ses_type_@@@)\\.json$", "tokens": { "@@@_anat_ses_type_@@@": [ "T1w", "T2w", "T1map", "T2map", "T1rho", "FLAIR", "PD", "PDT2", "inplaneT1", "inplaneT2", "angio", "defacemask", "SWImagandphase" ] } }, "dwi_ses": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_)?dwi\\.(?:@@@_dwi_ses_ext_@@@)$", "tokens": { "@@@_dwi_ses_ext_@@@": ["json", "bval", "bvec"] } }, "meg_ses": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(@@@_meg_ses_type_@@@)$", "tokens": { "@@@_meg_ses_type_@@@": [ "_events\\.tsv", "_channels\\.tsv", "_channels\\.json", "_meg\\.json", "_coordsystem\\.json", "_photo\\.jpg", "_photo\\.png", "_photo\\.tif", "_headshape\\.pos" ] } }, "eeg_ses": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_space-(@@@_eeg_space_@@@))?(@@@_eeg_ses_type_@@@)$", "tokens": { "@@@_eeg_ses_type_@@@": [ "_events\\.tsv", "_channels\\.tsv", "_channels\\.json", "_electrodes\\.tsv", "_electrodes\\.json", "_eeg\\.json", "_coordsystem\\.json", "_photo\\.jpg", "_photo\\.png", "_photo\\.tif" ], "@@@_eeg_space_@@@": [ "Other", "CapTrak", "EEGLAB", "EEGLAB-HJ", "CTF", "ElektaNeuromag", "4DBti", "KitYokogawa", "ChietiItab", "ICBM452AirSpace", "ICBM452Warp5Space", "IXI549Space", "fsaverage", "fsaverageSym", "fsLR", "MNIColin27", "MNI152Lin", "MNI152NLin2009aSym", "MNI152NLin2009bSym", "MNI152NLin2009cSym", "MNI152NLin2009aAsym", "MNI152NLin2009bAsym", "MNI152NLin2009cAsym", "MNI152NLin6Sym", "MNI152NLin6ASym", "MNI305", "NIHPD", "OASIS30AntsOASISAnts", "OASIS30Atropos", "Talairach", "UNCInfant", "fsaverage3", "fsaverage4", "fsaverage5", "fsaverage6", "fsaveragesym", "UNCInfant0V21", "UNCInfant1V21", "UNCInfant2V21", "UNCInfant0V22", "UNCInfant1V22", "UNCInfant2V22", "UNCInfant0V23", "UNCInfant1V23", "UNCInfant2V23" ] } }, "ieeg_ses": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_space-(@@@_ieeg_space_@@@))?(@@@_ieeg_ses_type_@@@)$", "tokens": { "@@@_ieeg_ses_type_@@@": [ "_events\\.tsv", "_channels\\.tsv", "_channels\\.json", "_electrodes\\.tsv", "_electrodes\\.json", "_ieeg\\.json", "_coordsystem\\.json", "_photo\\.jpg", "_photo\\.png", "_photo\\.tif" ], "@@@_ieeg_space_@@@": [ "Other", "Pixels", "ACPC", "ScanRAS", "ICBM452AirSpace", "ICBM452Warp5Space", "IXI549Space", "fsaverage", "fsaverageSym", "fsLR", "MNIColin27", "MNI152Lin", "MNI152NLin2009aSym", "MNI152NLin2009bSym", "MNI152NLin2009cSym", "MNI152NLin2009aAsym", "MNI152NLin2009bAsym", "MNI152NLin2009cAsym", "MNI152NLin6Sym", "MNI152NLin6ASym", "MNI305", "NIHPD", "OASIS30AntsOASISAnts", "OASIS30Atropos", "Talairach", "UNCInfant", "fsaverage3", "fsaverage4", "fsaverage5", "fsaverage6", "fsaveragesym", "UNCInfant0V21", "UNCInfant1V21", "UNCInfant2V21", "UNCInfant0V22", "UNCInfant1V22", "UNCInfant2V22", "UNCInfant0V23", "UNCInfant1V23", "UNCInfant2V23" ] } }, "motion_ses": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(@@@_motion_ses_type_@@@)$", "tokens": { "@@@_motion_ses_type_@@@": [ "_events.tsv", "_events.json", "_channels.tsv", "_channels.json", "_motion.json", "_coordsystem.json" ] } }, "microscopy_ses": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(?:_chunk-[0-9]+)?(@@@_microscopy_ses_type_@@@)$", "tokens": { "@@@_microscopy_ses_type_@@@": [ "_TEM\\.json", "_SEM\\.json", "_uCT\\.json", "_BF\\.json", "_DF\\.json", "_PC\\.json", "_DIC\\.json", "_FLUO\\.json", "_CONF\\.json", "_PLI\\.json", "_CARS\\.json", "_2PE\\.json", "_MPE\\.json", "_SR\\.json", "_NLO\\.json", "_OCT\\.json", "_SPIM\\.json" ] } }, "nirs_ses": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(((?:_acq-[a-zA-Z0-9]+)?(@@@_nirs_optodes_@@@))|((?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(@@@_nirs_ses_type_@@@)))$", "tokens": { "@@@_nirs_ses_type_@@@": [ "_events\\.tsv", "_channels\\.tsv", "_nirs\\.json", "_photo\\.jpg" ], "@@@_nirs_optodes_@@@": [ "_optodes\\.tsv", "_optodes\\.json", "_coordsystem\\.json" ] } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/bids_validator/rules/subject_level_rules.json0000644000000000000000000000032400000000000025517 0ustar00rootroot00000000000000{ "subject_level": { "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\]\\1(@@@_subject_level_ext_@@@)$", "tokens": { "@@@_subject_level_ext_@@@": ["_sessions\\.tsv", "_sessions\\.json"] } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/bids_validator/rules/top_level_rules.json0000644000000000000000000002076200000000000024672 0ustar00rootroot00000000000000{ "root_top": { "regexp": "^[\\/\\\\]?(@@@_root_files_@@@)$", "tokens": { "@@@_root_files_@@@": [ "README", "README\\.md", "README\\.rst", "README\\.txt", "CHANGES", "CITATION\\.cff", "LICENSE", "dataset_description\\.json", "genetic_info\\.json", "participants\\.tsv", "participants\\.json", "phasediff.json", "phase1\\.json", "phase2\\.json", "fieldmap\\.json", "TB1DAM\\.json", "TB1EPI\\.json", "TB1AFI\\.json", "TB1TFL\\.json", "TB1RFM\\.json", "TB1SRGE\\.json", "RB1COR\\.json", "events\\.json", "scans\\.json", "samples\\.json", "samples\\.tsv" ] } }, "func_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?((?:@@@_func_top_ext_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", "tokens": { "@@@_func_top_ext_@@@": [ "_bold\\.json", "_sbref\\.json", "_events\\.json", "_events\\.tsv", "_beh\\.json" ], "@@@_cont_ext_@@@": ["_physio\\.json", "_stim\\.json"] } }, "asl_top": { "regexp": "^\\/(?:ses-[a-zA-Z0-9]+_)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_asl_top_ext_@@@)$", "tokens": { "@@@_asl_top_ext_@@@": [ "_asl\\.json", "_m0scan\\.json", "_aslcontext\\.tsv", "_labeling.jpg" ] } }, "pet_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:task-[a-zA-Z0-9]+_)?(?:trc-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(@@@_pet_suffixes_@@@)\\.json$", "tokens": { "@@@_pet_suffixes_@@@": ["pet"] } }, "anat_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:task-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(@@@_anat_suffixes_@@@)\\.json$", "tokens": { "@@@_anat_suffixes_@@@": [ "T1w", "T2w", "T1map", "T2map", "T1rho", "FLAIR", "PD", "PDT2", "inplaneT1", "inplaneT2", "angio", "SWImagandphase", "T2star", "FLASH", "PDmap", "photo" ] } }, "VFA_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:echo-[0-9]+_)?(?:flip-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(@@@_mese_megre_suffixes_@@@)\\.json$", "tokens": { "@@@_mese_megre_suffixes_@@@": ["VFA"] } }, "megre_mese_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:echo-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(@@@_mese_megre_suffixes_@@@)\\.json$", "tokens": { "@@@_mese_megre_suffixes_@@@": ["MEGRE", "MESE"] } }, "irt1_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:inv-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(@@@_irt1_suffixes_@@@)\\.json$", "tokens": { "@@@_irt1_suffixes_@@@": ["IRT1"] } }, "mpm_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:echo-[0-9]+_)?(?:flip-[0-9]+_)?(?:mt-(on|off)_)(?:part-(mag|phase|real|imag)_)?(@@@_mpm_suffixes_@@@)\\.json$", "tokens": { "@@@_mpm_suffixes_@@@": ["MPM"] } }, "mts_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:echo-[0-9]+_)?(?:flip-[0-9]+_mt-(on|off)_)?(?:part-(mag|phase|real|imag)_)?(@@@_mts_suffixes_@@@)\\.json$", "tokens": { "@@@_mts_suffixes_@@@": ["MTS"] } }, "mtr_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:mt-(on|off)_)?(?:part-(mag|phase|real|imag)_)?(@@@_mtr_suffixes_@@@)\\.json$", "tokens": { "@@@_mtr_suffixes_@@@": ["MTR"] } }, "mp2rage_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:echo-[0-9]+_)?(?:flip-[0-9]+_)?(?:inv-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(@@@_mp2rage_suffixes_@@@)\\.json$", "tokens": { "@@@_mp2rage_suffixes_@@@": ["MP2RAGE"] } }, "dwi_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:dir-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(?:chunk-[0-9]+_)?(dwi\\.(?:@@@_dwi_top_ext_@@@)|sbref\\.json)$", "tokens": { "@@@_dwi_top_ext_@@@": ["json", "bval", "bvec"] } }, "eeg_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:@@@_eeg_top_ext_@@@)$", "tokens": { "@@@_eeg_top_ext_@@@": [ "_eeg\\.json", "_channels\\.tsv", "_channels\\.json", "_electrodes\\.tsv", "_electrodes\\.json", "_photo\\.jpg", "_photo\\.png", "_photo\\.tif", "_coordsystem\\.json" ] } }, "ieeg_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:@@@_ieeg_top_ext_@@@)$", "tokens": { "@@@_ieeg_top_ext_@@@": [ "_ieeg\\.json", "_channels\\.tsv", "_channels\\.json", "_electrodes\\.tsv", "_electrodes\\.json", "_photo\\.jpg", "_photo\\.png", "_photo\\.tif", "_coordsystem\\.json" ] } }, "meg_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:@@@_meg_top_ext_@@@)$", "tokens": { "@@@_meg_top_ext_@@@": [ "_meg\\.json", "_channels\\.tsv", "_channels\\.json", "_photo\\.jpg", "_photo\\.png", "_photo\\.tif", "_coordsystem\\.json" ] } }, "motion_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?task-[a-zA-Z0-9]+(?:@@@_motion_top_ext_@@@)$", "tokens": { "@@@_motion_top_ext_@@@": [ "_motion\\.json", "_channels\\.tsv", "_channels\\.json", "_coordsystem\\.json" ] } }, "nirs_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(((?:_acq-[a-zA-Z0-9]+)?(@@@_nirs_optodes_@@@))|(task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:@@@_nirs_top_ext_@@@)))$", "tokens": { "@@@_nirs_top_ext_@@@": [ "_nirs\\.json", "_channels\\.tsv", "_photo\\.jpg" ], "@@@_nirs_optodes_@@@": [ "_optodes\\.tsv", "_optodes\\.json", "_coordsystem\\.json" ] } }, "fmap_epi_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:dir-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(?:chunk-[0-9]+_)?(?:@@@_field_map_type_@@@)\\.json$", "tokens": { "@@@_field_map_type_@@@": ["m0scan", "epi"] } }, "fmap_gre_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:chunk-[0-9]+_)?(@@@_fmap_gre_suffixes_@@@)\\.json$", "tokens": { "@@@_fmap_gre_suffixes_@@@": [ "magnitude1", "magnitude2", "phasediff", "phase1", "phase2", "magnitude", "fieldmap" ] } }, "other_top_files": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:task-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:recording-[a-zA-Z0-9]+_)?(@@@_other_top_files_ext_@@@)$", "tokens": { "@@@_other_top_files_ext_@@@": ["physio\\.json", "stim\\.json"] } }, "microscopy_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", "tokens": { "@@@_microscopy_top_ext_@@@": [ "_TEM\\.json", "_SEM\\.json", "_uCT\\.json", "_BF\\.json", "_DF\\.json", "_PC\\.json", "_DIC\\.json", "_FLUO\\.json", "_CONF\\.json", "_PLI\\.json", "_CARS\\.json", "_2PE\\.json", "_MPE\\.json", "_SR\\.json", "_NLO\\.json", "_OCT\\.json", "_SPIM\\.json" ] } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/bids_validator/test_bids_validator.py0000644000000000000000000001224200000000000024033 0ustar00rootroot00000000000000"""Test BIDSValidator functionality. git-annex and datalad are used to download a test data structure without the actual file contents. """ import os import pytest import datalad.api from bids_validator import BIDSValidator HOME = os.path.expanduser('~') TEST_DATA_DICT = { 'eeg_matchingpennies': ( 'https://gin.g-node.org/sappelhoff/eeg_matchingpennies' ), } EXCLUDE_KEYWORDS = ['git', 'datalad', 'sourcedata', 'bidsignore'] def _download_test_data(test_data_dict, dsname): """Download test data using datalad.""" url = test_data_dict[dsname] dspath = os.path.join(HOME, dsname) datalad.api.clone(source=url, path=dspath) return dspath def _gather_test_files(dspath, exclude_keywords): """Get test files from dataset path, relative to dataset.""" files = [] for r, _, f in os.walk(dspath): for file in f: fname = os.path.join(r, file) fname = fname.replace(dspath, '') if not any(keyword in fname for keyword in exclude_keywords): files.append(fname) return files dspath = _download_test_data(TEST_DATA_DICT, 'eeg_matchingpennies') files = _gather_test_files(dspath, EXCLUDE_KEYWORDS) @pytest.fixture(scope='module') def validator(): """Return a BIDSValidator instance.""" validator = BIDSValidator() return validator @pytest.mark.parametrize('fname', files) def test_datasets(validator, fname): """Test that is_bids returns true for each file in a valid BIDS dataset.""" assert validator.is_bids(fname) @pytest.mark.parametrize('fname, matches', [ ('/T1w.json', True), ('/dataset_description.json', True), ('/README', True), ('/CHANGES', True), ('/participants.tsv', True), ('/sub-01/anat/sub-01_T1w.nii.gz', False), ]) def test_top_level(validator, fname, matches): """Test that is_top_level returns true for top-level files.""" assert validator.is_top_level(fname) is matches @pytest.mark.parametrize('fname, matches', [ ('/sourcedata/unstructured_data.nii.gz', True), ('/sourcedata/dicom_dir/xyz.dcm', True), ('/code/my_analysis/analysis.py', True), ('/derivatives/preproc/sub-01/anat/sub-01_desc-preproc_T1w.nii.gz', True), ('/stimuli/pic.jpg', True), ('/sub-01/anat/sub-01_T1w.nii.gz', False), ]) def test_associated_data(validator, fname, matches): """Test that is_associated_data returns true for associated data.""" assert validator.is_associated_data(fname) is matches @pytest.mark.parametrize('fname, matches', [ ('/sub-01/ses-1/sub-01_ses-1_scans.tsv', True), ('/sub-01/ses-1/sub-01_ses-1_scans.json', True), ('/sub-01/sub-01_scans.tsv', True), ('/sub-01/sub-01_scans.json', True), ('/sub-01/ses-1/sub-01_ses-1_task-rest_bold.json', True), ('/sub-01/sub-01_task-rest_bold.json', True), ('/sub-01/ses-1/sub-01_ses-1_asl.json', True), ('/sub-01/sub-01_asl.json', True), ('/sub-01/ses-1/sub-01_ses-1_pet.json', True), ('/sub-01/sub-01_pet.json', True), ('/sub-01/ses-1/sub-01_ses-1_proc-test_channels.tsv', True), ('/sub-01/ses-1/sub-01_ses-1_channels.json', True), ('/sub-01/sub-01_proc-test_channels.tsv', True), ('/sub-01/sub-01_channels.json', True), ('/sub-01/ses-1/sub-01_ses-1_space-CapTrak_electrodes.tsv', True), ('/sub-01/ses-1/sub-01_ses-1_coordsystem.json', True), ('/sub-01/sub-01_space-CapTrak_electrodes.tsv', True), ('/sub-01/sub-01_coordsystem.json', True), ('/sub-01/ses-1/sub-01_ses-1_motion.json', True), ('/sub-01/sub-01_motion.json', True), ('/sub-01/ses-1/sub-01_ses-1_TEM.json', True), ('/sub-01/sub-01_TEM.json', True), ('/sub-01/ses-1/sub-01_ses-1_nirs.json', True), ('/sub-01/sub-01_nirs.json', True), # Mismatch sessions ('/sub-01/sub-01_ses-1_scans.tsv', False), ('/sub-01/sub-01_ses-1_scans.json', False), ('/sub-01/ses-1/sub-01_ses-2_scans.tsv', False), # File-level ('/sub-01/ses-1/func/sub-01_ses-1_task-rest_bold.nii.gz', False), ('/sub-01/anat/sub-01_T1w.nii.gz', False), ]) def test_session_level(validator, fname, matches): """Test that is_session_level returns true for session level files.""" assert validator.is_session_level(fname) is matches @pytest.mark.parametrize('fname, matches', [ ('/sub-01/sub-01_sessions.tsv', True), ('/sub-01/sub-01_sessions.json', True), ('/sub-01/anat/sub-01_T1w.nii.gz', False), ]) def test_subject_level(validator, fname, matches): """Test that is_subject_level returns true for subject level files.""" assert validator.is_subject_level(fname) is matches @pytest.mark.parametrize('fname, matches', [ ('/phenotype/measure.tsv', True), ('/phenotype/measure.json', True), ('/sub-01/anat/sub-01_T1w.nii.gz', False), ]) def test_phenotpic(validator, fname, matches): """Test that is_phenotypic returns true for phenotypic files.""" assert validator.is_phenotypic(fname) is matches @pytest.mark.parametrize('fname, matches', [ ('/sub-01/ses-1/func/sub-01_ses-1_task-rest_bold.nii.gz', True), ('/sub-01/anat/sub-01_T1w.nii.gz', True), ]) def test_file_level(validator, fname, matches): """Test that is_file returns true for file level files.""" assert validator.is_file(fname) is matches ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1712341552.505778 bids-validator-1.14.5/bids_validator/tsv/0000755000000000000000000000000000000000000020247 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/bids_validator/tsv/non_custom_columns.json0000644000000000000000000000342700000000000025074 0ustar00rootroot00000000000000{ "channels": [ "description", "high_cutoff", "low_cutoff", "name", "notch", "source", "detector", "wavelength_actual", "wavelength_nominal", "wavelength_emission_actual", "component", "short_channel", "sampling_frequency", "software_filters", "status", "status_description", "type", "units", "reference", "group", "tracking_system", "tracked_point", "component", "placement", "HED" ], "electrodes": [ "name", "x", "y", "z", "size", "material", "manufacturer", "group", "hemisphere", "type", "impedance", "dimension", "HED" ], "optodes": [ "name", "type", "x", "y", "z", "template_x", "template_y", "template_z", "source_type", "detector_type", "HED" ], "events": [ "duration", "HED", "onset", "trial_type", "response_time", "stim_file", "HED" ], "misc": [], "participants": ["participant_id", "HED"], "phenotype": ["participant_id", "HED"], "scans": ["acq_time", "filename", "HED"], "samples": [ "sample_id", "participant_id", "sample_type", "pathology", "derived_from", "HED" ], "sessions": ["acq_time", "session_id", "HED"], "aslcontext": ["volume_type", "HED"], "blood": [ "time", "plasma_radioactivity", "whole_blood_radioactivity", "metabolite_parent_fraction", "hplc_recovery_fractions", "HED" ], "nirs": [ "name", "type", "source", "detector", "wavelength_nominal", "units", "sampling_frequency", "component", "wavelength_actual", "description", "wavelength_emission_actual", "short_channel", "status", "status_description", "HED" ] } ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1712341552.505778 bids-validator-1.14.5/bids_validator.egg-info/0000755000000000000000000000000000000000000021125 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341552.0 bids-validator-1.14.5/bids_validator.egg-info/PKG-INFO0000644000000000000000000011410000000000000022217 0ustar00rootroot00000000000000Metadata-Version: 2.1 Name: bids-validator Version: 1.14.5 Summary: Validator for the Brain Imaging Data Structure Author-email: PyBIDS developers License: MIT License Project-URL: Homepage, https://github.com/bids-standard/bids-validator Classifier: Development Status :: 3 - Alpha Classifier: Environment :: Console Classifier: Intended Audience :: Science/Research Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Topic :: Scientific/Engineering Requires-Python: >=3.8 Description-Content-Type: text/markdown License-File: LICENSE [![Node Tests](https://github.com/bids-standard/bids-validator/actions/workflows/node_tests.yml/badge.svg)](https://github.com/bids-standard/bids-validator/actions/workflows/node_tests.yml) [![Python tests](https://github.com/bids-standard/bids-validator/actions/workflows/python_tests.yml/badge.svg)](https://github.com/bids-standard/bids-validator/actions/workflows/python_tests.yml) [![bids-examples tests](https://github.com/bids-standard/bids-validator/actions/workflows/test-bids-examples.yml/badge.svg)](https://github.com/bids-standard/bids-validator/actions/workflows/test-bids-examples.yml) [![CircleCI](https://circleci.com/gh/bids-standard/bids-validator.svg?style=shield&circle-token=:circle-token)](https://circleci.com/gh/bids-standard/bids-validator) [![Codecov](https://codecov.io/gh/bids-standard/bids-validator/branch/master/graph/badge.svg)](https://codecov.io/gh/bids-standard/bids-validator) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3688707.svg)](https://doi.org/10.5281/zenodo.3688707) # BIDS-Validator - [BIDS-Validator](#bids-validator) - [Quickstart](#quickstart) - [Support](#support) - [Maintainers and Contributors](#maintainers-and-contributors) - [Use](#use) - [API](#api) - [.bidsignore](#bidsignore) - [Configuration](#configuration) - [In the Browser](#in-the-browser) - [On the Server](#on-the-server) - [Through Command Line](#through-command-line) - [Docker image](#docker-image) - [Python Library](#python-library) - [Example](#example) - [Development](#development) - [Running Locally in a Browser](#running-locally-in-a-browser) - [Testing](#testing) - [Publishing](#publishing) - [Acknowledgments](#acknowledgments) ## Quickstart 1. Web version: 1. Open [Google Chrome](https://www.google.com/chrome/) or [Mozilla Firefox](https://mozilla.org/firefox) (currently the only supported browsers) 1. Go to https://bids-standard.github.io/bids-validator/ and select a folder with your BIDS dataset. If the validator seems to be working longer than couple of minutes please open [developer tools ](https://developer.chrome.com/devtools) and report the error at [https://github.com/bids-standard/bids-validator/issues](https://github.com/bids-standard/bids-validator/issues). 1. Command line version: 1. Install [Node.js](https://nodejs.org) (at least version 18.0.0) 1. Update `npm` to be at least version 7 (`npm install --global npm@^7`) 1. From a terminal run `npm install -g bids-validator` 1. Run `bids-validator` to start validating datasets. 1. Docker 1. Install Docker 1. From a terminal run `docker run -ti --rm -v /path/to/data:/data:ro bids/validator /data` but replace the `/path/to/data` part of the command with your own path on your machine. 1. Python Library: 1. Install [Python](https://www.python.org/) 1. Install [Pip](https://pip.pypa.io/en/stable/installing/) package manager for Python, if not already installed. 1. From a terminal run `pip install bids_validator` to acquire the [BIDS Validator PyPI package](https://pypi.org/project/bids-validator/) or `conda install bids-validator` for the [Conda package](https://anaconda.org/conda-forge/bids-validator). 1. Open a Python terminal and type: `python` 1. Import the BIDS Validator package `from bids_validator import BIDSValidator` 1. Check if a file is BIDS compatible `BIDSValidator().is_bids('/relative/path/to/a/bids/file')` 1. Note, the file path must be relative to the root of the BIDS dataset, and a leading forward slash `/` must be added to the file path. ## Support The BIDS Validator is designed to work in both the browser and in Node.js. We target support for the latest long term stable (LTS) release of Node.js and the latest version of Chrome. There is also a library of helper functions written in Python, for use with BIDS compliant applications written in this language. Please report any issues you experience while using these support targets via the [GitHub issue tracker](https://github.com/bids-standard/bids-validator/issues). If you experience issues outside of these supported environments and believe we should extend our targeted support feel free to open a new issue describing the issue, your support target and why you require extended support and we will address these issues on a case by case basis. ## Maintainers and Contributors [![All Contributors](https://img.shields.io/badge/all_contributors-43-orange.svg?style=flat-square)](#contributors-) This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind are welcome! The project is maintained by [@rwblair](https://github.com/rwblair/) with the help of many contributors listed below. (The [emoji key](https://allcontributors.org/docs/en/emoji-key) is indicating the kind of contribution) Please also see [Acknowledgments](#acknowledgments).

Adam Li

πŸ’» ⚠️ πŸ““ πŸ›

Adam Thomas

πŸ“–

Alexander Jones

πŸ’» ⚠️ πŸ€”

Ben Beasley

πŸ“¦

Chris Gorgolewski

πŸ› πŸ’» πŸ”£ πŸ“– πŸ’‘ πŸ€” πŸš‡ 🚧 πŸ§‘β€πŸ« πŸ’¬ πŸ‘€ ⚠️ βœ… πŸ“’ πŸ““

Chris Holdgraf

πŸ’»

Chris Markiewicz

πŸ’» ⚠️ πŸ€” πŸ› πŸ’¬ πŸ”§ 🚧

David Nishikawa

πŸ’» ⚠️

Dimitri Papadopoulos Orfanos

πŸ’»

Duncan Macleod

πŸš‡

Franklin Feingold

πŸ“–

Gregory noack

πŸ’» ⚠️

Horea Christian

πŸ’»

Jakub Kaczmarzyk

πŸš‡

Joke Durnez

πŸ’»

Mainak Jas

πŸ’» ⚠️ πŸ€” πŸ‘€ πŸ““

Marco Castellaro

πŸ’» ⚠️

Max

πŸ’» πŸ›

Michael Hanke

πŸ“–

Mikael Naveau

πŸ’»

Nell Hardcastle

πŸ’» πŸ€” πŸš‡ πŸ’¬ πŸ‘€

Nicolas Traut

πŸ’»

Parul Sethi

πŸ’» ⚠️

Patricia Clement

πŸ’»

Remi Gau

πŸ’» πŸ“– πŸ““

Richard HΓΆchenberger

πŸ’» πŸ““ ⚠️ πŸ›

Robert Oostenveld

πŸ’» πŸ€” πŸ› ⚠️

Rohan Goyal

πŸ’»

Ross Blair

🚧 πŸ€” πŸ’» πŸ› πŸš‡ πŸ“† πŸ’¬ πŸ‘€ πŸ”§ ⚠️

Russ Poldrack

πŸ’» πŸ’΅ πŸ”

Soichi Hayashi

πŸ›

Stefan Appelhoff

πŸ› πŸ’» πŸ”£ πŸ“– πŸ’‘ πŸ€” πŸš‡ 🚧 πŸ§‘β€πŸ« πŸ’¬ πŸ‘€ ⚠️ βœ… πŸ“’ πŸ““

Suyash

πŸ’»

Taylor Salo

πŸ’»

Teal Hobson-Lowther

πŸ’» ⚠️

Travis Riddle

πŸ›

VisLab

πŸ€” πŸ’»

Wazeer Zulfikar

πŸ“–

Yaroslav Halchenko

πŸ€” πŸ’» πŸ“– πŸ““

constellates

πŸ’» ⚠️

dewarrn1

πŸ’»

dkp

πŸ’»

goldmund

πŸ’» ⚠️
## Use ### API The BIDS Validator has one primary method that takes a directory as either a path to the directory (node) or the object given by selecting a directory with a file input (browser), an options object, and a callback. Available options include: - ignoreWarnings - (boolean - defaults to false) - ignoreNiftiHeaders - (boolean - defaults to false) For example: `validate.BIDS(directory, {ignoreWarnings: true}, function (issues, summary) {console.log(issues.errors, issues.warnings);});` If you would like to test individual files you can use the file specific checks that we expose. - validate.BIDS() - validate.JSON() - validate.TSV() - validate.NIFTI() Additionally you can reformat stored errors against a new config using `validate.reformat()` ### .bidsignore Optionally one can include a `.bidsignore` file in the root of the dataset. This file lists patterns (compatible with the [.gitignore syntax](https://git-scm.com/docs/gitignore)) defining files that should be ignored by the validator. This option is useful when the validated dataset includes file types not yet supported by BIDS specification. ```Text *_not_bids.txt extra_data/ ``` ### Configuration You can configure the severity of errors by passing a json configuration file with a `-c` or `--config` flag to the command line interface or by defining a config object on the options object passed during javascript usage. If no path is specified a default path of `.bids-validator-config.json` will be used. You can add this file to your dataset to share dataset specific validation configuration. To disable this behavior use `--no-config` and the default configuration will be used. The basic configuration format is outlined below. All configuration is optional. ```JSON { "ignore": [], "warn": [], "error": [], "ignoredFiles": [] } ``` `ignoredFiles` takes a list of file paths or glob patterns you'd like to ignore. Lets say we want to ignore all files and sub-directory under `/derivatives/`. **This is not the same syntax as used in the .bidsignore file** ```JSON { "ignoredFiles": ["/derivatives/**"] } ``` Note that adding two stars `**` in path makes validator recognize all files and sub-dir to be ignored. `ignore`, `warn`, and `error` take lists of issue codes or issue keys and change the severity of those issues so they are either ignored or reported as warnings or errors. You can find a list of all available issues at [utils/issues/list](https://github.com/bids-standard/bids-validator/blob/master/bids-validator/utils/issues/list.js). Some issues may be ignored by default, but can be elevated to warnings or errors. These provide a way to check for common things that are more specific than BIDS compatibility. An example is a check for the presence of a T1w modality. The following would raise an error if no T1W image was found in a dataset. ```JSON { "error": ["NO_T1W"] } ``` In addition to issue codes and keys these lists can also contain objects with and "and" or "or" properties set to arrays of codes or keys. These allow some level of conditional logic when configuring issues. For example: ```JSON { "ignore": [ { "and": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" ] } ] } ``` In the above example the two issues will only be ignored if both of them are triggered during validation. ```JSON { "ignore": [ { "and": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" { "or": [ "ECHO_TIME1-2_NOT_DEFINED", "ECHO_TIME_MUST_DEFINE" ] } ] } ] } ``` And in this example the listed issues will only be ignored if `ECHO_TIME_GREATER_THAN`, `ECHO_TIME_NOT_DEFINED` and either `ECHO_TIME1-2_NOT_DEFINED` or `ECHO_TIME_MUST_DEFINE` are triggered during validation. "or" arrays are not supported at the lowest level because it wouldn't add any functionality. For example the following is not supported. ```JSON { "ignore": [ { "or": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" ] } ] } ``` because it would be functionally the same as this: ```JSON { "ignore": [ "ECHO_TIME_GREATER_THAN", "ECHO_TIME_NOT_DEFINED" ] } ``` For passing a configuration while using the bids-validator on the command line, you can use the following style to for example ignore empty file errors (99) and files that cannot be read (44): ``` bids-validator --config.ignore=99 --config.ignore=44 path/to/bids/dir ``` This style of use puts limits on what configuration you can require, so for complex scenarios, we advise users to create a dedicated configuration file with contents as described above. ### In the Browser The BIDS Validator currently works in the browser with [browserify](https://browserify.org/) or [webpack](https://webpack.js.org/). You can add it to a project by cloning the validator and requiring it with browserify syntax `const validate = require('bids-validator');` or an ES2015 webpack import `import validate from 'bids-validator'`. ### On the Server The BIDS validator works like most npm packages. You can install it by running `npm install bids-validator`. ### Through Command Line If you install the bids validator globally by using `npm install -g bids-validator` you will be able to use it as a command line tool. Once installed you should be able to run `bids-validator /path/to/your/bids/directory` and see any validation issues logged to the terminal. Run `bids-validator` without a directory path to see available options. ## Docker image [![Docker Image Version (latest by date)](https://img.shields.io/docker/v/bids/validator?label=docker)](https://hub.docker.com/r/bids/validator) To use bids validator with [docker](https://www.docker.com/), you simply need to [install docker](https://docs.docker.com/install/) on your system. And then from a terminal run: - `docker run -ti --rm bids/validator --version` to print the version of the docker image - `docker run -ti --rm bids/validator --help` to print the help - `docker run -ti --rm -v /path/to/data:/data:ro bids/validator /data` to validate the dataset `/path/to/data` on your host machine See here for a brief explanation of the commands: - `docker run` is the command to tell docker to run a certain docker image, usually taking the form `docker run ` - the `-ti` flag means the inputs are accepted and outputs are printed to the terminal - the `--rm` flag means that the state of the docker container is not saved after it has run - the `-v` flag is adding your local data to the docker container ([bind-mounts](https://docs.docker.com/storage/bind-mounts/)). Importantly, the input after the `-v` flag consists of three fields separated colons: `:` - the first field is the path to the directory on the host machine: `/path/to/data` - the second field is the path where the directory is mounted in the container - the third field is optional. In our case, we use `ro` to specify that the mounted data is _read only_ ## Python Library [![PyPI version](https://badge.fury.io/py/bids-validator.svg)](https://badge.fury.io/py/bids-validator) [![Conda version](https://img.shields.io/conda/vn/conda-forge/bids-validator)](https://anaconda.org/conda-forge/bids-validator) There are is a limited library of helper functions written in Python. The main function determines if a file extension is compliant with the BIDS specification. You can find the available functions in the library, as well as their descriptions, [here](https://github.com/bids-standard/bids-validator/blob/master/bids-validator/bids_validator/bids_validator.py). To install, run `pip install -U bids_validator` (requires python and pip) or `conda install bids-validator` (requires a Conda environment). ### Example ```Python from bids_validator import BIDSValidator validator = BIDSValidator() filepaths = ["/sub-01/anat/sub-01_rec-CSD_T1w.nii.gz", "/sub-01/anat/sub-01_acq-23_rec-CSD_T1w.exe"] for filepath in filepaths: print(validator.is_bids(filepath)) # will print True, and then False ``` Note, the file path must be relative to the root of the BIDS dataset, and a leading forward slash `/` must be added to the file path. ## Development To develop locally, clone the project and run `npm install` from the project root. This will install external dependencies. If you wish to install `bids-validator` globally (so that you can run it in other folders), use the following command to install it globally: `cd bids-validator && npm install -g` (for windows users, if in a different drive add /d, e.g. `cd /d F:\bids-validator && npm install -g`) Please see the [CONTRIBUTING.md](../CONTRIBUTING.md) for additional details. ### Bundling bids-validator is bundled with esbuild. While developing, the script `bids-validator/bin/bids-validator` will automatically bundle the project each time it is run. To test a build without publishing it `npm -w bids-validator run build`. This will generate a bids-validator/dist directory containing the local build and `bids-validator/bin/bids-validator` will use this build. To return to automatic bundling on each run, remove the dist directory. ### Running Locally in a Browser A note about OS X, the dependencies for the browser require a npm package called node-gyp which needs xcode to be installed in order to be compiled. 1. The browser version of `bids-validator` lives in the repo subdirectory `/bids-validator-web`. It is a [React.js](https://reactjs.org/) application that uses the [next.js](https://nextjs.org/) framework. 2. To develop `bids-validator` and see how it will act in the browser, simply run `npm run web-dev` in the project root and navigate to `localhost:3000`. 3. In development mode, changes to the codebase will trigger rebuilds of the application automatically. 4. Changes to the `/bids-validator` in the codebase will also be reflected in the web application. 5. Tests use the [Jest](https://jestjs.io/index.html) testing library and should be developed in `/bids-validator-web/tests`. We can always use more tests, so please feel free to contribute a test that reduces the chance of any bugs you fix! 6. To ensure that the web application compiles successfully in production, run `npm run web-export` ### Testing If it's your first time running tests, first use the command `git submodule update --init --depth 1` to pull the test example data. This repo contains the [bids-examples github repository](https://github.com/bids-standard/bids-examples) as a [submodule](https://git-scm.com/book/en/v2/Git-Tools-Submodules). To start the test suite run `npm run test` from the project root. `npm run test -- --watch` is useful to run tests while making changes. A coverage report is available with `npm run coverage`. To run the linter which checks code conventions run `npm run lint`. ### Install globally from a development branch Global installs are not recommended for development because of the possibility of package conflicts with other Node.js projects. If you do need to test with a global install from a development tree, follow these steps to generate the NPM package without publishing it and install the package locally. 1. `npm -w bids-validator run build` 2. `npm -w bids-validator pack` 3. `npm install -g bids-validator-*.tgz` ### Publishing Publishing is done with [Lerna](https://github.com/lerna/lerna). Use the command `npx lerna publish` and follow instructions to set a new version. Using lerna publish will create a git commit with updated version information and create a version number tag for it, push the tag to GitHub, then publish to NPM and PyPI. The GitHub release is manual following that. ## Acknowledgments Many contributions to the `bids-validator` were done by members of the BIDS community. See the [list of contributors](https://bids-specification.readthedocs.io/en/stable/99-appendices/01-contributors.html). A large part of the development of `bids-validator` is currently done by [Squishymedia](https://squishymedia.com/), who are in turn financed through different grants offered for the general development of BIDS. See the list below. Development and contributions were supported through the following federally funded projects/grants: - [BIDS Derivatives (NIMH: R24MH114705, PI: Poldrack)](https://grantome.com/grant/NIH/R24-MH114705-01) - [OpenNeuro (NIMH: R24MH117179, PI: Poldrack)](https://grantome.com/grant/NIH/R24-MH117179-01) - [Spokes: MEDIUM: WEST (NSF: 1760950, PI: Poldrack & Gorgolewski)](https://grantome.com/grant/NSF/IIS-1760950) - [ReproNim](http://repronim.org) [(NIH-NIBIB P41 EB019936, PI: Kennedy)](https://projectreporter.nih.gov/project_info_description.cfm?aid=8999833) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341552.0 bids-validator-1.14.5/bids_validator.egg-info/SOURCES.txt0000644000000000000000000000121300000000000023006 0ustar00rootroot00000000000000LICENSE MANIFEST.in README.md pyproject.toml setup.cfg setup.py bids_validator/__init__.py bids_validator/_version.py bids_validator/bids_validator.py bids_validator/test_bids_validator.py bids_validator.egg-info/PKG-INFO bids_validator.egg-info/SOURCES.txt bids_validator.egg-info/dependency_links.txt bids_validator.egg-info/top_level.txt bids_validator/rules/associated_data_rules.json bids_validator/rules/file_level_rules.json bids_validator/rules/phenotypic_rules.json bids_validator/rules/session_level_rules.json bids_validator/rules/subject_level_rules.json bids_validator/rules/top_level_rules.json bids_validator/tsv/non_custom_columns.json././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341552.0 bids-validator-1.14.5/bids_validator.egg-info/dependency_links.txt0000644000000000000000000000000100000000000025173 0ustar00rootroot00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341552.0 bids-validator-1.14.5/bids_validator.egg-info/top_level.txt0000644000000000000000000000001700000000000023655 0ustar00rootroot00000000000000bids_validator ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/pyproject.toml0000644000000000000000000000207200000000000017362 0ustar00rootroot00000000000000[build-system] requires = ["setuptools", "versioneer[toml]"] build-backend = "setuptools.build_meta" [project] name = "bids-validator" dynamic = ["version"] description = "Validator for the Brain Imaging Data Structure" readme = "README.md" license = { text = "MIT License" } authors = [ { name = "PyBIDS developers", email = "bids-discussion@googlegroups.com" }, ] classifiers = [ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering", ] requires-python = ">=3.8" [project.urls] Homepage = "https://github.com/bids-standard/bids-validator" [tool.setuptools.packages.find] include = ["bids_validator*"] [tool.setuptools.package-data] bids_validator = ["*/*.json"] [tool.versioneer] VCS = "git" style = "pep440" versionfile_source = "bids_validator/_version.py" versionfile_build = "bids_validator/_version.py" tag_prefix = "v" parentdir_prefix = "" ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1712341552.509778 bids-validator-1.14.5/setup.cfg0000644000000000000000000000015600000000000016270 0ustar00rootroot00000000000000[tool:pytest] ignore = _version.py [flake8] exclude = _version.py [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1712341521.0 bids-validator-1.14.5/setup.py0000644000000000000000000000023000000000000016152 0ustar00rootroot00000000000000#!/usr/bin/env python from setuptools import setup import versioneer setup(version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass())